Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 4 additions & 16 deletions consensus/ethash/algorithm.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ package ethash

import (
"encoding/binary"
"hash"
"math/big"
"reflect"
"runtime"
Expand Down Expand Up @@ -94,25 +93,14 @@ func calcDatasetSize(epoch int) uint64 {
// reused between hash runs instead of requiring new ones to be created.
type hasher func(dest []byte, data []byte)

// makeHasher creates a repetitive hasher, allowing the same hash data structures
// to be reused between hash runs instead of requiring new ones to be created.
// The returned function is not thread safe!
func makeHasher(h hash.Hash) hasher {
return func(dest []byte, data []byte) {
h.Write(data)
h.Sum(dest[:0])
h.Reset()
}
}

// seedHash is the seed to use for generating a verification cache and the mining
// dataset.
func seedHash(block uint64) []byte {
seed := make([]byte, 32)
if block < epochLength {
return seed
}
keccak256 := makeHasher(sha3.NewKeccak256())
keccak256 := sha3.KeccakFast256
for i := 0; i < int(block/epochLength); i++ {
keccak256(seed, seed)
}
Expand Down Expand Up @@ -166,7 +154,7 @@ func generateCache(dest []uint32, epoch uint64, seed []byte) {
}
}()
// Create a hasher to reuse between invocations
keccak512 := makeHasher(sha3.NewKeccak512())
keccak512 := sha3.KeccakFast512

// Sequentially produce the initial dataset
keccak512(cache, seed)
Expand Down Expand Up @@ -299,7 +287,7 @@ func generateDataset(dest []uint32, epoch uint64, cache []uint32) {
defer pend.Done()

// Create a hasher to reuse between invocations
keccak512 := makeHasher(sha3.NewKeccak512())
keccak512 := sha3.KeccakFast512

// Calculate the data segment this thread should generate
batch := uint32((size + hashBytes*uint64(threads) - 1) / (hashBytes * uint64(threads)))
Expand Down Expand Up @@ -373,7 +361,7 @@ func hashimoto(hash []byte, nonce uint64, size uint64, lookup func(index uint32)
// in-memory cache) in order to produce our final value for a particular header
// hash and nonce.
func hashimotoLight(size uint64, cache []uint32, hash []byte, nonce uint64) ([]byte, []byte) {
keccak512 := makeHasher(sha3.NewKeccak512())
keccak512 := sha3.KeccakFast512

lookup := func(index uint32) []uint32 {
rawData := generateDatasetItem(cache, index, keccak512)
Expand Down
59 changes: 59 additions & 0 deletions crypto/sha3/sha3.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

package sha3

import "encoding/binary"

// spongeDirection indicates the direction bytes are flowing through the sponge.
type spongeDirection int

Expand Down Expand Up @@ -190,3 +192,60 @@ func (d *state) Sum(in []byte) []byte {
dup.Read(hash)
return append(in, hash...)
}



func keccakFast(out []byte, bits int, data []byte) {
const wordSize = 8
hashSize := bits / 8
blockSize := (1600 - bits * 2) / 8

var state [25]uint64

dataIndex := 0

dataLen := len(data)
for dataLen >= blockSize {
for i := 0; i < (blockSize / wordSize); i++ {
state[i] ^= binary.LittleEndian.Uint64(data[dataIndex:])
dataIndex += wordSize
}
keccakF1600(&state)
dataLen -= blockSize
}

stateIndex := 0
for dataLen >= wordSize {
state[stateIndex] ^= binary.LittleEndian.Uint64(data[dataIndex:])
stateIndex++
dataIndex += wordSize
dataLen -= wordSize
}

var lastWord [8]byte
lastWordIndex := 0
for dataLen > 0 {
lastWord[lastWordIndex] = data[dataIndex]
lastWordIndex++
dataIndex++
dataLen--
}
lastWord[lastWordIndex] = 0x01
state[stateIndex] ^= binary.LittleEndian.Uint64(lastWord[:])

state[(blockSize/wordSize) - 1] ^= 0x8000000000000000

keccakF1600(&state)

for i := 0; i < (hashSize / wordSize); i++ {
binary.LittleEndian.PutUint64(out[i * 8:], state[i])
}
}

func KeccakFast256(dest []byte, data []byte) {
keccakFast(dest, 256, data)
}

func KeccakFast512(dest []byte, data []byte) {
keccakFast(dest, 512, data)
}