@@ -28,6 +28,7 @@ def _get_cache(seed, n):
28
28
for i in range (1 , n ):
29
29
o .append (sha3_512 (o [- 1 ]))
30
30
31
+ # Use a low-round version of randmemohash
31
32
for _ in range (CACHE_ROUNDS ):
32
33
for i in range (n ):
33
34
v = o [i ][0 ] % n
@@ -39,9 +40,11 @@ def _get_cache(seed, n):
39
40
def calc_dataset_item (cache , i ):
40
41
n = len (cache )
41
42
r = HASH_BYTES // WORD_BYTES
43
+ # initialize the mix
42
44
mix = copy .copy (cache [i % n ])
43
45
mix [0 ] ^= i
44
46
mix = sha3_512 (mix )
47
+ # fnv it with a lot of random cache nodes based on i
45
48
for j in range (DATASET_PARENTS ):
46
49
cache_index = fnv (i ^ j , mix [j % r ])
47
50
mix = list (map (fnv , mix , cache [cache_index % n ]))
@@ -64,16 +67,19 @@ def hashimoto(header, nonce, full_size, dataset_lookup):
64
67
n = full_size // HASH_BYTES
65
68
w = MIX_BYTES // WORD_BYTES
66
69
mixhashes = MIX_BYTES // HASH_BYTES
70
+ # combine header+nonce into a 64 byte seed
67
71
s = sha3_512 (header + nonce [::- 1 ])
68
72
mix = []
69
73
for _ in range (MIX_BYTES // HASH_BYTES ):
70
74
mix .extend (s )
75
+ # mix in random dataset nodes
71
76
for i in range (ACCESSES ):
72
77
p = fnv (i ^ s [0 ], mix [i % w ]) % (n // mixhashes ) * mixhashes
73
78
newdata = []
74
79
for j in range (mixhashes ):
75
80
newdata .extend (dataset_lookup (p + j ))
76
81
mix = list (map (fnv , mix , newdata ))
82
+ # compress mix
77
83
cmix = []
78
84
for i in range (0 , len (mix ), 4 ):
79
85
cmix .append (fnv (fnv (fnv (mix [i ], mix [i + 1 ]), mix [i + 2 ]), mix [i + 3 ]))
0 commit comments