Skip to content

Commit 3706393

Browse files
authored
Merge pull request #12 from kobigurk/master
Adds support for blake2x by allowing max_depth 0
2 parents a312979 + 1ee274d commit 3706393

File tree

5 files changed

+129
-8
lines changed

5 files changed

+129
-8
lines changed

blake2b/src/lib.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -287,10 +287,9 @@ impl Params {
287287
self
288288
}
289289

290-
/// From 1 (the default, meaning sequential) to 255 (meaning unlimited).
290+
/// From 0 (meaning BLAKE2X B2 hashes), through 1 (the default, meaning sequential) to 255 (meaning unlimited).
291291
#[inline]
292292
pub fn max_depth(&mut self, depth: u8) -> &mut Self {
293-
assert!(depth != 0, "Bad max depth: {}", depth);
294293
self.max_depth = depth;
295294
self
296295
}

blake2b/src/test.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -172,8 +172,7 @@ fn test_long_personal_panics() {
172172
}
173173

174174
#[test]
175-
#[should_panic]
176-
fn test_zero_max_depth_panics() {
175+
fn test_zero_max_depth_supported() {
177176
Params::new().max_depth(0);
178177
}
179178

blake2s/src/lib.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -278,10 +278,9 @@ impl Params {
278278
self
279279
}
280280

281-
/// From 1 (the default, meaning sequential) to 255 (meaning unlimited).
281+
/// From 0 (meaning BLAKE2X B2 hashes), through 1 (the default, meaning sequential) to 255 (meaning unlimited).
282282
#[inline]
283283
pub fn max_depth(&mut self, depth: u8) -> &mut Self {
284-
assert!(depth != 0, "Bad max depth: {}", depth);
285284
self.max_depth = depth;
286285
self
287286
}

blake2s/src/test.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -168,8 +168,7 @@ fn test_long_personal_panics() {
168168
}
169169

170170
#[test]
171-
#[should_panic]
172-
fn test_zero_max_depth_panics() {
171+
fn test_zero_max_depth_supported() {
173172
Params::new().max_depth(0);
174173
}
175174

tests/vector_tests.rs

+125
Original file line numberDiff line numberDiff line change
@@ -149,3 +149,128 @@ fn blake2sp_vectors() {
149149
// count.
150150
assert_eq!(512, test_num);
151151
}
152+
153+
fn blake2x_test<F: Fn(&[u8], &[u8], u64) -> Vec<u8>, F2: Fn(&[u8], u64, usize) -> Vec<u8>>(
154+
h0_hasher: F,
155+
b2_hasher: F2,
156+
variant_hash_length: usize,
157+
variant_name: &str,
158+
) {
159+
let mut test_num = 0u64;
160+
for case in TEST_CASES.iter() {
161+
if &case.hash == variant_name {
162+
test_num += 1;
163+
164+
let input_bytes = hex::decode(&case.in_).unwrap();
165+
let key = if !case.key.is_empty() {
166+
hex::decode(&case.key).unwrap()
167+
} else {
168+
vec![]
169+
};
170+
171+
let output_length = case.out.len() / 2;
172+
173+
// BLAKE2X divides the underlying hash node_offset into two parts - node_offset
174+
// and xof_digest_length. This is the encoding of xof_digest_length in the
175+
// correct position in the node_offset.
176+
let combined_node_offset_xof_length = (output_length as u64) << 32;
177+
let h0 = h0_hasher(&input_bytes, &key, combined_node_offset_xof_length);
178+
179+
let mut buf = vec![];
180+
let mut b2_hash_index = 0;
181+
while buf.len() < output_length {
182+
let hash_length = {
183+
// Is this the last hash and the digest length doesn't divide the output
184+
// length?
185+
if output_length - buf.len() < variant_hash_length
186+
&& (output_length % variant_hash_length) != 0
187+
{
188+
output_length % variant_hash_length
189+
} else {
190+
variant_hash_length
191+
}
192+
};
193+
194+
let b2_out = b2_hasher(
195+
&h0,
196+
(b2_hash_index as u64) | combined_node_offset_xof_length,
197+
hash_length,
198+
);
199+
buf.extend_from_slice(&b2_out);
200+
b2_hash_index += 1;
201+
}
202+
assert_eq!(case.out, hex::encode(&buf[..output_length]));
203+
}
204+
}
205+
206+
// Make sure we don't accidentally skip all the tests somehow. If the
207+
// number of test vectors changes in the future, we'll need to update this
208+
// count.
209+
assert_eq!(512, test_num);
210+
}
211+
212+
#[test]
213+
fn blake2xs_vectors() {
214+
let blake2xs_h0_hasher =
215+
|input_bytes: &[u8], key: &[u8], combined_node_offset_xof_length: u64| -> Vec<u8> {
216+
let mut params = blake2s_simd::Params::new();
217+
let h0 = params
218+
.key(key)
219+
.hash_length(32)
220+
.node_offset(combined_node_offset_xof_length)
221+
.hash(&input_bytes)
222+
.as_bytes()
223+
.to_vec();
224+
h0
225+
};
226+
let blake2xs_b2_hasher =
227+
|input_bytes: &[u8], combined_node_offset_xof_length: u64, hash_length: usize| -> Vec<u8> {
228+
let mut params = blake2s_simd::Params::new();
229+
let b2_out = params
230+
.hash_length(hash_length)
231+
.max_leaf_length(32)
232+
.inner_hash_length(32)
233+
.fanout(0)
234+
.max_depth(0)
235+
.node_offset(combined_node_offset_xof_length)
236+
.hash(&input_bytes)
237+
.as_bytes()
238+
.to_vec();
239+
b2_out
240+
};
241+
242+
blake2x_test(blake2xs_h0_hasher, blake2xs_b2_hasher, 32, "blake2xs");
243+
}
244+
245+
#[test]
246+
fn blake2xb_vectors() {
247+
let blake2xb_h0_hasher =
248+
|input_bytes: &[u8], key: &[u8], combined_node_offset_xof_length: u64| -> Vec<u8> {
249+
let mut params = blake2b_simd::Params::new();
250+
let h0 = params
251+
.key(key)
252+
.hash_length(64)
253+
.node_offset(combined_node_offset_xof_length)
254+
.hash(&input_bytes)
255+
.as_bytes()
256+
.to_vec();
257+
h0
258+
};
259+
let blake2xb_b2_hasher =
260+
|input_bytes: &[u8], combined_node_offset_xof_length: u64, hash_length: usize| -> Vec<u8> {
261+
let mut params = blake2b_simd::Params::new();
262+
let b2_out = params
263+
.hash_length(hash_length)
264+
.max_leaf_length(64)
265+
.inner_hash_length(64)
266+
.fanout(0)
267+
.max_depth(0)
268+
.node_offset(combined_node_offset_xof_length)
269+
.hash(&input_bytes)
270+
.as_bytes()
271+
.to_vec();
272+
b2_out
273+
};
274+
275+
blake2x_test(blake2xb_h0_hasher, blake2xb_b2_hasher, 64, "blake2xb");
276+
}

0 commit comments

Comments
 (0)