diff --git a/Cargo.lock b/Cargo.lock index 7da141b60175a..3ce37a852f47e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5129,6 +5129,7 @@ dependencies = [ "cumulus-primitives-core", "parity-scale-codec", "polkadot-primitives", + "proptest", "sp-consensus-babe", "sp-core 28.0.0", "sp-runtime", diff --git a/cumulus/test/relay-sproof-builder/Cargo.toml b/cumulus/test/relay-sproof-builder/Cargo.toml index 553210a044ac0..2ae2d9bf00034 100644 --- a/cumulus/test/relay-sproof-builder/Cargo.toml +++ b/cumulus/test/relay-sproof-builder/Cargo.toml @@ -27,6 +27,9 @@ polkadot-primitives = { workspace = true } # Cumulus cumulus-primitives-core = { workspace = true } +[dev-dependencies] +proptest = { workspace = true } + [features] default = ["std"] std = [ diff --git a/cumulus/test/relay-sproof-builder/src/lib.rs b/cumulus/test/relay-sproof-builder/src/lib.rs index ad3998e99cf05..ee8cd92fd9d93 100644 --- a/cumulus/test/relay-sproof-builder/src/lib.rs +++ b/cumulus/test/relay-sproof-builder/src/lib.rs @@ -306,7 +306,7 @@ pub fn build_relay_parent_descendants( let mut previous_hash = None; - for block_number in 0..=num_headers as u32 { + for block_number in 0..num_headers as u32 { let mut header = Header { number: block_number, parent_hash: previous_hash.unwrap_or_default(), @@ -329,3 +329,254 @@ pub fn build_relay_parent_descendants( headers } + +#[cfg(test)] +mod tests { + use super::*; + use codec::Decode; + use proptest::prelude::*; + use sp_consensus_babe::{ + digests::{PreDigest, PrimaryPreDigest}, + AuthorityId, AuthorityPair, BabeAuthorityWeight, + }; + use sp_core::{crypto::Pair, sr25519::Signature, H256}; + use sp_runtime::{ + generic::{Digest, Header}, + DigestItem, + }; + + /// Tests for `generate_authority_pairs` + #[test] + fn test_generate_authority_pairs_count() { + // Test case 1: Zero authorities + assert_eq!(generate_authority_pairs(0).len(), 0); + + // Test case 2: A small number of authorities + assert_eq!(generate_authority_pairs(5).len(), 5); + + // Test case 3: A larger number of authorities + assert_eq!(generate_authority_pairs(100).len(), 100); + + // Test case 4: Uniqueness of generated authorities + let pairs = generate_authority_pairs(10); + let public_keys: std::collections::HashSet<_> = + pairs.iter().map(|pair| pair.public()).collect(); + + assert_eq!(pairs.len(), public_keys.len()); + } + + /// Tests for `convert_to_authority_weight_pair` + #[test] + fn test_convert_to_authority_weight_pair() { + let num_authorities = 3; + let authorities = generate_authority_pairs(num_authorities); + let converted_pairs = convert_to_authority_weight_pair(&authorities); + + // Check the count is correct + assert_eq!(converted_pairs.len(), num_authorities as usize); + + for (i, (authority_id, weight)) in converted_pairs.iter().enumerate() { + // Check that the AuthorityId is derived correctly from the public key + let expected_id: AuthorityId = authorities[i].public().into(); + assert_eq!(*authority_id, expected_id); + + // Check that the weight is the default (usually 1) + assert_eq!(*weight, BabeAuthorityWeight::default()); + } + } + + /// Tests for `add_babe_pre_digest` + #[test] + fn test_add_babe_pre_digest() { + let mut header = Header { + number: 0, + parent_hash: H256::default(), + state_root: H256::default(), + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + let authority_index = 42; + let block_number = 100; + + add_babe_pre_digest(&mut header, authority_index, block_number); + + // Ensure exactly one digest item was added + assert_eq!(header.digest().logs.len(), 1); + + // Check if the added digest item is the correct type and data + let digest_item = &header.digest().logs[0]; + + let pre_digest_data = match digest_item { + DigestItem::PreRuntime(id, data) if id == &sp_consensus_babe::BABE_ENGINE_ID => + PreDigest::decode(&mut &data[..]).unwrap(), + _ => panic!("Expected a BABE pre-digest"), + }; + + match pre_digest_data { + PreDigest::Primary(PrimaryPreDigest { + authority_index: auth_idx, + slot, + vrf_signature: _, + }) => { + assert_eq!(auth_idx, authority_index); + assert_eq!(slot, relay_chain::Slot::from(block_number)); + }, + _ => panic!("Expected a Primary PreDigest"), + } + } + + proptest! { + // Proptest for `build_relay_parent_descendants` to ensure general properties hold. + #[test] + fn prop_test_build_relay_parent_descendants( + num_headers in 1..20u64, // Test a reasonable range of headers + seed_bytes: [u8; 32], + num_authorities in 1..5u64, + ) { + let state_root = H256::from(seed_bytes); + let authorities = generate_authority_pairs(num_authorities); + + // Skip test if no authorities are generated (proptest range ensures at least 1) + if authorities.is_empty() { + return Ok(()); + } + + let headers = build_relay_parent_descendants(num_headers, state_root, authorities.clone()); + + // 1. Check the correct number of headers are generated + prop_assert_eq!(headers.len(), num_headers as usize); + + let mut previous_hash: Option = None; + + for (i, header) in headers.iter().enumerate() { + let block_number = i as u32; + let expected_authority_index = block_number % (num_authorities as u32); + let authority_pair = &authorities[expected_authority_index as usize]; + + // 2. Check block number and parent hash linkage + prop_assert_eq!(header.number, block_number); + prop_assert_eq!(header.parent_hash, previous_hash.unwrap_or_default()); + prop_assert_eq!(header.state_root, state_root); + + // 3. Check for the presence of Babe Pre-Digest and Seal (should be exactly 2 items) + prop_assert_eq!(header.digest().logs.len(), 2); + + let pre_digest_item = &header.digest().logs[0]; + let seal_item = &header.digest().logs[1]; + + // 4. Validate Pre-Digest content + let pre_digest_data = match pre_digest_item { + DigestItem::PreRuntime(id, data) if id == &sp_consensus_babe::BABE_ENGINE_ID => { + PreDigest::decode(&mut &data[..]).unwrap() + } + _ => panic!("Expected a BABE pre-digest"), + }; + + if let PreDigest::Primary(PrimaryPreDigest { authority_index, slot, .. }) = pre_digest_data { + prop_assert_eq!(authority_index, expected_authority_index); + prop_assert_eq!(slot, relay_chain::Slot::from(block_number as u64)); + } else { + panic!("Pre-Digest should be Primary"); + } + + // 5. Validate Seal content (check signature) + let signature = match seal_item { + DigestItem::Seal(id, data) if id == &sp_consensus_babe::BABE_ENGINE_ID => { + let raw_sig = Signature::decode(&mut &data[..]).expect("Valid signature"); + sp_consensus_babe::AuthoritySignature::from(raw_sig) + } + _ => panic!("Expected a BABE seal"), + }; + + // The signature must be valid for the header's hash without the seal, signed by the expected authority + // We need to create a copy of the header without the seal to get the correct hash for verification. + let mut header_without_seal = header.clone(); + header_without_seal.digest_mut().pop(); // Remove the seal + let header_hash_for_verification = header_without_seal.hash(); + prop_assert!(AuthorityPair::verify(&signature, header_hash_for_verification.as_bytes(), &authority_pair.public())); + + let header_hash = header.hash(); + + previous_hash = Some(header_hash); + } + } + } + + /// Test to ensure that when num_authorities is populated, the authorities are included in the + /// proof + #[test] + fn test_authorities_included_in_proof() { + let mut builder = RelayStateSproofBuilder::default(); + builder.num_authorities = 3; + + let (state_root, proof) = builder.into_state_root_and_proof(); + + // Verify that the proof contains the authorities keys + let authorities_key = relay_chain::well_known_keys::AUTHORITIES; + let next_authorities_key = relay_chain::well_known_keys::NEXT_AUTHORITIES; + + // At minimum, we should be able to verify that authorities data exists in the storage + // by reconstructing the storage and checking if the keys exist + use sp_runtime::traits::HashingFor; + use sp_state_machine::{Backend, TrieBackendBuilder}; + let db = proof.into_memory_db::>(); + let backend = TrieBackendBuilder::new(db, state_root).build(); + + // Verify authorities key exists and contains 3 authorities + let authorities_data = backend.storage(authorities_key).unwrap().unwrap(); + let authorities: Vec<(AuthorityId, BabeAuthorityWeight)> = + codec::Decode::decode(&mut &authorities_data[..]).unwrap(); + assert_eq!(authorities.len(), 3); + + // Verify next_authorities key exists and contains the same 3 authorities + let next_authorities_data = backend.storage(next_authorities_key).unwrap().unwrap(); + let next_authorities: Vec<(AuthorityId, BabeAuthorityWeight)> = + codec::Decode::decode(&mut &next_authorities_data[..]).unwrap(); + assert_eq!(next_authorities.len(), 3); + + // Verify they are the same authorities + assert_eq!(authorities, next_authorities); + } + + /// Test to ensure into_state_root_proof_and_descendants generates relay_parent_offset+1 headers + #[test] + fn test_into_state_root_proof_and_descendants_generates_correct_number_of_headers() { + let mut builder = RelayStateSproofBuilder::default(); + builder.num_authorities = 2; + + // Test with different relay_parent_offsets + let test_cases = vec![0, 1, 5, 10]; + + for relay_parent_offset in test_cases { + let builder_clone = builder.clone(); + let (state_root, _proof, descendants) = + builder_clone.into_state_root_proof_and_descendants(relay_parent_offset); + + // Should generate relay_parent_offset + 1 headers + let expected_num_headers = relay_parent_offset + 1; + assert_eq!( + descendants.len(), + expected_num_headers as usize, + "Failed for relay_parent_offset {}: expected {} headers, got {}", + relay_parent_offset, + expected_num_headers, + descendants.len() + ); + + // Verify the headers are properly linked + for (i, header) in descendants.iter().enumerate() { + assert_eq!(header.number, i as u32); + assert_eq!(header.state_root, state_root.into()); + } + + // Verify each header has proper digest items (pre-digest and seal) + for header in &descendants { + assert_eq!( + header.digest().logs.len(), + 2, + "Each header should have pre-digest and seal" + ); + } + } + } +} diff --git a/prdoc/pr_10616.prdoc b/prdoc/pr_10616.prdoc new file mode 100644 index 0000000000000..e003fec43977b --- /dev/null +++ b/prdoc/pr_10616.prdoc @@ -0,0 +1,8 @@ +title: 'relay-sproof-builder/test: Fix num of descendants' +doc: +- audience: Node Dev + description: |- + This PR fixes a testing off-by-one error which causes the `sproof-builder` to build an extra descendant. +crates: +- name: cumulus-test-relay-sproof-builder + bump: patch