Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -44,38 +44,53 @@ pub(crate) unconstrained fn attempt_note_nonce_discovery<Env>(

// We need to find nonces (typically just one) that result in a note hash that, once siloed into a unique note
// hash, is one of the note hashes created by the transaction.
unique_note_hashes_in_tx.for_eachi(|i, expected_unique_note_hash| {
// Nonces are computed by hashing the first nullifier in the transaction with the index of the note in the new
// note hashes array. We therefore know for each note in every transaction what its nonce is.
let candidate_nonce = compute_note_hash_nonce(first_nullifier_in_tx, i);

// Given note nonce, note content and metadata, we can compute the note hash and silo it to check if it matches
// the note hash at the array index we're currently processing. TODO(#11157): handle failed
// note_hash_and_nullifier computation
// The nonce is meant to be derived from the index of the note hash in the transaction effects array. However, due
// to an issue in the kernels the nonce might actually use any of the possible note hash indices - not necessarily
// the one that corresponds to the note hash. Hence, we need to try them all.
for i in 0..MAX_NOTE_HASHES_PER_TX {
let nonce_for_i = compute_note_hash_nonce(first_nullifier_in_tx, i);

// Given note nonce, note content and metadata, we can compute the note hash and silo it to check if
// the resulting unique note matches any in the transaction.
// TODO(#11157): handle failed note_hash_and_nullifier computation
let hashes = compute_note_hash_and_nullifier(
packed_note,
owner,
storage_slot,
note_type_id,
contract_address,
randomness,
candidate_nonce,
nonce_for_i,
)
.expect(f"Failed to compute a note hash for note type {note_type_id}");

let siloed_note_hash = compute_siloed_note_hash(contract_address, hashes.note_hash);
let unique_note_hash = compute_unique_note_hash(candidate_nonce, siloed_note_hash);
let siloed_note_hash_for_i = compute_siloed_note_hash(contract_address, hashes.note_hash);
let unique_note_hash_for_i = compute_unique_note_hash(nonce_for_i, siloed_note_hash_for_i);

if unique_note_hash == expected_unique_note_hash {
// Note that while we did check that the note hash is the preimage of the expected unique note hash, we
// perform no validations on the nullifier - we fundamentally cannot, since only the application knows how
// to compute nullifiers. We simply trust it to have provided the correct one: if it hasn't, then PXE may
// fail to realize that a given note has been nullified already, and calls to the application could result
// in invalid transactions (with duplicate nullifiers). This is not a concern because an application
// already has more direct means of making a call to it fail the transaction.
let matching_notes = bvec_filter(
unique_note_hashes_in_tx,
|unique_note_hash_in_tx| unique_note_hash_in_tx == unique_note_hash_for_i,
);
if matching_notes.len() > 1 {
let identical_note_hashes = matching_notes.len();
// Note that we don't actually check that the note hashes array contains unique values, only that the note
// we found is unique. We don't expect for this to ever happen (it'd indicate a malicious node or PXE,
// which
// are both assumed to be cooperative) so testing for it just in case is unnecessary, but we _do_ need to
// handle it if we find a duplicate.
panic(
f"Received {identical_note_hashes} identical note hashes for a transaction - these should all be unique",
)
} else if matching_notes.len() == 1 {
// Note that while we did check that the note hash is the preimage of a unique note hash, we perform no
// validations on the nullifier - we fundamentally cannot, since only the application knows how to compute
// nullifiers. We simply trust it to have provided the correct one: if it hasn't, then PXE may fail to
// realize that a given note has been nullified already, and calls to the application could result in
// invalid transactions (with duplicate nullifiers). This is not a concern because an application already
// has more direct means of making a call to it fail the transaction.
discovered_notes.push(
DiscoveredNoteInfo {
note_nonce: candidate_nonce,
note_nonce: nonce_for_i,
note_hash: hashes.note_hash,
// TODO: The None case will be handled in a followup PR.
// https://linear.app/aztec-labs/issue/F-265/store-external-notes
Expand All @@ -89,13 +104,29 @@ pub(crate) unconstrained fn attempt_note_nonce_discovery<Env>(
// multiple times in the same transaction with different nonces. This typically doesn't happen due to notes
// containing random values in order to hide their contents.
}
});
}

aztecnr_debug_log_format!("Found valid nonces for a total of {0} notes")([discovered_notes.len() as Field]);

*discovered_notes
}

// There is no BoundedVec::filter in the stdlib, so we use this until that is implemented.
unconstrained fn bvec_filter<Env, T, let MAX_LEN: u32>(
bvec: BoundedVec<T, MAX_LEN>,
filter: fn[Env](T) -> bool,
) -> BoundedVec<T, MAX_LEN> {
let filtered = &mut BoundedVec::new();

bvec.for_each(|value| {
if filter(value) {
filtered.push(value);
}
});

*filtered
}

mod test {
use crate::{
messages::{discovery::NoteHashAndNullifier, logs::note::MAX_NOTE_PACKED_LEN},
Expand Down Expand Up @@ -307,4 +338,96 @@ mod test {
& (discovered_note.inner_nullifier == second_note_and_data.inner_nullifier)
}));
}

#[test]
unconstrained fn single_note_misaligned_nonce() {
let note_index_in_tx = 2;
let note_and_data = construct_note(VALUE, note_index_in_tx);

let mut unique_note_hashes_in_tx = BoundedVec::from_array([
random(), random(), random(), random(), random(), random(), random(),
]);

// The note is not at the correct index
unique_note_hashes_in_tx.set(note_index_in_tx + 1, note_and_data.unique_note_hash);

let discovered_notes = attempt_note_nonce_discovery(
unique_note_hashes_in_tx,
FIRST_NULLIFIER_IN_TX,
compute_note_hash_and_nullifier,
CONTRACT_ADDRESS,
OWNER,
STORAGE_SLOT,
RANDOMNESS,
MockNote::get_id(),
BoundedVec::from_array(note_and_data.note.pack()),
);

assert_eq(discovered_notes.len(), 1);
let discovered_note = discovered_notes.get(0);

assert_eq(discovered_note.note_nonce, note_and_data.note_nonce);
assert_eq(discovered_note.note_hash, note_and_data.note_hash);
assert_eq(discovered_note.inner_nullifier, note_and_data.inner_nullifier);
}

#[test]
unconstrained fn single_note_nonce_with_index_past_note_hashes_in_tx() {
let mut unique_note_hashes_in_tx = BoundedVec::from_array([
random(), random(), random(), random(), random(), random(), random(),
]);

// The nonce is computed with an index that does not exist in the tx
let note_index_in_tx = unique_note_hashes_in_tx.len() + 5;
let note_and_data = construct_note(VALUE, note_index_in_tx);

// The note is inserted at an arbitrary index - its true index is out of the array's bounds
unique_note_hashes_in_tx.set(2, note_and_data.unique_note_hash);

let discovered_notes = attempt_note_nonce_discovery(
unique_note_hashes_in_tx,
FIRST_NULLIFIER_IN_TX,
compute_note_hash_and_nullifier,
CONTRACT_ADDRESS,
OWNER,
STORAGE_SLOT,
RANDOMNESS,
MockNote::get_id(),
BoundedVec::from_array(note_and_data.note.pack()),
);

assert_eq(discovered_notes.len(), 1);
let discovered_note = discovered_notes.get(0);

assert_eq(discovered_note.note_nonce, note_and_data.note_nonce);
assert_eq(discovered_note.note_hash, note_and_data.note_hash);
assert_eq(discovered_note.inner_nullifier, note_and_data.inner_nullifier);
}

#[test(should_fail_with = "identical note hashes for a transaction")]
unconstrained fn duplicate_unique_note_hashes() {
let note_index_in_tx = 2;
let note_and_data = construct_note(VALUE, note_index_in_tx);

let mut unique_note_hashes_in_tx = BoundedVec::from_array([
random(), random(), random(), random(), random(), random(), random(),
]);

// The same unique note hash is present in two indices in the array, which is not allowed. Note that we don't
// test all note hashes for uniqueness, only those that we actually find.
unique_note_hashes_in_tx.set(note_index_in_tx, note_and_data.unique_note_hash);
unique_note_hashes_in_tx.set(note_index_in_tx + 1, note_and_data.unique_note_hash);

let _ = attempt_note_nonce_discovery(
unique_note_hashes_in_tx,
FIRST_NULLIFIER_IN_TX,
compute_note_hash_and_nullifier,
CONTRACT_ADDRESS,
OWNER,
STORAGE_SLOT,
RANDOMNESS,
MockNote::get_id(),
BoundedVec::from_array(note_and_data.note.pack()),
);
}
}
3 changes: 1 addition & 2 deletions noir-projects/aztec-nr/aztec/src/note/note_metadata.nr
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,7 @@ impl NoteMetadata {
}

/// Returns `true` if the note is settled, i.e. if it's been created in a prior transaction and is therefore
/// already
/// in the note hash tree.
/// already in the note hash tree.
pub fn is_settled(self) -> bool {
self.stage == NoteStage.SETTLED
}
Expand Down
Loading