Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,38 +43,53 @@ pub unconstrained fn attempt_note_nonce_discovery<Env>(

// We need to find nonces (typically just one) that result in a note hash that, once siloed into a unique note
// hash, is one of the note hashes created by the transaction.
unique_note_hashes_in_tx.for_eachi(|i, expected_unique_note_hash| {
// Nonces are computed by hashing the first nullifier in the transaction with the index of the note in the new
// note hashes array. We therefore know for each note in every transaction what its nonce is.
let candidate_nonce = compute_note_hash_nonce(first_nullifier_in_tx, i);

// Given note nonce, note content and metadata, we can compute the note hash and silo it to check if it matches
// the note hash at the array index we're currently processing. TODO(#11157): handle failed
// note_hash_and_nullifier computation
// The nonce is meant to be derived from the index of the note hash in the transaction effects array. However, due
// to an issue in the kernels the nonce might actually use any of the possible note hash indices - not necessarily
// the one that corresponds to the note hash. Hence, we need to try them all.
for i in 0..MAX_NOTE_HASHES_PER_TX {
let nonce_for_i = compute_note_hash_nonce(first_nullifier_in_tx, i);

// Given note nonce, note content and metadata, we can compute the note hash and silo it to check if
// the resulting unique note matches any in the transaction.
// TODO(#11157): handle failed note_hash_and_nullifier computation
let hashes = compute_note_hash_and_nullifier(
packed_note,
owner,
storage_slot,
note_type_id,
contract_address,
randomness,
candidate_nonce,
nonce_for_i,
)
.expect(f"Failed to compute a note hash for note type {note_type_id}");

let siloed_note_hash = compute_siloed_note_hash(contract_address, hashes.note_hash);
let unique_note_hash = compute_unique_note_hash(candidate_nonce, siloed_note_hash);
let siloed_note_hash_for_i = compute_siloed_note_hash(contract_address, hashes.note_hash);
let unique_note_hash_for_i = compute_unique_note_hash(nonce_for_i, siloed_note_hash_for_i);

if unique_note_hash == expected_unique_note_hash {
// Note that while we did check that the note hash is the preimage of the expected unique note hash, we
// perform no validations on the nullifier - we fundamentally cannot, since only the application knows how
// to compute nullifiers. We simply trust it to have provided the correct one: if it hasn't, then PXE may
// fail to realize that a given note has been nullified already, and calls to the application could result
// in invalid transactions (with duplicate nullifiers). This is not a concern because an application
// already has more direct means of making a call to it fail the transaction.
let matching_notes = bvec_filter(
unique_note_hashes_in_tx,
|unique_note_hash_in_tx| unique_note_hash_in_tx == unique_note_hash_for_i,
);
if matching_notes.len() > 1 {
let identical_note_hashes = matching_notes.len();
// Note that we don't actually check that the note hashes array contains unique values, only that the note
// we found is unique. We don't expect for this to ever happen (it'd indicate a malicious node or PXE,
// which
// are both assumed to be cooperative) so testing for it just in case is unnecessary, but we _do_ need to
// handle it if we find a duplicate.
panic(
f"Received {identical_note_hashes} identical note hashes for a transaction - these should all be unique",
)
} else if matching_notes.len() == 1 {
// Note that while we did check that the note hash is the preimage of a unique note hash, we perform no
// validations on the nullifier - we fundamentally cannot, since only the application knows how to compute
// nullifiers. We simply trust it to have provided the correct one: if it hasn't, then PXE may fail to
// realize that a given note has been nullified already, and calls to the application could result in
// invalid transactions (with duplicate nullifiers). This is not a concern because an application already
// has more direct means of making a call to it fail the transaction.
discovered_notes.push(
DiscoveredNoteInfo {
note_nonce: candidate_nonce,
note_nonce: nonce_for_i,
note_hash: hashes.note_hash,
// TODO: The None case will be handled in a followup PR.
// https://linear.app/aztec-labs/issue/F-265/store-external-notes
Expand All @@ -88,7 +103,7 @@ pub unconstrained fn attempt_note_nonce_discovery<Env>(
// multiple times in the same transaction with different nonces. This typically doesn't happen due to notes
// containing random values in order to hide their contents.
}
});
}

debug_log_format(
"Found valid nonces for a total of {0} notes",
Expand All @@ -98,6 +113,22 @@ pub unconstrained fn attempt_note_nonce_discovery<Env>(
*discovered_notes
}

// There is no BoundedVec::filter in the stdlib, so we use this until that is implemented.
unconstrained fn bvec_filter<Env, T, let MAX_LEN: u32>(
bvec: BoundedVec<T, MAX_LEN>,
filter: fn[Env](T) -> bool,
) -> BoundedVec<T, MAX_LEN> {
let filtered = &mut BoundedVec::new();

bvec.for_each(|value| {
if filter(value) {
filtered.push(value);
}
});

*filtered
}

mod test {
use crate::{
messages::{discovery::NoteHashAndNullifier, logs::note::MAX_NOTE_PACKED_LEN},
Expand Down Expand Up @@ -309,4 +340,96 @@ mod test {
& (discovered_note.inner_nullifier == second_note_and_data.inner_nullifier)
}));
}

#[test]
unconstrained fn single_note_misaligned_nonce() {
let note_index_in_tx = 2;
let note_and_data = construct_note(VALUE, note_index_in_tx);

let mut unique_note_hashes_in_tx = BoundedVec::from_array([
random(), random(), random(), random(), random(), random(), random(),
]);

// The note is not at the correct index
unique_note_hashes_in_tx.set(note_index_in_tx + 1, note_and_data.unique_note_hash);

let discovered_notes = attempt_note_nonce_discovery(
unique_note_hashes_in_tx,
FIRST_NULLIFIER_IN_TX,
compute_note_hash_and_nullifier,
CONTRACT_ADDRESS,
OWNER,
STORAGE_SLOT,
RANDOMNESS,
MockNote::get_id(),
BoundedVec::from_array(note_and_data.note.pack()),
);

assert_eq(discovered_notes.len(), 1);
let discovered_note = discovered_notes.get(0);

assert_eq(discovered_note.note_nonce, note_and_data.note_nonce);
assert_eq(discovered_note.note_hash, note_and_data.note_hash);
assert_eq(discovered_note.inner_nullifier, note_and_data.inner_nullifier);
}

#[test]
unconstrained fn single_note_nonce_with_index_past_note_hashes_in_tx() {
let mut unique_note_hashes_in_tx = BoundedVec::from_array([
random(), random(), random(), random(), random(), random(), random(),
]);

// The nonce is computed with an index that does not exist in the tx
let note_index_in_tx = unique_note_hashes_in_tx.len() + 5;
let note_and_data = construct_note(VALUE, note_index_in_tx);

// The note is inserted at an arbitrary index - its true index is out of the array's bounds
unique_note_hashes_in_tx.set(2, note_and_data.unique_note_hash);

let discovered_notes = attempt_note_nonce_discovery(
unique_note_hashes_in_tx,
FIRST_NULLIFIER_IN_TX,
compute_note_hash_and_nullifier,
CONTRACT_ADDRESS,
OWNER,
STORAGE_SLOT,
RANDOMNESS,
MockNote::get_id(),
BoundedVec::from_array(note_and_data.note.pack()),
);

assert_eq(discovered_notes.len(), 1);
let discovered_note = discovered_notes.get(0);

assert_eq(discovered_note.note_nonce, note_and_data.note_nonce);
assert_eq(discovered_note.note_hash, note_and_data.note_hash);
assert_eq(discovered_note.inner_nullifier, note_and_data.inner_nullifier);
}

#[test(should_fail_with = "identical note hashes for a transaction")]
unconstrained fn duplicate_unique_note_hashes() {
let note_index_in_tx = 2;
let note_and_data = construct_note(VALUE, note_index_in_tx);

let mut unique_note_hashes_in_tx = BoundedVec::from_array([
random(), random(), random(), random(), random(), random(), random(),
]);

// The same unique note hash is present in two indices in the array, which is not allowed. Note that we don't
// test all note hashes for uniqueness, only those that we actually find.
unique_note_hashes_in_tx.set(note_index_in_tx, note_and_data.unique_note_hash);
unique_note_hashes_in_tx.set(note_index_in_tx + 1, note_and_data.unique_note_hash);

let _ = attempt_note_nonce_discovery(
unique_note_hashes_in_tx,
FIRST_NULLIFIER_IN_TX,
compute_note_hash_and_nullifier,
CONTRACT_ADDRESS,
OWNER,
STORAGE_SLOT,
RANDOMNESS,
MockNote::get_id(),
BoundedVec::from_array(note_and_data.note.pack()),
);
}
}
4 changes: 2 additions & 2 deletions noir-projects/aztec-nr/aztec/src/note/note_metadata.nr
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ impl NoteMetadata {
self.stage == NoteStage.PENDING_PREVIOUS_PHASE
}

/// Returns true if the note is settled, i.e. if it's been created in a prior transaction and is therefore already
/// in the note hash tree.
/// Returns `true` if the note is settled, i.e. if it's been created in a prior transaction and is therefore
/// already in the note hash tree.
pub fn is_settled(self) -> bool {
self.stage == NoteStage.SETTLED
}
Expand Down
4 changes: 4 additions & 0 deletions spartan/scripts/deploy_network.sh
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,11 @@ PROVER_FAILED_PROOF_STORE=${PROVER_FAILED_PROOF_STORE:-}
SEQ_MIN_TX_PER_BLOCK=${SEQ_MIN_TX_PER_BLOCK:-1}
SEQ_MAX_TX_PER_BLOCK=${SEQ_MAX_TX_PER_BLOCK:-null}
SEQ_MAX_TX_PER_CHECKPOINT=${SEQ_MAX_TX_PER_CHECKPOINT:-8}
<<<<<<< HEAD
SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER=${SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER:-2}
=======
SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER=${SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER:-}
>>>>>>> origin/v4
SEQ_BLOCK_DURATION_MS=${SEQ_BLOCK_DURATION_MS:-}
SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT=${SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT:-}
SEQ_BUILD_CHECKPOINT_IF_EMPTY=${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-}
Expand Down
13 changes: 13 additions & 0 deletions yarn-project/.claude/rules/bash-no-echo-exit.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Bash Command Rules

**NEVER append `; echo "EXIT: $?"` or similar exit-code suffixes to any command.** The Bash tool already reports exit codes directly. Adding these suffixes is redundant and causes unnecessary permission prompts.

Bad:
```bash
yarn test src/file.test.ts > /tmp/out.log 2>&1; echo "EXIT: $?"
```

Good:
```bash
yarn test src/file.test.ts > /tmp/out.log 2>&1
```
6 changes: 3 additions & 3 deletions yarn-project/archiver/src/archiver-store.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import { EventEmitter } from 'events';
import { type MockProxy, mock } from 'jest-mock-extended';

import { Archiver, type ArchiverEmitter } from './archiver.js';
import { InitialBlockNumberNotSequentialError } from './errors.js';
import { BlockNumberNotSequentialError } from './errors.js';
import type { ArchiverInstrumentation } from './modules/instrumentation.js';
import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js';
import { KVArchiverDataStore } from './store/kv_archiver_store.js';
Expand Down Expand Up @@ -265,7 +265,7 @@ describe('Archiver Store', () => {
await archiver.addBlock(block1);

// Block 3 should be rejected because block 2 is missing
await expect(archiver.addBlock(block3)).rejects.toThrow(InitialBlockNumberNotSequentialError);
await expect(archiver.addBlock(block3)).rejects.toThrow(BlockNumberNotSequentialError);
});

it('rejects blocks with duplicate block numbers', async () => {
Expand All @@ -276,7 +276,7 @@ describe('Archiver Store', () => {
await archiver.addBlock(block2);

// Adding block 2 again shoud be rejected
await expect(archiver.addBlock(block2)).rejects.toThrow(InitialBlockNumberNotSequentialError);
await expect(archiver.addBlock(block2)).rejects.toThrow(BlockNumberNotSequentialError);
});

it('rejects first block if not starting from block 1', async () => {
Expand Down
10 changes: 7 additions & 3 deletions yarn-project/archiver/src/archiver-sync.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1327,7 +1327,7 @@ describe('Archiver Sync', () => {
expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1));
const blockAlreadySyncedFromCheckpoint = cp1.blocks[cp1.blocks.length - 1];

// Now try and add one of the blocks via the addProposedBlocks method. It should throw
// Now try and add one of the blocks via the addProposedBlock method. It should throw
await expect(archiver.addBlock(blockAlreadySyncedFromCheckpoint)).rejects.toThrow();
}, 10_000);

Expand Down Expand Up @@ -1428,8 +1428,12 @@ describe('Archiver Sync', () => {
const { checkpoint: cp3 } = await fake.addCheckpoint(CheckpointNumber(3), { l1BlockNumber: 5010n });

// Add blocks from BOTH checkpoints locally (matching the L1 checkpoints)
await archiverStore.addProposedBlocks(cp2.blocks, { force: true });
await archiverStore.addProposedBlocks(cp3.blocks, { force: true });
for (const block of cp2.blocks) {
await archiverStore.addProposedBlock(block, { force: true });
}
for (const block of cp3.blocks) {
await archiverStore.addProposedBlock(block, { force: true });
}

// Verify all blocks are visible locally
const lastBlockInCheckpoint3 = cp3.blocks[cp3.blocks.length - 1].number;
Expand Down
9 changes: 7 additions & 2 deletions yarn-project/archiver/src/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import {
import { type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client';

import { type ArchiverConfig, mapArchiverConfig } from './config.js';
import { NoBlobBodiesFoundError } from './errors.js';
import { BlockAlreadyCheckpointedError, NoBlobBodiesFoundError } from './errors.js';
import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
import { ArchiverDataSourceBase } from './modules/data_source_base.js';
import { ArchiverDataStoreUpdater } from './modules/data_store_updater.js';
Expand Down Expand Up @@ -242,10 +242,15 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
}

try {
await this.updater.addProposedBlocks([block]);
await this.updater.addProposedBlock(block);
this.log.debug(`Added block ${block.number} to store`);
resolve();
} catch (err: any) {
if (err instanceof BlockAlreadyCheckpointedError) {
this.log.debug(`Proposed block ${block.number} matches already checkpointed block, ignoring late proposal`);
resolve();
continue;
}
this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
reject(err);
}
Expand Down
34 changes: 10 additions & 24 deletions yarn-project/archiver/src/errors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,9 @@ export class NoBlobBodiesFoundError extends Error {
}
}

export class InitialBlockNumberNotSequentialError extends Error {
constructor(
public readonly newBlockNumber: number,
public readonly previousBlockNumber: number | undefined,
) {
super(
`Cannot insert new block ${newBlockNumber} given previous block number in store is ${
previousBlockNumber ?? 'undefined'
}`,
);
}
}

export class BlockNumberNotSequentialError extends Error {
constructor(newBlockNumber: number, previous: number | undefined) {
super(
`Cannot insert new block ${newBlockNumber} given previous block number in batch is ${previous ?? 'undefined'}`,
);
super(`Cannot insert new block ${newBlockNumber} given previous block number is ${previous ?? 'undefined'}`);
}
}

Expand All @@ -48,14 +33,6 @@ export class CheckpointNumberNotSequentialError extends Error {
}
}

export class CheckpointNumberNotConsistentError extends Error {
constructor(newCheckpointNumber: number, previous: number | undefined) {
super(
`Cannot insert block for new checkpoint ${newCheckpointNumber} given previous block was checkpoint ${previous ?? 'undefined'}`,
);
}
}

export class BlockIndexNotSequentialError extends Error {
constructor(newBlockIndex: number, previousBlockIndex: number | undefined) {
super(
Expand Down Expand Up @@ -89,6 +66,15 @@ export class BlockNotFoundError extends Error {
}
}

/** Thrown when a proposed block matches a block that was already checkpointed. This is expected for late proposals. */
export class BlockAlreadyCheckpointedError extends Error {
constructor(public readonly blockNumber: number) {
super(`Block ${blockNumber} has already been checkpointed with the same content`);
this.name = 'BlockAlreadyCheckpointedError';
}
}

/** Thrown when a proposed block conflicts with an already checkpointed block (different content). */
export class CannotOverwriteCheckpointedBlockError extends Error {
constructor(
public readonly blockNumber: number,
Expand Down
Loading
Loading