Skip to content
17 changes: 17 additions & 0 deletions noir-projects/aztec-nr/aztec/src/messages/processing/mod.nr
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ global EVENT_VALIDATION_REQUESTS_ARRAY_BASE_SLOT: Field = sha256_to_field(
"AZTEC_NR::EVENT_VALIDATION_REQUESTS_ARRAY_BASE_SLOT".as_bytes(),
);

global NOTE_BOUNDED_VEC_CAPACITY_SLOT: Field = sha256_to_field("AZTEC_NR::NOTE_BOUNDED_VEC_CAPACITY_SLOT".as_bytes());

global EVENT_BOUNDED_VEC_CAPACITY_SLOT: Field = sha256_to_field("AZTEC_NR::EVENT_BOUNDED_VEC_CAPACITY_SLOT".as_bytes());

global LOG_RETRIEVAL_REQUESTS_ARRAY_BASE_SLOT: Field = sha256_to_field(
"AZTEC_NR::LOG_RETRIEVAL_REQUESTS_ARRAY_BASE_SLOT".as_bytes(),
);
Expand Down Expand Up @@ -150,6 +154,19 @@ pub unconstrained fn enqueue_event_for_validation(
///
/// This automatically clears both validation request queues, so no further work needs to be done by the caller.
pub unconstrained fn validate_and_store_enqueued_notes_and_events(contract_address: AztecAddress) {
// Store BoundedVec capacities so PXE knows the serialization layout. Contracts that don't store these will
// cause PXE to fall back to default capacities.
oracle::capsules::store(
contract_address,
NOTE_BOUNDED_VEC_CAPACITY_SLOT,
[MAX_NOTE_PACKED_LEN as Field],
);
oracle::capsules::store(
contract_address,
EVENT_BOUNDED_VEC_CAPACITY_SLOT,
[MAX_EVENT_SERIALIZED_LEN as Field],
Comment on lines +164 to +167
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

waaaaat? Doesn't the PR description say that we can just pass these values as params to the oracle? that seems much saner.

);

oracle::message_processing::validate_and_store_enqueued_notes_and_events(
contract_address,
NOTE_VALIDATION_REQUESTS_ARRAY_BASE_SLOT,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,23 @@ import { TxHash } from '@aztec/stdlib/tx';
import { EventValidationRequest } from './event_validation_request.js';

describe('EventValidationRequest', () => {
it('output of Noir serialization deserializes as expected', () => {
it('deserializes with default capacity when no capacity is given', () => {
// 11 storage fields = default BoundedVec capacity (default)
const serialized = [
1, // contract_address
2, // event_type_id
3, // randomness
4, // serialized_event[0]
5, // serialized_event[1]
0, // serialized_event padding start
4,
5,
0,
0,
0,
0,
0,
0,
0, // serialized_event padding end
0,
0,
0, // 11 storage fields
2, // bounded_vec_len
6, // event_commitment
7, // tx_hash
Expand All @@ -37,4 +39,59 @@ describe('EventValidationRequest', () => {
expect(request.txHash).toEqual(TxHash.fromBigInt(7n));
expect(request.recipient).toEqual(AztecAddress.fromBigInt(8n));
});

it('deserializes with explicit capacity matching current capacity', () => {
// 10 storage fields = current BoundedVec capacity
const serialized = [
1, // contract_address
2, // event_type_id
3, // randomness
4,
5,
0,
0,
0,
0,
0,
0,
0,
0, // 10 storage fields
2, // bounded_vec_len
6, // event_commitment
7, // tx_hash
8, // recipient
].map(n => new Fr(n));

const request = EventValidationRequest.fromFields(serialized, 10);

expect(request.contractAddress).toEqual(AztecAddress.fromBigInt(1n));
expect(request.serializedEvent).toEqual([new Fr(4), new Fr(5)]);
expect(request.eventCommitment).toEqual(new Fr(6));
});

it('throws if capacity does not match actual field count (reader not exhausted)', () => {
// Data has 11 storage fields but we claim capacity=10, leaving 1 unconsumed field
const serialized = [
1,
2,
3, // header
10,
11,
0,
0,
0,
0,
0,
0,
0,
0,
0, // 11 storage fields
2, // bounded_vec_len
6,
7,
8, // footer
].map(n => new Fr(n));

expect(() => EventValidationRequest.fromFields(serialized, 10)).toThrow(/did not consume all fields/);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ import { EventSelector } from '@aztec/stdlib/abi';
import { AztecAddress } from '@aztec/stdlib/aztec-address';
import { TxHash } from '@aztec/stdlib/tx';

// TODO(#14617): should we compute this from constants? This value is aztec-nr specific.
const MAX_EVENT_SERIALIZED_LEN = 10;
// Default BoundedVec storage capacity for contracts that don't explicitly store their capacity.
// TODO(F-380): remove once all contracts store capacity explicitly.
export const DEFAULT_EVENT_BOUNDED_VEC_CAPACITY = 11;

/**
* Intermediate struct used to perform batch event validation by PXE. The `utilityValidateAndStoreEnqueuedNotesAndEvents` oracle
Expand All @@ -22,22 +23,28 @@ export class EventValidationRequest {
public recipient: AztecAddress,
) {}

static fromFields(fields: Fr[] | FieldReader): EventValidationRequest {
static fromFields(fields: Fr[], capacity: number = DEFAULT_EVENT_BOUNDED_VEC_CAPACITY): EventValidationRequest {
const reader = FieldReader.asReader(fields);

const contractAddress = AztecAddress.fromField(reader.readField());
const eventTypeId = EventSelector.fromField(reader.readField());

const randomness = reader.readField();

const eventStorage = reader.readFieldArray(MAX_EVENT_SERIALIZED_LEN);
const eventStorage = reader.readFieldArray(capacity);
const eventLen = reader.readField().toNumber();
const serializedEvent = eventStorage.slice(0, eventLen);

const eventCommitment = reader.readField();
const txHash = TxHash.fromField(reader.readField());
const recipient = AztecAddress.fromField(reader.readField());

if (reader.remainingFields() !== 0) {
throw new Error(
`EventValidationRequest deserialization did not consume all fields: ${reader.remainingFields()} remaining (capacity=${capacity}).`,
);
}

return new EventValidationRequest(
contractAddress,
eventTypeId,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,7 @@ import { range } from '@aztec/foundation/array';
import { Fr } from '@aztec/foundation/curves/bn254';
import type { TxHash } from '@aztec/stdlib/tx';

import { MAX_NOTE_PACKED_LEN } from './note_validation_request.js';

const MAX_PUBLIC_LOG_LEN_FOR_NOTE_COMPLETION = MAX_NOTE_PACKED_LEN;
const MAX_LOG_CONTENT_LEN = Math.max(MAX_PUBLIC_LOG_LEN_FOR_NOTE_COMPLETION, PRIVATE_LOG_CIPHERTEXT_LEN);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe it didn't make sense to have this. MAX_NOTE_PACKED_LEN is calculated in aztec-nr from PRIVATE_LOG_CIPHERTEXT_LEN, and it should always be lower for it. But please correct me if I'm wrong

const MAX_LOG_CONTENT_LEN = PRIVATE_LOG_CIPHERTEXT_LEN;

/**
* Intermediate struct used to perform batch log retrieval by PXE. The `utilityBulkRetrieveLogs` oracle stores values of this
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,27 +5,29 @@ import { TxHash } from '@aztec/stdlib/tx';
import { NoteValidationRequest } from './note_validation_request.js';

describe('NoteValidationRequest', () => {
it('output of Noir serialization deserializes as expected', () => {
it('deserializes with default capacity when no capacity is given', () => {
// 9 storage fields = old BoundedVec capacity (default)
const serialized = [
'0x0000000000000000000000000000000000000000000000000000000000000001', // contract address
'0x0000000000000000000000000000000000000000000000000000000000000032', // owner
'0x0000000000000000000000000000000000000000000000000000000000000002', // storage slot
'0x000000000000000000000000000000000000000000000000000000000000002a', // randomness
'0x0000000000000000000000000000000000000000000000000000000000000003', // note nonce
'0x0000000000000000000000000000000000000000000000000000000000000004', // content begin: note content 1
'0x0000000000000000000000000000000000000000000000000000000000000005', // note content 2
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000', // content end (MAX_NOTE_PACKED_LEN = 8)
'0x0000000000000000000000000000000000000000000000000000000000000002', // content length
'0x0000000000000000000000000000000000000000000000000000000000000006', // note hash
'0x0000000000000000000000000000000000000000000000000000000000000007', // nullifier
'0x0000000000000000000000000000000000000000000000000000000000000008', // tx hash
'0x0000000000000000000000000000000000000000000000000000000000000009', // recipient
].map(Fr.fromHexString);
1, // contract address
50, // owner
2, // storage slot
42, // randomness
3, // note nonce
4,
5,
0,
0,
0,
0,
0,
0,
0, // 9 storage fields
2, // content length
6, // note hash
7, // nullifier
8, // tx hash
9, // recipient
].map(n => new Fr(n));

const request = NoteValidationRequest.fromFields(serialized);

Expand All @@ -41,32 +43,60 @@ describe('NoteValidationRequest', () => {
expect(request.recipient).toEqual(AztecAddress.fromBigInt(9n));
});

it('throws if fed more fields than expected', () => {
it('deserializes with explicit capacity matching current capacity', () => {
// 8 storage fields = current BoundedVec capacity
const serialized = [
'0x0000000000000000000000000000000000000000000000000000000000000001', // contract address
'0x0000000000000000000000000000000000000000000000000000000000000032', // owner
'0x0000000000000000000000000000000000000000000000000000000000000002', // storage slot
'0X000000000000000000000000000000000000000000000000000000000000002a', // randomness
'0x0000000000000000000000000000000000000000000000000000000000000003', // note nonce
'0x0000000000000000000000000000000000000000000000000000000000000004', // content begin: note content 1
'0x0000000000000000000000000000000000000000000000000000000000000005', // note content 2
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000000000000000000000000000000', // content end (MAX_NOTE_PACKED_LEN = 8)
'0x0000000000000000000000000000000000000000000000000000000000000000', // extra field beyond MAX_NOTE_PACKED_LEN, this is a malformed serialization
'0x0000000000000000000000000000000000000000000000000000000000000002', // content length
'0x0000000000000000000000000000000000000000000000000000000000000006', // note hash
'0x0000000000000000000000000000000000000000000000000000000000000007', // nullifier
'0x0000000000000000000000000000000000000000000000000000000000000008', // tx hash
'0x0000000000000000000000000000000000000000000000000000000000000009', // recipient
].map(Fr.fromHexString);
1, // contract address
50, // owner
2, // storage slot
42, // randomness
3, // note nonce
4,
5,
0,
0,
0,
0,
0,
0, // 8 storage fields
2, // content length
6, // note hash
7, // nullifier
8, // tx hash
9, // recipient
].map(n => new Fr(n));

expect(() => NoteValidationRequest.fromFields(serialized)).toThrow(
/Error converting array of fields to NoteValidationRequest/,
);
const request = NoteValidationRequest.fromFields(serialized, 8);

expect(request.contractAddress).toEqual(AztecAddress.fromBigInt(1n));
expect(request.content).toEqual([new Fr(4), new Fr(5)]);
expect(request.noteHash).toEqual(new Fr(6));
});

it('throws if capacity does not match actual field count (reader not exhausted)', () => {
// Data has 9 storage fields but we claim capacity=8, leaving 1 unconsumed field
const serialized = [
1,
2,
3,
4,
5, // header
10,
11,
0,
0,
0,
0,
0,
0,
0, // 9 storage fields
2, // content length
6,
7,
8,
9, // footer
].map(n => new Fr(n));

expect(() => NoteValidationRequest.fromFields(serialized, 8)).toThrow(/did not consume all fields/);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@ import { FieldReader } from '@aztec/foundation/serialize';
import { AztecAddress } from '@aztec/stdlib/aztec-address';
import { TxHash } from '@aztec/stdlib/tx';

// TODO(#14617): should we compute this from constants? This value is aztec-nr specific.
export const MAX_NOTE_PACKED_LEN = 8;
// Default BoundedVec storage capacity for contracts that don't explicitly store their capacity.
// TODO(F-380): remove once all contracts store capacity explicitly.
export const DEFAULT_NOTE_BOUNDED_VEC_CAPACITY = 9;

/**
* Intermediate struct used to perform batch note validation by PXE. The `utilityValidateAndStoreEnqueuedNotesAndEvents` oracle
Expand All @@ -24,7 +25,7 @@ export class NoteValidationRequest {
public recipient: AztecAddress,
) {}

static fromFields(fields: Fr[] | FieldReader): NoteValidationRequest {
static fromFields(fields: Fr[], capacity: number = DEFAULT_NOTE_BOUNDED_VEC_CAPACITY): NoteValidationRequest {
const reader = FieldReader.asReader(fields);

const contractAddress = AztecAddress.fromField(reader.readField());
Expand All @@ -33,7 +34,7 @@ export class NoteValidationRequest {
const randomness = reader.readField();
const noteNonce = reader.readField();

const contentStorage = reader.readFieldArray(MAX_NOTE_PACKED_LEN);
const contentStorage = reader.readFieldArray(capacity);
const contentLen = reader.readField().toNumber();
const content = contentStorage.slice(0, contentLen);

Expand All @@ -44,7 +45,7 @@ export class NoteValidationRequest {

if (reader.remainingFields() !== 0) {
throw new Error(
`Error converting array of fields to NoteValidationRequest. Hint: check that MAX_NOTE_PACKED_LEN is consistent with private_notes::MAX_NOTE_PACKED_LEN in Aztec-nr.`,
`NoteValidationRequest deserialization did not consume all fields: ${reader.remainingFields()} remaining (capacity=${capacity}).`,
);
}

Expand Down
Loading
Loading