diff --git a/docs/docs-developers/docs/resources/migration_notes.md b/docs/docs-developers/docs/resources/migration_notes.md index 259002aaa815..9a342de45a2f 100644 --- a/docs/docs-developers/docs/resources/migration_notes.md +++ b/docs/docs-developers/docs/resources/migration_notes.md @@ -9,6 +9,16 @@ Aztec is in active development. Each version may introduce breaking changes that ## TBD +### [Aztec.nr] Made `compute_note_hash_for_nullification` unconstrained + +This function shouldn't have been constrained in the first place, as constrained computation of `HintedNote` nullifiers is dangerous (constrained computation of nullifiers can be performed only on the `ConfirmedNote` type). If you were calling this from a constrained function, consider using `compute_confirmed_note_hash_for_nullification` instead. Unconstrained usage is safe. + +### [Aztec.nr] Changes to standard note hash computation + +Note hashes used to be computed with the storage slot being the last value of the preimage, it is now the first. This is to make it easier to ensure all note hashes have proper domain separation. + +This change requires no input from your side unless you were testing or relying on hardcoded note hashes. + ### [Aztec.js] `getPublicEvents` now returns an object instead of an array `getPublicEvents` now returns a `GetPublicEventsResult` object with `events` and `maxLogsHit` fields instead of a plain array. This enables pagination through large result sets using the new `afterLog` filter option. diff --git a/noir-projects/aztec-nr/aztec/src/history/nullifier.nr b/noir-projects/aztec-nr/aztec/src/history/nullifier.nr index 329a6c1b33bb..697e82d926dd 100644 --- a/noir-projects/aztec-nr/aztec/src/history/nullifier.nr +++ b/noir-projects/aztec-nr/aztec/src/history/nullifier.nr @@ -26,7 +26,7 @@ mod test; /// /// ## Cost /// -/// This function performs a single merkle tree inclusion proof, which is in the order of 4k gates. +/// This function performs a single merkle tree inclusion proof, which is ~4k gates. /// /// If you don't need to assert existence at a _specific_ past block, consider using /// [`PrivateContext::assert_nullifier_exists`](crate::context::PrivateContext::assert_nullifier_exists) instead, which @@ -82,7 +82,7 @@ pub fn assert_nullifier_existed_by(block_header: BlockHeader, siloed_nullifier: /// /// ## Cost /// -/// This function performs a single merkle tree inclusion proof, which is in the order of 4k gates. +/// This function performs a single merkle tree inclusion proof, which is ~4k gates. pub fn assert_nullifier_did_not_exist_by(block_header: BlockHeader, siloed_nullifier: Field) { // 1) Get the membership witness of a low nullifier of the nullifier. // Safety: The witness is only used as a "magical value" that makes the proof below pass. Hence it's safe. diff --git a/noir-projects/aztec-nr/aztec/src/macros/notes.nr b/noir-projects/aztec-nr/aztec/src/macros/notes.nr index b2895f9fc3c9..2836182df312 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/notes.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/notes.nr @@ -78,8 +78,8 @@ comptime fn generate_note_hash_trait_impl(s: TypeDefinition) -> Quoted { quote { impl aztec::note::note_interface::NoteHash for $name { fn compute_note_hash(self, owner: aztec::protocol::address::AztecAddress, storage_slot: Field, randomness: Field) -> Field { - let inputs = aztec::protocol::traits::Packable::pack(self).concat( [aztec::protocol::traits::ToField::to_field(owner), storage_slot, randomness]); - aztec::protocol::hash::poseidon2_hash_with_separator(inputs, aztec::protocol::constants::DOM_SEP__NOTE_HASH) + let data = aztec::protocol::traits::Packable::pack(self).concat([aztec::protocol::traits::ToField::to_field(owner), randomness]); + aztec::note::utils::compute_note_hash(storage_slot, data) } fn compute_nullifier( @@ -93,10 +93,7 @@ comptime fn generate_note_hash_trait_impl(s: TypeDefinition) -> Quoted { // in the quote to avoid "trait not in scope" compiler warnings. let owner_npk_m_hash = aztec::protocol::traits::Hash::hash(owner_npk_m); let secret = context.request_nhk_app(owner_npk_m_hash); - aztec::protocol::hash::poseidon2_hash_with_separator( - [note_hash_for_nullification, secret], - aztec::protocol::constants::DOM_SEP__NOTE_NULLIFIER as Field, - ) + aztec::note::utils::compute_note_nullifier(note_hash_for_nullification, [secret]) } unconstrained fn compute_nullifier_unconstrained( @@ -113,10 +110,7 @@ comptime fn generate_note_hash_trait_impl(s: TypeDefinition) -> Quoted { // in the quote to avoid "trait not in scope" compiler warnings. let owner_npk_m_hash = aztec::protocol::traits::Hash::hash(owner_npk_m); let secret = aztec::keys::getters::get_nhk_app(owner_npk_m_hash); - aztec::protocol::hash::poseidon2_hash_with_separator( - [note_hash_for_nullification, secret], - aztec::protocol::constants::DOM_SEP__NOTE_NULLIFIER as Field, - ) + aztec::note::utils::compute_note_nullifier(note_hash_for_nullification, [secret]) }) } } diff --git a/noir-projects/aztec-nr/aztec/src/note/utils.nr b/noir-projects/aztec-nr/aztec/src/note/utils.nr index c649b2fb0455..0e8008d64651 100644 --- a/noir-projects/aztec-nr/aztec/src/note/utils.nr +++ b/noir-projects/aztec-nr/aztec/src/note/utils.nr @@ -1,6 +1,40 @@ use crate::{context::NoteExistenceRequest, note::{ConfirmedNote, HintedNote, note_interface::NoteHash}}; -use crate::protocol::hash::{compute_siloed_note_hash, compute_unique_note_hash}; +use crate::protocol::{ + constants::{DOM_SEP__NOTE_HASH, DOM_SEP__NOTE_NULLIFIER}, + hash::{compute_siloed_note_hash, compute_unique_note_hash, poseidon2_hash_with_separator}, +}; + +/// Computes a domain-separated note hash. +/// +/// Receives the `storage_slot` of the [`crate::state_vars::StateVariable`] that holds the note, plus any arbitrary +/// note `data`. This typically includes randomness, owner, and domain specific values (e.g. numeric amount, address, +/// id, etc.). +/// +/// Usage of this function guarantees that different state variables will never produce colliding note hashes, even if +/// their underlying notes have different implementations. +pub fn compute_note_hash(storage_slot: Field, data: [Field; N]) -> Field { + // All state variables have different storage slots, so by placing this at a fixed first position in the preimage + // we prevent collisions. + poseidon2_hash_with_separator([storage_slot].concat(data), DOM_SEP__NOTE_HASH) +} + +/// Computes a domain-separated note nullifier. +/// +/// Receives the `note_hash_for_nullification` of the note (usually returned by +/// [`compute_confirmed_note_hash_for_nullification`]), plus any arbitrary note `data`. This typically includes +/// secrets, such as the app-siloed nullifier hiding key of the note's owner. +/// +/// Usage of this function guarantees that different state variables will never produce colliding note nullifiers, even +/// if their underlying notes have different implementations. +pub fn compute_note_nullifier(note_hash_for_nullification: Field, data: [Field; N]) -> Field { + // All notes have different note hashes for nullification (i.e. transient or settled), so by placing this at a + // fixed first position in the preimage we prevent collisions. + poseidon2_hash_with_separator( + [note_hash_for_nullification].concat(data), + DOM_SEP__NOTE_NULLIFIER, + ) +} /// Returns the [`NoteExistenceRequest`] used to prove a note exists. pub fn compute_note_existence_request(hinted_note: HintedNote) -> NoteExistenceRequest @@ -26,21 +60,27 @@ where } } -/// Returns the note hash that must be used to compute a note's nullifier when calling `NoteHash::compute_nullifier` or -/// `NoteHash::compute_nullifier_unconstrained`. -pub fn compute_note_hash_for_nullification(hinted_note: HintedNote) -> Field +/// Unconstrained variant of [`compute_confirmed_note_hash_for_nullification`]. +pub unconstrained fn compute_note_hash_for_nullification(hinted_note: HintedNote) -> Field where Note: NoteHash, { + // Creating a ConfirmedNote like we do here is typically unsafe, as we've not confirmed existence. We can do it + // here because this is an unconstrained function, so the returned value should not make its way to a constrained + // function. This lets us reuse the `compute_confirmed_note_hash_for_nullification` implementation. compute_confirmed_note_hash_for_nullification(ConfirmedNote::new( hinted_note, compute_note_existence_request(hinted_note).note_hash(), )) } -/// Same as `compute_note_hash_for_nullification`, except it takes the note hash used in a read request (i.e. what -/// `compute_note_existence_request` would return). This is useful in scenarios where that hash has already been -/// computed to reduce constraints by reusing this value. +/// Returns the note hash to use when computing its nullifier. +/// +/// The `note_hash_for_nullification` parameter [`NoteHash::compute_nullifier`] takes depends on the note's stage, e.g. +/// settled notes use the unique note hash, but pending notes cannot as they have no nonce. This function returns the +/// correct note hash to use. +/// +/// Use [`compute_note_hash_for_nullification`] when computing this value in unconstrained functions. pub fn compute_confirmed_note_hash_for_nullification(confirmed_note: ConfirmedNote) -> Field { // There is just one instance in which the note hash for nullification does not match the note hash used for a read // request, which is when dealing with pending previous phase notes. These had their existence proven using their diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/delayed_public_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/delayed_public_mutable.nr index df53449b4ee2..6c4ee7cb35fb 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/delayed_public_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/delayed_public_mutable.nr @@ -401,6 +401,10 @@ where { /// Returns the current value. /// + /// This sets the transaction's `expiration_timestamp` (see + /// [`PrivateContext::set_expiration_timestamp`](crate::context::PrivateContext::set_expiration_timestamp)), + /// invalidating the transaction if not included before that time. See the 'Privacy' section below to learn more. + /// /// If [`schedule_value_change`](DelayedPublicMutable::schedule_value_change) has never been called, then this /// returns the default empty public storage value, which is all zeroes - equivalent to `let t = /// T::unpack(std::mem::zeroed());`. @@ -410,18 +414,16 @@ where /// /// ## Privacy /// - /// This function does leak some privacy, though in a subtle way. Understanding this is key to understanding how to - /// use `DelayedPublicMutable` in a privacy-preserving way. + /// This function leaks information via the transaction's expiration timestamp. The degree of leakage depends on + /// the delay that is set, and on whether a value change is currently scheduled. /// /// Private reads are based on a historical public storage read at the anchor block (i.e. /// [`crate::history::storage::public_storage_historical_read`]). `DelayedPublicMutable` is able to provide - /// guarantees about values read in the past remaining the state variable's current value into the future due to + /// guarantees that values read in the past remain the state variable's current value into the future due to /// the existence of delays when scheduling writes. It then sets the `expiration_timestamp` property of the current - /// transaction (see - /// [`PrivateContext::set_expiration_timestamp`](crate::context::PrivateContext::set_expiration_timestamp)) to - /// ensure that the transaction can only be included in a block **prior** to the state variable's value changing. - /// In other words, it knows some facts about the near future up until some time horizon, and then makes sure that - /// it doesn't act on this knowledge past said moment. + /// transaction to ensure that the transaction can only be included in a block **prior** to the state variable's + /// value changing. In other words, it knows some facts about the near future up until some time horizon, and then + /// makes sure that it doesn't act on this knowledge past said moment. /// /// Because the `expiration_timestamp` property is part of the transaction's public information, any mutation to /// this value could result in transaction fingerprinting. Note that multiple contracts may set this value during a @@ -470,7 +472,7 @@ where /// /// ## Cost /// - /// This function performs a historical public storage read (which is in the order of 4k gates), **regardless of + /// This function performs a historical public storage read (which is ~4k gates), **regardless of /// `T`'s packed length**. This is because [`DelayedPublicMutable::schedule_value_change`] stores not just the /// value but also its hash: this function obtains the preimage from an oracle and proves that it matches the hash /// from public storage. diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr index 92d61704fa49..a1ea5468c434 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr @@ -2,6 +2,8 @@ use crate::context::{PublicContext, UtilityContext}; use crate::protocol::traits::Packable; use crate::state_vars::StateVariable; +mod test; + /// Mutable public values. /// /// This is one of the most basic public state variables. It is equivalent to a non-`immutable` non-`constant` Solidity diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable/test.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable/test.nr index 29154c42d466..3c07e5bf1fbc 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable/test.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable/test.nr @@ -1,4 +1,4 @@ -use crate::{context::{PublicContext, UtilityContext}, state_vars::PublicMutable}; +use crate::{context::{PublicContext, UtilityContext}, state_vars::{PublicMutable, StateVariable}}; use crate::test::{helpers::test_environment::TestEnvironment, mocks::MockStruct}; use std::mem::zeroed; diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/single_use_claim.nr b/noir-projects/aztec-nr/aztec/src/state_vars/single_use_claim.nr index 4945119afbc7..28f0ac6f860d 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/single_use_claim.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/single_use_claim.nr @@ -1,5 +1,6 @@ use crate::protocol::{ - address::AztecAddress, constants::DOM_SEP__NOTE_HASH, hash::poseidon2_hash_with_separator, traits::Hash, + address::AztecAddress, constants::DOM_SEP__SINGLE_USE_CLAIM_NULLIFIER, hash::poseidon2_hash_with_separator, + traits::Hash, }; use crate::{ @@ -62,7 +63,6 @@ mod test; /// /// Public effects you emit alongside a claim (e.g. a public function call to update a tally) may still let observers /// infer who likely exercised the claim, so consider that when designing flows. -/// ``` pub struct SingleUseClaim { context: Context, storage_slot: Field, @@ -82,8 +82,10 @@ impl SingleUseClaim { /// This function is primarily used internally by functions [`SingleUseClaim::claim`], /// [`SingleUseClaim::assert_claimed`] and [`SingleUseClaim::has_claimed`] to coherently write and read state. fn compute_nullifier(self, owner_nhk_app: Field) -> Field { - // TODO(F-180): make sure we follow the nullifier convention - poseidon2_hash_with_separator([owner_nhk_app, self.storage_slot], DOM_SEP__NOTE_HASH) + poseidon2_hash_with_separator( + [owner_nhk_app, self.storage_slot], + DOM_SEP__SINGLE_USE_CLAIM_NULLIFIER, + ) } } diff --git a/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr b/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr index fabceb778c14..bee037c40061 100644 --- a/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr +++ b/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr @@ -5,11 +5,9 @@ use crate::{ note::{HintedNote, note_interface::{NoteHash, NoteType}, note_metadata::NoteMetadata}, }; -use crate::protocol::{ - address::AztecAddress, - constants::{DOM_SEP__NOTE_HASH, DOM_SEP__NOTE_NULLIFIER}, - hash::poseidon2_hash_with_separator, - traits::{Packable, ToField}, +use crate::{ + note::utils::{compute_note_hash, compute_note_nullifier}, + protocol::{address::AztecAddress, traits::{Packable, ToField}}, }; #[derive(Eq, Packable)] @@ -26,8 +24,8 @@ impl NoteType for MockNote { impl NoteHash for MockNote { fn compute_note_hash(self, owner: AztecAddress, storage_slot: Field, randomness: Field) -> Field { - let input = self.pack().concat([owner.to_field(), storage_slot, randomness]); - poseidon2_hash_with_separator(input, DOM_SEP__NOTE_HASH) + let data = self.pack().concat([owner.to_field(), randomness]); + compute_note_hash(storage_slot, data) } fn compute_nullifier( @@ -38,10 +36,7 @@ impl NoteHash for MockNote { ) -> Field { // We don't use any kind of secret here since this is only a mock note and having it here would make tests more // cumbersome - poseidon2_hash_with_separator( - [note_hash_for_nullification], - DOM_SEP__NOTE_NULLIFIER as Field, - ) + compute_note_nullifier(note_hash_for_nullification, []) } unconstrained fn compute_nullifier_unconstrained( @@ -51,12 +46,7 @@ impl NoteHash for MockNote { ) -> Option { // We don't use any kind of secret here since this is only a mock note and having it here would make tests more // cumbersome - Option::some( - poseidon2_hash_with_separator( - [note_hash_for_nullification], - DOM_SEP__NOTE_NULLIFIER as Field, - ), - ) + Option::some(compute_note_nullifier(note_hash_for_nullification, [])) } } diff --git a/noir-projects/aztec-nr/uint-note/src/uint_note.nr b/noir-projects/aztec-nr/uint-note/src/uint_note.nr index 0fb84e2515c5..00d62a9ea45f 100644 --- a/noir-projects/aztec-nr/uint-note/src/uint_note.nr +++ b/noir-projects/aztec-nr/uint-note/src/uint_note.nr @@ -4,14 +4,11 @@ use aztec::{ keys::getters::{get_nhk_app, get_public_keys, try_get_public_keys}, macros::notes::custom_note, messages::logs::partial_note::compute_partial_note_private_content_log, - note::note_interface::{NoteHash, NoteType}, + note::{note_interface::{NoteHash, NoteType}, utils::compute_note_nullifier}, oracle::random::random, protocol::{ address::AztecAddress, - constants::{ - DOM_SEP__NOTE_HASH, DOM_SEP__NOTE_NULLIFIER, DOM_SEP__PARTIAL_NOTE_VALIDITY_COMMITMENT, - PRIVATE_LOG_SIZE_IN_FIELDS, - }, + constants::{DOM_SEP__NOTE_HASH, DOM_SEP__PARTIAL_NOTE_VALIDITY_COMMITMENT, PRIVATE_LOG_SIZE_IN_FIELDS}, hash::{compute_siloed_nullifier, poseidon2_hash_with_separator}, traits::{Deserialize, FromField, Hash, Packable, Serialize, ToField}, }, @@ -65,10 +62,7 @@ impl NoteHash for UintNote { let owner_npk_m = get_public_keys(owner).npk_m; let owner_npk_m_hash = owner_npk_m.hash(); let secret = context.request_nhk_app(owner_npk_m_hash); - poseidon2_hash_with_separator( - [note_hash_for_nullification, secret], - DOM_SEP__NOTE_NULLIFIER, - ) + compute_note_nullifier(note_hash_for_nullification, [secret]) } unconstrained fn compute_nullifier_unconstrained( @@ -80,10 +74,7 @@ impl NoteHash for UintNote { let owner_npk_m = public_keys.npk_m; let owner_npk_m_hash = owner_npk_m.hash(); let secret = get_nhk_app(owner_npk_m_hash); - poseidon2_hash_with_separator( - [note_hash_for_nullification, secret], - DOM_SEP__NOTE_NULLIFIER, - ) + compute_note_nullifier(note_hash_for_nullification, [secret]) }) } } diff --git a/noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr index 3d022fc8b76f..79257cb8b8a9 100644 --- a/noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr @@ -7,7 +7,10 @@ pub contract Claim { // docs:end:history_import use aztec::{ macros::{functions::{external, initializer}, storage::storage}, - note::{HintedNote, note_interface::NoteHash, utils::compute_note_hash_for_nullification}, + note::{ + HintedNote, note_interface::NoteHash, + utils::compute_confirmed_note_hash_for_nullification, + }, protocol::address::AztecAddress, state_vars::PublicImmutable, }; @@ -48,7 +51,7 @@ pub contract Claim { // 3) Prove that the note hash exists in the note hash tree // docs:start:prove_note_inclusion let header = self.context.get_anchor_block_header(); - let _ = assert_note_existed_by(header, hinted_note); + let confirmed_note = assert_note_existed_by(header, hinted_note); // docs:end:prove_note_inclusion // 4) Compute and emit a nullifier which is unique to the note and this contract to ensure the reward can be @@ -58,7 +61,8 @@ pub contract Claim { // the address of a contract it was emitted from. // TODO(#7775): manually computing the hash and passing it to compute_nullifier func is not great as note could // handle it on its own or we could make assert_note_existed_by return note_hash_for_nullification. - let note_hash_for_nullification = compute_note_hash_for_nullification(hinted_note); + let note_hash_for_nullification = + compute_confirmed_note_hash_for_nullification(confirmed_note); let nullifier = hinted_note.note.compute_nullifier( self.context, hinted_note.owner, diff --git a/noir-projects/noir-contracts/contracts/app/nft_contract/src/types/nft_note.nr b/noir-projects/noir-contracts/contracts/app/nft_contract/src/types/nft_note.nr index 52c59fba9592..0e780fd41c34 100644 --- a/noir-projects/noir-contracts/contracts/app/nft_contract/src/types/nft_note.nr +++ b/noir-projects/noir-contracts/contracts/app/nft_contract/src/types/nft_note.nr @@ -3,13 +3,11 @@ use aztec::{ keys::getters::{get_nhk_app, get_public_keys, try_get_public_keys}, macros::notes::custom_note, messages::logs::partial_note::compute_partial_note_private_content_log, - note::note_interface::{NoteHash, NoteType}, + note::{note_interface::{NoteHash, NoteType}, utils::compute_note_nullifier}, oracle::random::random, protocol::{ address::AztecAddress, - constants::{ - DOM_SEP__NOTE_HASH, DOM_SEP__NOTE_NULLIFIER, DOM_SEP__PARTIAL_NOTE_VALIDITY_COMMITMENT, - }, + constants::{DOM_SEP__NOTE_HASH, DOM_SEP__PARTIAL_NOTE_VALIDITY_COMMITMENT}, hash::poseidon2_hash_with_separator, traits::{Deserialize, Hash, Packable, Serialize, ToField}, }, @@ -66,10 +64,7 @@ impl NoteHash for NFTNote { let owner_npk_m = get_public_keys(owner).npk_m; let owner_npk_m_hash = owner_npk_m.hash(); let secret = context.request_nhk_app(owner_npk_m_hash); - poseidon2_hash_with_separator( - [note_hash_for_nullification, secret], - DOM_SEP__NOTE_NULLIFIER, - ) + compute_note_nullifier(note_hash_for_nullification, [secret]) } unconstrained fn compute_nullifier_unconstrained( @@ -81,10 +76,7 @@ impl NoteHash for NFTNote { let owner_npk_m = public_keys.npk_m; let owner_npk_m_hash = owner_npk_m.hash(); let secret = get_nhk_app(owner_npk_m_hash); - poseidon2_hash_with_separator( - [note_hash_for_nullification, secret], - DOM_SEP__NOTE_NULLIFIER, - ) + compute_note_nullifier(note_hash_for_nullification, [secret]) }) } } diff --git a/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/main.nr index 55757d6289d0..201954b1c4af 100644 --- a/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/main.nr @@ -145,9 +145,9 @@ pub contract Orderbook { // Nullify the order such that it cannot be fulfilled again. We emit a nullifier instead of deleting the order // from public storage because we get no refund for resetting public storage to zero and just emitting - // a nullifier is cheaper (1 Field in DA instead of multiple Fields for the order). We use the `order_id` - // itself as the nullifier because this contract does not work with notes and hence there is no risk of - // colliding with a real note nullifier. + // a nullifier is cheaper (1 Field in DA instead of multiple Fields for the order). + // TODO(F-399): pushing a raw nullifier with no domain separator like we do here is unsafe: we should instead + // use something like a singleton `SingleUseClaim`. self.context.push_nullifier(order_id); // Enqueue the fulfillment to finalize both partial notes diff --git a/noir-projects/noir-contracts/contracts/app/token_blacklist_contract/src/types/transparent_note.nr b/noir-projects/noir-contracts/contracts/app/token_blacklist_contract/src/types/transparent_note.nr index 805e63a72ccd..85f941e9fcdb 100644 --- a/noir-projects/noir-contracts/contracts/app/token_blacklist_contract/src/types/transparent_note.nr +++ b/noir-projects/noir-contracts/contracts/app/token_blacklist_contract/src/types/transparent_note.nr @@ -1,13 +1,8 @@ use aztec::{ context::PrivateContext, macros::notes::custom_note, - note::note_interface::NoteHash, - protocol::{ - address::AztecAddress, - constants::{DOM_SEP__NOTE_HASH, DOM_SEP__NOTE_NULLIFIER}, - hash::poseidon2_hash_with_separator, - traits::Packable, - }, + note::{note_interface::NoteHash, utils::{compute_note_hash, compute_note_nullifier}}, + protocol::{address::AztecAddress, traits::Packable}, }; use std::mem::zeroed; @@ -29,8 +24,8 @@ impl NoteHash for TransparentNote { storage_slot: Field, randomness: Field, ) -> Field { - let inputs = self.pack().concat([storage_slot, randomness]); - poseidon2_hash_with_separator(inputs, DOM_SEP__NOTE_HASH) + let data = self.pack().concat([randomness]); + compute_note_hash(storage_slot, data) } // Computing a nullifier in a transparent note is not guarded by making secret a part of the nullifier preimage (as @@ -47,10 +42,7 @@ impl NoteHash for TransparentNote { _owner: AztecAddress, note_hash_for_nullification: Field, ) -> Field { - poseidon2_hash_with_separator( - [note_hash_for_nullification], - DOM_SEP__NOTE_NULLIFIER as Field, - ) + compute_note_nullifier(note_hash_for_nullification, []) } unconstrained fn compute_nullifier_unconstrained( diff --git a/noir-projects/noir-contracts/contracts/protocol/contract_class_registry_contract/src/main.nr b/noir-projects/noir-contracts/contracts/protocol/contract_class_registry_contract/src/main.nr index 629497482dca..89791e564fff 100644 --- a/noir-projects/noir-contracts/contracts/protocol/contract_class_registry_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/protocol/contract_class_registry_contract/src/main.nr @@ -93,6 +93,7 @@ pub contract ContractClassRegistry { // Emit the contract class id as a nullifier: // - to demonstrate that this contract class hasn't been published before // - to enable apps to read that this contract class has been published. + // We use no domain separators because these are the only nullifiers this contract uses. context.push_nullifier(contract_class_id.to_field()); // Broadcast class info including public bytecode diff --git a/noir-projects/noir-contracts/contracts/protocol/contract_instance_registry_contract/src/main.nr b/noir-projects/noir-contracts/contracts/protocol/contract_instance_registry_contract/src/main.nr index eb48b949d35a..bc9a5eed0b94 100644 --- a/noir-projects/noir-contracts/contracts/protocol/contract_instance_registry_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/protocol/contract_instance_registry_contract/src/main.nr @@ -139,6 +139,7 @@ pub contract ContractInstanceRegistry { let address = AztecAddress::compute(public_keys, partial_address); // Emit address as nullifier: prevents duplicate deployment and proves publication. + // We use no domain separators because these are the only nullifiers this contract uses. context.push_nullifier(address.to_field()); // Broadcast deployment event. Uses non-standard serialization (2 fields per point, diff --git a/noir-projects/noir-contracts/contracts/test/spam_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/spam_contract/src/main.nr index 9d26399f290e..7519a67fd2f2 100644 --- a/noir-projects/noir-contracts/contracts/test/spam_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/spam_contract/src/main.nr @@ -6,12 +6,10 @@ pub contract Spam { use aztec::{ macros::{functions::{external, only_self}, storage::storage}, messages::message_delivery::MessageDelivery, - protocol::{ - constants::{ - DOM_SEP__NOTE_NULLIFIER, MAX_NOTE_HASHES_PER_CALL, MAX_NULLIFIERS_PER_CALL, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - }, - hash::poseidon2_hash_with_separator, + note::utils::compute_note_nullifier, + protocol::constants::{ + MAX_NOTE_HASHES_PER_CALL, MAX_NULLIFIERS_PER_CALL, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, }, state_vars::{Map, Owned, PublicMutable}, }; @@ -34,10 +32,7 @@ pub contract Spam { for i in 0..MAX_NULLIFIERS_PER_CALL { if (i < nullifier_count) { - self.context.push_nullifier(poseidon2_hash_with_separator( - [nullifier_seed, i as Field], - DOM_SEP__NOTE_NULLIFIER as Field, - )); + self.context.push_nullifier(compute_note_nullifier(nullifier_seed, [i as Field])); } } diff --git a/noir-projects/noir-contracts/contracts/test/test_contract/src/test_note.nr b/noir-projects/noir-contracts/contracts/test/test_contract/src/test_note.nr index a0d4d6b00dad..a7a7a6260547 100644 --- a/noir-projects/noir-contracts/contracts/test/test_contract/src/test_note.nr +++ b/noir-projects/noir-contracts/contracts/test/test_contract/src/test_note.nr @@ -1,11 +1,8 @@ use aztec::{ context::PrivateContext, macros::notes::custom_note, - note::note_interface::NoteHash, - protocol::{ - address::AztecAddress, constants::DOM_SEP__NOTE_HASH, hash::poseidon2_hash_with_separator, - traits::Packable, - }, + note::{note_interface::NoteHash, utils::compute_note_hash}, + protocol::{address::AztecAddress, traits::Packable}, }; /// A note used only for testing purposes. @@ -24,8 +21,8 @@ impl NoteHash for TestNote { ) -> Field { // The note is inserted into the state in the Test contract so we provide a real compute_note_hash // implementation. - let inputs = self.pack().concat([storage_slot, randomness]); - poseidon2_hash_with_separator(inputs, DOM_SEP__NOTE_HASH) + let data = self.pack().concat([randomness]); + compute_note_hash(storage_slot, data) } fn compute_nullifier( diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 38f209bb0006..a49e33607a70 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -634,16 +634,49 @@ pub global MAX_PUBLIC_CALLS_TO_UNIQUE_CONTRACT_CLASS_IDS: u32 = 21; // See ./constants_tests.nr for how the values are generated. // --------------------------------------------------------------- -// Note hash generator index which can be used by custom implementations of NoteHash::compute_note_hash +/// Domain separator for note hashes. +/// +/// This is not technically a protocol constant as note hashes are computed by each contract. pub global DOM_SEP__NOTE_HASH: u32 = 116501019; -pub global DOM_SEP__NOTE_HASH_NONCE: u32 = 1721808740; -pub global DOM_SEP__UNIQUE_NOTE_HASH: u32 = 226850429; + +/// Domain separator for siloed note hashes. +/// +/// Used by [`crate::hash::compute_siloed_note_hash`]. pub global DOM_SEP__SILOED_NOTE_HASH: u32 = 3361878420; +/// Domain separator for unique note hashes. +/// +/// Used by [`crate::hash::compute_unique_note_hash`]. +pub global DOM_SEP__UNIQUE_NOTE_HASH: u32 = 226850429; + +/// Domain separator for nonces. +/// +/// Used by [`crate::hash::compute_note_hash_nonce`]. +pub global DOM_SEP__NOTE_HASH_NONCE: u32 = 1721808740; + +/// Domain separator for `SingleUseClaim` nullifiers. +/// +/// This is not technically a protocol constant as these nullifiers are computed by each contract. +pub global DOM_SEP__SINGLE_USE_CLAIM_NULLIFIER: u32 = 1465998995; + +/// Domain separator for note nullifiers. +/// +/// This is not technically a protocol constant as note nullifiers are computed by each contract. pub global DOM_SEP__NOTE_NULLIFIER: u32 = 50789342; -pub global DOM_SEP__MESSAGE_NULLIFIER: u32 = 3754509616; + +/// Domain separator for siloed nullifiers. +/// +/// Used by [`crate::hash::compute_siloed_nullifier`]. pub global DOM_SEP__SILOED_NULLIFIER: u32 = 57496191; +/// Domain separator for L1 to L2 message nullifiers. +/// +/// This is not technically a protocol constant as message nullifiers are computed by each contract. +pub global DOM_SEP__MESSAGE_NULLIFIER: u32 = 3754509616; + +/// Domain separator for private log tags. +/// +/// Used by [`crate::hash::compute_siloed_private_log_first_field`]. pub global DOM_SEP__PRIVATE_LOG_FIRST_FIELD: u32 = 2769976252; pub global DOM_SEP__PUBLIC_LEAF_SLOT: u32 = 1247650290; @@ -710,11 +743,29 @@ pub global DOM_SEP__CIPHERTEXT_FIELD_MASK: u32 = 1870492847; pub global DOM_SEP__PARTIAL_NOTE_VALIDITY_COMMITMENT: u32 = 623934423; // --------------------------------------------------------------- -// TODO: consider moving these to aztec-nr +// TODO(F-397): move these to aztec-nr, along with note hash, note nullifier, message nullifier and single use claim +// nullifier. +/// Domain separator for state variable initialization. +/// +/// Should not be reused for a given storage slot. pub global DOM_SEP__INITIALIZATION_NULLIFIER: u32 = 1653084894; + +/// Domain separator for L1 to L2 message secret hashes. pub global DOM_SEP__SECRET_HASH: u32 = 4199652938; + +/// Domain separator for transaction nullifiers. +/// +/// Used to produce cancellable (replaceable) transactions. +/// +/// This is not technically a protocol constant as cancellable transactions are an account contract feature. pub global DOM_SEP__TX_NULLIFIER: u32 = 1025801951; + +/// Domain separator for account contract payloads. +/// +/// Used to check for authorization to execute a payload. +/// +/// This is not technically a protocol constant as payload authorization is an account contract feature. pub global DOM_SEP__SIGNATURE_PAYLOAD: u32 = 463525807; // --------------------------------------------------------------- diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants_tests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants_tests.nr index 6f7c9dee1cc1..a3a9f80fe9d5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants_tests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants_tests.nr @@ -17,9 +17,10 @@ use crate::{ DOM_SEP__PUBLIC_BYTECODE, DOM_SEP__PUBLIC_CALLDATA, DOM_SEP__PUBLIC_KEYS_HASH, DOM_SEP__PUBLIC_LEAF_SLOT, DOM_SEP__PUBLIC_STORAGE_MAP_SLOT, DOM_SEP__PUBLIC_TX_HASH, DOM_SEP__SECRET_HASH, DOM_SEP__SIGNATURE_PAYLOAD, DOM_SEP__SILOED_NOTE_HASH, - DOM_SEP__SILOED_NULLIFIER, DOM_SEP__SYMMETRIC_KEY, DOM_SEP__SYMMETRIC_KEY_2, DOM_SEP__TSK_M, - DOM_SEP__TX_NULLIFIER, DOM_SEP__TX_REQUEST, DOM_SEP__UNIQUE_NOTE_HASH, - NULL_MSG_SENDER_CONTRACT_ADDRESS, SIDE_EFFECT_MASKING_ADDRESS, TX_START_PREFIX, + DOM_SEP__SILOED_NULLIFIER, DOM_SEP__SINGLE_USE_CLAIM_NULLIFIER, DOM_SEP__SYMMETRIC_KEY, + DOM_SEP__SYMMETRIC_KEY_2, DOM_SEP__TSK_M, DOM_SEP__TX_NULLIFIER, DOM_SEP__TX_REQUEST, + DOM_SEP__UNIQUE_NOTE_HASH, NULL_MSG_SENDER_CONTRACT_ADDRESS, SIDE_EFFECT_MASKING_ADDRESS, + TX_START_PREFIX, }, hash::poseidon2_hash_bytes, traits::{FromField, ToField}, @@ -129,7 +130,7 @@ impl HashedValueTester::new(); + let mut tester = HashedValueTester::<51, 44>::new(); // ----------------- // Domain separators @@ -139,6 +140,10 @@ fn hashed_values_match_derived() { tester.assert_dom_sep_matches_derived(DOM_SEP__UNIQUE_NOTE_HASH, "unique_note_hash"); tester.assert_dom_sep_matches_derived(DOM_SEP__SILOED_NOTE_HASH, "siloed_note_hash"); tester.assert_dom_sep_matches_derived(DOM_SEP__NOTE_NULLIFIER, "note_nullifier"); + tester.assert_dom_sep_matches_derived( + DOM_SEP__SINGLE_USE_CLAIM_NULLIFIER, + "single_use_claim_nullifier", + ); tester.assert_dom_sep_matches_derived(DOM_SEP__SILOED_NULLIFIER, "siloed_nullifier"); tester.assert_dom_sep_matches_derived( DOM_SEP__PRIVATE_LOG_FIRST_FIELD, diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 8ea062dcafd2..df0af1e38d73 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -61,6 +61,7 @@ "@aztec/validator-ha-signer": "workspace:^", "@aztec/wallets": "workspace:^", "@aztec/world-state": "workspace:^", + "@iarna/toml": "^2.2.5", "@types/chalk": "^2.2.0", "abitype": "^0.8.11", "chalk": "^5.3.0", diff --git a/yarn-project/aztec/scripts/aztec.sh b/yarn-project/aztec/scripts/aztec.sh index 45ebe59f7ef3..16e64d62cd4f 100755 --- a/yarn-project/aztec/scripts/aztec.sh +++ b/yarn-project/aztec/scripts/aztec.sh @@ -20,6 +20,9 @@ function aztec { case $cmd in test) + # Attempt to compile, no-op if there are no changes + node --no-warnings "$script_dir/../dest/bin/index.js" compile + export LOG_LEVEL="${LOG_LEVEL:-"error;trace:contract_log"}" aztec start --txe --port 8081 & server_pid=$! diff --git a/yarn-project/aztec/src/cli/cmds/compile.ts b/yarn-project/aztec/src/cli/cmds/compile.ts index a4b2af7c1b30..dc7ddf87b25f 100644 --- a/yarn-project/aztec/src/cli/cmds/compile.ts +++ b/yarn-project/aztec/src/cli/cmds/compile.ts @@ -6,6 +6,7 @@ import { readFile, writeFile } from 'fs/promises'; import { join } from 'path'; import { readArtifactFiles } from './utils/artifacts.js'; +import { needsRecompile } from './utils/needs_recompile.js'; import { run } from './utils/spawn.js'; /** Returns paths to contract artifacts in the target directory. */ @@ -137,6 +138,11 @@ async function checkNoTestsInContracts(nargo: string, log: LogFn): Promise /** Compiles Aztec Noir contracts and postprocesses artifacts. */ async function compileAztecContract(nargoArgs: string[], log: LogFn): Promise { + if (!(await needsRecompile())) { + log('No source changes detected, skipping compilation.'); + return; + } + const nargo = process.env.NARGO ?? 'nargo'; const bb = process.env.BB ?? 'bb'; diff --git a/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.test.ts b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.test.ts new file mode 100644 index 000000000000..9f2755898be6 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.test.ts @@ -0,0 +1,278 @@ +import { afterEach, beforeEach, describe, expect, it } from '@jest/globals'; +import { mkdir, rm, utimes, writeFile } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { needsRecompile } from './needs_recompile.js'; + +/** Create a file (if needed) and set its timestamp to the given value (seconds since epoch). */ +async function touch(filePath: string, timeSec: number) { + // we apply the 'a' flag to mimic the behavior of touch command that does not change contents of a file if it already + // exist + await writeFile(filePath, '', { flag: 'a' }); + await utimes(filePath, timeSec, timeSec); +} + +/** Create a directory (recursively). */ +async function mkdirp(dir: string) { + await mkdir(dir, { recursive: true }); +} + +describe('needsRecompile', () => { + let tempDir: string; + let originalCwd: string; + + beforeEach(async () => { + originalCwd = process.cwd(); + // Create a unique temp directory and chdir into it so needsRecompile() + // resolves its relative paths ('target', '.') against our test fixtures. + tempDir = join(tmpdir(), `needs-recompile-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + await mkdirp(tempDir); + process.chdir(tempDir); + }); + + afterEach(async () => { + process.chdir(originalCwd); + await rm(tempDir, { recursive: true, force: true }); + }); + + it('returns true when target directory does not exist', async () => { + // No target/ at all — always needs recompile. + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + expect(await needsRecompile()).toBe(true); + }); + + it('returns true when target directory is empty (no .json artifacts)', async () => { + await mkdirp('target'); + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + expect(await needsRecompile()).toBe(true); + }); + + it('returns true when target has only non-json files', async () => { + await mkdirp('target'); + await touch(join('target', 'something.txt'), 1000); + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + expect(await needsRecompile()).toBe(true); + }); + + it('returns false when artifacts are newer than all sources', async () => { + // Source files at t=1000, artifact at t=2000. + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + expect(await needsRecompile()).toBe(false); + }); + + it('returns true when a .nr source file is newer than the newest artifact', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('target', 'artifact.json'), 2000); + await touch(join('src', 'main.nr'), 3000); + + expect(await needsRecompile()).toBe(true); + }); + + it('returns true when Nargo.toml is newer than the newest artifact', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 3000, 3000); + + expect(await needsRecompile()).toBe(true); + }); + + it('follows path-based dependencies and detects newer sources in them', async () => { + // Main project depends on a local library via path. + const libDir = join(tempDir, 'lib', 'my_dep'); + await mkdirp(join(libDir, 'src')); + await mkdirp('src'); + await mkdirp('target'); + + // Main project Nargo.toml with a path dependency. + const mainToml = `[package] +name = "test" +type = "contract" + +[dependencies] +my_dep = { path = "lib/my_dep" } +`; + await writeFile('Nargo.toml', mainToml); + await utimes('Nargo.toml', 1000, 1000); + + // Dependency Nargo.toml + await writeFile(join(libDir, 'Nargo.toml'), '[package]\nname = "my_dep"\ntype = "lib"\n'); + await utimes(join(libDir, 'Nargo.toml'), 1000, 1000); + + // Source files — all old. + await touch(join('src', 'main.nr'), 1000); + await touch(join(libDir, 'src', 'lib.nr'), 1000); + + // Artifact is newer than all sources. + await touch(join('target', 'artifact.json'), 2000); + + expect(await needsRecompile()).toBe(false); + + // Now update a source file in the dependency. + await utimes(join(libDir, 'src', 'lib.nr'), 3000, 3000); + + expect(await needsRecompile()).toBe(true); + }); + + it('ignores git-based dependencies (no path field)', async () => { + await mkdirp('src'); + await mkdirp('target'); + + // Nargo.toml with a git dependency only. + const toml = `[package] +name = "test" +type = "contract" + +[dependencies] +aztec = { git = "https://github.com/example/repo", tag = "v1.0" } +`; + await writeFile('Nargo.toml', toml); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + // Should return false and not error out because of invalid links — git deps are not searched through since they + // are fixed to a tag in Nargo.toml (and if Nargo.toml got modified we would detect it). + expect(await needsRecompile()).toBe(false); + }); + + it('skips target/ directories when scanning for source files', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + // Place a newer .nr file inside a nested target/ directory. + // This should be ignored. + await mkdirp(join('src', 'target')); + await touch(join('src', 'target', 'cached.nr'), 5000); + + expect(await needsRecompile()).toBe(false); + }); + + it('compares against the oldest artifact when multiple exist', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 2500); + // Two artifacts: one old, one very new. + await touch(join('target', 'old_artifact.json'), 2000); + await touch(join('target', 'new_artifact.json'), 3000); + + // Source (2500) is newer than the oldest artifact (2000), so recompile. + expect(await needsRecompile()).toBe(true); + }); + + it('returns false when all sources are older than the oldest artifact', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'old_artifact.json'), 2000); + await touch(join('target', 'new_artifact.json'), 3000); + + // Source (1000) < oldest artifact (2000), no recompile. + expect(await needsRecompile()).toBe(false); + }); + + it('handles deeply nested .nr source files', async () => { + await mkdirp('src/nested/deep'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'nested', 'deep', 'module.nr'), 3000); + await touch(join('target', 'artifact.json'), 2000); + + expect(await needsRecompile()).toBe(true); + }); + + it('throws when a path dependency resolves to a file instead of a directory', async () => { + await mkdirp('src'); + await mkdirp('target'); + + // Create a file where the dependency path points. + await writeFile('not_a_dir', 'I am a file'); + + const mainToml = `[package] +name = "test" +type = "contract" + +[dependencies] +bad_dep = { path = "not_a_dir" } +`; + await writeFile('Nargo.toml', mainToml); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + await expect(needsRecompile()).rejects.toThrow('which is not a directory'); + }); + + it('does not follow circular path dependencies', async () => { + // Two projects that depend on each other via path. + const libDir = join(tempDir, 'lib'); + await mkdirp(join(libDir, 'src')); + await mkdirp('src'); + await mkdirp('target'); + + const mainToml = `[package] +name = "main" +type = "contract" + +[dependencies] +lib = { path = "lib" } +`; + await writeFile('Nargo.toml', mainToml); + await utimes('Nargo.toml', 1000, 1000); + + // lib depends back on the main project. + const libToml = `[package] +name = "lib" +type = "lib" + +[dependencies] +main = { path = ".." } +`; + await writeFile(join(libDir, 'Nargo.toml'), libToml); + await utimes(join(libDir, 'Nargo.toml'), 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join(libDir, 'src', 'lib.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + // Should not infinite-loop; should return false since all sources are old. + expect(await needsRecompile()).toBe(false); + }); +}); diff --git a/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.ts b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.ts new file mode 100644 index 000000000000..c66bfc5bee08 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.ts @@ -0,0 +1,139 @@ +import TOML from '@iarna/toml'; +import { readFile, readdir, stat } from 'fs/promises'; +import { join, resolve } from 'path'; + +/** + * Returns true if recompilation is needed: either no artifacts exist in target/ or any .nr or Nargo.toml source file + * (including path-based dependencies) is newer than the oldest artifact. We compare against the oldest artifact so + * that a source change between the oldest and newest compilation (e.g. in a multi-contract workspace) still triggers + * a recompile. + * + * Note: The above implies that if there is a random json file in the target dir we would be always recompiling. + */ +export async function needsRecompile(): Promise { + const oldestArtifactMs = await getOldestArtifactModificationTime('target'); + if (oldestArtifactMs === undefined) { + return true; + } + + const crateDirs = await collectCrateDirs('.'); + return hasNewerSourceFile(crateDirs, oldestArtifactMs); +} + +/** + * Returns the last modification time (timestamp in ms) of the oldest .json artifact in targetDir, or undefined if + * none exist. + */ +async function getOldestArtifactModificationTime(targetDir: string): Promise { + let entries: string[]; + try { + entries = (await readdir(targetDir)).filter(f => f.endsWith('.json')); + } catch (err: any) { + if (err?.code === 'ENOENT') { + return undefined; + } + throw err; + } + + if (entries.length === 0) { + return undefined; + } + + let oldest = Infinity; + for (const entry of entries) { + const s = await stat(join(targetDir, entry)); + if (s.mtimeMs < oldest) { + oldest = s.mtimeMs; + } + } + return oldest; +} + +/** + * Recursively collects crate directories starting from startCrateDir by following path-based dependencies declared in + * Nargo.toml files. Git-based deps are ignored (they only change when Nargo.toml itself is modified since the deps are + * tagged). + */ +async function collectCrateDirs(startCrateDir: string): Promise { + // We have a set of visited dirs we check against when entering a new dir because we could stumble upon a directory + // multiple times in case multiple deps shared a dep (e.g. dep A and dep B both sharing dep C). + const visited = new Set(); + + async function visit(crateDir: string): Promise { + const absDir = resolve(crateDir); + if (visited.has(absDir)) { + return; + } + visited.add(absDir); + + // Every dep is its own crate and every crate needs to have Nargo.toml defined in the root so we try to load it and + // error out if it's not the case. + const tomlPath = join(absDir, 'Nargo.toml'); + const content = await readFile(tomlPath, 'utf-8').catch(() => { + throw new Error(`Incorrectly defined dependency. Nargo.toml not found in ${absDir}`); + }); + + // We parse and iterate over the dependencies + const parsed = TOML.parse(content) as Record; + const deps = (parsed.dependencies as Record) ?? {}; + for (const dep of Object.values(deps)) { + if (dep && typeof dep === 'object' && typeof dep.path === 'string') { + const depPath = resolve(absDir, dep.path); + const s = await stat(depPath); + if (!s.isDirectory()) { + throw new Error( + `Dependency path "${dep.path}" in ${tomlPath} resolves to ${depPath} which is not a directory`, + ); + } + await visit(depPath); + } + } + } + + await visit(startCrateDir); + return [...visited]; +} + +/** + * Walks crate dirs looking for .nr and Nargo.toml files newer than thresholdMs. Short-circuits on the first match. + */ +async function hasNewerSourceFile(crateDirs: string[], thresholdMs: number): Promise { + // Returns true if it find a new file than thresholdMs, false otherwise + async function walkForNewer(dir: string): Promise { + let entries; + try { + entries = await readdir(dir, { withFileTypes: true }); + } catch { + return false; + } + + // We iterate over the entries in the dir + for (const entry of entries) { + const fullPath = join(dir, entry.name); + if (entry.isDirectory()) { + // If the entry is a dir and it's not called `target` we recursively enter it + if (entry.name === 'target') { + continue; + } + if (await walkForNewer(fullPath)) { + return true; + } + } else if (entry.name === 'Nargo.toml' || entry.name.endsWith('.nr')) { + // The entry is a Nargo.toml file or *.nr file so we check the timestamp + const s = await stat(fullPath); + if (s.mtimeMs > thresholdMs) { + return true; + } + } + } + return false; + } + + // We search through the crate dirs + for (const dir of crateDirs) { + if (await walkForNewer(dir)) { + return true; + } + } + return false; +} diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts index 02e5f378c460..3ce458889e16 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts @@ -275,7 +275,7 @@ describe('Private Execution test suite', () => { const computeNoteHash = (note: Note, owner: AztecAddress, storageSlot: Fr, randomness: Fr) => { // We're assuming here that the note hash function is the default one injected by the #[note] macro. return poseidon2HashWithSeparator( - [...note.items, owner.toField(), storageSlot, randomness], + [storageSlot, ...note.items, owner.toField(), randomness], DomainSeparator.NOTE_HASH, ); }; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 49d3009e27f0..c41f37ce40a2 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -910,6 +910,7 @@ __metadata: "@aztec/validator-ha-signer": "workspace:^" "@aztec/wallets": "workspace:^" "@aztec/world-state": "workspace:^" + "@iarna/toml": "npm:^2.2.5" "@jest/globals": "npm:^30.0.0" "@types/chalk": "npm:^2.2.0" "@types/jest": "npm:^30.0.0"