diff --git a/docs/docs-developers/docs/aztec-js/aztec_js_reference.md b/docs/docs-developers/docs/aztec-js/aztec_js_reference.md index 9ae2605af87f..3010573e9e88 100644 --- a/docs/docs-developers/docs/aztec-js/aztec_js_reference.md +++ b/docs/docs-developers/docs/aztec-js/aztec_js_reference.md @@ -4830,7 +4830,7 @@ Helper type that represents all methods that can be batched. ```typescript export type BatchableMethods = Pick< Wallet, - 'registerContract' | 'sendTx' | 'registerSender' | 'simulateUtility' | 'simulateTx' + 'registerContract' | 'sendTx' | 'registerSender' | 'executeUtility' | 'simulateTx' >; ``` @@ -5040,7 +5040,7 @@ export type Wallet = { secretKey?: Fr, ): Promise; simulateTx(exec: ExecutionPayload, opts: SimulateOptions): Promise; - simulateUtility(call: FunctionCall, authwits?: AuthWitness[]): Promise; + executeUtility(call: FunctionCall, authwits?: AuthWitness[]): Promise; profileTx(exec: ExecutionPayload, opts: ProfileOptions): Promise; sendTx(exec: ExecutionPayload, opts: SendOptions): Promise; createAuthWit(from: AztecAddress, messageHashOrIntent: Fr | IntentInnerHash | CallIntent): Promise; @@ -5210,15 +5210,15 @@ simulateTx( **Returns:** `Promise` -##### simulateUtility +##### executeUtility **Signature:** ```typescript -simulateUtility( +executeUtility( call: FunctionCall, authwits?: AuthWitness[] -): Promise +): Promise ``` **Parameters:** @@ -5228,7 +5228,7 @@ simulateUtility( **Returns:** -`Promise` +`Promise` ##### profileTx **Signature:** diff --git a/docs/docs-developers/docs/resources/migration_notes.md b/docs/docs-developers/docs/resources/migration_notes.md index f7ab60e20c53..01862c1ddfaa 100644 --- a/docs/docs-developers/docs/resources/migration_notes.md +++ b/docs/docs-developers/docs/resources/migration_notes.md @@ -9,6 +9,27 @@ Aztec is in active development. Each version may introduce breaking changes that ## TBD +### `simulateUtility` renamed to `executeUtility` + +The `simulateUtility` method and related types have been renamed to `executeUtility` across the entire stack to better reflect that utility functions are executed, not simulated. + +**TypeScript:** + +```diff +- import { SimulateUtilityOptions, UtilitySimulationResult } from '@aztec/aztec.js'; ++ import { ExecuteUtilityOptions, UtilityExecutionResult } from '@aztec/aztec.js'; + +- const result: UtilitySimulationResult = await wallet.simulateUtility(functionCall, opts); ++ const result: UtilityExecutionResult = await wallet.executeUtility(functionCall, opts); +``` + +**Noir (test environment):** + +```diff +- let result = env.simulate_utility(my_contract_address, selector); ++ let result = env.execute_utility(my_contract_address, selector); +``` + ### [Protocol] `include_by_timestamp` renamed to `expiration_timestamp` The `include_by_timestamp` field has been renamed to `expiration_timestamp` across the protocol to better convey its meaning. diff --git a/noir-projects/aztec-nr/aztec/src/contract_self.nr b/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_private.nr similarity index 70% rename from noir-projects/aztec-nr/aztec/src/contract_self.nr rename to noir-projects/aztec-nr/aztec/src/contract_self/contract_self_private.nr index 69de9b4e2f6a..6065428f97c1 100644 --- a/noir-projects/aztec-nr/aztec/src/contract_self.nr +++ b/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_private.nr @@ -1,32 +1,23 @@ -//! The `self` contract value. +//! The `self` contract value for private execution contexts. use crate::{ - context::{ - calls::{PrivateCall, PrivateStaticCall, PublicCall, PublicStaticCall}, - PrivateContext, - PublicContext, - UtilityContext, - }, - event::{ - event_emission::{emit_event_in_private, emit_event_in_public}, - event_interface::EventInterface, - EventMessage, - }, + context::{calls::{PrivateCall, PrivateStaticCall, PublicCall, PublicStaticCall}, PrivateContext}, + event::{event_emission::emit_event_in_private, event_interface::EventInterface, EventMessage}, }; use crate::protocol::{address::AztecAddress, traits::{Deserialize, Serialize}}; -/// Core interface for interacting with aztec-nr contract features. +/// Core interface for interacting with aztec-nr contract features in private execution contexts. /// /// This struct is automatically injected into every [`external`](crate::macros::functions::external) and -/// [`internal`](crate::macros::functions::internal) contract function by the Aztec macro system and is accessible -/// through the `self` variable. +/// [`internal`](crate::macros::functions::internal) contract function marked with `"private"` by the Aztec macro +/// system and is accessible through the `self` variable. /// /// ## Usage in Contract Functions /// /// Once injected, you can use `self` to: /// - Access storage: `self.storage.balances.at(owner).read()` /// - Call contracts: `self.call(Token::at(address).transfer(recipient, amount))` -/// - Emit events: `self.emit(event).deliver_to(recipient, delivery_mode)` (private) or `self.emit(event)` (public) +/// - Emit events: `self.emit(event).deliver_to(recipient, delivery_mode)` /// - Get the contract address: `self.address` /// - Get the caller: `self.msg_sender()` /// - Access low-level Aztec.nr APIs through the context: `self.context` @@ -49,14 +40,14 @@ use crate::protocol::{address::AztecAddress, traits::{Deserialize, Serialize}}; /// /// ## Type Parameters /// -/// - `Context`: The execution context type - either `&mut PrivateContext`, `PublicContext`, or `UtilityContext` /// - `Storage`: The contract's storage struct (defined with [`storage`](crate::macros::storage::storage), or `()` if /// the contract has no storage /// - `CallSelf`: Macro-generated type for calling contract's own non-view functions /// - `EnqueueSelf`: Macro-generated type for enqueuing calls to the contract's own non-view functions /// - `CallSelfStatic`: Macro-generated type for calling contract's own view functions /// - `EnqueueSelfStatic`: Macro-generated type for enqueuing calls to the contract's own view functions -pub struct ContractSelf { +/// - `CallInternal`: Macro-generated type for calling internal functions +pub struct ContractSelfPrivate { /// The address of this contract pub address: AztecAddress, @@ -64,10 +55,8 @@ pub struct ContractSelf ContractSelf<&mut PrivateContext, Storage, CallSelf, EnqueueSelf, CallSelfStatic, EnqueueSelfStatic, CallInternal> { - /// Creates a new `ContractSelf` instance for a private function. +impl ContractSelfPrivate { + /// Creates a new `ContractSelfPrivate` instance for a private function. /// /// This constructor is called automatically by the macro system and should not be called directly. - pub fn new_private( + pub fn new( context: &mut PrivateContext, storage: Storage, call_self: CallSelf, @@ -335,8 +304,8 @@ impl ContractSelf { - /// Creates a new `ContractSelf` instance for a public function. - /// - /// This constructor is called automatically by the macro system and should not be called directly. - pub fn new_public( - context: PublicContext, - storage: Storage, - call_self: CallSelf, - call_self_static: CallSelfStatic, - internal: CallInternal, - ) -> Self { - Self { - context, - storage, - address: context.this_address(), - call_self, - enqueue_self: (), - call_self_static, - enqueue_self_static: (), - internal, - } - } - - /// The address of the contract address that made this function call. - /// - /// This is similar to Solidity's `msg.sender` value. - /// - /// ## Incognito Calls - /// - /// Contracts can call public functions from private ones hiding their identity (see - /// [`enqueue_incognito`](ContractSelf::enqueue_incognito)). This function reverts when executed in such a context. - /// - /// If you need to handle these cases, use [`PublicContext::maybe_msg_sender`]. - pub fn msg_sender(self: Self) -> AztecAddress { - self.context.maybe_msg_sender().unwrap() - } - - /// Emits an event publicly. - /// - /// Public events are emitted as plaintext and are therefore visible to everyone. This is is the same as Solidity - /// events on EVM chains. - /// - /// Unlike private events, they don't require delivery of an event message. - /// - /// # Example - /// ```noir - /// #[event] - /// struct Update { value: Field } - /// - /// #[external("public")] - /// fn publish_update(value: Field) { - /// self.emit(Update { value }); - /// } - /// ``` - /// - /// # Cost - /// - /// Public event emission is achieved by emitting public transaction logs. A total of `N+1` fields are emitted, - /// where `N` is the serialization length of the event. - pub fn emit(&mut self, event: Event) - where - Event: EventInterface + Serialize, - { - emit_event_in_public(self.context, event); - } - - /// Makes a public contract call. - /// - /// Will revert if the called function reverts or runs out of gas. - /// - /// # Arguments - /// * `call` - The object representing the public function to invoke. - /// - /// # Returns - /// * `T` - Whatever data the called function has returned. - /// - /// # Example - /// ```noir - /// self.call(Token::at(address).transfer_in_public(recipient, amount)); - /// ``` - /// - pub unconstrained fn call(self, call: PublicCall) -> T - where - T: Deserialize, - { - call.call(self.context) - } - - /// Makes a public read-only contract call. - /// - /// This is similar to Solidity's `staticcall`. The called function cannot modify state or emit events. Any nested - /// calls are constrained to also be static calls. - /// - /// Will revert if the called function reverts or runs out of gas. - /// - /// # Arguments - /// * `call` - The object representing the read-only public function to invoke. - /// - /// # Returns - /// * `T` - Whatever data the called function has returned. - /// - /// # Example - /// ```noir - /// self.view(Token::at(address).balance_of_public(recipient)); - /// ``` - /// - pub unconstrained fn view(self, call: PublicStaticCall) -> T - where - T: Deserialize, - { - call.view(self.context) - } -} - -// Implementation for `ContractSelf` in utility execution contexts. -// -// This implementation is used when an external or internal contract function is marked with "utility". -impl ContractSelf { - /// Creates a new `ContractSelf` instance for a utility function. - /// - /// This constructor is called automatically by the macro system and should not be called directly. - pub fn new_utility(context: UtilityContext, storage: Storage) -> Self { - Self { - context, - storage, - address: context.this_address(), - call_self: (), - enqueue_self: (), - call_self_static: (), - enqueue_self_static: (), - internal: (), - } - } -} diff --git a/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_public.nr b/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_public.nr new file mode 100644 index 000000000000..ab5026bffaa9 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_public.nr @@ -0,0 +1,174 @@ +//! The `self` contract value for public execution contexts. + +use crate::{ + context::{calls::{PublicCall, PublicStaticCall}, PublicContext}, + event::{event_emission::emit_event_in_public, event_interface::EventInterface}, +}; +use crate::protocol::{address::AztecAddress, traits::{Deserialize, Serialize}}; + +/// Core interface for interacting with aztec-nr contract features in public execution contexts. +/// +/// This struct is automatically injected into every [`external`](crate::macros::functions::external) and +/// [`internal`](crate::macros::functions::internal) contract function marked with `"public"` by the Aztec macro +/// system and is accessible through the `self` variable. +/// +/// ## Type Parameters +/// +/// - `Storage`: The contract's storage struct (defined with [`storage`](crate::macros::storage::storage), or `()` if +/// the contract has no storage +/// - `CallSelf`: Macro-generated type for calling contract's own non-view functions +/// - `CallSelfStatic`: Macro-generated type for calling contract's own view functions +/// - `CallInternal`: Macro-generated type for calling internal functions +pub struct ContractSelfPublic { + /// The address of this contract + pub address: AztecAddress, + + /// The contract's storage instance, representing the struct to which the + /// [`storage`](crate::macros::storage::storage) macro was applied in your contract. If the contract has no + /// storage, the type of this will be `()`. + /// + /// This storage instance is specialized for the current execution context (public) and + /// provides access to the contract's state variables. + /// + /// ## Developer Note + /// + /// If you've arrived here while trying to access your contract's storage while the `Storage` generic type is set + /// to unit type `()`, it means you haven't yet defined a Storage struct using the + /// [`storage`](crate::macros::storage::storage) macro in your contract. For guidance on setting this up, please + /// refer to our docs: https://docs.aztec.network/developers/docs/guides/smart_contracts/storage + pub storage: Storage, + + /// The public execution context. + pub context: PublicContext, + + /// Provides type-safe methods for calling this contract's own non-view functions. + /// + /// Example API: + /// ```noir + /// self.call_self.some_public_function(args) + /// ``` + pub call_self: CallSelf, + + /// Provides type-safe methods for calling this contract's own view functions. + /// + /// Example API: + /// ```noir + /// self.call_self_static.some_view_function(args) + /// ``` + pub call_self_static: CallSelfStatic, + + /// Provides type-safe methods for calling internal functions. + /// + /// Example API: + /// ```noir + /// self.internal.some_internal_function(args) + /// ``` + pub internal: CallInternal, +} + +impl ContractSelfPublic { + /// Creates a new `ContractSelfPublic` instance for a public function. + /// + /// This constructor is called automatically by the macro system and should not be called directly. + pub fn new( + context: PublicContext, + storage: Storage, + call_self: CallSelf, + call_self_static: CallSelfStatic, + internal: CallInternal, + ) -> Self { + Self { context, storage, address: context.this_address(), call_self, call_self_static, internal } + } + + /// The address of the contract address that made this function call. + /// + /// This is similar to Solidity's `msg.sender` value. + /// + /// ## Incognito Calls + /// + /// Contracts can call public functions from private ones hiding their identity (see + /// + /// [`ContractSelfPrivate::enqueue_incognito`](crate::contract_self::ContractSelfPrivate::enqueue_incognito)). + /// This function reverts when executed in such a context. + /// + /// If you need to handle these cases, use [`PublicContext::maybe_msg_sender`]. + pub fn msg_sender(self: Self) -> AztecAddress { + self.context.maybe_msg_sender().unwrap() + } + + /// Emits an event publicly. + /// + /// Public events are emitted as plaintext and are therefore visible to everyone. This is is the same as Solidity + /// events on EVM chains. + /// + /// Unlike private events, they don't require delivery of an event message. + /// + /// # Example + /// ```noir + /// #[event] + /// struct Update { value: Field } + /// + /// #[external("public")] + /// fn publish_update(value: Field) { + /// self.emit(Update { value }); + /// } + /// ``` + /// + /// # Cost + /// + /// Public event emission is achieved by emitting public transaction logs. A total of `N+1` fields are emitted, + /// where `N` is the serialization length of the event. + pub fn emit(&mut self, event: Event) + where + Event: EventInterface + Serialize, + { + emit_event_in_public(self.context, event); + } + + /// Makes a public contract call. + /// + /// Will revert if the called function reverts or runs out of gas. + /// + /// # Arguments + /// * `call` - The object representing the public function to invoke. + /// + /// # Returns + /// * `T` - Whatever data the called function has returned. + /// + /// # Example + /// ```noir + /// self.call(Token::at(address).transfer_in_public(recipient, amount)); + /// ``` + /// + pub unconstrained fn call(self, call: PublicCall) -> T + where + T: Deserialize, + { + call.call(self.context) + } + + /// Makes a public read-only contract call. + /// + /// This is similar to Solidity's `staticcall`. The called function cannot modify state or emit events. Any nested + /// calls are constrained to also be static calls. + /// + /// Will revert if the called function reverts or runs out of gas. + /// + /// # Arguments + /// * `call` - The object representing the read-only public function to invoke. + /// + /// # Returns + /// * `T` - Whatever data the called function has returned. + /// + /// # Example + /// ```noir + /// self.view(Token::at(address).balance_of_public(recipient)); + /// ``` + /// + pub unconstrained fn view(self, call: PublicStaticCall) -> T + where + T: Deserialize, + { + call.view(self.context) + } +} diff --git a/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_utility.nr b/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_utility.nr new file mode 100644 index 000000000000..39cc1da0d3ac --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/contract_self/contract_self_utility.nr @@ -0,0 +1,45 @@ +//! The `self` contract value for utility execution contexts. + +use crate::context::UtilityContext; +use crate::protocol::address::AztecAddress; + +/// Core interface for interacting with aztec-nr contract features in utility execution contexts. +/// +/// This struct is automatically injected into every [`external`](crate::macros::functions::external) contract function +/// marked with `"utility"` by the Aztec macro system and is accessible through the `self` variable. +/// +/// ## Type Parameters +/// +/// - `Storage`: The contract's storage struct (defined with [`storage`](crate::macros::storage::storage), or `()` if +/// the contract has no storage +pub struct ContractSelfUtility { + /// The address of this contract + pub address: AztecAddress, + + /// The contract's storage instance, representing the struct to which the + /// [`storage`](crate::macros::storage::storage) macro was applied in your contract. If the contract has no + /// storage, the type of this will be `()`. + /// + /// This storage instance is specialized for the current execution context (utility) and + /// provides access to the contract's state variables. + /// + /// ## Developer Note + /// + /// If you've arrived here while trying to access your contract's storage while the `Storage` generic type is set + /// to unit type `()`, it means you haven't yet defined a Storage struct using the + /// [`storage`](crate::macros::storage::storage) macro in your contract. For guidance on setting this up, please + /// refer to our docs: https://docs.aztec.network/developers/docs/guides/smart_contracts/storage + pub storage: Storage, + + /// The utility execution context. + pub context: UtilityContext, +} + +impl ContractSelfUtility { + /// Creates a new `ContractSelfUtility` instance for a utility function. + /// + /// This constructor is called automatically by the macro system and should not be called directly. + pub fn new(context: UtilityContext, storage: Storage) -> Self { + Self { context, storage, address: context.this_address() } + } +} diff --git a/noir-projects/aztec-nr/aztec/src/contract_self/mod.nr b/noir-projects/aztec-nr/aztec/src/contract_self/mod.nr new file mode 100644 index 000000000000..e97c764a38e8 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/contract_self/mod.nr @@ -0,0 +1,7 @@ +pub mod contract_self_private; +pub mod contract_self_public; +pub mod contract_self_utility; + +pub use contract_self_private::ContractSelfPrivate; +pub use contract_self_public::ContractSelfPublic; +pub use contract_self_utility::ContractSelfUtility; diff --git a/noir-projects/aztec-nr/aztec/src/event/event_emission.nr b/noir-projects/aztec-nr/aztec/src/event/event_emission.nr index 89141b717278..845c2f366f6c 100644 --- a/noir-projects/aztec-nr/aztec/src/event/event_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/event/event_emission.nr @@ -11,7 +11,7 @@ pub struct NewEvent { pub(crate) randomness: Field, } -/// Equivalent to `self.emit(event)`: see [`crate::contract_self::ContractSelf::emit`]. +/// Equivalent to `self.emit(event)`: see [`crate::contract_self::ContractSelfPrivate::emit`]. pub fn emit_event_in_private(context: &mut PrivateContext, event: Event) -> EventMessage where Event: EventInterface + Serialize, @@ -34,7 +34,7 @@ where EventMessage::new(NewEvent { event, randomness }, context) } -/// Equivalent to `self.emit(event)`: see [`crate::contract_self::ContractSelf::emit`]. +/// Equivalent to `self.emit(event)`: see [`crate::contract_self::ContractSelfPublic::emit`]. pub fn emit_event_in_public(context: PublicContext, event: Event) where Event: EventInterface + Serialize, diff --git a/noir-projects/aztec-nr/aztec/src/lib.nr b/noir-projects/aztec-nr/aztec/src/lib.nr index d0118d08a9b7..d0cce41274d4 100644 --- a/noir-projects/aztec-nr/aztec/src/lib.nr +++ b/noir-projects/aztec-nr/aztec/src/lib.nr @@ -28,8 +28,7 @@ //! [`TestEnvironment`](crate::test::helpers::test_environment::TestEnvironment) and [mocks](crate::test::mocks). pub mod context; -mod contract_self; -pub use contract_self::ContractSelf; +pub mod contract_self; pub mod publish_contract_instance; pub mod hash; pub mod history; diff --git a/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr index dfcd4eec59d5..75ab13d794ee 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr @@ -220,7 +220,7 @@ pub comptime fn only_self(f: FunctionDefinition) { /// ## Guarantees /// /// [`view`] functions can *only* be called in a static execution context, which is typically achieved by calling the -/// [`crate::contract_self::ContractSelf::view`] method on `self`. +/// [`crate::contract_self::ContractSelfPublic::view`] method on `self`. /// /// No compile time checks are performed on whether a function can be made [`view`]. If a function marked as view /// attempts to modify state, that will result in *runtime* failures. diff --git a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/private.nr b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/private.nr index 035d0106284c..88c0f8590d4d 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/private.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/private.nr @@ -34,9 +34,9 @@ pub(crate) comptime fn generate_private_external(f: FunctionDefinition) -> Quote let storage = Storage::init(&mut context); } } else { - // Contract does not have Storage defined, so we set storage to the unit type `()`. ContractSelf requires a - // storage struct in its constructor. Using an Option type would lead to worse developer experience and higher - // constraint counts so we use the unit type `()` instead. + // Contract does not have Storage defined, so we set storage to the unit type `()`. ContractSelfPrivate + // requires a storage struct in its constructor. Using an Option type would lead to worse developer experience + // and higher constraint counts so we use the unit type `()` instead. quote { let storage = (); } @@ -55,7 +55,7 @@ pub(crate) comptime fn generate_private_external(f: FunctionDefinition) -> Quote let call_self_static: CallSelfStatic<&mut aztec::context::PrivateContext> = CallSelfStatic { address: self_address, context: &mut context }; let enqueue_self_static: EnqueueSelfStatic<&mut aztec::context::PrivateContext> = EnqueueSelfStatic { address: self_address, context: &mut context }; let internal: CallInternal<&mut aztec::context::PrivateContext> = CallInternal { context: &mut context }; - aztec::ContractSelf::new_private(&mut context, storage, call_self, enqueue_self, call_self_static, enqueue_self_static, internal) + aztec::contract_self::ContractSelfPrivate::new(&mut context, storage, call_self, enqueue_self, call_self_static, enqueue_self_static, internal) }; }; diff --git a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/public.nr b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/public.nr index 8a575ab661b8..d481fd0a9666 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/public.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/public.nr @@ -33,9 +33,9 @@ pub(crate) comptime fn generate_public_external(f: FunctionDefinition) -> Quoted let storage = Storage::init(context); } } else { - // Contract does not have Storage defined, so we set storage to the unit type `()`. ContractSelf requires a - // storage struct in its constructor. Using an Option type would lead to worse developer experience and higher - // constraint counts so we use the unit type `()` instead. + // Contract does not have Storage defined, so we set storage to the unit type `()`. ContractSelfPublic requires + // a storage struct in its constructor. Using an Option type would lead to worse developer experience and + // higher constraint counts so we use the unit type `()` instead. quote { let storage = (); } @@ -55,7 +55,7 @@ pub(crate) comptime fn generate_public_external(f: FunctionDefinition) -> Quoted let call_self: CallSelf = CallSelf { address: self_address, context }; let call_self_static: CallSelfStatic = CallSelfStatic { address: self_address, context }; let internal: CallInternal = CallInternal { context }; - aztec::ContractSelf::new_public(context, storage, call_self, call_self_static, internal) + aztec::contract_self::ContractSelfPublic::new(context, storage, call_self, call_self_static, internal) }; }; diff --git a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/utility.nr b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/utility.nr index f7f4da0a6904..f50bbc1249d7 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/utility.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/external/utility.nr @@ -7,7 +7,8 @@ pub(crate) comptime fn generate_utility_external(f: FunctionDefinition) -> Quote let storage = Storage::init(context); } } else { - // Contract does not have Storage defined, so we set storage to the unit type `()`. ContractSelf requires a + // Contract does not have Storage defined, so we set storage to the unit type `()`. ContractSelfUtility + // requires a // storage struct in its constructor. Using an Option type would lead to worse developer experience and higher // constraint counts so we use the unit type `()` instead. quote { @@ -21,7 +22,7 @@ pub(crate) comptime fn generate_utility_external(f: FunctionDefinition) -> Quote let mut self = { let context = aztec::context::UtilityContext::new(); $storage_init - aztec::ContractSelf::new_utility(context, storage) + aztec::contract_self::ContractSelfUtility::new(context, storage) }; }; diff --git a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/internal.nr b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/internal.nr index 9aabae9e6a6c..443daa28f564 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/internal.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/internals_functions_generation/internal.nr @@ -42,7 +42,7 @@ pub(crate) comptime fn generate_private_internal(f: FunctionDefinition) -> Quote let call_self_static: CallSelfStatic<&mut aztec::context::PrivateContext> = CallSelfStatic { address: self_address, context }; let enqueue_self_static: EnqueueSelfStatic<&mut aztec::context::PrivateContext> = EnqueueSelfStatic { address: self_address, context }; let internal: CallInternal<&mut aztec::context::PrivateContext> = CallInternal { context }; - aztec::ContractSelf::new_private(context, storage, call_self, enqueue_self, call_self_static, enqueue_self_static, internal) + aztec::contract_self::ContractSelfPrivate::new(context, storage, call_self, enqueue_self, call_self_static, enqueue_self_static, internal) }; $body @@ -92,7 +92,7 @@ pub(crate) comptime fn generate_public_internal(f: FunctionDefinition) -> Quoted let call_self: CallSelf = CallSelf { address: self_address, context }; let call_self_static: CallSelfStatic = CallSelfStatic { address: self_address, context }; let internal: CallInternal = CallInternal { context }; - aztec::ContractSelf::new_public(context, storage, call_self, call_self_static, internal) + aztec::contract_self::ContractSelfPublic::new(context, storage, call_self, call_self_static, internal) }; $body diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr index 723d48135594..4211f1ce3e77 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr @@ -30,7 +30,7 @@ mod test; /// Unlike [`PublicMutable`](crate::state_vars::PublicMutable) it is **also** possible to read a `PublicImmutable` from /// a /// private contract function, though it is not possible to initialize one. A common pattern is to have these functions -/// [enqueue a public self calls](crate::contract_self::ContractSelf::enqueue_self) in which the initialization +/// [enqueue a public self calls](crate::contract_self::ContractSelfPrivate::enqueue) in which the initialization /// operation is performed. /// /// For a mutable (with restrictions) variant which also can be read from private functions see diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr index 5500059ef84b..c7f53d505d9d 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr @@ -15,7 +15,8 @@ use crate::state_vars::StateVariable; /// A value stored in a `PublicMutable` can be read and written from public contract functions. /// /// It is not possible to read or write a `PublicMutable` from private contract functions. A common pattern is to have -/// these functions [enqueue a public self calls](crate::contract_self::ContractSelf::enqueue_self) in which the +/// these functions [enqueue a public self +/// calls](crate::contract_self::ContractSelfPrivate::enqueue) in which the /// required operation is performed. /// /// For an immutable variant which can be read from private functions, see diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr b/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr index dd2cf8a241dc..7d27bfc02aa6 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr @@ -550,14 +550,14 @@ impl TestEnvironment { /// ```noir /// let caller = env.create_light_account(); /// let contract_addr = env.deploy("SampleContract").without_initializer(); - /// let return_value = env.simulate_utility(SampleContract::at(contract_addr).sample_utility_function()); + /// let return_value = env.execute_utility(SampleContract::at(contract_addr).sample_utility_function()); /// ``` - pub unconstrained fn simulate_utility(_self: Self, call: UtilityCall) -> T + pub unconstrained fn execute_utility(_self: Self, call: UtilityCall) -> T where T: Deserialize, { let serialized_return_values = - txe_oracles::simulate_utility_function(call.target_contract, call.selector, call.args); + txe_oracles::execute_utility_function(call.target_contract, call.selector, call.args); T::deserialize(serialized_return_values) } diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers/txe_oracles.nr b/noir-projects/aztec-nr/aztec/src/test/helpers/txe_oracles.nr index 74a5cedf307d..f9fe915c9c46 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers/txe_oracles.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers/txe_oracles.nr @@ -55,12 +55,12 @@ pub unconstrained fn public_call_new_flow( public_call_new_flow_oracle(from, contract_address, calldata, is_static_call) } -pub unconstrained fn simulate_utility_function( +pub unconstrained fn execute_utility_function( contract_address: AztecAddress, function_selector: FunctionSelector, args: [Field; M], ) -> [Field; N] { - simulate_utility_function_oracle(contract_address, function_selector, args) + execute_utility_function_oracle(contract_address, function_selector, args) } #[oracle(txeGetNextBlockNumber)] @@ -145,8 +145,8 @@ unconstrained fn public_call_new_flow_oracle( is_static_call: bool, ) -> [Field; N] {} -#[oracle(txeSimulateUtilityFunction)] -unconstrained fn simulate_utility_function_oracle( +#[oracle(txeExecuteUtilityFunction)] +unconstrained fn execute_utility_function_oracle( contract_address: AztecAddress, function_selector: FunctionSelector, args: [Field; M], diff --git a/noir-projects/noir-contracts/contracts/app/amm_contract/src/test/test.nr b/noir-projects/noir-contracts/contracts/app/amm_contract/src/test/test.nr index 57971f5a7105..e97677d73e3a 100644 --- a/noir-projects/noir-contracts/contracts/app/amm_contract/src/test/test.nr +++ b/noir-projects/noir-contracts/contracts/app/amm_contract/src/test/test.nr @@ -76,9 +76,9 @@ unconstrained fn add_liquidity_twice_and_remove_liquidity() { ); // Liquidity provider 2 should have 0 token0 and the refund amount of token1 - assert_eq(env.simulate_utility(token0.balance_of_private(liquidity_provider_2)), 0); + assert_eq(env.execute_utility(token0.balance_of_private(liquidity_provider_2)), 0); assert_eq( - env.simulate_utility(token1.balance_of_private(liquidity_provider_2)), + env.execute_utility(token1.balance_of_private(liquidity_provider_2)), expected_refund_amount1, ); @@ -86,7 +86,7 @@ unconstrained fn add_liquidity_twice_and_remove_liquidity() { let expected_liquidity_tokens = (expected_amount_0_in * initial_liquidity_token_supply) / initial_amount0; assert_eq( - env.simulate_utility(liquidity_token.balance_of_private(liquidity_provider_2)), + env.execute_utility(liquidity_token.balance_of_private(liquidity_provider_2)), expected_liquidity_tokens, ); @@ -111,17 +111,17 @@ unconstrained fn add_liquidity_twice_and_remove_liquidity() { let expected_token1_back = (liquidity_to_remove * initial_amount1) / initial_liquidity_token_supply; assert_eq( - env.simulate_utility(token0.balance_of_private(liquidity_provider_1)), + env.execute_utility(token0.balance_of_private(liquidity_provider_1)), expected_token0_back, ); assert_eq( - env.simulate_utility(token1.balance_of_private(liquidity_provider_1)), + env.execute_utility(token1.balance_of_private(liquidity_provider_1)), expected_token1_back, ); // Check remaining liquidity tokens assert_eq( - env.simulate_utility(liquidity_token.balance_of_private(liquidity_provider_1)), + env.execute_utility(liquidity_token.balance_of_private(liquidity_provider_1)), // The expected remaining liquidity is the other half of the initial liquidity. AMM::INITIAL_LIQUIDITY / 2, ); @@ -182,9 +182,9 @@ unconstrained fn swap_exact_tokens_for_tokens() { ); // Verify swap occurred - all of input tokens should be spent and hence the swapper should have 0 token0 balance. - assert_eq(env.simulate_utility(token0.balance_of_private(swapper)), 0); + assert_eq(env.execute_utility(token0.balance_of_private(swapper)), 0); // The exact amount out depends on the AMM formula, but should be > amount_out_min - assert(env.simulate_utility(token1.balance_of_private(swapper)) >= amount_out_min); + assert(env.execute_utility(token1.balance_of_private(swapper)) >= amount_out_min); } #[test] @@ -243,9 +243,9 @@ unconstrained fn swap_tokens_for_exact_tokens() { ); // Verify swap occurred - should get exact amount out - assert_eq(env.simulate_utility(token1.balance_of_private(swapper)), amount_out); + assert_eq(env.execute_utility(token1.balance_of_private(swapper)), amount_out); // Should have some token0 change returned - let swapper_token0_balance = env.simulate_utility(token0.balance_of_private(swapper)); + let swapper_token0_balance = env.execute_utility(token0.balance_of_private(swapper)); assert(swapper_token0_balance > 0); assert(swapper_token0_balance < amount_in_max); } diff --git a/noir-projects/noir-contracts/contracts/app/nft_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/app/nft_contract/src/test/utils.nr index ab1244debcc3..0d2ef11d9077 100644 --- a/noir-projects/noir-contracts/contracts/app/nft_contract/src/test/utils.nr +++ b/noir-projects/noir-contracts/contracts/app/nft_contract/src/test/utils.nr @@ -72,7 +72,7 @@ pub unconstrained fn assert_owns_private_nft( token_id: Field, ) { let (private_nfts, _) = - env.simulate_utility(NFT::at(nft_contract_address).get_private_nfts(owner, 0)); + env.execute_utility(NFT::at(nft_contract_address).get_private_nfts(owner, 0)); let mut nft_found = false; for obtained_token_id in private_nfts { diff --git a/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/test/test.nr b/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/test/test.nr index b2334e056e66..b3786a5bd95b 100644 --- a/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/test/test.nr +++ b/noir-projects/noir-contracts/contracts/app/orderbook_contract/src/test/test.nr @@ -45,7 +45,7 @@ unconstrained fn full_flow() { ); // Get order and verify it's active - let (order, is_fulfilled) = env.simulate_utility(orderbook.get_order(order_id)); + let (order, is_fulfilled) = env.execute_utility(orderbook.get_order(order_id)); assert_eq(order.bid_amount, BID_AMOUNT); assert_eq(order.ask_amount, ASK_AMOUNT); @@ -54,7 +54,7 @@ unconstrained fn full_flow() { // Verify that all maker's tokens were transferred to orderbook's public balance assert_eq(env.view_public(token0.balance_of_public(orderbook_address)), BID_AMOUNT); - assert_eq(env.simulate_utility(token0.balance_of_private(maker)), 0); + assert_eq(env.execute_utility(token0.balance_of_private(maker)), 0); // ORDER FULFILLMENT @@ -72,13 +72,13 @@ unconstrained fn full_flow() { env.call_private(taker, orderbook.fulfill_order(order_id, FULFILL_ORDER_AUTHWIT_NONCE)); // Verify final balances - assert_eq(env.simulate_utility(token0.balance_of_private(maker)), 0); - assert_eq(env.simulate_utility(token1.balance_of_private(maker)), ASK_AMOUNT); - assert_eq(env.simulate_utility(token0.balance_of_private(taker)), BID_AMOUNT); - assert_eq(env.simulate_utility(token1.balance_of_private(taker)), 0); + assert_eq(env.execute_utility(token0.balance_of_private(maker)), 0); + assert_eq(env.execute_utility(token1.balance_of_private(maker)), ASK_AMOUNT); + assert_eq(env.execute_utility(token0.balance_of_private(taker)), BID_AMOUNT); + assert_eq(env.execute_utility(token1.balance_of_private(taker)), 0); // Get order and verify it's fulfilled - let (order, is_fulfilled) = env.simulate_utility(orderbook.get_order(order_id)); + let (order, is_fulfilled) = env.execute_utility(orderbook.get_order(order_id)); assert_eq(order.bid_amount, BID_AMOUNT); assert_eq(order.ask_amount, ASK_AMOUNT); diff --git a/noir-projects/noir-contracts/contracts/app/token_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/app/token_contract/src/test/utils.nr index b3e4fd7b0abc..67edae1007d3 100644 --- a/noir-projects/noir-contracts/contracts/app/token_contract/src/test/utils.nr +++ b/noir-projects/noir-contracts/contracts/app/token_contract/src/test/utils.nr @@ -81,7 +81,7 @@ pub unconstrained fn check_private_balance( address_amount: u128, ) { assert_eq( - env.simulate_utility(Token::at(token_contract_address).balance_of_private(address)), + env.execute_utility(Token::at(token_contract_address).balance_of_private(address)), address_amount, ); } diff --git a/noir-projects/noir-contracts/contracts/test/abi_types_contract/src/test.nr b/noir-projects/noir-contracts/contracts/test/abi_types_contract/src/test.nr index 8013103b1fa2..2e725716aae7 100644 --- a/noir-projects/noir-contracts/contracts/test/abi_types_contract/src/test.nr +++ b/noir-projects/noir-contracts/contracts/test/abi_types_contract/src/test.nr @@ -95,7 +95,7 @@ unconstrained fn pass_utility_parameters() { let abi_types = AbiTypes::at(env.deploy("AbiTypes").without_initializer()); - let min_range_return_values = env.simulate_utility(abi_types.return_utility_parameters( + let min_range_return_values = env.execute_utility(abi_types.return_utility_parameters( false, 0, 0, @@ -107,7 +107,7 @@ unconstrained fn pass_utility_parameters() { (false, 0, 0, I64_MIN, CustomStruct { w: 0, x: false, y: 0, z: I64_MIN }), ); - let max_range_return_values = env.simulate_utility(abi_types.return_utility_parameters( + let max_range_return_values = env.execute_utility(abi_types.return_utility_parameters( true, MAX_FIELD_VALUE, U64_MAX, diff --git a/noir-projects/noir-contracts/contracts/test/counter_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/counter_contract/src/main.nr index ef220deef8b1..6a735d3af490 100644 --- a/noir-projects/noir-contracts/contracts/test/counter_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/counter_contract/src/main.nr @@ -110,7 +110,7 @@ pub contract Counter { // Read the stored value in the note let initial_counter = - env.simulate_utility(Counter::at(contract_address).get_counter(owner)); + env.execute_utility(Counter::at(contract_address).get_counter(owner)); assert( initial_counter == initial_value, f"Expected {initial_value} but got {initial_counter}", @@ -120,7 +120,7 @@ pub contract Counter { env.call_private(owner, Counter::at(contract_address).increment(owner)); let incremented_counter = - env.simulate_utility(Counter::at(contract_address).get_counter(owner)); + env.execute_utility(Counter::at(contract_address).get_counter(owner)); let expected_current_value = initial_value + 1; assert( expected_current_value == incremented_counter, @@ -135,21 +135,21 @@ pub contract Counter { // Checking that the note was discovered from private logs let initial_note_value = - env.simulate_utility(Counter::at(contract_address).get_counter(owner)); + env.execute_utility(Counter::at(contract_address).get_counter(owner)); assert(initial_note_value == initial_value); env.call_private(owner, Counter::at(contract_address).increment_twice(owner)); - assert_eq(env.simulate_utility(Counter::at(contract_address).get_counter(owner)), 7); + assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 7); let _ = env.call_private( owner, Counter::at(contract_address).increment_and_decrement(owner), ); - assert_eq(env.simulate_utility(Counter::at(contract_address).get_counter(owner)), 7); + assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 7); env.call_private(owner, Counter::at(contract_address).decrement(owner)); - assert_eq(env.simulate_utility(Counter::at(contract_address).get_counter(owner)), 6); + assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 6); } } } diff --git a/noir-projects/noir-contracts/contracts/test/test_contract/src/test/note_delivery.nr b/noir-projects/noir-contracts/contracts/test/test_contract/src/test/note_delivery.nr index 6f7e3265937c..3d57d7bc51c2 100644 --- a/noir-projects/noir-contracts/contracts/test/test_contract/src/test/note_delivery.nr +++ b/noir-projects/noir-contracts/contracts/test/test_contract/src/test/note_delivery.nr @@ -47,7 +47,7 @@ unconstrained fn create_note_private_only_tx_and_read_in_utility() { test_contract.call_create_note(VALUE, recipient, STORAGE_SLOT, make_tx_hybrid), ); - let retrieved = env.simulate_utility(test_contract.call_view_notes( + let retrieved = env.execute_utility(test_contract.call_view_notes( recipient, STORAGE_SLOT, ACTIVE_OR_NULLIFIED, @@ -85,7 +85,7 @@ unconstrained fn create_note_hybrid_tx_and_read_in_utility() { test_contract.call_create_note(VALUE, recipient, STORAGE_SLOT, make_tx_hybrid), ); - let retrieved = env.simulate_utility(test_contract.call_view_notes( + let retrieved = env.execute_utility(test_contract.call_view_notes( recipient, STORAGE_SLOT, ACTIVE_OR_NULLIFIED, @@ -121,7 +121,7 @@ unconstrained fn create_partial_note_in_one_tx_and_read_in_utility() { test_contract.call_create_and_complete_partial_note(recipient, STORAGE_SLOT, VALUE), ); - let retrieved = env.simulate_utility(test_contract.call_view_notes( + let retrieved = env.execute_utility(test_contract.call_view_notes( recipient, STORAGE_SLOT, ACTIVE_OR_NULLIFIED, @@ -157,7 +157,7 @@ unconstrained fn create_partial_note_in_two_txs_and_read_in_utility() { env.call_public(sender, test_contract.call_complete_partial_note(partial_note, VALUE)); - let retrieved = env.simulate_utility(test_contract.call_view_notes( + let retrieved = env.execute_utility(test_contract.call_view_notes( recipient, STORAGE_SLOT, ACTIVE_OR_NULLIFIED, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 429afe14aec6..51748193cdfb 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -1091,7 +1091,7 @@ pub global AVM_ADDRESSING_RELATIVE_L2_GAS: u32 = 3; // One range check // Base L2 GAS // Based on simulation time metrics -pub global L2_GAS_PER_NOTE_HASH: u32 = 2700; +pub global L2_GAS_PER_NOTE_HASH: u32 = 9200; // Bounded by long term storage requirements. pub global L2_GAS_PER_NULLIFIER: u32 = 16000; pub global L2_GAS_PER_L2_TO_L1_MSG: u32 = 5200; pub global L2_GAS_PER_PRIVATE_LOG: u32 = 2500; diff --git a/yarn-project/CLAUDE.md b/yarn-project/CLAUDE.md index 1d493ac162e1..8adc4f69a2d3 100644 --- a/yarn-project/CLAUDE.md +++ b/yarn-project/CLAUDE.md @@ -110,6 +110,10 @@ LOG_LEVEL='info; debug:sequencer,archiver' yarn workspace @aztec/ **IMPORTANT**: These commands are run from the root of `yarn-project`, NOT the git root. +### Style + +- **Line width**: 120 characters (`printWidth: 120` in `.prettierrc.json`). Wrap comments and code at 120, not 80. + ### Format ```bash diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 16742b0034cb..99481ba2373e 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -76,7 +76,8 @@ import { type WorldStateSynchronizer, tryStop, } from '@aztec/stdlib/interfaces/server'; -import type { LogFilter, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; +import type { DebugLogStore, LogFilter, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; +import { InMemoryDebugLogStore, NullDebugLogStore } from '@aztec/stdlib/logs'; import { InboxLeaf, type L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { P2PClientType } from '@aztec/stdlib/p2p'; import type { Offense, SlashPayloadRound } from '@aztec/stdlib/slashing'; @@ -156,12 +157,20 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { private blobClient?: BlobClientInterface, private validatorClient?: ValidatorClient, private keyStoreManager?: KeystoreManager, + private debugLogStore: DebugLogStore = new NullDebugLogStore(), ) { this.metrics = new NodeMetrics(telemetry, 'AztecNodeService'); this.tracer = telemetry.getTracer('AztecNodeService'); this.log.info(`Aztec Node version: ${this.packageVersion}`); this.log.info(`Aztec Node started on chain 0x${l1ChainId.toString(16)}`, config.l1Contracts); + + // A defensive check that protects us against introducing a bug in the complex `createAndSync` function. We must + // never have debugLogStore enabled when not in test mode because then we would be accumulating debug logs in + // memory which could be a DoS vector on the sequencer (since no fees are paid for debug logs). + if (debugLogStore.isEnabled && config.realProofs) { + throw new Error('debugLogStore should never be enabled when realProofs are set'); + } } public async getWorldStateSyncStatus(): Promise { @@ -301,9 +310,19 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { config.realProofs || config.debugForceTxProofVerification ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(config.proverTestVerificationDelayMs); + + let debugLogStore: DebugLogStore; if (!config.realProofs) { log.warn(`Aztec node is accepting fake proofs`); + + debugLogStore = new InMemoryDebugLogStore(); + log.info( + 'Aztec node started in test mode (realProofs set to false) hence debug logs from public functions will be collected and served', + ); + } else { + debugLogStore = new NullDebugLogStore(); } + const proofVerifier = new QueuedIVCVerifier(config, circuitVerifier); // create the tx pool and the p2p client, which will need the l2 block source @@ -462,6 +481,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { archiver, dateProvider, telemetry, + debugLogStore, ); sequencer = await SequencerClient.new(config, { @@ -543,6 +563,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { blobClient, validatorClient, keyStoreManager, + debugLogStore, ); return node; @@ -836,18 +857,22 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { // Then get the actual tx from the archiver, which tracks every tx in a mined block. const settledTxReceipt = await this.blockSource.getSettledTxReceipt(txHash); + let receipt: TxReceipt; if (settledTxReceipt) { - // If the archiver has the receipt then return it. - return settledTxReceipt; + receipt = settledTxReceipt; } else if (isKnownToPool) { // If the tx is in the pool but not in the archiver, it's pending. // This handles race conditions between archiver and p2p, where the archiver // has pruned the block in which a tx was mined, but p2p has not caught up yet. - return new TxReceipt(txHash, TxStatus.PENDING, undefined, undefined); + receipt = new TxReceipt(txHash, TxStatus.PENDING, undefined, undefined); } else { // Otherwise, if we don't know the tx, we consider it dropped. - return new TxReceipt(txHash, TxStatus.DROPPED, undefined, 'Tx dropped by P2P node'); + receipt = new TxReceipt(txHash, TxStatus.DROPPED, undefined, 'Tx dropped by P2P node'); } + + this.debugLogStore.decorateReceiptWithLogs(txHash.toString(), receipt); + + return receipt; } public getTxEffect(txHash: TxHash): Promise { @@ -1241,7 +1266,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { const processor = publicProcessorFactory.create(merkleTreeFork, newGlobalVariables, config); // REFACTOR: Consider merging ProcessReturnValues into ProcessedTx - const [processedTxs, failedTxs, _usedTxs, returns] = await processor.process([tx]); + const [processedTxs, failedTxs, _usedTxs, returns, _blobFields, debugLogs] = await processor.process([tx]); // REFACTOR: Consider returning the error rather than throwing if (failedTxs.length) { this.log.warn(`Simulated tx ${txHash} fails: ${failedTxs[0].error}`, { txHash }); @@ -1255,6 +1280,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { processedTx.txEffect, returns, processedTx.gasUsed, + debugLogs, ); } finally { await merkleTreeFork.close(); diff --git a/yarn-project/aztec.js/src/api/wallet.ts b/yarn-project/aztec.js/src/api/wallet.ts index 548d70675600..ae7d39b316f7 100644 --- a/yarn-project/aztec.js/src/api/wallet.ts +++ b/yarn-project/aztec.js/src/api/wallet.ts @@ -1,7 +1,7 @@ export { type Aliased, type SimulateOptions, - type SimulateUtilityOptions, + type ExecuteUtilityOptions, type ProfileOptions, type SendOptions, type BatchableMethods, diff --git a/yarn-project/aztec.js/src/contract/batch_call.test.ts b/yarn-project/aztec.js/src/contract/batch_call.test.ts index 52126be476e3..976884e68745 100644 --- a/yarn-project/aztec.js/src/contract/batch_call.test.ts +++ b/yarn-project/aztec.js/src/contract/batch_call.test.ts @@ -1,7 +1,7 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import { FunctionCall, FunctionSelector, FunctionType } from '@aztec/stdlib/abi'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { ExecutionPayload, TxSimulationResult, UtilitySimulationResult } from '@aztec/stdlib/tx'; +import { ExecutionPayload, TxSimulationResult, UtilityExecutionResult } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -107,8 +107,8 @@ describe('BatchCall', () => { batchCall = new BatchCall(wallet, [utilityPayload1, privatePayload, utilityPayload2, publicPayload]); // Mock utility simulation results - const utilityResult1 = UtilitySimulationResult.random(); - const utilityResult2 = UtilitySimulationResult.random(); + const utilityResult1 = UtilityExecutionResult.random(); + const utilityResult2 = UtilityExecutionResult.random(); // Mock tx simulation result const privateReturnValues = [Fr.random(), Fr.random()]; @@ -122,8 +122,8 @@ describe('BatchCall', () => { // Mock wallet.batch to return both utility results and simulateTx result wallet.batch.mockResolvedValue([ - { name: 'simulateUtility', result: utilityResult1 }, - { name: 'simulateUtility', result: utilityResult2 }, + { name: 'executeUtility', result: utilityResult1 }, + { name: 'executeUtility', result: utilityResult2 }, { name: 'simulateTx', result: txSimResult }, ] as any); @@ -133,14 +133,14 @@ describe('BatchCall', () => { expect(wallet.batch).toHaveBeenCalledTimes(1); expect(wallet.batch).toHaveBeenCalledWith([ { - name: 'simulateUtility', + name: 'executeUtility', args: [ expect.objectContaining({ name: 'getBalance', to: contractAddress1 }), expect.objectContaining({ scope: expect.any(AztecAddress) }), ], }, { - name: 'simulateUtility', + name: 'executeUtility', args: [ expect.objectContaining({ name: 'checkPermission', to: contractAddress3 }), expect.objectContaining({ scope: expect.any(AztecAddress) }), @@ -160,9 +160,9 @@ describe('BatchCall', () => { }, ]); - // Verify wallet.simulateTx/simulateUtility were NOT called directly + // Verify wallet.simulateTx/executeUtility were NOT called directly expect(wallet.simulateTx).not.toHaveBeenCalled(); - expect(wallet.simulateUtility).not.toHaveBeenCalled(); + expect(wallet.executeUtility).not.toHaveBeenCalled(); expect(results).toHaveLength(4); // First utility - decoded from Fr[] to bigint (single field returns the value directly, not as array) @@ -184,13 +184,13 @@ describe('BatchCall', () => { batchCall = new BatchCall(wallet, [utilityPayload1, utilityPayload2]); - // Mock utility simulation results - const utilityResult1 = UtilitySimulationResult.random(); - const utilityResult2 = UtilitySimulationResult.random(); + // Mock utility execution results + const utilityResult1 = UtilityExecutionResult.random(); + const utilityResult2 = UtilityExecutionResult.random(); wallet.batch.mockResolvedValue([ - { name: 'simulateUtility', result: utilityResult1 }, - { name: 'simulateUtility', result: utilityResult2 }, + { name: 'executeUtility', result: utilityResult1 }, + { name: 'executeUtility', result: utilityResult2 }, ] as any); const results = await batchCall.simulate({ from: await AztecAddress.random() }); @@ -198,14 +198,14 @@ describe('BatchCall', () => { expect(wallet.batch).toHaveBeenCalledTimes(1); expect(wallet.batch).toHaveBeenCalledWith([ { - name: 'simulateUtility', + name: 'executeUtility', args: [ expect.objectContaining({ name: 'view1', to: contractAddress1 }), expect.objectContaining({ scope: expect.any(AztecAddress) }), ], }, { - name: 'simulateUtility', + name: 'executeUtility', args: [ expect.objectContaining({ name: 'view2', to: contractAddress2 }), expect.objectContaining({ scope: expect.any(AztecAddress) }), diff --git a/yarn-project/aztec.js/src/contract/batch_call.ts b/yarn-project/aztec.js/src/contract/batch_call.ts index 6f90e80856e1..cd112be6f59b 100644 --- a/yarn-project/aztec.js/src/contract/batch_call.ts +++ b/yarn-project/aztec.js/src/contract/batch_call.ts @@ -1,10 +1,5 @@ import { type FunctionCall, FunctionType, decodeFromAbi } from '@aztec/stdlib/abi'; -import { - ExecutionPayload, - TxSimulationResult, - UtilitySimulationResult, - mergeExecutionPayloads, -} from '@aztec/stdlib/tx'; +import { ExecutionPayload, TxSimulationResult, UtilityExecutionResult, mergeExecutionPayloads } from '@aztec/stdlib/tx'; import type { BatchedMethod, Wallet } from '../wallet/wallet.js'; import { BaseContractInteraction } from './base_contract_interaction.js'; @@ -42,9 +37,9 @@ export class BatchCall extends BaseContractInteraction { } /** - * Simulates the batch, supporting private, public and utility functions. Although this is a single + * Simulates/executes the batch, supporting private, public and utility functions. Although this is a single * interaction with the wallet, private and public functions will be grouped into a single ExecutionPayload - * that the wallet will simulate as a single transaction. Utility function calls will simply be executed + * that the wallet will simulate as a single transaction. Utility function calls will be executed * one by one. * @param options - An optional object containing additional configuration for the interaction. * @returns The results of all the interactions that make up the batch @@ -81,7 +76,7 @@ export class BatchCall extends BaseContractInteraction { // Add utility calls to batch for (const [call] of utility) { batchRequests.push({ - name: 'simulateUtility' as const, + name: 'executeUtility' as const, args: [call, { scope: options.from, authWitnesses: options.authWitnesses }], }); } @@ -111,8 +106,8 @@ export class BatchCall extends BaseContractInteraction { for (let i = 0; i < utility.length; i++) { const [call, resultIndex] = utility[i]; const wrappedResult = batchResults[i]; - if (wrappedResult.name === 'simulateUtility') { - const rawReturnValues = (wrappedResult.result as UtilitySimulationResult).result; + if (wrappedResult.name === 'executeUtility') { + const rawReturnValues = (wrappedResult.result as UtilityExecutionResult).result; results[resultIndex] = rawReturnValues ? decodeFromAbi(call.returnTypes, rawReturnValues) : []; } } diff --git a/yarn-project/aztec.js/src/contract/contract.test.ts b/yarn-project/aztec.js/src/contract/contract.test.ts index 3abac1754238..28005f6ecbfc 100644 --- a/yarn-project/aztec.js/src/contract/contract.test.ts +++ b/yarn-project/aztec.js/src/contract/contract.test.ts @@ -11,7 +11,7 @@ import type { TxHash, TxReceipt, TxSimulationResult, - UtilitySimulationResult, + UtilityExecutionResult, } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -31,7 +31,7 @@ describe('Contract Class', () => { const _mockTxHash = { type: 'TxHash' } as any as TxHash; const mockTxReceipt = { type: 'TxReceipt' } as any as TxReceipt; const mockTxSimulationResult = { type: 'TxSimulationResult', result: 1n } as any as TxSimulationResult; - const mockUtilityResultValue = { result: [new Fr(42)] } as any as UtilitySimulationResult; + const mockUtilityResultValue = { result: [new Fr(42)] } as any as UtilityExecutionResult; const defaultArtifact: ContractArtifact = { name: 'FooContract', @@ -137,7 +137,7 @@ describe('Contract Class', () => { account.createTxExecutionRequest.mockResolvedValue(mockTxRequest); wallet.registerContract.mockResolvedValue(contractInstance); wallet.sendTx.mockResolvedValue(mockTxReceipt); - wallet.simulateUtility.mockResolvedValue(mockUtilityResultValue); + wallet.executeUtility.mockResolvedValue(mockUtilityResultValue); }); it('should create and send a contract method tx', async () => { @@ -153,8 +153,8 @@ describe('Contract Class', () => { it('should call view on a utility function', async () => { const fooContract = Contract.at(contractAddress, defaultArtifact, wallet); const result = await fooContract.methods.qux(123n).simulate({ from: account.getAddress() }); - expect(wallet.simulateUtility).toHaveBeenCalledTimes(1); - expect(wallet.simulateUtility).toHaveBeenCalledWith( + expect(wallet.executeUtility).toHaveBeenCalledTimes(1); + expect(wallet.executeUtility).toHaveBeenCalledWith( expect.objectContaining({ name: 'qux', to: contractAddress }), expect.objectContaining({ scope: account.getAddress() }), ); diff --git a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts index d7e56e071598..a087d85f0525 100644 --- a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts +++ b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts @@ -111,7 +111,7 @@ export class ContractFunctionInteraction extends BaseContractInteraction { // docs:end:simulate if (this.functionDao.functionType == FunctionType.UTILITY) { const call = await this.getFunctionCall(); - const utilityResult = await this.wallet.simulateUtility(call, { + const utilityResult = await this.wallet.executeUtility(call, { scope: options.from, authWitnesses: options.authWitnesses, }); diff --git a/yarn-project/aztec.js/src/wallet/capabilities.ts b/yarn-project/aztec.js/src/wallet/capabilities.ts index eb5516679477..54a6a3c2b42d 100644 --- a/yarn-project/aztec.js/src/wallet/capabilities.ts +++ b/yarn-project/aztec.js/src/wallet/capabilities.ts @@ -180,11 +180,11 @@ export interface ContractClassesCapability { export interface GrantedContractClassesCapability extends ContractClassesCapability {} /** - * Transaction simulation capability - for simulating transactions and utilities. + * Transaction simulation capability - for simulating transactions and executing utilities. * * Maps to wallet methods: * - simulateTx (when transactions scope specified) - * - simulateUtility (when utilities scope specified) + * - executeUtility (when utilities scope specified) * - profileTx (when transactions scope specified) * * @example @@ -200,7 +200,7 @@ export interface GrantedContractClassesCapability extends ContractClassesCapabil * \} * * @example - * // Simulate any transaction and utility call + * // Simulate any transaction and execute any utility call * \{ * type: 'simulation', * transactions: \{ scope: '*' \}, @@ -221,7 +221,7 @@ export interface SimulationCapability { scope: '*' | ContractFunctionPattern[]; }; - /** Utility simulation scope (unconstrained calls). Maps to: simulateUtility */ + /** Utility execution scope (unconstrained calls). Maps to: executeUtility */ utilities?: { /** * Which contracts/functions to allow: diff --git a/yarn-project/aztec.js/src/wallet/wallet.test.ts b/yarn-project/aztec.js/src/wallet/wallet.test.ts index 4d6d51e21fbe..bcee66440e18 100644 --- a/yarn-project/aztec.js/src/wallet/wallet.test.ts +++ b/yarn-project/aztec.js/src/wallet/wallet.test.ts @@ -15,7 +15,7 @@ import { TxProfileResult, TxReceipt, TxSimulationResult, - UtilitySimulationResult, + UtilityExecutionResult, } from '@aztec/stdlib/tx'; import { type InteractionWaitOptions, NO_WAIT, type SendReturn } from '../contract/interaction_options.js'; @@ -163,7 +163,7 @@ describe('WalletSchema', () => { expect(result).toBeInstanceOf(TxSimulationResult); }); - it('simulateUtility', async () => { + it('executeUtility', async () => { const call = FunctionCall.from({ name: 'testFunction', to: await AztecAddress.random(), @@ -174,11 +174,11 @@ describe('WalletSchema', () => { args: [Fr.random()], returnTypes: [], }); - const result = await context.client.simulateUtility(call, { + const result = await context.client.executeUtility(call, { scope: await AztecAddress.random(), authWitnesses: [AuthWitness.random()], }); - expect(result).toBeInstanceOf(UtilitySimulationResult); + expect(result).toBeInstanceOf(UtilityExecutionResult); }); it('profileTx', async () => { @@ -325,7 +325,7 @@ describe('WalletSchema', () => { { name: 'getAccounts', args: [] }, { name: 'registerContract', args: [mockInstance, mockArtifact, undefined] }, { name: 'simulateTx', args: [exec, simulateOpts] }, - { name: 'simulateUtility', args: [call, { scope: address3, authWitnesses: [AuthWitness.random()] }] }, + { name: 'executeUtility', args: [call, { scope: address3, authWitnesses: [AuthWitness.random()] }] }, { name: 'profileTx', args: [exec, profileOpts] }, { name: 'sendTx', args: [exec, opts] }, { name: 'createAuthWit', args: [address1, { consumer: await AztecAddress.random(), innerHash: Fr.random() }] }, @@ -351,7 +351,7 @@ describe('WalletSchema', () => { result: expect.objectContaining({ address: expect.any(AztecAddress) }), }); expect(results[8]).toEqual({ name: 'simulateTx', result: expect.any(TxSimulationResult) }); - expect(results[9]).toEqual({ name: 'simulateUtility', result: expect.any(UtilitySimulationResult) }); + expect(results[9]).toEqual({ name: 'executeUtility', result: expect.any(UtilityExecutionResult) }); expect(results[10]).toEqual({ name: 'profileTx', result: expect.any(TxProfileResult) }); expect(results[11]).toEqual({ name: 'sendTx', result: expect.any(TxReceipt) }); expect(results[12]).toEqual({ name: 'createAuthWit', result: expect.any(AuthWitness) }); @@ -430,11 +430,11 @@ class MockWallet implements Wallet { return Promise.resolve(TxSimulationResult.random()); } - simulateUtility( + executeUtility( _call: any, _opts: { scope: AztecAddress; authWitnesses?: AuthWitness[] }, - ): Promise { - return Promise.resolve(UtilitySimulationResult.random()); + ): Promise { + return Promise.resolve(UtilityExecutionResult.random()); } profileTx(_exec: ExecutionPayload, _opts: ProfileOptions): Promise { diff --git a/yarn-project/aztec.js/src/wallet/wallet.ts b/yarn-project/aztec.js/src/wallet/wallet.ts index 84cf35592761..9918542d297e 100644 --- a/yarn-project/aztec.js/src/wallet/wallet.ts +++ b/yarn-project/aztec.js/src/wallet/wallet.ts @@ -22,7 +22,7 @@ import { TxProfileResult, TxReceipt, TxSimulationResult, - UtilitySimulationResult, + UtilityExecutionResult, inTxSchema, } from '@aztec/stdlib/tx'; @@ -226,10 +226,10 @@ export type ContractClassMetadata = { }; /** - * Options for simulating a utility function call. + * Options for executing a utility function call. */ -export type SimulateUtilityOptions = { - /** The scope for the utility simulation (determines which notes and keys are visible). */ +export type ExecuteUtilityOptions = { + /** The scope for the utility execution (determines which notes and keys are visible). */ scope: AztecAddress; /** Optional auth witnesses to use during execution. */ authWitnesses?: AuthWitness[]; @@ -255,7 +255,7 @@ export type Wallet = { secretKey?: Fr, ): Promise; simulateTx(exec: ExecutionPayload, opts: SimulateOptions): Promise; - simulateUtility(call: FunctionCall, opts: SimulateUtilityOptions): Promise; + executeUtility(call: FunctionCall, opts: ExecuteUtilityOptions): Promise; profileTx(exec: ExecutionPayload, opts: ProfileOptions): Promise; sendTx( exec: ExecutionPayload, @@ -518,7 +518,7 @@ const WalletMethodSchemas = { .args(ContractInstanceWithAddressSchema, optional(ContractArtifactSchema), optional(schemas.Fr)) .returns(ContractInstanceWithAddressSchema), simulateTx: z.function().args(ExecutionPayloadSchema, SimulateOptionsSchema).returns(TxSimulationResult.schema), - simulateUtility: z + executeUtility: z .function() .args( FunctionCall.schema, @@ -527,7 +527,7 @@ const WalletMethodSchemas = { authWitnesses: optional(z.array(AuthWitness.schema)), }), ) - .returns(UtilitySimulationResult.schema), + .returns(UtilityExecutionResult.schema), profileTx: z.function().args(ExecutionPayloadSchema, ProfileOptionsSchema).returns(TxProfileResult.schema), sendTx: z .function() diff --git a/yarn-project/aztec/bootstrap.sh b/yarn-project/aztec/bootstrap.sh new file mode 100755 index 000000000000..c27fba277781 --- /dev/null +++ b/yarn-project/aztec/bootstrap.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source_bootstrap + +repo_root=$(git rev-parse --show-toplevel) +export NARGO=${NARGO:-$repo_root/noir/noir-repo/target/release/nargo} +export BB=${BB:-$repo_root/barretenberg/cpp/build/bin/bb} +export PROFILER_PATH=${PROFILER_PATH:-$repo_root/noir/noir-repo/target/release/noir-profiler} + +hash=$(../bootstrap.sh hash) + +function test_cmds { + # All CLI tests share test/mixed-workspace/target so they must run sequentially + # in a single jest invocation (--runInBand is set by run_test.sh). + echo "$hash:ISOLATE=1:NAME=aztec/cli NARGO=$NARGO BB=$BB PROFILER_PATH=$PROFILER_PATH yarn-project/scripts/run_test.sh aztec/src/cli" +} + +case "$cmd" in + "") + ;; + *) + default_cmd_handler "$@" + ;; +esac diff --git a/yarn-project/aztec/scripts/aztec.sh b/yarn-project/aztec/scripts/aztec.sh index aff76c6ed46a..0a7e9003882e 100755 --- a/yarn-project/aztec/scripts/aztec.sh +++ b/yarn-project/aztec/scripts/aztec.sh @@ -21,7 +21,7 @@ function aztec { case $cmd in test) - export LOG_LEVEL="${LOG_LEVEL:-error}" + export LOG_LEVEL="${LOG_LEVEL:-"error;trace:contract_log"}" aztec start --txe --port 8081 & server_pid=$! trap 'kill $server_pid &>/dev/null || true' EXIT @@ -54,9 +54,13 @@ case $cmd in aztec start "$@" ;; - compile|new|init|flamegraph) + new|init) $script_dir/${cmd}.sh "$@" ;; + flamegraph) + echo "Warning: 'aztec flamegraph' is deprecated. Use 'aztec profile flamegraph' instead." >&2 + aztec profile flamegraph "$@" + ;; *) aztec $cmd "$@" ;; diff --git a/yarn-project/aztec/scripts/compile.sh b/yarn-project/aztec/scripts/compile.sh deleted file mode 100755 index 7bec1e29d17f..000000000000 --- a/yarn-project/aztec/scripts/compile.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -NARGO=${NARGO:-nargo} -BB=${BB:-bb} - -# If help is requested, show Aztec-specific info then run nargo compile help and then exit in order to not trigger -# transpilation -for arg in "$@"; do - if [ "$arg" == "--help" ] || [ "$arg" == "-h" ]; then - cat << 'EOF' -Aztec Compile - Compile Aztec Noir contracts - -This command compiles Aztec Noir contracts using nargo and then automatically -postprocesses them to generate Aztec specific artifacts including: -- Transpiled contract artifacts -- Verification keys - -The compiled contracts will be placed in the target/ directory by default. - ---- -Underlying nargo compile options: - -EOF - nargo compile --help - exit 0 - fi -done - -# Run nargo compile. -$NARGO compile "$@" - -echo "Postprocessing contract..." -$BB aztec_process - -# Strip internal prefixes from all compiled contract JSONs in target directory -# TODO: This should be part of bb aztec_process! -for json in target/*.json; do - temp_file="${json}.tmp" - jq '.functions |= map(.name |= sub("^__aztec_nr_internals__"; ""))' "$json" > "$temp_file" - mv "$temp_file" "$json" -done - -echo "Compilation complete!" diff --git a/yarn-project/aztec/scripts/extract_function.js b/yarn-project/aztec/scripts/extract_function.js deleted file mode 100644 index c73c8ba9aa58..000000000000 --- a/yarn-project/aztec/scripts/extract_function.js +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env node -import fs from 'fs/promises'; -import path from 'path'; - -// Simple script to extract a contract function as a separate Noir artifact. -// We need to use this since the transpiling that we do on public functions make the contract artifacts -// unreadable by noir tooling, since they are no longer following the noir artifact format. -async function main() { - let [contractArtifactPath, functionName] = process.argv.slice(2); - if (!contractArtifactPath || !functionName) { - console.log('Usage: node extractFunctionAsNoirArtifact.js '); - return; - } - - const contractArtifact = JSON.parse(await fs.readFile(contractArtifactPath, 'utf8')); - const func = contractArtifact.functions.find(f => f.name === functionName); - if (!func) { - console.error(`Function ${functionName} not found in ${contractArtifactPath}`); - return; - } - - const artifact = { - noir_version: contractArtifact.noir_version, - hash: 0, - abi: func.abi, - bytecode: func.bytecode, - debug_symbols: func.debug_symbols, - file_map: contractArtifact.file_map, - expression_width: { - Bounded: { - width: 4, - }, - }, - }; - - const outputDir = path.dirname(contractArtifactPath); - const outputName = path.basename(contractArtifactPath, '.json') + `-${functionName}.json`; - - const outPath = path.join(outputDir, outputName); - - await fs.writeFile(outPath, JSON.stringify(artifact, null, 2)); -} - -main().catch(err => { - console.error(err); - process.exit(1); -}); diff --git a/yarn-project/aztec/scripts/flamegraph.sh b/yarn-project/aztec/scripts/flamegraph.sh deleted file mode 100755 index 48763ef0d793..000000000000 --- a/yarn-project/aztec/scripts/flamegraph.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env bash -set -eu - -# If first arg is -h or --help, print usage. -if [ $# -lt 2 ] || [ "$1" == "-h" ] || [ "$1" == "--help" ]; then - cat << 'EOF' -Aztec Flamegraph - Generate a gate count flamegraph for an aztec contract function. - -Usage: aztec flamegraph - -Options: - -h, --help Print help - -Will output an svg at /--flamegraph.svg. -You can open it in your browser to view it. - -EOF - exit 0 -fi - -cleanup() { - set +e - if [ -f "$function_artifact" ]; then - rm -f "$function_artifact" - fi -} - -trap cleanup EXIT - -# Get the directory of the script -script_dir=$(realpath $(dirname $0)) - -PROFILER=${PROFILER_PATH:-noir-profiler} -BB=${BB:-bb} - -# first console arg is contract name in camel case or path to contract artifact -contract=$1 - -# second console arg is the contract function -function=$2 - -if [ ! -f "$contract" ]; then - echo "Error: Contract artifact not found at: $contract" - exit 1 -fi -artifact_path=$contract -function_artifact="${artifact_path%%.json}-${function}.json" -output_dir=$(dirname "$artifact_path") - -# Extract artifact for the specific function. -node $script_dir/extract_function.js "$artifact_path" $function - -# Generate the flamegraph -$PROFILER gates --artifact-path "$function_artifact" --backend-path "$BB" --backend-gates-command "gates" --output "$output_dir" --scheme chonk --include_gates_per_opcode - -# Save as $artifact_name-$function-flamegraph.svg -output_file="${function_artifact%%.json}-flamegraph.svg" -mv "$output_dir/__aztec_nr_internals__${function}_gates.svg" "$output_file" -echo "Flamegraph generated at: $output_file" diff --git a/yarn-project/aztec/src/bin/index.ts b/yarn-project/aztec/src/bin/index.ts index d06b298d9add..c1565d92576f 100644 --- a/yarn-project/aztec/src/bin/index.ts +++ b/yarn-project/aztec/src/bin/index.ts @@ -14,7 +14,9 @@ import { createConsoleLogger, createLogger } from '@aztec/foundation/log'; import { Command } from 'commander'; +import { injectCompileCommand } from '../cli/cmds/compile.js'; import { injectMigrateCommand } from '../cli/cmds/migrate_ha_db.js'; +import { injectProfileCommand } from '../cli/cmds/profile.js'; import { injectAztecCommands } from '../cli/index.js'; import { getCliVersion } from '../cli/release_version.js'; @@ -47,7 +49,7 @@ async function main() { const cliVersion = getCliVersion(); let program = new Command('aztec'); - program.description('Aztec command line interface').version(cliVersion); + program.description('Aztec command line interface').version(cliVersion).enablePositionalOptions(); program = injectAztecCommands(program, userLog, debugLogger); program = injectBuilderCommands(program); program = injectContractCommands(program, userLog, debugLogger); @@ -56,6 +58,8 @@ async function main() { program = injectAztecNodeCommands(program, userLog, debugLogger); program = injectMiscCommands(program, userLog); program = injectValidatorKeysCommands(program, userLog); + program = injectCompileCommand(program, userLog); + program = injectProfileCommand(program, userLog); program = injectMigrateCommand(program, userLog); await program.parseAsync(process.argv); diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index f086b852da31..1c79cf24f294 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -39,7 +39,6 @@ Additional commands: init [folder] [options] creates a new Aztec Noir project. new [options] creates a new Aztec Noir project in a new directory. - compile [options] compiles Aztec Noir contracts. test [options] starts a TXE and runs "nargo test" using it as the oracle resolver. `, ); diff --git a/yarn-project/aztec/src/cli/cmds/compile.test.ts b/yarn-project/aztec/src/cli/cmds/compile.test.ts new file mode 100644 index 000000000000..8a0af802b005 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/compile.test.ts @@ -0,0 +1,82 @@ +import { afterAll, beforeAll, describe, expect, it } from '@jest/globals'; +import { execFileSync } from 'child_process'; +import { existsSync, readFileSync, rmSync } from 'fs'; +import { dirname, join } from 'path'; +import { fileURLToPath } from 'url'; + +const PACKAGE_ROOT = join(dirname(fileURLToPath(import.meta.url)), '../../..'); +const CLI = join(PACKAGE_ROOT, 'dest/bin/index.js'); +const WORKSPACE = join(PACKAGE_ROOT, 'test/mixed-workspace'); +const TARGET = join(WORKSPACE, 'target'); +const CODEGEN_OUT = join(WORKSPACE, 'codegen-output'); + +// Compiles a mixed workspace containing both a contract and a plain circuit, +// then runs codegen. Validates that: +// - Contract artifacts have a functions array and are transpiled +// - Program (circuit) artifacts do not have functions and are not transpiled +// - Codegen produces TypeScript only for contracts, not circuits +describe('aztec compile integration', () => { + beforeAll(() => { + cleanupArtifacts(); + runCompile(); + runCodegen(); + }, 120_000); + + afterAll(() => { + cleanupArtifacts(); + }); + + it('contract artifact has functions array', () => { + const artifact = JSON.parse(readFileSync(join(TARGET, 'simple_contract-SimpleContract.json'), 'utf-8')); + expect(Array.isArray(artifact.functions)).toBe(true); + expect(artifact.functions.length).toBeGreaterThan(0); + }); + + it('program artifact does not have functions', () => { + const artifact = JSON.parse(readFileSync(join(TARGET, 'simple_circuit.json'), 'utf-8')); + expect(artifact.functions).toBeUndefined(); + }); + + it('contract artifact was transpiled', () => { + const artifact = JSON.parse(readFileSync(join(TARGET, 'simple_contract-SimpleContract.json'), 'utf-8')); + expect(artifact.transpiled).toBe(true); + }); + + it('program artifact was not transpiled', () => { + const artifact = JSON.parse(readFileSync(join(TARGET, 'simple_circuit.json'), 'utf-8')); + expect(artifact.transpiled).toBeFalsy(); + }); + + it('codegen produced TypeScript for contract', () => { + expect(existsSync(join(CODEGEN_OUT, 'SimpleContract.ts'))).toBe(true); + }); + + it('codegen did not produce TypeScript for circuit', () => { + expect(existsSync(join(CODEGEN_OUT, 'SimpleCircuit.ts'))).toBe(false); + }); +}); + +function cleanupArtifacts() { + rmSync(TARGET, { recursive: true, force: true }); + rmSync(CODEGEN_OUT, { recursive: true, force: true }); + rmSync(join(WORKSPACE, 'codegenCache.json'), { force: true }); +} + +function runCompile() { + try { + execFileSync('node', [CLI, 'compile'], { cwd: WORKSPACE, stdio: 'pipe' }); + } catch (e: any) { + throw new Error(`compile failed:\n${e.stderr?.toString() ?? e.message}`); + } +} + +function runCodegen() { + try { + execFileSync('node', [CLI, 'codegen', 'target', '-o', 'codegen-output', '-f'], { + cwd: WORKSPACE, + stdio: 'pipe', + }); + } catch (e: any) { + throw new Error(`codegen failed:\n${e.stderr?.toString() ?? e.message}`); + } +} diff --git a/yarn-project/aztec/src/cli/cmds/compile.ts b/yarn-project/aztec/src/cli/cmds/compile.ts new file mode 100644 index 000000000000..9737eeb9b312 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/compile.ts @@ -0,0 +1,80 @@ +import type { LogFn } from '@aztec/foundation/log'; + +import { execFileSync } from 'child_process'; +import type { Command } from 'commander'; +import { readFile, writeFile } from 'fs/promises'; + +import { readArtifactFiles } from './utils/artifacts.js'; +import { run } from './utils/spawn.js'; + +/** Returns paths to contract artifacts in the target directory. */ +async function collectContractArtifacts(): Promise { + let files; + try { + files = await readArtifactFiles('target'); + } catch (err: any) { + if (err?.message?.includes('does not exist')) { + return []; + } + throw err; + } + return files.filter(f => Array.isArray(f.content.functions)).map(f => f.filePath); +} + +/** Strips the `__aztec_nr_internals__` prefix from function names in contract artifacts. */ +async function stripInternalPrefixes(artifactPaths: string[]): Promise { + for (const path of artifactPaths) { + const artifact = JSON.parse(await readFile(path, 'utf-8')); + for (const fn of artifact.functions) { + if (typeof fn.name === 'string') { + fn.name = fn.name.replace(/^__aztec_nr_internals__/, ''); + } + } + await writeFile(path, JSON.stringify(artifact, null, 2) + '\n'); + } +} + +/** Compiles Aztec Noir contracts and postprocesses artifacts. */ +async function compileAztecContract(nargoArgs: string[], log: LogFn): Promise { + const nargo = process.env.NARGO ?? 'nargo'; + const bb = process.env.BB ?? 'bb'; + + await run(nargo, ['compile', ...nargoArgs]); + + const artifacts = await collectContractArtifacts(); + + if (artifacts.length > 0) { + log('Postprocessing contracts...'); + const bbArgs = artifacts.flatMap(a => ['-i', a]); + await run(bb, ['aztec_process', ...bbArgs]); + + // TODO: This should be part of bb aztec_process! + await stripInternalPrefixes(artifacts); + } + + log('Compilation complete!'); +} + +export function injectCompileCommand(program: Command, log: LogFn): Command { + program + .command('compile') + .argument('[nargo-args...]') + .passThroughOptions() + .allowUnknownOption() + .description( + 'Compile Aztec Noir contracts using nargo and postprocess them to generate transpiled artifacts and verification keys. All options are forwarded to nargo compile.', + ) + .addHelpText('after', () => { + // Show nargo's own compile options so users see all available flags in one place. + const nargo = process.env.NARGO ?? 'nargo'; + try { + const output = execFileSync(nargo, ['compile', '--help'], { encoding: 'utf-8' }); + return `\nUnderlying nargo compile options:\n\n${output}`; + } catch { + return '\n(Run "nargo compile --help" to see available nargo options)'; + } + }) + .action((nargoArgs: string[]) => compileAztecContract(nargoArgs, log)); + + return program; +} diff --git a/yarn-project/aztec/src/cli/cmds/profile.ts b/yarn-project/aztec/src/cli/cmds/profile.ts new file mode 100644 index 000000000000..d7248025074a --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/profile.ts @@ -0,0 +1,25 @@ +import type { LogFn } from '@aztec/foundation/log'; + +import type { Command } from 'commander'; + +import { profileFlamegraph } from './profile_flamegraph.js'; +import { profileGates } from './profile_gates.js'; + +export function injectProfileCommand(program: Command, log: LogFn): Command { + const profile = program.command('profile').description('Profile compiled Aztec artifacts.'); + + profile + .command('gates') + .argument('[target-dir]', 'Path to the compiled artifacts directory', './target') + .description('Display gate counts for all compiled Aztec artifacts in a target directory.') + .action((targetDir: string) => profileGates(targetDir, log)); + + profile + .command('flamegraph') + .argument('', 'Path to the compiled contract artifact JSON') + .argument('', 'Name of the contract function to profile') + .description('Generate a gate count flamegraph SVG for a contract function.') + .action((artifactPath: string, functionName: string) => profileFlamegraph(artifactPath, functionName, log)); + + return program; +} diff --git a/yarn-project/aztec/src/cli/cmds/profile_flamegraph.test.ts b/yarn-project/aztec/src/cli/cmds/profile_flamegraph.test.ts new file mode 100644 index 000000000000..4a91bf417717 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/profile_flamegraph.test.ts @@ -0,0 +1,51 @@ +import { afterAll, beforeAll, describe, expect, it } from '@jest/globals'; +import { execFileSync } from 'child_process'; +import { existsSync, readFileSync, rmSync } from 'fs'; +import { dirname, join } from 'path'; +import { fileURLToPath } from 'url'; + +const PACKAGE_ROOT = join(dirname(fileURLToPath(import.meta.url)), '../../..'); +const CLI = join(PACKAGE_ROOT, 'dest/bin/index.js'); +const WORKSPACE = join(PACKAGE_ROOT, 'test/mixed-workspace'); +const TARGET = join(WORKSPACE, 'target'); +const CONTRACT_ARTIFACT = join(TARGET, 'simple_contract-SimpleContract.json'); + +describe('aztec profile flamegraph', () => { + const svgPath = join(TARGET, 'simple_contract-SimpleContract-private_function-flamegraph.svg'); + + beforeAll(() => { + rmSync(TARGET, { recursive: true, force: true }); + runCompile(); + runFlamegraph(CONTRACT_ARTIFACT, 'private_function'); + }, 300_000); + + afterAll(() => { + rmSync(TARGET, { recursive: true, force: true }); + }); + + it('generates a valid flamegraph SVG', () => { + expect(existsSync(svgPath)).toBe(true); + const content = readFileSync(svgPath, 'utf-8'); + expect(content).toContain(''); + }); +}); + +function runCompile() { + try { + execFileSync('node', [CLI, 'compile'], { cwd: WORKSPACE, stdio: 'pipe' }); + } catch (e: any) { + throw new Error(`compile failed:\n${e.stderr?.toString() ?? e.message}`); + } +} + +function runFlamegraph(artifactPath: string, functionName: string) { + try { + execFileSync('node', [CLI, 'profile', 'flamegraph', artifactPath, functionName], { + encoding: 'utf-8', + stdio: 'pipe', + }); + } catch (e: any) { + throw new Error(`profile flamegraph failed:\n${e.stderr?.toString() ?? e.message}`); + } +} diff --git a/yarn-project/aztec/src/cli/cmds/profile_flamegraph.ts b/yarn-project/aztec/src/cli/cmds/profile_flamegraph.ts new file mode 100644 index 000000000000..78b4743d715e --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/profile_flamegraph.ts @@ -0,0 +1,63 @@ +import type { LogFn } from '@aztec/foundation/log'; + +import { readFile, rename, rm, writeFile } from 'fs/promises'; +import { basename, dirname, join } from 'path'; + +import { makeFunctionArtifact } from './profile_utils.js'; +import type { CompiledArtifact } from './utils/artifacts.js'; +import { run } from './utils/spawn.js'; + +/** Generates a gate count flamegraph SVG for a single contract function. */ +export async function profileFlamegraph(artifactPath: string, functionName: string, log: LogFn): Promise { + const raw = await readFile(artifactPath, 'utf-8'); + const artifact: CompiledArtifact = JSON.parse(raw); + + if (!Array.isArray(artifact.functions)) { + throw new Error(`${artifactPath} does not appear to be a contract artifact (no functions array)`); + } + + const func = artifact.functions.find(f => f.name === functionName); + if (!func) { + const available = artifact.functions.map(f => f.name).join(', '); + throw new Error(`Function "${functionName}" not found in artifact. Available: ${available}`); + } + if (func.is_unconstrained) { + throw new Error(`Function "${functionName}" is unconstrained and cannot be profiled`); + } + + const outputDir = dirname(artifactPath); + const contractName = basename(artifactPath, '.json'); + const functionArtifact = join(outputDir, `${contractName}-${functionName}.json`); + + try { + await writeFile(functionArtifact, makeFunctionArtifact(artifact, func)); + + const profiler = process.env.PROFILER_PATH ?? 'noir-profiler'; + const bb = process.env.BB ?? 'bb'; + + await run(profiler, [ + 'gates', + '--artifact-path', + functionArtifact, + '--backend-path', + bb, + '--backend-gates-command', + 'gates', + '--output', + outputDir, + '--scheme', + 'chonk', + '--include_gates_per_opcode', + ]); + + // noir-profiler names the SVG using the internal function name which + // retains the __aztec_nr_internals__ prefix in the bytecode metadata. + const srcSvg = join(outputDir, `__aztec_nr_internals__${functionName}_gates.svg`); + const destSvg = join(outputDir, `${contractName}-${functionName}-flamegraph.svg`); + await rename(srcSvg, destSvg); + + log(`Flamegraph written to ${destSvg}`); + } finally { + await rm(functionArtifact, { force: true }); + } +} diff --git a/yarn-project/aztec/src/cli/cmds/profile_gates.test.ts b/yarn-project/aztec/src/cli/cmds/profile_gates.test.ts new file mode 100644 index 000000000000..d933dc232f0f --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/profile_gates.test.ts @@ -0,0 +1,58 @@ +import { afterAll, beforeAll, describe, expect, it } from '@jest/globals'; +import { execFileSync } from 'child_process'; +import { rmSync } from 'fs'; +import { dirname, join } from 'path'; +import { fileURLToPath } from 'url'; + +const PACKAGE_ROOT = join(dirname(fileURLToPath(import.meta.url)), '../../..'); +const CLI = join(PACKAGE_ROOT, 'dest/bin/index.js'); +const WORKSPACE = join(PACKAGE_ROOT, 'test/mixed-workspace'); +const TARGET = join(WORKSPACE, 'target'); + +describe('aztec profile gates', () => { + let gatesOutput: string; + + beforeAll(() => { + rmSync(TARGET, { recursive: true, force: true }); + runCompile(); + gatesOutput = runProfile('gates'); + }, 300_000); + + afterAll(() => { + rmSync(TARGET, { recursive: true, force: true }); + }); + + it('prints gate counts for both contract functions', () => { + expect(gatesOutput).toContain('simple_contract-SimpleContract::private_function'); + expect(gatesOutput).toContain('simple_contract-SimpleContract::another_private_function'); + }); + + it('prints gate counts for both plain circuits', () => { + expect(gatesOutput).toContain('simple_circuit'); + expect(gatesOutput).toContain('simple_circuit_2'); + }); + + it('gate counts are positive integers', () => { + const counts = [...gatesOutput.matchAll(/(\d[\d,]*)\s*$/gm)].map(m => parseInt(m[1].replace(/,/g, ''), 10)); + expect(counts.length).toBeGreaterThanOrEqual(4); + for (const count of counts) { + expect(count).toBeGreaterThan(0); + } + }); +}); + +function runCompile() { + try { + execFileSync('node', [CLI, 'compile'], { cwd: WORKSPACE, stdio: 'pipe' }); + } catch (e: any) { + throw new Error(`compile failed:\n${e.stderr?.toString() ?? e.message}`); + } +} + +function runProfile(subcommand: string) { + try { + return execFileSync('node', [CLI, 'profile', subcommand, TARGET], { encoding: 'utf-8', stdio: 'pipe' }); + } catch (e: any) { + throw new Error(`profile ${subcommand} failed:\n${e.stderr?.toString() ?? e.message}`); + } +} diff --git a/yarn-project/aztec/src/cli/cmds/profile_gates.ts b/yarn-project/aztec/src/cli/cmds/profile_gates.ts new file mode 100644 index 000000000000..19770ba91f1e --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/profile_gates.ts @@ -0,0 +1,67 @@ +import { asyncPool } from '@aztec/foundation/async-pool'; +import type { LogFn } from '@aztec/foundation/log'; + +import { execFile as execFileCb } from 'child_process'; +import { rm } from 'fs/promises'; +import { promisify } from 'util'; + +import { MAX_CONCURRENT, discoverArtifacts } from './profile_utils.js'; + +const execFile = promisify(execFileCb); + +interface GateCountResult { + name: string; + gateCount: number; +} + +/** Parses circuit_size from bb gates JSON output: { "functions": [{ "circuit_size": N }] } */ +function parseGateCount(stdout: string): number { + const parsed = JSON.parse(stdout); + const size = parsed?.functions?.[0]?.circuit_size; + if (typeof size !== 'number') { + throw new Error('Failed to parse circuit_size from bb gates output'); + } + return size; +} + +/** Runs bb gates on a single artifact file and returns the gate count. */ +async function getGateCount(bb: string, artifactPath: string): Promise { + const { stdout } = await execFile(bb, ['gates', '--scheme', 'chonk', '-b', artifactPath]); + return parseGateCount(stdout); +} + +/** Profiles all compiled artifacts in a target directory and prints gate counts. */ +export async function profileGates(targetDir: string, log: LogFn): Promise { + const bb = process.env.BB ?? 'bb'; + const { artifacts, tmpDir } = await discoverArtifacts(targetDir); + + if (artifacts.length === 0) { + log('No artifacts found in target directory.'); + return; + } + + try { + const results: GateCountResult[] = await asyncPool(MAX_CONCURRENT, artifacts, async artifact => ({ + name: artifact.name, + gateCount: await getGateCount(bb, artifact.filePath), + })); + results.sort((a, b) => a.name.localeCompare(b.name)); + + if (results.length === 0) { + log('No constrained circuits found.'); + return; + } + + const maxNameLen = Math.max(...results.map(r => r.name.length)); + log(''); + log('Gate counts:'); + log('-'.repeat(maxNameLen + 16)); + for (const { name, gateCount } of results) { + log(`${name.padEnd(maxNameLen)} ${gateCount.toLocaleString().padStart(12)}`); + } + log('-'.repeat(maxNameLen + 16)); + log(`Total: ${results.length} circuit(s)`); + } finally { + await rm(tmpDir, { recursive: true, force: true }); + } +} diff --git a/yarn-project/aztec/src/cli/cmds/profile_utils.ts b/yarn-project/aztec/src/cli/cmds/profile_utils.ts new file mode 100644 index 000000000000..e604419bc631 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/profile_utils.ts @@ -0,0 +1,58 @@ +import { mkdtemp, writeFile } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import type { CompiledArtifact, ContractFunction } from './utils/artifacts.js'; +import { readArtifactFiles } from './utils/artifacts.js'; + +export const MAX_CONCURRENT = 4; + +export interface DiscoveredArtifact { + name: string; + filePath: string; + type: 'contract-function' | 'program'; +} + +/** + * Reads a target directory and returns a list of discovered artifacts with temp files + * created for contract functions. Caller must clean up tmpDir when done. + */ +export async function discoverArtifacts( + targetDir: string, +): Promise<{ artifacts: DiscoveredArtifact[]; tmpDir: string }> { + const files = await readArtifactFiles(targetDir); + const tmpDir = await mkdtemp(join(tmpdir(), 'aztec-profile-')); + const artifacts: DiscoveredArtifact[] = []; + + for (const file of files) { + if (Array.isArray(file.content.functions)) { + for (const func of file.content.functions) { + if (!func.bytecode || func.is_unconstrained) { + continue; + } + const name = `${file.name}::${func.name}`; + const tmpPath = join(tmpDir, `${file.name}-${func.name}.json`); + await writeFile(tmpPath, makeFunctionArtifact(file.content, func)); + artifacts.push({ name, filePath: tmpPath, type: 'contract-function' }); + } + } else if (file.content.bytecode) { + artifacts.push({ name: file.name, filePath: file.filePath, type: 'program' }); + } + } + + return { artifacts, tmpDir }; +} + +/** Extracts a contract function as a standalone program artifact JSON string. */ +export function makeFunctionArtifact(artifact: CompiledArtifact, func: ContractFunction) { + /* eslint-disable camelcase */ + return JSON.stringify({ + noir_version: artifact.noir_version, + hash: 0, + abi: func.abi, + bytecode: func.bytecode, + debug_symbols: func.debug_symbols, + file_map: artifact.file_map, + }); + /* eslint-enable camelcase */ +} diff --git a/yarn-project/aztec/src/cli/cmds/utils/artifacts.ts b/yarn-project/aztec/src/cli/cmds/utils/artifacts.ts new file mode 100644 index 000000000000..104bbed25145 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/artifacts.ts @@ -0,0 +1,44 @@ +import { readFile, readdir } from 'fs/promises'; +import { join } from 'path'; + +export interface CompiledArtifact { + noir_version: string; + file_map: unknown; + functions: ContractFunction[]; + bytecode?: string; +} + +export interface ContractFunction { + name: string; + abi: unknown; + bytecode: string; + debug_symbols: unknown; + is_unconstrained?: boolean; +} + +export interface ArtifactFile { + name: string; + filePath: string; + content: CompiledArtifact; +} + +/** Reads all JSON artifact files from a target directory and returns their parsed contents. */ +export async function readArtifactFiles(targetDir: string): Promise { + let entries: string[]; + try { + entries = (await readdir(targetDir)).filter(f => f.endsWith('.json')); + } catch (err: any) { + if (err?.code === 'ENOENT') { + throw new Error(`Target directory '${targetDir}' does not exist. Compile first with 'aztec compile'.`); + } + throw err; + } + + const artifacts: ArtifactFile[] = []; + for (const file of entries) { + const filePath = join(targetDir, file); + const content = JSON.parse(await readFile(filePath, 'utf-8')) as CompiledArtifact; + artifacts.push({ name: file.replace('.json', ''), filePath, content }); + } + return artifacts; +} diff --git a/yarn-project/aztec/src/cli/cmds/utils/spawn.ts b/yarn-project/aztec/src/cli/cmds/utils/spawn.ts new file mode 100644 index 000000000000..53514e06d931 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/spawn.ts @@ -0,0 +1,16 @@ +import { spawn } from 'child_process'; + +/** Spawns a command with inherited stdio and rejects on non-zero exit. */ +export function run(cmd: string, args: string[]): Promise { + return new Promise((resolve, reject) => { + const child = spawn(cmd, args, { stdio: 'inherit' }); + child.on('error', reject); + child.on('close', code => { + if (code !== 0) { + reject(new Error(`${cmd} exited with code ${code}`)); + } else { + resolve(); + } + }); + }); +} diff --git a/yarn-project/aztec/src/mainnet_compatibility.test.ts b/yarn-project/aztec/src/mainnet_compatibility.test.ts index 5dde99fdd902..d05246bea08c 100644 --- a/yarn-project/aztec/src/mainnet_compatibility.test.ts +++ b/yarn-project/aztec/src/mainnet_compatibility.test.ts @@ -9,7 +9,7 @@ import { getGenesisValues } from '@aztec/world-state/testing'; */ describe('Mainnet compatibility', () => { it('has expected VK tree root', () => { - const expectedRoots = [Fr.fromHexString('0x1621e3d2e4f04a6f0318b2099cb1e0afd60261055402e2f3c9ceee28849fb014')]; + const expectedRoots = [Fr.fromHexString('0x2d0b15497929f5150c4c383993555456e60d27121f4ac2cb9ef880319f5f9a6f')]; expect(expectedRoots).toContainEqual(getVKTreeRoot()); }); it('has expected Protocol Contracts tree root', () => { diff --git a/yarn-project/aztec/src/testnet_compatibility.test.ts b/yarn-project/aztec/src/testnet_compatibility.test.ts index 6f339176e77c..5514289a570c 100644 --- a/yarn-project/aztec/src/testnet_compatibility.test.ts +++ b/yarn-project/aztec/src/testnet_compatibility.test.ts @@ -11,7 +11,7 @@ import { getGenesisValues } from '@aztec/world-state/testing'; */ describe('Testnet compatibility', () => { it('has expected VK tree root', () => { - const expectedRoots = [Fr.fromHexString('0x1621e3d2e4f04a6f0318b2099cb1e0afd60261055402e2f3c9ceee28849fb014')]; + const expectedRoots = [Fr.fromHexString('0x2d0b15497929f5150c4c383993555456e60d27121f4ac2cb9ef880319f5f9a6f')]; expect(expectedRoots).toContainEqual(getVKTreeRoot()); }); it('has expected Protocol Contracts hash', () => { diff --git a/yarn-project/aztec/test/mixed-workspace/.gitignore b/yarn-project/aztec/test/mixed-workspace/.gitignore new file mode 100644 index 000000000000..8515795b7b4d --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/.gitignore @@ -0,0 +1,3 @@ +target/ +codegen-output/ +codegenCache.json diff --git a/yarn-project/aztec/test/mixed-workspace/Nargo.toml b/yarn-project/aztec/test/mixed-workspace/Nargo.toml new file mode 100644 index 000000000000..774733593fd3 --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/Nargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = ["simple_contract", "simple_circuit", "simple_circuit_2"] diff --git a/yarn-project/aztec/test/mixed-workspace/README.md b/yarn-project/aztec/test/mixed-workspace/README.md new file mode 100644 index 000000000000..83f385759655 --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/README.md @@ -0,0 +1,26 @@ +# Mixed Workspace Test + +Regression test for `aztec compile` and `aztec codegen` in Nargo workspaces that +contain both Aztec contracts and plain Noir circuits. + +## Problem + +Both `aztec compile` and `aztec codegen` assumed every `.json` in `target/` is a +contract artifact. When a workspace also contains `type = "bin"` packages, the +resulting program artifacts lack `functions`/`name` fields, causing: + +- `bb aztec_process` to fail trying to transpile a program artifact +- The jq postprocessing step to fail on missing `.functions` +- `codegen` to crash calling `loadContractArtifact()` on a program artifact + +## What the test checks + +`yarn-project/aztec/src/cli/cmds/compile.test.ts` runs compile and codegen on +this workspace and verifies: + +1. Compilation succeeds without errors +2. Both artifacts exist in `target/` +3. The contract artifact was postprocessed (has `transpiled` field) +4. The program artifact was not modified (no `transpiled` field) +5. Codegen generates a TypeScript wrapper only for the contract +6. No TypeScript wrapper is generated for the program artifact diff --git a/yarn-project/aztec/test/mixed-workspace/simple_circuit/Nargo.toml b/yarn-project/aztec/test/mixed-workspace/simple_circuit/Nargo.toml new file mode 100644 index 000000000000..a74e4a284148 --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/simple_circuit/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "simple_circuit" +authors = [""] +compiler_version = ">=0.25.0" +type = "bin" + +[dependencies] diff --git a/yarn-project/aztec/test/mixed-workspace/simple_circuit/src/main.nr b/yarn-project/aztec/test/mixed-workspace/simple_circuit/src/main.nr new file mode 100644 index 000000000000..e149eb109fca --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/simple_circuit/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field) { + assert(x != 0); +} diff --git a/yarn-project/aztec/test/mixed-workspace/simple_circuit_2/Nargo.toml b/yarn-project/aztec/test/mixed-workspace/simple_circuit_2/Nargo.toml new file mode 100644 index 000000000000..94ba379302a1 --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/simple_circuit_2/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "simple_circuit_2" +authors = [""] +compiler_version = ">=0.25.0" +type = "bin" + +[dependencies] diff --git a/yarn-project/aztec/test/mixed-workspace/simple_circuit_2/src/main.nr b/yarn-project/aztec/test/mixed-workspace/simple_circuit_2/src/main.nr new file mode 100644 index 000000000000..a51a0a7f9cf0 --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/simple_circuit_2/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: Field) { + assert(x != y); +} diff --git a/yarn-project/aztec/test/mixed-workspace/simple_contract/Nargo.toml b/yarn-project/aztec/test/mixed-workspace/simple_contract/Nargo.toml new file mode 100644 index 000000000000..681c51353ea5 --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/simple_contract/Nargo.toml @@ -0,0 +1,8 @@ +[package] +name = "simple_contract" +authors = [""] +compiler_version = ">=0.25.0" +type = "contract" + +[dependencies] +aztec = { path = "../../../../../noir-projects/aztec-nr/aztec" } diff --git a/yarn-project/aztec/test/mixed-workspace/simple_contract/src/main.nr b/yarn-project/aztec/test/mixed-workspace/simple_contract/src/main.nr new file mode 100644 index 000000000000..478d776dad92 --- /dev/null +++ b/yarn-project/aztec/test/mixed-workspace/simple_contract/src/main.nr @@ -0,0 +1,16 @@ +use aztec::macros::aztec; + +#[aztec] +pub contract SimpleContract { + use aztec::macros::functions::external; + + #[external("private")] + fn private_function() -> Field { + 0 + } + + #[external("private")] + fn another_private_function(x: Field) -> Field { + x + } +} diff --git a/yarn-project/bootstrap.sh b/yarn-project/bootstrap.sh index acacda5639f8..a7992576d9e5 100755 --- a/yarn-project/bootstrap.sh +++ b/yarn-project/bootstrap.sh @@ -221,6 +221,9 @@ function test_cmds { # Uses mocha for browser tests, so we have to treat it differently. echo "$hash:ISOLATE=1 cd yarn-project/kv-store && yarn test" + # Aztec CLI tests + aztec/bootstrap.sh test_cmds + if [[ "${TARGET_BRANCH:-}" =~ ^v[0-9]+$ ]]; then echo "$hash yarn-project/scripts/run_test.sh aztec/src/testnet_compatibility.test.ts" echo "$hash yarn-project/scripts/run_test.sh aztec/src/mainnet_compatibility.test.ts" diff --git a/yarn-project/builder/src/contract-interface-gen/codegen.ts b/yarn-project/builder/src/contract-interface-gen/codegen.ts index 5321c5070d22..1137b81a6a6c 100644 --- a/yarn-project/builder/src/contract-interface-gen/codegen.ts +++ b/yarn-project/builder/src/contract-interface-gen/codegen.ts @@ -50,6 +50,12 @@ async function generateFromNoirAbi(outputPath: string, noirAbiPath: string, opts const file = await readFile(noirAbiPath, 'utf8'); const contract = JSON.parse(file); + + if (!Array.isArray(contract.functions)) { + console.log(`${fileName} is not a contract artifact. Skipping.`); + return; + } + const aztecAbi = loadContractArtifact(contract); await mkdir(outputPath, { recursive: true }); diff --git a/yarn-project/constants/src/constants.gen.ts b/yarn-project/constants/src/constants.gen.ts index 99ea3d02afff..6d63cc576b67 100644 --- a/yarn-project/constants/src/constants.gen.ts +++ b/yarn-project/constants/src/constants.gen.ts @@ -410,7 +410,7 @@ export const AVM_MAX_REGISTERS = 6; export const AVM_ADDRESSING_BASE_RESOLUTION_L2_GAS = 3; export const AVM_ADDRESSING_INDIRECT_L2_GAS = 3; export const AVM_ADDRESSING_RELATIVE_L2_GAS = 3; -export const L2_GAS_PER_NOTE_HASH = 2700; +export const L2_GAS_PER_NOTE_HASH = 9200; export const L2_GAS_PER_NULLIFIER = 16000; export const L2_GAS_PER_L2_TO_L1_MSG = 5200; export const L2_GAS_PER_PRIVATE_LOG = 2500; diff --git a/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts b/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts index 5763e03a311e..3857b3d2fe95 100644 --- a/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts +++ b/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts @@ -7,12 +7,12 @@ import type { BatchedMethod, ContractClassMetadata, ContractMetadata, + ExecuteUtilityOptions, PrivateEvent, PrivateEventFilter, ProfileOptions, SendOptions, SimulateOptions, - SimulateUtilityOptions, Wallet, WalletCapabilities, } from '@aztec/aztec.js/wallet'; @@ -29,7 +29,7 @@ import type { ContractArtifact, EventMetadataDefinition, FunctionCall } from '@a import type { AuthWitness } from '@aztec/stdlib/auth-witness'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; -import type { ExecutionPayload, TxProfileResult, TxSimulationResult, UtilitySimulationResult } from '@aztec/stdlib/tx'; +import type { ExecutionPayload, TxProfileResult, TxSimulationResult, UtilityExecutionResult } from '@aztec/stdlib/tx'; import { Tx } from '@aztec/stdlib/tx'; import { Worker } from 'worker_threads'; @@ -169,8 +169,8 @@ export class WorkerWallet implements Wallet { return this.call('simulateTx', exec, opts); } - simulateUtility(call: FunctionCall, opts: SimulateUtilityOptions): Promise { - return this.call('simulateUtility', call, opts); + executeUtility(call: FunctionCall, opts: ExecuteUtilityOptions): Promise { + return this.call('executeUtility', call, opts); } profileTx(exec: ExecutionPayload, opts: ProfileOptions): Promise { diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts index 828a351bf5fb..593dd1831a3d 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts @@ -147,7 +147,7 @@ describe('L1TxUtils', () => { address: l1Client.account.address, }); - // Next send fails at sendRawTransaction (e.g. network error) + // Next send fails at sendRawTransaction (e.g. network error / 429) const originalSendRawTransaction = l1Client.sendRawTransaction.bind(l1Client); using _sendSpy = jest .spyOn(l1Client, 'sendRawTransaction') @@ -163,6 +163,29 @@ describe('L1TxUtils', () => { expect((await l1Client.getTransaction({ hash: txHash })).nonce).toBe(expectedNonce); }, 30_000); + it('bumps nonce when getTransactionCount returns a stale value after a successful send', async () => { + // Send a successful tx first to advance the chain nonce + await gasUtils.sendAndMonitorTransaction(request); + + const expectedNonce = await l1Client.getTransactionCount({ + blockTag: 'pending', + address: l1Client.account.address, + }); + + // Simulate a stale fallback RPC node that returns the pre-send nonce + const originalGetTransactionCount = l1Client.getTransactionCount.bind(l1Client); + using _spy = jest + .spyOn(l1Client, 'getTransactionCount') + .mockImplementationOnce(() => Promise.resolve(expectedNonce - 1)) // stale: one behind + .mockImplementation(originalGetTransactionCount); + + // Despite the stale count, the send should use lastSentNonce+1 = expectedNonce + const { txHash, state } = await gasUtils.sendTransaction(request); + + expect(state.nonce).toBe(expectedNonce); + expect((await l1Client.getTransaction({ hash: txHash })).nonce).toBe(expectedNonce); + }, 30_000); + // Regression for TMNT-312 it('speed-up of blob tx sets non-zero maxFeePerBlobGas', async () => { await cheatCodes.setAutomine(false); diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts index 48b8dfc41aa5..f6292311fc7e 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts @@ -45,6 +45,8 @@ const MAX_L1_TX_STATES = 32; export class L1TxUtils extends ReadOnlyL1TxUtils { protected txs: L1TxState[] = []; + /** Last nonce successfully sent to the chain. Used as a lower bound when a fallback RPC node returns a stale count. */ + private lastSentNonce: number | undefined; /** Tx delayer for testing. Only set when enableDelayer config is true. */ public delayer?: Delayer; /** KZG instance for blob operations. */ @@ -105,6 +107,11 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { this.metrics?.recordMinedTx(l1TxState, new Date(l1Timestamp)); } else if (newState === TxUtilsState.NOT_MINED) { this.metrics?.recordDroppedTx(l1TxState); + // The tx was dropped: the chain nonce reverted to l1TxState.nonce, so our lower bound is + // no longer valid. Clear it so the next send fetches the real nonce from the chain. + if (this.lastSentNonce === l1TxState.nonce) { + this.lastSentNonce = undefined; + } } // Update state in the store @@ -246,7 +253,11 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { ); } - const nonce = await this.client.getTransactionCount({ address: account, blockTag: 'pending' }); + const chainNonce = await this.client.getTransactionCount({ address: account, blockTag: 'pending' }); + // If a fallback RPC node returns a stale count (lower than what we last sent), use our + // local lower bound to avoid sending a duplicate of an already-pending transaction. + const nonce = + this.lastSentNonce !== undefined && chainNonce <= this.lastSentNonce ? this.lastSentNonce + 1 : chainNonce; const baseState = { request, gasLimit, blobInputs, gasPrice, nonce }; const txData = this.makeTxData(baseState, { isCancelTx: false }); @@ -254,6 +265,8 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { // Send the new tx const signedRequest = await this.prepareSignedTransaction(txData); const txHash = await this.client.sendRawTransaction({ serializedTransaction: signedRequest }); + // Update after tx is sent successfully + this.lastSentNonce = nonce; // Create the new state for monitoring const l1TxState: L1TxState = { diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.test.ts b/yarn-project/prover-node/src/job/epoch-proving-job.test.ts index 93a5920cb80d..c94818623302 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.test.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.test.ts @@ -134,7 +134,7 @@ describe('epoch-proving-job', () => { publicProcessor.process.mockImplementation(async txs => { const txsArray = await toArray(txs); const processedTxs = await Promise.all(txsArray.map(tx => mock({ hash: tx.getTxHash() }))); - return [processedTxs, [], txsArray, [], 0]; + return [processedTxs, [], txsArray, [], 0, []]; }); }); @@ -179,7 +179,7 @@ describe('epoch-proving-job', () => { publicProcessor.process.mockImplementation(async txs => { const txsArray = await toArray(txs); const errors = txsArray.map(tx => ({ error: new Error('Failed to process tx'), tx })); - return [[], errors, [], [], 0]; + return [[], errors, [], [], 0, []]; }); const job = createJob(); @@ -190,7 +190,7 @@ describe('epoch-proving-job', () => { }); it('fails if does not process all txs for a block', async () => { - publicProcessor.process.mockImplementation(_txs => Promise.resolve([[], [], [], [], 0])); + publicProcessor.process.mockImplementation(_txs => Promise.resolve([[], [], [], [], 0, []])); const job = createJob(); await job.run(); diff --git a/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts b/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts index c1c9f56e172a..e4e783f26b02 100644 --- a/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts +++ b/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts @@ -361,7 +361,7 @@ export class ContractFunctionSimulator { ); }); - this.log.verbose(`Utility simulation for ${call.to}.${call.selector} completed`); + this.log.verbose(`Utility execution for ${call.to}.${call.selector} completed`); return witnessMapToFields(acirExecutionResult.returnWitness); } catch (err) { throw createSimulationError(err instanceof Error ? err : new Error('Unknown error during private execution')); diff --git a/yarn-project/pxe/src/contract_function_simulator/execution_tagging_index_cache.ts b/yarn-project/pxe/src/contract_function_simulator/execution_tagging_index_cache.ts index 85a45f7fb108..37ffc83016d9 100644 --- a/yarn-project/pxe/src/contract_function_simulator/execution_tagging_index_cache.ts +++ b/yarn-project/pxe/src/contract_function_simulator/execution_tagging_index_cache.ts @@ -1,18 +1,18 @@ -import { DirectionalAppTaggingSecret, type PreTag } from '@aztec/stdlib/logs'; +import { ExtendedDirectionalAppTaggingSecret, type PreTag } from '@aztec/stdlib/logs'; /** - * A map that stores the tagging index for a given directional app tagging secret. + * A map that stores the tagging index for a given extended directional app tagging secret. * Note: The directional app tagging secret is unique for a (sender, recipient, contract) tuple while the direction * of sender -> recipient matters. */ export class ExecutionTaggingIndexCache { private taggingIndexMap: Map = new Map(); - public getLastUsedIndex(secret: DirectionalAppTaggingSecret): number | undefined { + public getLastUsedIndex(secret: ExtendedDirectionalAppTaggingSecret): number | undefined { return this.taggingIndexMap.get(secret.toString()); } - public setLastUsedIndex(secret: DirectionalAppTaggingSecret, index: number) { + public setLastUsedIndex(secret: ExtendedDirectionalAppTaggingSecret, index: number) { const currentValue = this.taggingIndexMap.get(secret.toString()); if (currentValue !== undefined && currentValue !== index - 1) { throw new Error(`Invalid tagging index update. Current value: ${currentValue}, new value: ${index}`); @@ -25,7 +25,7 @@ export class ExecutionTaggingIndexCache { */ public getUsedPreTags(): PreTag[] { return Array.from(this.taggingIndexMap.entries()).map(([secret, index]) => ({ - secret: DirectionalAppTaggingSecret.fromString(secret), + extendedSecret: ExtendedDirectionalAppTaggingSecret.fromString(secret), index, })); } diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts index 7c72b6407c86..a1b2ada7881e 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts @@ -14,7 +14,7 @@ import { import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { siloNullifier } from '@aztec/stdlib/hash'; import { PrivateContextInputs } from '@aztec/stdlib/kernel'; -import { type ContractClassLog, DirectionalAppTaggingSecret, type PreTag } from '@aztec/stdlib/logs'; +import { type ContractClassLog, ExtendedDirectionalAppTaggingSecret, type PreTag } from '@aztec/stdlib/logs'; import { Tag } from '@aztec/stdlib/logs'; import { Note, type NoteStatus } from '@aztec/stdlib/note'; import { @@ -216,25 +216,29 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP * @returns An app tag to be used in a log. */ public async privateGetNextAppTagAsSender(sender: AztecAddress, recipient: AztecAddress): Promise { - const secret = await this.#calculateDirectionalAppTaggingSecret(this.contractAddress, sender, recipient); + const extendedSecret = await this.#calculateExtendedDirectionalAppTaggingSecret( + this.contractAddress, + sender, + recipient, + ); - const index = await this.#getIndexToUseForSecret(secret); + const index = await this.#getIndexToUseForSecret(extendedSecret); this.log.debug( `Incrementing tagging index for sender: ${sender}, recipient: ${recipient}, contract: ${this.contractAddress} to ${index}`, ); - this.taggingIndexCache.setLastUsedIndex(secret, index); + this.taggingIndexCache.setLastUsedIndex(extendedSecret, index); - return Tag.compute({ secret, index }); + return Tag.compute({ extendedSecret, index }); } - async #calculateDirectionalAppTaggingSecret( + async #calculateExtendedDirectionalAppTaggingSecret( contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress, ) { const senderCompleteAddress = await this.getCompleteAddressOrFail(sender); const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); - return DirectionalAppTaggingSecret.compute( + return ExtendedDirectionalAppTaggingSecret.compute( senderCompleteAddress, senderIvsk, recipient, @@ -243,7 +247,7 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP ); } - async #getIndexToUseForSecret(secret: DirectionalAppTaggingSecret): Promise { + async #getIndexToUseForSecret(secret: ExtendedDirectionalAppTaggingSecret): Promise { // If we have the tagging index in the cache, we use it. If not we obtain it from the execution data provider. const lastUsedIndexInTx = this.taggingIndexCache.getLastUsedIndex(secret); @@ -255,7 +259,6 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP // that'd be wasteful as most tagging secrets are not used in each tx. await syncSenderTaggingIndexes( secret, - this.contractAddress, this.aztecNode, this.senderTaggingStore, await this.anchorBlockHeader.hash(), diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts index 00242b8bb925..f957d44326a8 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts @@ -3,7 +3,7 @@ import type { BlockNumber } from '@aztec/foundation/branded-types'; import { Aes128 } from '@aztec/foundation/crypto/aes128'; import { Fr } from '@aztec/foundation/curves/bn254'; import { Point } from '@aztec/foundation/curves/grumpkin'; -import { LogLevels, type Logger, applyStringFormatting, createLogger } from '@aztec/foundation/log'; +import { LogLevels, type Logger, createLogger } from '@aztec/foundation/log'; import type { MembershipWitness } from '@aztec/foundation/trees'; import type { KeyStore } from '@aztec/key-store'; import type { AuthWitness } from '@aztec/stdlib/auth-witness'; @@ -21,6 +21,7 @@ import { MerkleTreeId, type NullifierMembershipWitness, PublicDataWitness } from import type { BlockHeader, Capsule } from '@aztec/stdlib/tx'; import type { AccessScopes } from '../../access_scopes.js'; +import { createContractLogger, logContractMessage } from '../../contract_logging.js'; import { EventService } from '../../events/event_service.js'; import { LogService } from '../../logs/log_service.js'; import { NoteService } from '../../notes/note_service.js'; @@ -402,12 +403,13 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra */ async #getContractLogger(): Promise { if (!this.contractLogger) { - const addrAbbrev = this.contractAddress.toString().slice(0, 10); - const name = await this.contractStore.getDebugContractName(this.contractAddress); - const module = name ? `contract_log::${name}(${addrAbbrev})` : `contract_log::${addrAbbrev}`; // Purpose of instanceId is to distinguish logs from different instances of the same component. It makes sense // to re-use jobId as instanceId here as executions of different PXE jobs are isolated. - this.contractLogger = createLogger(module, { instanceId: this.jobId }); + this.contractLogger = await createContractLogger( + this.contractAddress, + addr => this.contractStore.getDebugContractName(addr), + { instanceId: this.jobId }, + ); } return this.contractLogger; } @@ -416,9 +418,8 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra if (!LogLevels[level]) { throw new Error(`Invalid log level: ${level}`); } - const levelName = LogLevels[level]; const logger = await this.#getContractLogger(); - logger[levelName](`${applyStringFormatting(message, fields)}`); + logContractMessage(logger, LogLevels[level], message, fields); } public async utilityFetchTaggedLogs(pendingTaggedLogArrayBaseSlot: Fr) { diff --git a/yarn-project/pxe/src/contract_logging.ts b/yarn-project/pxe/src/contract_logging.ts new file mode 100644 index 000000000000..cb32e2026fa1 --- /dev/null +++ b/yarn-project/pxe/src/contract_logging.ts @@ -0,0 +1,39 @@ +import type { Fr } from '@aztec/foundation/curves/bn254'; +import { type LogLevel, type Logger, applyStringFormatting, createLogger } from '@aztec/foundation/log'; +import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { DebugLog } from '@aztec/stdlib/logs'; + +/** Resolves a contract address to a human-readable name, if available. */ +export type ContractNameResolver = (address: AztecAddress) => Promise; + +/** + * Creates a logger whose output is prefixed with `contract_log::()`. + */ +export async function createContractLogger( + contractAddress: AztecAddress, + getContractName: ContractNameResolver, + options?: { instanceId?: string }, +): Promise { + const addrAbbrev = contractAddress.toString().slice(0, 10); + const name = await getContractName(contractAddress); + const module = name ? `contract_log::${name}(${addrAbbrev})` : `contract_log::Unknown(${addrAbbrev})`; + return createLogger(module, options); +} + +/** + * Formats and emits a single contract log message through the given logger. + */ +export function logContractMessage(logger: Logger, level: LogLevel, message: string, fields: Fr[]): void { + logger[level](applyStringFormatting(message, fields)); +} + +/** + * Displays debug logs collected during public function simulation, + * using the `contract_log::` prefixed logger format. + */ +export async function displayDebugLogs(debugLogs: DebugLog[], getContractName: ContractNameResolver): Promise { + for (const log of debugLogs) { + const logger = await createContractLogger(log.contractAddress, getContractName); + logContractMessage(logger, log.level, log.message, log.fields); + } +} diff --git a/yarn-project/pxe/src/debug/pxe_debug_utils.ts b/yarn-project/pxe/src/debug/pxe_debug_utils.ts index 2520433060c3..e5504328a611 100644 --- a/yarn-project/pxe/src/debug/pxe_debug_utils.ts +++ b/yarn-project/pxe/src/debug/pxe_debug_utils.ts @@ -18,7 +18,7 @@ import type { NoteStore } from '../storage/note_store/note_store.js'; export class PXEDebugUtils { #putJobInQueue!: (job: (jobId: string) => Promise) => Promise; #getSimulatorForTx!: (overrides?: { contracts?: ContractOverrides }) => ContractFunctionSimulator; - #simulateUtility!: ( + #executeUtility!: ( contractFunctionSimulator: ContractFunctionSimulator, call: FunctionCall, authWitnesses: AuthWitness[] | undefined, @@ -37,7 +37,7 @@ export class PXEDebugUtils { public setPXEHelpers( putJobInQueue: (job: (jobId: string) => Promise) => Promise, getSimulatorForTx: (overrides?: { contracts?: ContractOverrides }) => ContractFunctionSimulator, - simulateUtility: ( + executeUtility: ( contractFunctionSimulator: ContractFunctionSimulator, call: FunctionCall, authWitnesses: AuthWitness[] | undefined, @@ -47,7 +47,7 @@ export class PXEDebugUtils { ) { this.#putJobInQueue = putJobInQueue; this.#getSimulatorForTx = getSimulatorForTx; - this.#simulateUtility = simulateUtility; + this.#executeUtility = executeUtility; } /** @@ -73,7 +73,7 @@ export class PXEDebugUtils { filter.contractAddress, null, async (privateSyncCall, execScopes) => - await this.#simulateUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), + await this.#executeUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), anchorBlockHeader, jobId, filter.scopes, diff --git a/yarn-project/pxe/src/entrypoints/client/bundle/index.ts b/yarn-project/pxe/src/entrypoints/client/bundle/index.ts index e532ec2b7b8a..d854f0abf873 100644 --- a/yarn-project/pxe/src/entrypoints/client/bundle/index.ts +++ b/yarn-project/pxe/src/entrypoints/client/bundle/index.ts @@ -3,6 +3,7 @@ export * from '../../../notes_filter.js'; export * from '../../../pxe.js'; export * from '../../../config/index.js'; export * from '../../../error_enriching.js'; +export * from '../../../contract_logging.js'; export * from '../../../storage/index.js'; export * from './utils.js'; export type { PXECreationOptions } from '../../pxe_creation_options.js'; diff --git a/yarn-project/pxe/src/entrypoints/client/lazy/index.ts b/yarn-project/pxe/src/entrypoints/client/lazy/index.ts index 5efe9b4e4ec6..17b4025cbf74 100644 --- a/yarn-project/pxe/src/entrypoints/client/lazy/index.ts +++ b/yarn-project/pxe/src/entrypoints/client/lazy/index.ts @@ -4,5 +4,6 @@ export * from '../../../pxe.js'; export * from '../../../config/index.js'; export * from '../../../storage/index.js'; export * from '../../../error_enriching.js'; +export * from '../../../contract_logging.js'; export * from './utils.js'; export { type PXECreationOptions } from '../../pxe_creation_options.js'; diff --git a/yarn-project/pxe/src/logs/log_service.ts b/yarn-project/pxe/src/logs/log_service.ts index 55a90c779a4b..7da90c092a62 100644 --- a/yarn-project/pxe/src/logs/log_service.ts +++ b/yarn-project/pxe/src/logs/log_service.ts @@ -3,7 +3,13 @@ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundatio import type { KeyStore } from '@aztec/key-store'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { DirectionalAppTaggingSecret, PendingTaggedLog, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; +import { + ExtendedDirectionalAppTaggingSecret, + PendingTaggedLog, + SiloedTag, + Tag, + TxScopedL2Log, +} from '@aztec/stdlib/logs'; import type { BlockHeader } from '@aztec/stdlib/tx'; import type { AccessScopes } from '../access_scopes.js'; @@ -41,7 +47,7 @@ export class LogService { logRetrievalRequests.map(async request => { const [publicLog, privateLog] = await Promise.all([ this.#getPublicLogByTag(request.tag, request.contractAddress), - this.#getPrivateLogByTag(await SiloedTag.compute(request.tag, request.contractAddress)), + this.#getPrivateLogByTag(await SiloedTag.computeFromTagAndApp(request.tag, request.contractAddress)), ]); if (publicLog !== null && privateLog !== null) { @@ -130,7 +136,6 @@ export class LogService { secrets.map(secret => loadPrivateLogsForSenderRecipientPair( secret, - contractAddress, this.aztecNode, this.recipientTaggingStore, anchorBlockNumber, @@ -154,7 +159,7 @@ export class LogService { async #getSecretsForSenders( contractAddress: AztecAddress, recipient: AztecAddress, - ): Promise { + ): Promise { const recipientCompleteAddress = await this.addressStore.getCompleteAddress(recipient); if (!recipientCompleteAddress) { return []; @@ -172,7 +177,7 @@ export class LogService { return Promise.all( deduplicatedSenders.map(sender => { - return DirectionalAppTaggingSecret.compute( + return ExtendedDirectionalAppTaggingSecret.compute( recipientCompleteAddress, recipientIvsk, sender, diff --git a/yarn-project/pxe/src/pxe.test.ts b/yarn-project/pxe/src/pxe.test.ts index f53376b0e4b3..7325d8b80ed7 100644 --- a/yarn-project/pxe/src/pxe.test.ts +++ b/yarn-project/pxe/src/pxe.test.ts @@ -326,6 +326,6 @@ describe('PXE', () => { }); }); }); - // Note: Not testing a successful run of `proveTx`, `sendTx`, `getTxReceipt` and `simulateUtility` here as it + // Note: Not testing a successful run of `proveTx`, `sendTx`, `getTxReceipt` and `executeUtility` here as it // requires a larger setup and it's sufficiently tested in the e2e tests. }); diff --git a/yarn-project/pxe/src/pxe.ts b/yarn-project/pxe/src/pxe.ts index 0d70a53fc1e5..9b7e5cc3ed98 100644 --- a/yarn-project/pxe/src/pxe.ts +++ b/yarn-project/pxe/src/pxe.ts @@ -47,7 +47,7 @@ import { TxProfileResult, TxProvingResult, TxSimulationResult, - UtilitySimulationResult, + UtilityExecutionResult, } from '@aztec/stdlib/tx'; import { inspect } from 'util'; @@ -61,6 +61,7 @@ import { generateSimulatedProvingResult, } from './contract_function_simulator/contract_function_simulator.js'; import { ProxiedContractStoreFactory } from './contract_function_simulator/proxied_contract_data_source.js'; +import { displayDebugLogs } from './contract_logging.js'; import { ContractSyncService } from './contract_sync/contract_sync_service.js'; import { readCurrentClassId } from './contract_sync/helpers.js'; import { PXEDebugUtils } from './debug/pxe_debug_utils.js'; @@ -111,8 +112,8 @@ export type SimulateTxOpts = { scopes: AccessScopes; }; -/** Options for PXE.simulateUtility. */ -export type SimulateUtilityOpts = { +/** Options for PXE.executeUtility. */ +export type ExecuteUtilityOpts = { /** The authentication witnesses required for the function call. */ authwits?: AuthWitness[]; /** The accounts whose notes we can access in this call */ @@ -264,7 +265,7 @@ export class PXE { debugUtils.setPXEHelpers( pxe.#putInJobQueue.bind(pxe), pxe.#getSimulatorForTx.bind(pxe), - pxe.#simulateUtility.bind(pxe), + pxe.#executeUtility.bind(pxe), ); pxe.jobQueue.start(); @@ -369,7 +370,7 @@ export class PXE { contractAddress, functionSelector, (privateSyncCall, execScopes) => - this.#simulateUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), + this.#executeUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), anchorBlockHeader, jobId, scopes, @@ -393,16 +394,16 @@ export class PXE { } /** - * Simulate a utility function call on the given contract. + * Execute a utility function call on the given contract. * @param contractFunctionSimulator - The simulator to use for the function call. * @param call - The function call to execute. * @param authWitnesses - Authentication witnesses required for the function call. * @param scopes - Optional array of account addresses whose notes can be accessed in this call. Defaults to all * accounts if not specified. * @param jobId - The job ID for staged writes. - * @returns The simulation result containing the outputs of the utility function. + * @returns The execution result containing the outputs of the utility function. */ - async #simulateUtility( + async #executeUtility( contractFunctionSimulator: ContractFunctionSimulator, call: FunctionCall, authWitnesses: AuthWitness[] | undefined, @@ -946,6 +947,9 @@ export class PXE { const publicSimulationTimer = new Timer(); publicOutput = await this.#simulatePublicCalls(simulatedTx, skipFeeEnforcement); publicSimulationTime = publicSimulationTimer.ms(); + if (publicOutput?.debugLogs?.length) { + await displayDebugLogs(publicOutput.debugLogs, addr => this.contractStore.getDebugContractName(addr)); + } } let validationTime: number | undefined; @@ -1012,16 +1016,16 @@ export class PXE { } /** - * Simulates the execution of a contract utility function. + * Executes a contract utility function. * @param call - The function call containing the function details, arguments, and target contract address. */ - public simulateUtility( + public executeUtility( call: FunctionCall, - { authwits, scopes }: SimulateUtilityOpts = { scopes: 'ALL_SCOPES' }, - ): Promise { - // We disable concurrent simulations since those might execute oracles which read and write to the PXE stores (e.g. + { authwits, scopes }: ExecuteUtilityOpts = { scopes: 'ALL_SCOPES' }, + ): Promise { + // We disable concurrent executions since those might execute oracles which read and write to the PXE stores (e.g. // to the capsules), and we need to prevent concurrent runs from interfering with one another (e.g. attempting to - // delete the same read value, or reading values that another simulation is currently modifying). + // delete the same read value, or reading values that another execution is currently modifying). return this.#putInJobQueue(async jobId => { try { const totalTimer = new Timer(); @@ -1036,13 +1040,13 @@ export class PXE { call.to, call.selector, (privateSyncCall, execScopes) => - this.#simulateUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), + this.#executeUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), anchorBlockHeader, jobId, scopes, ); - const executionResult = await this.#simulateUtility( + const executionResult = await this.#executeUtility( contractFunctionSimulator, call, authwits ?? [], @@ -1069,7 +1073,7 @@ export class PXE { const stringifiedArgs = args.map(arg => arg.toString()).join(', '); throw this.#contextualizeError( err, - `simulateUtility ${to}:${name}(${stringifiedArgs})`, + `executeUtility ${to}:${name}(${stringifiedArgs})`, `scopes=${scopes === 'ALL_SCOPES' ? scopes : scopes.map(s => s.toString()).join(', ')}`, ); } @@ -1107,7 +1111,7 @@ export class PXE { filter.contractAddress, null, async (privateSyncCall, execScopes) => - await this.#simulateUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), + await this.#executeUtility(contractFunctionSimulator, privateSyncCall, [], execScopes, jobId), anchorBlockHeader, jobId, filter.scopes, diff --git a/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.test.ts b/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.test.ts index 49ff3ad27a89..33f2c60d2291 100644 --- a/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.test.ts +++ b/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.test.ts @@ -1,18 +1,18 @@ -import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { DirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; +import { randomExtendedDirectionalAppTaggingSecret } from '@aztec/stdlib/testing'; import { RecipientTaggingStore } from './recipient_tagging_store.js'; describe('RecipientTaggingStore', () => { let taggingStore: RecipientTaggingStore; - let secret1: DirectionalAppTaggingSecret; - let secret2: DirectionalAppTaggingSecret; + let secret1: ExtendedDirectionalAppTaggingSecret; + let secret2: ExtendedDirectionalAppTaggingSecret; beforeEach(async () => { taggingStore = new RecipientTaggingStore(await openTmpStore('test')); - secret1 = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); - secret2 = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); + secret1 = await randomExtendedDirectionalAppTaggingSecret(); + secret2 = await randomExtendedDirectionalAppTaggingSecret(); }); describe('staged writes', () => { diff --git a/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts b/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts index d492c0ff99b8..148d9b59dcfc 100644 --- a/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts +++ b/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts @@ -1,5 +1,5 @@ import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; -import type { DirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; import type { StagedStore } from '../../job_coordinator/job_coordinator.js'; @@ -106,11 +106,11 @@ export class RecipientTaggingStore implements StagedStore { return Promise.resolve(); } - getHighestAgedIndex(secret: DirectionalAppTaggingSecret, jobId: string): Promise { + getHighestAgedIndex(secret: ExtendedDirectionalAppTaggingSecret, jobId: string): Promise { return this.#store.transactionAsync(() => this.#readHighestAgedIndex(jobId, secret.toString())); } - updateHighestAgedIndex(secret: DirectionalAppTaggingSecret, index: number, jobId: string): Promise { + updateHighestAgedIndex(secret: ExtendedDirectionalAppTaggingSecret, index: number, jobId: string): Promise { return this.#store.transactionAsync(async () => { const currentIndex = await this.#readHighestAgedIndex(jobId, secret.toString()); if (currentIndex !== undefined && index <= currentIndex) { @@ -121,11 +121,15 @@ export class RecipientTaggingStore implements StagedStore { }); } - getHighestFinalizedIndex(secret: DirectionalAppTaggingSecret, jobId: string): Promise { + getHighestFinalizedIndex(secret: ExtendedDirectionalAppTaggingSecret, jobId: string): Promise { return this.#store.transactionAsync(() => this.#readHighestFinalizedIndex(jobId, secret.toString())); } - updateHighestFinalizedIndex(secret: DirectionalAppTaggingSecret, index: number, jobId: string): Promise { + updateHighestFinalizedIndex( + secret: ExtendedDirectionalAppTaggingSecret, + index: number, + jobId: string, + ): Promise { return this.#store.transactionAsync(async () => { const currentIndex = await this.#readHighestFinalizedIndex(jobId, secret.toString()); if (currentIndex !== undefined && index < currentIndex) { diff --git a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts index 7b78ce0141a3..986f1daef6fc 100644 --- a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts +++ b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts @@ -1,6 +1,6 @@ -import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { DirectionalAppTaggingSecret, type PreTag } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret, PreTag } from '@aztec/stdlib/logs'; +import { randomExtendedDirectionalAppTaggingSecret } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from '../../tagging/constants.js'; @@ -8,19 +8,19 @@ import { SenderTaggingStore } from './sender_tagging_store.js'; describe('SenderTaggingStore', () => { let taggingStore: SenderTaggingStore; - let secret1: DirectionalAppTaggingSecret; - let secret2: DirectionalAppTaggingSecret; + let secret1: ExtendedDirectionalAppTaggingSecret; + let secret2: ExtendedDirectionalAppTaggingSecret; beforeEach(async () => { taggingStore = new SenderTaggingStore(await openTmpStore('test')); - secret1 = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); - secret2 = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); + secret1 = await randomExtendedDirectionalAppTaggingSecret(); + secret2 = await randomExtendedDirectionalAppTaggingSecret(); }); describe('storePendingIndexes', () => { it('stores a single pending index', async () => { const txHash = TxHash.random(); - const preTag: PreTag = { secret: secret1, index: 5 }; + const preTag: PreTag = { extendedSecret: secret1, index: 5 }; await taggingStore.storePendingIndexes([preTag], txHash, 'test'); @@ -32,8 +32,8 @@ describe('SenderTaggingStore', () => { it('stores multiple pending indexes for different secrets', async () => { const txHash = TxHash.random(); const preTags: PreTag[] = [ - { secret: secret1, index: 3 }, - { secret: secret2, index: 7 }, + { extendedSecret: secret1, index: 3 }, + { extendedSecret: secret2, index: 7 }, ]; await taggingStore.storePendingIndexes(preTags, txHash, 'test'); @@ -51,8 +51,8 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash2, 'test'); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10, 'test'); expect(txHashes).toHaveLength(2); @@ -62,7 +62,7 @@ describe('SenderTaggingStore', () => { it('ignores duplicate preTag + txHash combination', async () => { const txHash = TxHash.random(); - const preTag: PreTag = { secret: secret1, index: 5 }; + const preTag: PreTag = { extendedSecret: secret1, index: 5 }; await taggingStore.storePendingIndexes([preTag], txHash, 'test'); await taggingStore.storePendingIndexes([preTag], txHash, 'test'); @@ -75,8 +75,8 @@ describe('SenderTaggingStore', () => { it('throws when storing duplicate secrets in the same call', async () => { const txHash = TxHash.random(); const preTags: PreTag[] = [ - { secret: secret1, index: 3 }, - { secret: secret1, index: 7 }, + { extendedSecret: secret1, index: 3 }, + { extendedSecret: secret1, index: 7 }, ]; await expect(taggingStore.storePendingIndexes(preTags, txHash, 'test')).rejects.toThrow( @@ -88,12 +88,12 @@ describe('SenderTaggingStore', () => { const txHash = TxHash.random(); // First store an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash, 'test'); // Try to store a different index for the same secret + txHash pair - await expect(taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash, 'test')).rejects.toThrow( - /Cannot store index 7.*a different index 5 already exists/, - ); + await expect( + taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash, 'test'), + ).rejects.toThrow(/Cannot store index 7.*a different index 5 already exists/); }); it('throws when storing a pending index lower than the last finalized index', async () => { @@ -101,13 +101,13 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 10 }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); // Try to store a pending index lower than the finalized index - await expect(taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, 'test')).rejects.toThrow( - /Cannot store pending index 5.*lower than or equal to the last finalized index 10/, - ); + await expect( + taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash2, 'test'), + ).rejects.toThrow(/Cannot store pending index 5.*lower than or equal to the last finalized index 10/); }); it('throws when storing a pending index equal to the last finalized index', async () => { @@ -115,13 +115,13 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 10 }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); // Try to store a pending index equal to the finalized index - await expect(taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash2, 'test')).rejects.toThrow( - /Cannot store pending index 10.*lower than or equal to the last finalized index 10/, - ); + await expect( + taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 10 }], txHash2, 'test'), + ).rejects.toThrow(/Cannot store pending index 10.*lower than or equal to the last finalized index 10/); }); it('allows storing a pending index higher than the last finalized index', async () => { @@ -129,12 +129,12 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 10 }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); // Store a pending index higher than the finalized index - should succeed await expect( - taggingStore.storePendingIndexes([{ secret: secret1, index: 15 }], txHash2, 'test'), + taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 15 }], txHash2, 'test'), ).resolves.not.toThrow(); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 20, 'test'); @@ -150,12 +150,12 @@ describe('SenderTaggingStore', () => { const indexBeyondWindow = finalizedIndex + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN + 1; // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: finalizedIndex }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: finalizedIndex }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); // Try to store an index beyond the window await expect( - taggingStore.storePendingIndexes([{ secret: secret1, index: indexBeyondWindow }], txHash2, 'test'), + taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: indexBeyondWindow }], txHash2, 'test'), ).rejects.toThrow( `Highest used index ${indexBeyondWindow} is further than window length from the highest finalized index ${finalizedIndex}`, ); @@ -168,12 +168,12 @@ describe('SenderTaggingStore', () => { const indexAtBoundary = finalizedIndex + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN; // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: finalizedIndex }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: finalizedIndex }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); // Store an index at the boundary, but check is >, so it should succeed await expect( - taggingStore.storePendingIndexes([{ secret: secret1, index: indexAtBoundary }], txHash2, 'test'), + taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: indexAtBoundary }], txHash2, 'test'), ).resolves.not.toThrow(); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, indexAtBoundary + 5, 'test'); @@ -194,9 +194,9 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); const txHash3 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 8 }], txHash3, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 8 }], txHash3, 'test'); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 4, 9, 'test'); expect(txHashes).toHaveLength(2); @@ -209,8 +209,8 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 10 }], txHash2, 'test'); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 5, 10, 'test'); expect(txHashes).toHaveLength(1); @@ -223,13 +223,13 @@ describe('SenderTaggingStore', () => { const txHash3 = TxHash.random(); const txHash4 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash2, 'test'); // We store different secret with txHash1 to check we correctly don't return it in the result - await taggingStore.storePendingIndexes([{ secret: secret2, index: 7 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret2, index: 7 }], txHash1, 'test'); // Store "parallel" index for secret1 with a different tx (can happen when sending logs from multiple PXEs) - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash4, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash3, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash4, 'test'); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10, 'test'); // Should have 3 unique tx hashes for secret1 @@ -245,7 +245,7 @@ describe('SenderTaggingStore', () => { it('returns the last finalized index after finalizePendingIndexes', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash, 'test'); await taggingStore.finalizePendingIndexes([txHash], 'test'); const lastFinalized = await taggingStore.getLastFinalizedIndex(secret1, 'test'); @@ -261,7 +261,7 @@ describe('SenderTaggingStore', () => { it('returns the last finalized index when no pending indexes exist', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash, 'test'); await taggingStore.finalizePendingIndexes([txHash], 'test'); const lastUsed = await taggingStore.getLastUsedIndex(secret1, 'test'); @@ -273,11 +273,11 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First, finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); // Then add a higher pending index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash2, 'test'); const lastUsed = await taggingStore.getLastUsedIndex(secret1, 'test'); expect(lastUsed).toBe(7); @@ -288,9 +288,9 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); const txHash3 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash3, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash3, 'test'); const lastUsed = await taggingStore.getLastUsedIndex(secret1, 'test'); expect(lastUsed).toBe(7); @@ -302,9 +302,9 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret2, index: 5 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret2, index: 5 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash2, 'test'); await taggingStore.dropPendingIndexes([txHash1], 'test'); @@ -322,7 +322,7 @@ describe('SenderTaggingStore', () => { describe('finalizePendingIndexes', () => { it('moves pending index to finalized for a given tx hash', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash, 'test'); await taggingStore.finalizePendingIndexes([txHash], 'test'); @@ -338,10 +338,10 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash2, 'test'); await taggingStore.finalizePendingIndexes([txHash2], 'test'); const lastFinalized = await taggingStore.getLastFinalizedIndex(secret1, 'test'); @@ -353,8 +353,8 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // Store both pending indexes first - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash2, 'test'); // Finalize the higher index first await taggingStore.finalizePendingIndexes([txHash1], 'test'); @@ -371,9 +371,9 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); const txHash3 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash3, 'test'); // Finalize txHash2 (index 5) await taggingStore.finalizePendingIndexes([txHash2], 'test'); @@ -389,8 +389,8 @@ describe('SenderTaggingStore', () => { const txHash = TxHash.random(); await taggingStore.storePendingIndexes( [ - { secret: secret1, index: 3 }, - { secret: secret2, index: 7 }, + { extendedSecret: secret1, index: 3 }, + { extendedSecret: secret2, index: 7 }, ], txHash, 'test', @@ -407,7 +407,7 @@ describe('SenderTaggingStore', () => { it('does nothing when tx hash does not exist', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash, 'test'); await taggingStore.finalizePendingIndexes([TxHash.random()], 'test'); @@ -427,7 +427,7 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // Step 1: Add pending index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); expect(await taggingStore.getLastUsedIndex(secret1, 'test')).toBe(3); expect(await taggingStore.getLastFinalizedIndex(secret1, 'test')).toBeUndefined(); @@ -437,7 +437,7 @@ describe('SenderTaggingStore', () => { expect(await taggingStore.getLastFinalizedIndex(secret1, 'test')).toBe(3); // Step 3: Add a new higher pending index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash2, 'test'); expect(await taggingStore.getLastUsedIndex(secret1, 'test')).toBe(7); expect(await taggingStore.getLastFinalizedIndex(secret1, 'test')).toBe(3); @@ -451,8 +451,8 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], txHash2, 'test'); expect(await taggingStore.getLastUsedIndex(secret1, 'test')).toBe(5); @@ -468,14 +468,14 @@ describe('SenderTaggingStore', () => { const txHash3 = TxHash.random(); // Secret1: pending -> finalized - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, 'test'); await taggingStore.finalizePendingIndexes([txHash1], 'test'); // Secret2: pending (not finalized) - await taggingStore.storePendingIndexes([{ secret: secret2, index: 5 }], txHash2, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret2, index: 5 }], txHash2, 'test'); // Secret1: new pending - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, 'test'); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash3, 'test'); expect(await taggingStore.getLastFinalizedIndex(secret1, 'test')).toBe(3); expect(await taggingStore.getLastUsedIndex(secret1, 'test')).toBe(7); @@ -489,13 +489,13 @@ describe('SenderTaggingStore', () => { const committedTxHash = TxHash.random(); { const commitJobId: string = 'commit-job'; - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], committedTxHash, commitJobId); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], committedTxHash, commitJobId); await taggingStore.commit(commitJobId); } const stagedTxHash = TxHash.random(); const stagingJobId: string = 'staging-job'; - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], stagedTxHash, stagingJobId); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 5 }], stagedTxHash, stagingJobId); // For a job without any staged data we should only get committed data const txHashesWithoutJobId = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10, 'no-data-job'); @@ -513,7 +513,7 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); { const commitJobId: string = 'commit-job'; - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, commitJobId); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash1, commitJobId); await taggingStore.finalizePendingIndexes([txHash1], commitJobId); await taggingStore.commit(commitJobId); } @@ -522,7 +522,7 @@ describe('SenderTaggingStore', () => { const stagingJobId: string = 'staging-job'; // Stage a higher finalized index (not committed) - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, stagingJobId); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash2, stagingJobId); await taggingStore.finalizePendingIndexes([txHash2], stagingJobId); // With a different jobId, should get the committed finalized index @@ -537,8 +537,8 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); const commitJobId: string = 'commit-job'; - await taggingStore.storePendingIndexes([{ secret: secret1, index: 2 }], txHash1, commitJobId); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash2, commitJobId); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 2 }], txHash1, commitJobId); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 3 }], txHash2, commitJobId); await taggingStore.finalizePendingIndexes([txHash1], commitJobId); await taggingStore.commit(commitJobId); } @@ -546,7 +546,7 @@ describe('SenderTaggingStore', () => { const stagingJobId: string = 'staging-job'; { const txHash3 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, stagingJobId); + await taggingStore.storePendingIndexes([{ extendedSecret: secret1, index: 7 }], txHash3, stagingJobId); await taggingStore.finalizePendingIndexes([txHash3], stagingJobId); await taggingStore.discardStaged(stagingJobId); } diff --git a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts index 8da2e88ab91f..1b15bbbb207a 100644 --- a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts +++ b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts @@ -1,5 +1,5 @@ import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; -import type { DirectionalAppTaggingSecret, PreTag } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret, PreTag } from '@aztec/stdlib/logs'; import { TxHash } from '@aztec/stdlib/tx'; import type { StagedStore } from '../../job_coordinator/job_coordinator.js'; @@ -154,7 +154,7 @@ export class SenderTaggingStore implements StagedStore { // The secrets in pre-tags should be unique because we always store just the highest index per given secret-txHash // pair. Below we check that this is the case. - const secretsSet = new Set(preTags.map(preTag => preTag.secret.toString())); + const secretsSet = new Set(preTags.map(preTag => preTag.extendedSecret.toString())); if (secretsSet.size !== preTags.length) { return Promise.reject(new Error(`Duplicate secrets found when storing pending indexes`)); } @@ -163,10 +163,10 @@ export class SenderTaggingStore implements StagedStore { return this.#store.transactionAsync(async () => { // Prefetch all data, start reads during iteration to keep IndexedDB transaction alive - const preTagReadPromises = preTags.map(({ secret, index }) => { - const secretStr = secret.toString(); + const preTagReadPromises = preTags.map(({ extendedSecret, index }) => { + const secretStr = extendedSecret.toString(); return { - secret, + extendedSecret, secretStr, index, pending: this.#readPendingIndexes(jobId, secretStr), @@ -233,7 +233,7 @@ export class SenderTaggingStore implements StagedStore { * [startIndex, endIndex). Returns an empty array if no pending indexes exist in the range. */ getTxHashesOfPendingIndexes( - secret: DirectionalAppTaggingSecret, + secret: ExtendedDirectionalAppTaggingSecret, startIndex: number, endIndex: number, jobId: string, @@ -252,7 +252,7 @@ export class SenderTaggingStore implements StagedStore { * @param secret - The secret to get the last finalized index for. * @returns The last (highest) finalized index for the given secret. */ - getLastFinalizedIndex(secret: DirectionalAppTaggingSecret, jobId: string): Promise { + getLastFinalizedIndex(secret: ExtendedDirectionalAppTaggingSecret, jobId: string): Promise { return this.#store.transactionAsync(() => this.#readLastFinalizedIndex(jobId, secret.toString())); } @@ -262,7 +262,7 @@ export class SenderTaggingStore implements StagedStore { * @param secret - The directional app tagging secret to query the last used index for. * @returns The last used index. */ - getLastUsedIndex(secret: DirectionalAppTaggingSecret, jobId: string): Promise { + getLastUsedIndex(secret: ExtendedDirectionalAppTaggingSecret, jobId: string): Promise { const secretStr = secret.toString(); return this.#store.transactionAsync(async () => { diff --git a/yarn-project/pxe/src/tagging/get_all_logs_by_tags.test.ts b/yarn-project/pxe/src/tagging/get_all_logs_by_tags.test.ts index 2515b695ec1e..f769a1f54ff7 100644 --- a/yarn-project/pxe/src/tagging/get_all_logs_by_tags.test.ts +++ b/yarn-project/pxe/src/tagging/get_all_logs_by_tags.test.ts @@ -21,7 +21,7 @@ describe('getAllPrivateLogsByTags', () => { beforeAll(async () => { tags = await Promise.all( - [1, 2, 3].map(async () => SiloedTag.compute(new Tag(Fr.random()), await AztecAddress.random())), + [1, 2, 3].map(async () => SiloedTag.computeFromTagAndApp(new Tag(Fr.random()), await AztecAddress.random())), ); }); diff --git a/yarn-project/pxe/src/tagging/index.ts b/yarn-project/pxe/src/tagging/index.ts index beeb055a2e17..ea8c6f80f613 100644 --- a/yarn-project/pxe/src/tagging/index.ts +++ b/yarn-project/pxe/src/tagging/index.ts @@ -15,5 +15,5 @@ export { UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from './constants.js'; export { getAllPrivateLogsByTags, getAllPublicLogsByTagsFromContract } from './get_all_logs_by_tags.js'; // Re-export tagging-related types from stdlib -export { DirectionalAppTaggingSecret, Tag, SiloedTag } from '@aztec/stdlib/logs'; +export { ExtendedDirectionalAppTaggingSecret, Tag, SiloedTag } from '@aztec/stdlib/logs'; export { type PreTag } from '@aztec/stdlib/logs'; diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts index 7a19c393ae72..25d7104a3d48 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts @@ -2,11 +2,15 @@ import { MAX_TX_LIFETIME } from '@aztec/constants'; import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { DirectionalAppTaggingSecret, SiloedTag, Tag } from '@aztec/stdlib/logs'; -import { makeBlockHeader, makeL2Tips, randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; +import { type ExtendedDirectionalAppTaggingSecret, SiloedTag } from '@aztec/stdlib/logs'; +import { + makeBlockHeader, + makeL2Tips, + randomExtendedDirectionalAppTaggingSecret, + randomTxScopedPrivateL2Log, +} from '@aztec/stdlib/testing'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -20,17 +24,15 @@ const FAR_FUTURE_BLOCK_NUMBER = BlockNumber(100); const MOCK_ANCHOR_BLOCK_HASH = BlockHash.random(); describe('loadPrivateLogsForSenderRecipientPair', () => { - let secret: DirectionalAppTaggingSecret; - let app: AztecAddress; + let secret: ExtendedDirectionalAppTaggingSecret; let aztecNode: MockProxy; let taggingStore: RecipientTaggingStore; const currentTimestamp = BigInt(Math.floor(Date.now() / 1000)); - async function computeSiloedTagForIndex(index: number) { - const tag = await Tag.compute({ secret, index }); - return SiloedTag.compute(tag, app); + function computeSiloedTagForIndex(index: number) { + return SiloedTag.compute({ extendedSecret: secret, index }); } function makeLog(blockNumber: number, blockTimestamp: bigint, tag: Fr) { @@ -38,8 +40,7 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { } beforeAll(async () => { - secret = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); - app = await AztecAddress.random(); + secret = await randomExtendedDirectionalAppTaggingSecret(); aztecNode = mock(); }); @@ -62,7 +63,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const logs = await loadPrivateLogsForSenderRecipientPair( secret, - app, aztecNode, taggingStore, FAR_FUTURE_BLOCK_NUMBER, @@ -97,7 +97,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const logs = await loadPrivateLogsForSenderRecipientPair( secret, - app, aztecNode, taggingStore, FAR_FUTURE_BLOCK_NUMBER, @@ -132,7 +131,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const logs = await loadPrivateLogsForSenderRecipientPair( secret, - app, aztecNode, taggingStore, FAR_FUTURE_BLOCK_NUMBER, @@ -184,7 +182,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const logs = await loadPrivateLogsForSenderRecipientPair( secret, - app, aztecNode, taggingStore, FAR_FUTURE_BLOCK_NUMBER, diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts index de527f8a7f61..8587860539bf 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts @@ -1,8 +1,7 @@ import type { BlockNumber } from '@aztec/foundation/branded-types'; -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; -import type { DirectionalAppTaggingSecret, TxScopedL2Log } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret, TxScopedL2Log } from '@aztec/stdlib/logs'; import type { RecipientTaggingStore } from '../../storage/tagging_store/recipient_tagging_store.js'; import { UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from '../constants.js'; @@ -10,15 +9,14 @@ import { findHighestIndexes } from './utils/find_highest_indexes.js'; import { loadLogsForRange } from './utils/load_logs_for_range.js'; /** - * Loads private logs for `app` and sender-recipient pair defined by `secret` and updates the highest aged and + * Loads private logs for the app-sender-recipient triplet defined by `secret` and updates the highest aged and * finalized indexes in the db. At most load logs from blocks up to and including `anchorBlockNumber`. * * @dev This function can be safely executed "in parallel" for other sender-recipient pairs because the data in * in the tagging data provider is indexed by the secret and hence completely disjoint. */ export async function loadPrivateLogsForSenderRecipientPair( - secret: DirectionalAppTaggingSecret, - app: AztecAddress, + secret: ExtendedDirectionalAppTaggingSecret, aztecNode: AztecNode, taggingStore: RecipientTaggingStore, anchorBlockNumber: BlockNumber, @@ -96,7 +94,6 @@ export async function loadPrivateLogsForSenderRecipientPair( // Get private logs with their block timestamps and corresponding tagging indexes const privateLogsWithIndexes = await loadLogsForRange( secret, - app, aztecNode, start, end, diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts index 24c278dd2e4d..89134335968d 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts @@ -1,10 +1,8 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { DirectionalAppTaggingSecret, SiloedTag, Tag } from '@aztec/stdlib/logs'; -import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; +import { type ExtendedDirectionalAppTaggingSecret, SiloedTag } from '@aztec/stdlib/logs'; +import { randomExtendedDirectionalAppTaggingSecret, randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -18,14 +16,12 @@ const MOCK_ANCHOR_BLOCK_HASH = BlockHash.random(); describe('loadLogsForRange', () => { // App contract address and secret to be used on the input of the loadLogsForRange function. - let secret: DirectionalAppTaggingSecret; - let app: AztecAddress; + let secret: ExtendedDirectionalAppTaggingSecret; let aztecNode: MockProxy; - async function computeSiloedTagForIndex(index: number) { - const tag = await Tag.compute({ secret, index }); - return SiloedTag.compute(tag, app); + function computeSiloedTagForIndex(index: number) { + return SiloedTag.compute({ extendedSecret: secret, index }); } function makeLog(txHash: TxHash, blockNumber: number, blockTimestamp: bigint, tag: SiloedTag) { @@ -33,8 +29,7 @@ describe('loadLogsForRange', () => { } beforeAll(async () => { - secret = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); - app = await AztecAddress.random(); + secret = await randomExtendedDirectionalAppTaggingSecret(); aztecNode = mock(); }); @@ -49,7 +44,7 @@ describe('loadLogsForRange', () => { }); expect( - await loadLogsForRange(secret, app, aztecNode, 0, 10, FAR_FUTURE_BLOCK_NUMBER, MOCK_ANCHOR_BLOCK_HASH), + await loadLogsForRange(secret, aztecNode, 0, 10, FAR_FUTURE_BLOCK_NUMBER, MOCK_ANCHOR_BLOCK_HASH), ).toHaveLength(0); }); @@ -78,15 +73,7 @@ describe('loadLogsForRange', () => { ); }); - const result = await loadLogsForRange( - secret, - app, - aztecNode, - 0, - 10, - FAR_FUTURE_BLOCK_NUMBER, - MOCK_ANCHOR_BLOCK_HASH, - ); + const result = await loadLogsForRange(secret, aztecNode, 0, 10, FAR_FUTURE_BLOCK_NUMBER, MOCK_ANCHOR_BLOCK_HASH); expect(result).toHaveLength(2); const resultByIndex = result.sort((a, b) => a.taggingIndex - b.taggingIndex); @@ -118,15 +105,7 @@ describe('loadLogsForRange', () => { ); }); - const result = await loadLogsForRange( - secret, - app, - aztecNode, - 0, - 10, - FAR_FUTURE_BLOCK_NUMBER, - MOCK_ANCHOR_BLOCK_HASH, - ); + const result = await loadLogsForRange(secret, aztecNode, 0, 10, FAR_FUTURE_BLOCK_NUMBER, MOCK_ANCHOR_BLOCK_HASH); expect(result).toHaveLength(2); expect(result[0].taggingIndex).toBe(index); @@ -159,15 +138,7 @@ describe('loadLogsForRange', () => { ); }); - const result = await loadLogsForRange( - secret, - app, - aztecNode, - 0, - 10, - FAR_FUTURE_BLOCK_NUMBER, - MOCK_ANCHOR_BLOCK_HASH, - ); + const result = await loadLogsForRange(secret, aztecNode, 0, 10, FAR_FUTURE_BLOCK_NUMBER, MOCK_ANCHOR_BLOCK_HASH); expect(result).toHaveLength(2); @@ -203,7 +174,6 @@ describe('loadLogsForRange', () => { const result = await loadLogsForRange( secret, - app, aztecNode, start, end, @@ -240,7 +210,6 @@ describe('loadLogsForRange', () => { const result = await loadLogsForRange( secret, - app, aztecNode, 0, 10, diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts index d9e8ce3eb2ee..c8e3bfa575b7 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts @@ -1,32 +1,27 @@ import type { BlockNumber } from '@aztec/foundation/branded-types'; -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; -import type { DirectionalAppTaggingSecret, PreTag, TxScopedL2Log } from '@aztec/stdlib/logs'; -import { SiloedTag, Tag } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret, TxScopedL2Log } from '@aztec/stdlib/logs'; +import { SiloedTag } from '@aztec/stdlib/logs'; import { getAllPrivateLogsByTags } from '../../get_all_logs_by_tags.js'; /** - * Gets private logs with their corresponding block timestamps and tagging indexes for the given index range, `app` and - * `secret`. At most load logs from blocks up to and including `anchorBlockNumber`. `start` is inclusive and `end` is - * exclusive. + * Gets private logs with their corresponding block timestamps and tagging indexes for the given index range and + * `extendedSecret`. At most load logs from blocks up to and including `anchorBlockNumber`. `start` is inclusive and + * `end` is exclusive. */ export async function loadLogsForRange( - secret: DirectionalAppTaggingSecret, - app: AztecAddress, + extendedSecret: ExtendedDirectionalAppTaggingSecret, aztecNode: AztecNode, start: number, end: number, anchorBlockNumber: BlockNumber, anchorBlockHash: BlockHash, ): Promise> { - // Derive tags for the window - const preTags: PreTag[] = Array(end - start) - .fill(0) - .map((_, i) => ({ secret, index: start + i })); - const siloedTags = await Promise.all(preTags.map(preTag => Tag.compute(preTag))).then(tags => - Promise.all(tags.map(tag => SiloedTag.compute(tag, app))), + // Derive siloed tags for the window + const siloedTags = await Promise.all( + Array.from({ length: end - start }, (_, i) => SiloedTag.compute({ extendedSecret, index: start + i })), ); // We use the utility function below to retrieve all logs for the tags across all pages, so we don't need to handle @@ -37,7 +32,7 @@ export async function loadLogsForRange( const logsWithIndexes: Array<{ log: TxScopedL2Log; taggingIndex: number }> = []; for (let i = 0; i < logs.length; i++) { const logsForTag = logs[i]; - const taggingIndex = preTags[i].index; + const taggingIndex = start + i; for (const log of logsForTag) { if (log.blockNumber <= anchorBlockNumber) { logsWithIndexes.push({ log, taggingIndex }); diff --git a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts index ba4cb0466a0f..d214b6e50120 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts @@ -1,31 +1,32 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; -import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; +import { randomExtendedDirectionalAppTaggingSecret, randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { TxExecutionResult, TxHash, TxReceipt, TxStatus } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; import { SenderTaggingStore } from '../../storage/tagging_store/sender_tagging_store.js'; -import { DirectionalAppTaggingSecret, SiloedTag, Tag, UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from '../index.js'; +import { + type ExtendedDirectionalAppTaggingSecret, + SiloedTag, + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN, +} from '../index.js'; import { syncSenderTaggingIndexes } from './sync_sender_tagging_indexes.js'; const MOCK_ANCHOR_BLOCK_HASH = BlockHash.random(); describe('syncSenderTaggingIndexes', () => { - // Contract address and secret to be used on the input of the syncSenderTaggingIndexes function. - let secret: DirectionalAppTaggingSecret; - let contractAddress: AztecAddress; + // The secret to be used on the input of the syncSenderTaggingIndexes function. + let secret: ExtendedDirectionalAppTaggingSecret; let aztecNode: MockProxy; let taggingStore: SenderTaggingStore; - async function computeSiloedTagForIndex(index: number) { - const tag = await Tag.compute({ secret, index }); - return SiloedTag.compute(tag, contractAddress); + function computeSiloedTagForIndex(index: number) { + return SiloedTag.compute({ extendedSecret: secret, index }); } function makeLog(txHash: TxHash, tag: Fr) { @@ -33,8 +34,7 @@ describe('syncSenderTaggingIndexes', () => { } async function setUp() { - secret = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); - contractAddress = await AztecAddress.random(); + secret = await randomExtendedDirectionalAppTaggingSecret(); aztecNode = mock(); taggingStore = new SenderTaggingStore(await openTmpStore('test')); @@ -48,7 +48,7 @@ describe('syncSenderTaggingIndexes', () => { return Promise.resolve(tags.map((_tag: SiloedTag) => [])); }); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await syncSenderTaggingIndexes(secret, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Highest used and finalized indexes should stay undefined expect(await taggingStore.getLastUsedIndex(secret, 'test')).toBeUndefined(); @@ -91,7 +91,7 @@ describe('syncSenderTaggingIndexes', () => { ), ); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await syncSenderTaggingIndexes(secret, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify the highest finalized index is updated to 3 expect(await taggingStore.getLastFinalizedIndex(secret, 'test')).toBe(finalizedIndexStep1); @@ -123,7 +123,7 @@ describe('syncSenderTaggingIndexes', () => { ), ); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await syncSenderTaggingIndexes(secret, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify the highest finalized index was not updated expect(await taggingStore.getLastFinalizedIndex(secret, 'test')).toBe(finalizedIndexStep1); @@ -206,7 +206,7 @@ describe('syncSenderTaggingIndexes', () => { } }); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await syncSenderTaggingIndexes(secret, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); expect(await taggingStore.getLastFinalizedIndex(secret, 'test')).toBe(newHighestFinalizedIndex); expect(await taggingStore.getLastUsedIndex(secret, 'test')).toBe(newHighestUsedIndex); @@ -269,7 +269,7 @@ describe('syncSenderTaggingIndexes', () => { }); // Sync tagged logs - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await syncSenderTaggingIndexes(secret, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify that both highest finalized and highest used were set to the pending and finalized index expect(await taggingStore.getLastFinalizedIndex(secret, 'test')).toBe(pendingAndFinalizedIndex); diff --git a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts index 0270ead82a35..87d56d6a46e7 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts @@ -1,7 +1,6 @@ -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import type { DirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; import type { SenderTaggingStore } from '../../storage/tagging_store/sender_tagging_store.js'; import { UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from '../constants.js'; @@ -11,11 +10,8 @@ import { loadAndStoreNewTaggingIndexes } from './utils/load_and_store_new_taggin /** * Syncs tagging indexes. This function needs to be called whenever a private log is being sent. * - * @param secret - The secret that's unique for (sender, recipient, contract) tuple while the direction of + * @param secret - The secret that's unique for (sender, recipient, app) tuple while the direction of * sender -> recipient matters. - * @param app - The address of the contract that the logs are tagged for. Needs to be provided because we perform - * second round of siloing in this function which is necessary because kernels do it as well (they silo first field - * of the private log which corresponds to the tag). * @remarks When syncing the indexes as sender we don't care about the log contents - we only care about the highest * pending and highest finalized indexes as that guides the next index choice when sending a log. The next index choice * is simply the highest pending index plus one (or finalized if pending is undefined). @@ -23,8 +19,7 @@ import { loadAndStoreNewTaggingIndexes } from './utils/load_and_store_new_taggin * updates its status accordingly. */ export async function syncSenderTaggingIndexes( - secret: DirectionalAppTaggingSecret, - app: AztecAddress, + secret: ExtendedDirectionalAppTaggingSecret, aztecNode: AztecNode, taggingStore: SenderTaggingStore, anchorBlockHash: BlockHash, @@ -59,7 +54,7 @@ export async function syncSenderTaggingIndexes( while (true) { // Load and store indexes for the current window. These indexes may already exist in the database if txs using // them were previously sent from this PXE. Any duplicates are handled by the tagging data provider. - await loadAndStoreNewTaggingIndexes(secret, app, start, end, aztecNode, taggingStore, anchorBlockHash, jobId); + await loadAndStoreNewTaggingIndexes(secret, start, end, aztecNode, taggingStore, anchorBlockHash, jobId); // Retrieve all indexes within the current window from storage and update their status accordingly. const pendingTxHashes = await taggingStore.getTxHashesOfPendingIndexes(secret, start, end, jobId); diff --git a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts index 6ee625261b16..789c67c79f8f 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts @@ -1,10 +1,9 @@ -import { Fr } from '@aztec/foundation/curves/bn254'; +import type { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { DirectionalAppTaggingSecret, SiloedTag, Tag } from '@aztec/stdlib/logs'; -import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; +import { type ExtendedDirectionalAppTaggingSecret, SiloedTag } from '@aztec/stdlib/logs'; +import { randomExtendedDirectionalAppTaggingSecret, randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -15,16 +14,14 @@ import { loadAndStoreNewTaggingIndexes } from './load_and_store_new_tagging_inde const MOCK_ANCHOR_BLOCK_HASH = BlockHash.random(); describe('loadAndStoreNewTaggingIndexes', () => { - // App contract address and secret to be used on the input of the loadAndStoreNewTaggingIndexes function. - let secret: DirectionalAppTaggingSecret; - let app: AztecAddress; + // Secret to be used on the input of the loadAndStoreNewTaggingIndexes function. + let secret: ExtendedDirectionalAppTaggingSecret; let aztecNode: MockProxy; let taggingStore: SenderTaggingStore; - async function computeSiloedTagForIndex(index: number) { - const tag = await Tag.compute({ secret, index }); - return SiloedTag.compute(tag, app); + function computeSiloedTagForIndex(index: number) { + return SiloedTag.compute({ extendedSecret: secret, index }); } function makeLog(txHash: TxHash, tag: Fr) { @@ -32,8 +29,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { } beforeAll(async () => { - secret = DirectionalAppTaggingSecret.fromString(Fr.random().toString()); - app = await AztecAddress.random(); + secret = await randomExtendedDirectionalAppTaggingSecret(); aztecNode = mock(); }); @@ -49,7 +45,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { return Promise.resolve(tags.map((_tag: SiloedTag) => [])); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await loadAndStoreNewTaggingIndexes(secret, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify that no pending indexes were stored expect(await taggingStore.getLastUsedIndex(secret, 'test')).toBeUndefined(); @@ -69,7 +65,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { return Promise.resolve(tags.map((t: SiloedTag) => (t.equals(tag) ? [makeLog(txHash, tag.value)] : []))); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await loadAndStoreNewTaggingIndexes(secret, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify that the pending index was stored for this txHash const txHashesInRange = await taggingStore.getTxHashesOfPendingIndexes(secret, index, index + 1, 'test'); @@ -100,7 +96,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await loadAndStoreNewTaggingIndexes(secret, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify that only the highest index (7) was stored for this txHash and secret const txHashesAtIndex2 = await taggingStore.getTxHashesOfPendingIndexes(secret, index2, index2 + 1, 'test'); @@ -136,7 +132,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await loadAndStoreNewTaggingIndexes(secret, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify that both txHashes have their respective indexes stored const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, index1, index1 + 1, 'test'); @@ -164,7 +160,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await loadAndStoreNewTaggingIndexes(secret, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify that both txHashes have the same index stored const txHashesAtIndex = await taggingStore.getTxHashesOfPendingIndexes(secret, index, index + 1, 'test'); @@ -210,7 +206,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); + await loadAndStoreNewTaggingIndexes(secret, 0, 10, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify txHash1 has highest index 8 (should not be at index 1) const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, 1, 2, 'test'); @@ -258,16 +254,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes( - secret, - app, - start, - end, - aztecNode, - taggingStore, - MOCK_ANCHOR_BLOCK_HASH, - 'test', - ); + await loadAndStoreNewTaggingIndexes(secret, start, end, aztecNode, taggingStore, MOCK_ANCHOR_BLOCK_HASH, 'test'); // Verify that the log at start (inclusive) was processed const txHashesAtStart = await taggingStore.getTxHashesOfPendingIndexes(secret, start, start + 1, 'test'); diff --git a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts index 4fa1939d4fe8..5558c1097cba 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts @@ -1,8 +1,7 @@ -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import type { DirectionalAppTaggingSecret, PreTag } from '@aztec/stdlib/logs'; -import { SiloedTag, Tag } from '@aztec/stdlib/logs'; +import type { ExtendedDirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; +import { SiloedTag } from '@aztec/stdlib/logs'; import { TxHash } from '@aztec/stdlib/tx'; import type { SenderTaggingStore } from '../../../storage/tagging_store/sender_tagging_store.js'; @@ -12,9 +11,7 @@ import { getAllPrivateLogsByTags } from '../../get_all_logs_by_tags.js'; * Loads tagging indexes from the Aztec node and stores them in the tagging data provider. * @remarks This function is one of two places by which a pending index can get to the tagging data provider. The other * place is when a tx is being sent from this PXE. - * @param secret - The directional app tagging secret that's unique for (sender, recipient, contract) tuple. - * @param app - The address of the contract that the logs are tagged for. Used for siloing tags to match - * kernel circuit behavior. + * @param extendedSecret - The extended directional app tagging secret that's unique for (sender, recipient, app) tuple. * @param start - The starting index (inclusive) of the window to process. * @param end - The ending index (exclusive) of the window to process. * @param aztecNode - The Aztec node instance to query for logs. @@ -23,8 +20,7 @@ import { getAllPrivateLogsByTags } from '../../get_all_logs_by_tags.js'; * preserving way. */ export async function loadAndStoreNewTaggingIndexes( - secret: DirectionalAppTaggingSecret, - app: AztecAddress, + extendedSecret: ExtendedDirectionalAppTaggingSecret, start: number, end: number, aztecNode: AztecNode, @@ -33,20 +29,17 @@ export async function loadAndStoreNewTaggingIndexes( jobId: string, ) { // We compute the tags for the current window of indexes - const preTagsForWindow: PreTag[] = Array(end - start) - .fill(0) - .map((_, i) => ({ secret, index: start + i })); const siloedTagsForWindow = await Promise.all( - preTagsForWindow.map(async preTag => SiloedTag.compute(await Tag.compute(preTag), app)), + Array.from({ length: end - start }, (_, i) => SiloedTag.compute({ extendedSecret, index: start + i })), ); const txsForTags = await getTxsContainingTags(siloedTagsForWindow, aztecNode, anchorBlockHash); - const highestIndexMap = getTxHighestIndexMap(txsForTags, preTagsForWindow); + const highestIndexMap = getTxHighestIndexMap(txsForTags, start, siloedTagsForWindow.length); // Now we iterate over the map, reconstruct the preTags and tx hash and store them in the db. for (const [txHashStr, highestIndex] of highestIndexMap.entries()) { const txHash = TxHash.fromString(txHashStr); - await taggingStore.storePendingIndexes([{ secret, index: highestIndex }], txHash, jobId); + await taggingStore.storePendingIndexes([{ extendedSecret, index: highestIndex }], txHash, jobId); } } @@ -64,16 +57,14 @@ async function getTxsContainingTags( } // Returns a map of txHash to the highest index for that txHash. -function getTxHighestIndexMap(txHashesForTags: TxHash[][], preTagsForWindow: PreTag[]): Map { - if (txHashesForTags.length !== preTagsForWindow.length) { - throw new Error( - `Number of tx hashes arrays does not match number of pre-tags. ${txHashesForTags.length} !== ${preTagsForWindow.length}`, - ); +function getTxHighestIndexMap(txHashesForTags: TxHash[][], start: number, count: number): Map { + if (txHashesForTags.length !== count) { + throw new Error(`Number of tx hashes arrays does not match number of tags. ${txHashesForTags.length} !== ${count}`); } const highestIndexMap = new Map(); for (let i = 0; i < txHashesForTags.length; i++) { - const taggingIndex = preTagsForWindow[i].index; + const taggingIndex = start + i; const txHashesForTag = txHashesForTags[i]; for (const txHash of txHashesForTag) { const key = txHash.toString(); diff --git a/yarn-project/simulator/src/public/public_processor/public_processor.ts b/yarn-project/simulator/src/public/public_processor/public_processor.ts index 92698d928a20..e3a776edac02 100644 --- a/yarn-project/simulator/src/public/public_processor/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor/public_processor.ts @@ -25,6 +25,7 @@ import type { PublicProcessorValidator, SequencerConfig, } from '@aztec/stdlib/interfaces/server'; +import { type DebugLog, type DebugLogStore, NullDebugLogStore } from '@aztec/stdlib/logs'; import { ProvingRequestType } from '@aztec/stdlib/proofs'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { @@ -130,7 +131,6 @@ class PublicProcessorTimeoutError extends Error { */ export class PublicProcessor implements Traceable { private metrics: PublicProcessorMetrics; - constructor( protected globalVariables: GlobalVariables, private guardedMerkleTree: GuardedMerkleTreeOperations, @@ -140,6 +140,7 @@ export class PublicProcessor implements Traceable { telemetryClient: TelemetryClient = getTelemetryClient(), private log: Logger, private opts: Pick = {}, + private debugLogStore: DebugLogStore = new NullDebugLogStore(), ) { this.metrics = new PublicProcessorMetrics(telemetryClient, 'PublicProcessor'); } @@ -159,12 +160,13 @@ export class PublicProcessor implements Traceable { txs: Iterable | AsyncIterable, limits: PublicProcessorLimits = {}, validator: PublicProcessorValidator = {}, - ): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[], number]> { + ): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[], number, DebugLog[]]> { const { maxTransactions, maxBlockSize, deadline, maxBlockGas, maxBlobFields } = limits; const { preprocessValidator, nullifierCache } = validator; const result: ProcessedTx[] = []; const usedTxs: Tx[] = []; const failed: FailedTx[] = []; + const debugLogs: DebugLog[] = []; const timer = new Timer(); let totalSizeInBytes = 0; @@ -241,7 +243,7 @@ export class PublicProcessor implements Traceable { this.contractsDB.createCheckpoint(); try { - const [processedTx, returnValues] = await this.processTx(tx, deadline); + const [processedTx, returnValues, txDebugLogs] = await this.processTx(tx, deadline); // Inject a fake processing failure after N txs if requested const fakeThrowAfter = this.opts.fakeThrowAfterProcessingTxCount; @@ -290,6 +292,9 @@ export class PublicProcessor implements Traceable { result.push(processedTx); usedTxs.push(tx); returns = returns.concat(returnValues); + debugLogs.push(...txDebugLogs); + + this.debugLogStore.storeLogs(processedTx.hash.toString(), txDebugLogs); totalPublicGas = totalPublicGas.add(processedTx.gasUsed.publicGas); totalBlockGas = totalBlockGas.add(processedTx.gasUsed.totalGas); @@ -363,7 +368,7 @@ export class PublicProcessor implements Traceable { totalSizeInBytes, }); - return [result, failed, usedTxs, returns, totalBlobFields]; + return [result, failed, usedTxs, returns, totalBlobFields, debugLogs]; } private async checkWorldStateUnchanged( @@ -383,8 +388,13 @@ export class PublicProcessor implements Traceable { } @trackSpan('PublicProcessor.processTx', tx => ({ [Attributes.TX_HASH]: tx.getTxHash().toString() })) - private async processTx(tx: Tx, deadline: Date | undefined): Promise<[ProcessedTx, NestedProcessReturnValues[]]> { - const [time, [processedTx, returnValues]] = await elapsed(() => this.processTxWithinDeadline(tx, deadline)); + private async processTx( + tx: Tx, + deadline: Date | undefined, + ): Promise<[ProcessedTx, NestedProcessReturnValues[], DebugLog[]]> { + const [time, [processedTx, returnValues, debugLogs]] = await elapsed(() => + this.processTxWithinDeadline(tx, deadline), + ); this.log.verbose( !tx.hasPublicCalls() @@ -407,7 +417,7 @@ export class PublicProcessor implements Traceable { }, ); - return [processedTx, returnValues ?? []]; + return [processedTx, returnValues ?? [], debugLogs]; } private async doTreeInsertionsForPrivateOnlyTx(processedTx: ProcessedTx): Promise { @@ -441,10 +451,9 @@ export class PublicProcessor implements Traceable { private async processTxWithinDeadline( tx: Tx, deadline: Date | undefined, - ): Promise<[ProcessedTx, NestedProcessReturnValues[] | undefined]> { - const innerProcessFn: () => Promise<[ProcessedTx, NestedProcessReturnValues[] | undefined]> = tx.hasPublicCalls() - ? () => this.processTxWithPublicCalls(tx) - : () => this.processPrivateOnlyTx(tx); + ): Promise<[ProcessedTx, NestedProcessReturnValues[] | undefined, DebugLog[]]> { + const innerProcessFn: () => Promise<[ProcessedTx, NestedProcessReturnValues[] | undefined, DebugLog[]]> = + tx.hasPublicCalls() ? () => this.processTxWithPublicCalls(tx) : () => this.processPrivateOnlyTx(tx); // Fake a delay per tx if instructed (used for tests) const fakeDelayPerTxMs = this.opts.fakeProcessingDelayPerTxMs; @@ -512,7 +521,7 @@ export class PublicProcessor implements Traceable { @trackSpan('PublicProcessor.processPrivateOnlyTx', (tx: Tx) => ({ [Attributes.TX_HASH]: tx.getTxHash().toString(), })) - private async processPrivateOnlyTx(tx: Tx): Promise<[ProcessedTx, undefined]> { + private async processPrivateOnlyTx(tx: Tx): Promise<[ProcessedTx, undefined, DebugLog[]]> { const gasFees = this.globalVariables.gasFees; const transactionFee = computeTransactionFee(gasFees, tx.data.constants.txContext.gasSettings, tx.data.gasUsed); @@ -537,13 +546,13 @@ export class PublicProcessor implements Traceable { await this.contractsDB.addNewContracts(tx); - return [processedTx, undefined]; + return [processedTx, undefined, []]; } @trackSpan('PublicProcessor.processTxWithPublicCalls', tx => ({ [Attributes.TX_HASH]: tx.getTxHash().toString(), })) - private async processTxWithPublicCalls(tx: Tx): Promise<[ProcessedTx, NestedProcessReturnValues[]]> { + private async processTxWithPublicCalls(tx: Tx): Promise<[ProcessedTx, NestedProcessReturnValues[], DebugLog[]]> { const timer = new Timer(); const result = await this.publicTxSimulator.simulate(tx); @@ -581,7 +590,7 @@ export class PublicProcessor implements Traceable { revertReason, ); - return [processedTx, appLogicReturnValues]; + return [processedTx, appLogicReturnValues, result.logs ?? []]; } /** diff --git a/yarn-project/simulator/src/public/public_tx_simulator/factories.ts b/yarn-project/simulator/src/public/public_tx_simulator/factories.ts index 646317a3d94a..8d1c29746334 100644 --- a/yarn-project/simulator/src/public/public_tx_simulator/factories.ts +++ b/yarn-project/simulator/src/public/public_tx_simulator/factories.ts @@ -19,10 +19,11 @@ export function createPublicTxSimulatorForBlockBuilding( globalVariables: GlobalVariables, telemetryClient: TelemetryClient, bindings?: LoggerBindings, + collectDebugLogs = false, ) { const config = PublicSimulatorConfig.from({ skipFeeEnforcement: false, - collectDebugLogs: false, + collectDebugLogs, collectHints: false, collectPublicInputs: false, collectStatistics: false, diff --git a/yarn-project/stdlib/src/avm/revert_code.ts b/yarn-project/stdlib/src/avm/revert_code.ts index 4249a6b69f4a..be01d20b20e1 100644 --- a/yarn-project/stdlib/src/avm/revert_code.ts +++ b/yarn-project/stdlib/src/avm/revert_code.ts @@ -5,10 +5,25 @@ import { BufferReader, FieldReader } from '@aztec/foundation/serialize'; import { inspect } from 'util'; import { z } from 'zod'; +/** + * Tracks which revertible phases of a transaction's public execution reverted. + * + * A transaction executes in three sequential phases: + * 1. SETUP – non-revertible; if this fails the entire transaction is rejected. + * 2. APP_LOGIC – revertible; its state changes are rolled back on failure. + * 3. TEARDOWN – revertible; always runs (even after app-logic revert) so the fee-payment contract can clean up. + * + * Only APP_LOGIC and TEARDOWN can produce a revert code. SETUP failures throw instead and discard the transaction + * entirely. + */ export enum RevertCodeEnum { + /** All phases completed successfully; no state was rolled back. */ OK = 0, + /** APP_LOGIC reverted; its state changes were discarded. If present, TEARDOWN still ran and succeeded. */ APP_LOGIC_REVERTED = 1, + /** TEARDOWN reverted; its state changes were discarded. APP_LOGIC succeeded. */ TEARDOWN_REVERTED = 2, + /** Both APP_LOGIC and TEARDOWN reverted; only SETUP effects are kept. */ BOTH_REVERTED = 3, } diff --git a/yarn-project/stdlib/src/logs/debug_log_store.ts b/yarn-project/stdlib/src/logs/debug_log_store.ts new file mode 100644 index 000000000000..f671c4967bca --- /dev/null +++ b/yarn-project/stdlib/src/logs/debug_log_store.ts @@ -0,0 +1,54 @@ +import type { TxReceipt } from '../tx/tx_receipt.js'; +import type { DebugLog } from './debug_log.js'; + +/** + * Store for debug logs emitted by public functions during transaction execution. + * + * Uses the Null Object pattern: production code uses NullDebugLogStore (no-op), while test mode uses + * InMemoryDebugLogStore (stores and serves logs). + */ +export interface DebugLogStore { + /** Store debug logs for a processed transaction. */ + storeLogs(txHash: string, logs: DebugLog[]): void; + /** Decorate a TxReceipt with any stored debug logs for the given tx. */ + decorateReceiptWithLogs(txHash: string, receipt: TxReceipt): void; + /** Whether debug log collection is enabled. */ + readonly isEnabled: boolean; +} + +/** No-op implementation for production mode. */ +export class NullDebugLogStore implements DebugLogStore { + storeLogs(_txHash: string, _logs: DebugLog[]): void { + return; + } + decorateReceiptWithLogs(_txHash: string, _receipt: TxReceipt): void { + return; + } + get isEnabled(): boolean { + return false; + } +} + +/** In-memory implementation for test mode that stores and serves debug logs. */ +export class InMemoryDebugLogStore implements DebugLogStore { + private map = new Map(); + + storeLogs(txHash: string, logs: DebugLog[]): void { + if (logs.length > 0) { + this.map.set(txHash, logs); + } + } + + decorateReceiptWithLogs(txHash: string, receipt: TxReceipt): void { + if (receipt.isMined()) { + const debugLogs = this.map.get(txHash); + if (debugLogs) { + receipt.debugLogs = debugLogs; + } + } + } + + get isEnabled(): boolean { + return true; + } +} diff --git a/yarn-project/stdlib/src/logs/extended_directional_app_tagging_secret.test.ts b/yarn-project/stdlib/src/logs/extended_directional_app_tagging_secret.test.ts new file mode 100644 index 000000000000..59b38d10eb77 --- /dev/null +++ b/yarn-project/stdlib/src/logs/extended_directional_app_tagging_secret.test.ts @@ -0,0 +1,13 @@ +import { randomExtendedDirectionalAppTaggingSecret } from '../tests/factories.js'; +import { ExtendedDirectionalAppTaggingSecret } from './extended_directional_app_tagging_secret.js'; + +describe('ExtendedDirectionalAppTaggingSecret', () => { + it('toString and fromString works', async () => { + const secret = await randomExtendedDirectionalAppTaggingSecret(); + const str = secret.toString(); + const parsed = ExtendedDirectionalAppTaggingSecret.fromString(str); + + expect(parsed.secret).toEqual(secret.secret); + expect(parsed.app).toEqual(secret.app); + }); +}); diff --git a/yarn-project/stdlib/src/logs/directional_app_tagging_secret.ts b/yarn-project/stdlib/src/logs/extended_directional_app_tagging_secret.ts similarity index 67% rename from yarn-project/stdlib/src/logs/directional_app_tagging_secret.ts rename to yarn-project/stdlib/src/logs/extended_directional_app_tagging_secret.ts index 410eaf6f2be1..0a083ed053bc 100644 --- a/yarn-project/stdlib/src/logs/directional_app_tagging_secret.ts +++ b/yarn-project/stdlib/src/logs/extended_directional_app_tagging_secret.ts @@ -5,22 +5,28 @@ import type { Point } from '@aztec/foundation/curves/grumpkin'; import { z } from 'zod'; -import type { AztecAddress } from '../aztec-address/index.js'; +import { AztecAddress } from '../aztec-address/index.js'; import type { CompleteAddress } from '../contract/complete_address.js'; import { computeAddressSecret, computePreaddress } from '../keys/derivation.js'; /** - * Directional application tagging secret used for log tagging. + * Extended directional application tagging secret used for log tagging. * - * "Directional" because the derived secret is bound to the recipient - * address: A→B differs from B→A even with the same participants and app. + * "Extended" because it bundles the directional app tagging secret with the app (contract) address. This bundling was + * done because where this type is used we commonly need access to both the secret and the address. * - * Note: It's a bit unfortunate that this type resides in `stdlib` as the rest of the tagging functionality resides - * in `pxe/src/tagging`. We need to use this type in `PreTag` that in turn is used by other types - * in stdlib hence there doesn't seem to be a good way around this. + * "Directional" because the derived secret is bound to the recipient address: A→B differs from B→A even with the same + * participants and app. + * + * Note: It's a bit unfortunate that this type resides in `stdlib` as the rest of the tagging functionality resides in + * `pxe/src/tagging`. We need to use this type in `PreTag` that in turn is used by other types in stdlib hence there + * doesn't seem to be a good way around this. */ -export class DirectionalAppTaggingSecret { - private constructor(public readonly value: Fr) {} +export class ExtendedDirectionalAppTaggingSecret { + private constructor( + public readonly secret: Fr, + public readonly app: AztecAddress, + ) {} /** * Derives shared tagging secret and from that, the app address and recipient derives the directional app tagging @@ -39,20 +45,21 @@ export class DirectionalAppTaggingSecret { externalAddress: AztecAddress, app: AztecAddress, recipient: AztecAddress, - ): Promise { + ): Promise { const taggingSecretPoint = await computeSharedTaggingSecret(localAddress, localIvsk, externalAddress); const appTaggingSecret = await poseidon2Hash([taggingSecretPoint.x, taggingSecretPoint.y, app]); const directionalAppTaggingSecret = await poseidon2Hash([appTaggingSecret, recipient]); - return new DirectionalAppTaggingSecret(directionalAppTaggingSecret); + return new ExtendedDirectionalAppTaggingSecret(directionalAppTaggingSecret, app); } toString(): string { - return this.value.toString(); + return `${this.secret.toString()}:${this.app.toString()}`; } - static fromString(str: string): DirectionalAppTaggingSecret { - return new DirectionalAppTaggingSecret(Fr.fromString(str)); + static fromString(str: string): ExtendedDirectionalAppTaggingSecret { + const [secretStr, appStr] = str.split(':'); + return new ExtendedDirectionalAppTaggingSecret(Fr.fromString(secretStr), AztecAddress.fromString(appStr)); } } @@ -74,6 +81,7 @@ async function computeSharedTaggingSecret( return Grumpkin.mul(externalAddressPoint, await computeAddressSecret(knownPreaddress, localIvsk)); } -export const DirectionalAppTaggingSecretSchema = z.object({ - value: Fr.schema, +export const ExtendedDirectionalAppTaggingSecretSchema = z.object({ + secret: Fr.schema, + app: AztecAddress.schema, }); diff --git a/yarn-project/stdlib/src/logs/index.ts b/yarn-project/stdlib/src/logs/index.ts index dafe33e376db..2e25c40da7c3 100644 --- a/yarn-project/stdlib/src/logs/index.ts +++ b/yarn-project/stdlib/src/logs/index.ts @@ -1,4 +1,4 @@ -export * from './directional_app_tagging_secret.js'; +export * from './extended_directional_app_tagging_secret.js'; export * from './pre_tag.js'; export * from './contract_class_log.js'; export * from './public_log.js'; @@ -12,5 +12,6 @@ export * from './shared_secret_derivation.js'; export * from './tx_scoped_l2_log.js'; export * from './message_context.js'; export * from './debug_log.js'; +export * from './debug_log_store.js'; export * from './tag.js'; export * from './siloed_tag.js'; diff --git a/yarn-project/stdlib/src/logs/pre_tag.ts b/yarn-project/stdlib/src/logs/pre_tag.ts index 0110412cbd15..40f13b6f501b 100644 --- a/yarn-project/stdlib/src/logs/pre_tag.ts +++ b/yarn-project/stdlib/src/logs/pre_tag.ts @@ -3,9 +3,9 @@ import { schemas } from '@aztec/foundation/schemas'; import { z } from 'zod'; import { - type DirectionalAppTaggingSecret, - DirectionalAppTaggingSecretSchema, -} from './directional_app_tagging_secret.js'; + type ExtendedDirectionalAppTaggingSecret, + ExtendedDirectionalAppTaggingSecretSchema, +} from './extended_directional_app_tagging_secret.js'; /** * Represents a preimage of a private log tag (see `Tag` in `pxe/src/tagging`). @@ -15,11 +15,11 @@ import { * around this. */ export type PreTag = { - secret: DirectionalAppTaggingSecret; + extendedSecret: ExtendedDirectionalAppTaggingSecret; index: number; }; export const PreTagSchema = z.object({ - secret: DirectionalAppTaggingSecretSchema, + extendedSecret: ExtendedDirectionalAppTaggingSecretSchema, index: schemas.Integer, }); diff --git a/yarn-project/stdlib/src/logs/siloed_tag.ts b/yarn-project/stdlib/src/logs/siloed_tag.ts index 5eec518b7f03..0710d3e91fe7 100644 --- a/yarn-project/stdlib/src/logs/siloed_tag.ts +++ b/yarn-project/stdlib/src/logs/siloed_tag.ts @@ -4,7 +4,8 @@ import type { ZodFor } from '@aztec/foundation/schemas'; import type { AztecAddress } from '../aztec-address/index.js'; import { computeSiloedPrivateLogFirstField } from '../hash/hash.js'; import { schemas } from '../schemas/schemas.js'; -import type { Tag } from './tag.js'; +import type { PreTag } from './pre_tag.js'; +import { Tag } from './tag.js'; /* eslint-disable @typescript-eslint/no-unsafe-declaration-merging */ @@ -21,7 +22,12 @@ export interface SiloedTag { export class SiloedTag { constructor(public readonly value: Fr) {} - static async compute(tag: Tag, app: AztecAddress): Promise { + static async compute(preTag: PreTag): Promise { + const tag = await Tag.compute(preTag); + return SiloedTag.computeFromTagAndApp(tag, preTag.extendedSecret.app); + } + + static async computeFromTagAndApp(tag: Tag, app: AztecAddress): Promise { const siloedTag = await computeSiloedPrivateLogFirstField(app, tag.value); return new SiloedTag(siloedTag); } diff --git a/yarn-project/stdlib/src/logs/tag.ts b/yarn-project/stdlib/src/logs/tag.ts index ff7e120bc5b2..c16771da9f8c 100644 --- a/yarn-project/stdlib/src/logs/tag.ts +++ b/yarn-project/stdlib/src/logs/tag.ts @@ -20,7 +20,7 @@ export class Tag { constructor(public readonly value: Fr) {} static async compute(preTag: PreTag): Promise { - const tag = await poseidon2Hash([preTag.secret.value, preTag.index]); + const tag = await poseidon2Hash([preTag.extendedSecret.secret, preTag.index]); return new Tag(tag); } diff --git a/yarn-project/stdlib/src/tests/factories.ts b/yarn-project/stdlib/src/tests/factories.ts index e2d22f969b2c..c33aaa591bd4 100644 --- a/yarn-project/stdlib/src/tests/factories.ts +++ b/yarn-project/stdlib/src/tests/factories.ts @@ -128,6 +128,7 @@ import { PublicCallRequestArrayLengths, } from '../kernel/public_call_request.js'; import { PublicKeys, computeAddress } from '../keys/index.js'; +import { ExtendedDirectionalAppTaggingSecret } from '../logs/extended_directional_app_tagging_secret.js'; import { ContractClassLog, ContractClassLogFields } from '../logs/index.js'; import { PrivateLog } from '../logs/private_log.js'; import { FlatPublicLogs, PublicLog } from '../logs/public_log.js'; @@ -1757,3 +1758,11 @@ export function makeL2Tips( }, }; } + +export async function randomExtendedDirectionalAppTaggingSecret(): Promise { + const resolvedApp = await AztecAddress.random(); + // Using the fromString method like this is messy as it leaks the underlying serialization format but I don't want to + // expose the type's constructor just for tests since in prod the secret is always constructed via compute. Also this + // method is tested in extended_directional_app_tagging_secret.test.ts hence all should be fine. + return ExtendedDirectionalAppTaggingSecret.fromString(`${Fr.random().toString()}:${resolvedApp.toString()}`); +} diff --git a/yarn-project/stdlib/src/tx/profiling.ts b/yarn-project/stdlib/src/tx/profiling.ts index 093e7af471ed..c5441d77931d 100644 --- a/yarn-project/stdlib/src/tx/profiling.ts +++ b/yarn-project/stdlib/src/tx/profiling.ts @@ -157,23 +157,23 @@ export class TxProfileResult { } } -export class UtilitySimulationResult { +export class UtilityExecutionResult { constructor( public result: Fr[], public stats?: SimulationStats, ) {} - static get schema(): ZodFor { + static get schema(): ZodFor { return z .object({ result: z.array(schemas.Fr), stats: optional(SimulationStatsSchema), }) - .transform(({ result, stats }) => new UtilitySimulationResult(result, stats)); + .transform(({ result, stats }) => new UtilityExecutionResult(result, stats)); } - static random(): UtilitySimulationResult { - return new UtilitySimulationResult([Fr.random()], { + static random(): UtilityExecutionResult { + return new UtilityExecutionResult([Fr.random()], { nodeRPCCalls: { perMethod: { getBlockHeader: { times: [1] } }, roundTrips: { diff --git a/yarn-project/stdlib/src/tx/public_simulation_output.ts b/yarn-project/stdlib/src/tx/public_simulation_output.ts index 20e5d743e3d2..984a747fcf3d 100644 --- a/yarn-project/stdlib/src/tx/public_simulation_output.ts +++ b/yarn-project/stdlib/src/tx/public_simulation_output.ts @@ -7,6 +7,7 @@ import { z } from 'zod'; import { SimulationError } from '../errors/simulation_error.js'; import { Gas } from '../gas/gas.js'; import type { GasUsed } from '../gas/gas_used.js'; +import { DebugLog } from '../logs/debug_log.js'; import { NullishToUndefined } from '../schemas/schemas.js'; import { TxEffect } from '../tx/tx_effect.js'; import { GlobalVariables } from './global_variables.js'; @@ -71,6 +72,7 @@ export class PublicSimulationOutput { public txEffect: TxEffect, public publicReturnValues: NestedProcessReturnValues[], public gasUsed: GasUsed, + public debugLogs: DebugLog[] = [], ) {} static get schema(): ZodFor { @@ -86,6 +88,7 @@ export class PublicSimulationOutput { publicGas: Gas.schema, billedGas: Gas.schema, }), + debugLogs: z.array(DebugLog.schema).default([]), }) .transform( fields => @@ -95,6 +98,7 @@ export class PublicSimulationOutput { fields.txEffect, fields.publicReturnValues, fields.gasUsed, + fields.debugLogs, ), ); } diff --git a/yarn-project/stdlib/src/tx/tx_receipt.ts b/yarn-project/stdlib/src/tx/tx_receipt.ts index ec54694712d5..3b67b0057ba5 100644 --- a/yarn-project/stdlib/src/tx/tx_receipt.ts +++ b/yarn-project/stdlib/src/tx/tx_receipt.ts @@ -4,6 +4,7 @@ import { z } from 'zod'; import { RevertCode } from '../avm/revert_code.js'; import { BlockHash } from '../block/block_hash.js'; +import { DebugLog } from '../logs/debug_log.js'; import { type ZodFor, schemas } from '../schemas/schemas.js'; import { TxHash } from './tx_hash.js'; @@ -57,6 +58,12 @@ export class TxReceipt { public blockHash?: BlockHash, /** The block number in which the transaction was included. */ public blockNumber?: BlockNumber, + /** + * Debug logs collected during public function execution. Served only when the node is in test mode and placed on + * the receipt only because it's a convenient place for it (the logs are printed out by the wallet when a mined + * tx receipt is obtained). + */ + public debugLogs?: DebugLog[], ) {} /** Returns true if the transaction was executed successfully. */ @@ -103,6 +110,7 @@ export class TxReceipt { blockHash: BlockHash.schema.optional(), blockNumber: BlockNumberSchema.optional(), transactionFee: schemas.BigInt.optional(), + debugLogs: z.array(DebugLog.schema).optional(), }) .transform(fields => TxReceipt.from(fields)); } @@ -115,6 +123,7 @@ export class TxReceipt { transactionFee?: bigint; blockHash?: BlockHash; blockNumber?: BlockNumber; + debugLogs?: DebugLog[]; }) { return new TxReceipt( fields.txHash, @@ -124,6 +133,7 @@ export class TxReceipt { fields.transactionFee, fields.blockHash, fields.blockNumber, + fields.debugLogs, ); } diff --git a/yarn-project/txe/src/oracle/interfaces.ts b/yarn-project/txe/src/oracle/interfaces.ts index 35d5bea9555d..98dd9f4c0086 100644 --- a/yarn-project/txe/src/oracle/interfaces.ts +++ b/yarn-project/txe/src/oracle/interfaces.ts @@ -71,11 +71,13 @@ export interface ITxeExecutionOracle { args: Fr[], argsHash: Fr, isStaticCall: boolean, + jobId: string, ): Promise; - txeSimulateUtilityFunction( + txeExecuteUtilityFunction( targetContractAddress: AztecAddress, functionSelector: FunctionSelector, args: Fr[], + jobId: string, ): Promise; txePublicCallNewFlow( from: AztecAddress, @@ -83,4 +85,7 @@ export interface ITxeExecutionOracle { calldata: Fr[], isStaticCall: boolean, ): Promise; + // TODO(F-335): Drop this from here as it's not a real oracle handler - it's only called from + // RPCTranslator::txeGetPrivateEvents and never from Noir. + syncContractNonOracleMethod(contractAddress: AztecAddress, scope: AztecAddress, jobId: string): Promise; } diff --git a/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts b/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts index 59947893fd7c..45be8bbcf95c 100644 --- a/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts +++ b/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts @@ -107,7 +107,6 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl private senderAddressBookStore: SenderAddressBookStore, private capsuleStore: CapsuleStore, private privateEventStore: PrivateEventStore, - private jobId: string, private nextBlockTimestamp: bigint, private version: Fr, private chainId: Fr, @@ -172,6 +171,25 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl return { txHash: txEffects.txHash, noteHashes: txEffects.noteHashes, nullifiers: txEffects.nullifiers }; } + async syncContractNonOracleMethod(contractAddress: AztecAddress, scope: AztecAddress, jobId: string) { + if (contractAddress.equals(DEFAULT_ADDRESS)) { + this.logger.debug(`Skipping sync in txeGetPrivateEvents because the events correspond to the default address.`); + return; + } + + const blockHeader = await this.stateMachine.anchorBlockStore.getBlockHeader(); + await this.stateMachine.contractSyncService.ensureContractSynced( + contractAddress, + null, + async (call, execScopes) => { + await this.executeUtilityCall(call, execScopes, jobId); + }, + blockHeader, + jobId, + [scope], + ); + } + async txeGetPrivateEvents(selector: EventSelector, contractAddress: AztecAddress, scope: AztecAddress) { return ( await this.privateEventStore.getPrivateEvents(selector, { @@ -285,6 +303,7 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl args: Fr[], argsHash: Fr = Fr.zero(), isStaticCall: boolean = false, + jobId: string, ) { this.logger.verbose( `Executing external function ${await this.contractStore.getDebugFunctionName(targetContractAddress, functionSelector)}@${targetContractAddress} isStaticCall=${isStaticCall}`, @@ -304,7 +323,7 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl // Sync notes before executing private function to discover notes from previous transactions const utilityExecutor = async (call: FunctionCall, execScopes: AccessScopes) => { - await this.executeUtilityCall(call, execScopes); + await this.executeUtilityCall(call, execScopes, jobId); }; const blockHeader = await this.stateMachine.anchorBlockStore.getBlockHeader(); @@ -313,7 +332,7 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl functionSelector, utilityExecutor, blockHeader, - this.jobId, + jobId, effectiveScopes, ); @@ -360,7 +379,7 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl capsuleStore: this.capsuleStore, privateEventStore: this.privateEventStore, contractSyncService: this.stateMachine.contractSyncService, - jobId: this.jobId, + jobId, totalPublicCalldataCount: 0, sideEffectCounter: minRevertibleSideEffectCounter, scopes: effectiveScopes, @@ -659,10 +678,11 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl return returnValues ?? []; } - async txeSimulateUtilityFunction( + async txeExecuteUtilityFunction( targetContractAddress: AztecAddress, functionSelector: FunctionSelector, args: Fr[], + jobId: string, ) { const artifact = await this.contractStore.getFunctionArtifact(targetContractAddress, functionSelector); if (!artifact) { @@ -675,10 +695,10 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl targetContractAddress, functionSelector, async (call, execScopes) => { - await this.executeUtilityCall(call, execScopes); + await this.executeUtilityCall(call, execScopes, jobId); }, blockHeader, - this.jobId, + jobId, 'ALL_SCOPES', ); @@ -693,10 +713,10 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl returnTypes: [], }); - return this.executeUtilityCall(call, 'ALL_SCOPES'); + return this.executeUtilityCall(call, 'ALL_SCOPES', jobId); } - private async executeUtilityCall(call: FunctionCall, scopes: AccessScopes): Promise { + private async executeUtilityCall(call: FunctionCall, scopes: AccessScopes, jobId: string): Promise { const entryPointArtifact = await this.contractStore.getFunctionArtifactWithDebugMetadata(call.to, call.selector); if (entryPointArtifact.functionType !== FunctionType.UTILITY) { throw new Error(`Cannot run ${entryPointArtifact.functionType} function as utility`); @@ -723,7 +743,7 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl senderAddressBookStore: this.senderAddressBookStore, capsuleStore: this.capsuleStore, privateEventStore: this.privateEventStore, - jobId: this.jobId, + jobId, scopes, }); const acirExecutionResult = await new WASMSimulator() @@ -741,10 +761,10 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl ); }); - this.logger.verbose(`Utility simulation for ${call.to}.${call.selector} completed`); + this.logger.verbose(`Utility execution for ${call.to}.${call.selector} completed`); return witnessMapToFields(acirExecutionResult.returnWitness); } catch (err) { - throw createSimulationError(err instanceof Error ? err : new Error('Unknown error during utility simulation')); + throw createSimulationError(err instanceof Error ? err : new Error('Unknown error during utility execution')); } } diff --git a/yarn-project/txe/src/rpc_translator.ts b/yarn-project/txe/src/rpc_translator.ts index 78a3b9ce13ec..95995654f675 100644 --- a/yarn-project/txe/src/rpc_translator.ts +++ b/yarn-project/txe/src/rpc_translator.ts @@ -285,6 +285,13 @@ export class RPCTranslator { const contractAddress = addressFromSingle(foreignContractAddress); const scope = addressFromSingle(foreignScope); + // TODO(F-335): Avoid doing the following 2 calls here. + { + await this.handlerAsTxe().syncContractNonOracleMethod(contractAddress, scope, this.stateHandler.getCurrentJob()); + // We cycle job to commit the stores after the contract sync. + await this.stateHandler.cycleJob(); + } + const events = await this.handlerAsTxe().txeGetPrivateEvents(selector, contractAddress, scope); if (events.length > MAX_PRIVATE_EVENTS_PER_TXE_QUERY) { @@ -1038,12 +1045,15 @@ export class RPCTranslator { args, argsHash, isStaticCall, + this.stateHandler.getCurrentJob(), ); + // TODO(F-335): Avoid doing the following call here. + await this.stateHandler.cycleJob(); return toForeignCallResult([toArray(returnValues)]); } - async txeSimulateUtilityFunction( + async txeExecuteUtilityFunction( foreignTargetContractAddress: ForeignCallSingle, foreignFunctionSelector: ForeignCallSingle, foreignArgs: ForeignCallArray, @@ -1052,12 +1062,15 @@ export class RPCTranslator { const functionSelector = FunctionSelector.fromField(fromSingle(foreignFunctionSelector)); const args = fromArray(foreignArgs); - const returnValues = await this.handlerAsTxe().txeSimulateUtilityFunction( + const returnValues = await this.handlerAsTxe().txeExecuteUtilityFunction( targetContractAddress, functionSelector, args, + this.stateHandler.getCurrentJob(), ); + // TODO(F-335): Avoid doing the following call here. + await this.stateHandler.cycleJob(); return toForeignCallResult([toArray(returnValues)]); } @@ -1074,6 +1087,8 @@ export class RPCTranslator { const returnValues = await this.handlerAsTxe().txePublicCallNewFlow(from, address, calldata, isStaticCall); + // TODO(F-335): Avoid doing the following call here. + await this.stateHandler.cycleJob(); return toForeignCallResult([toArray(returnValues)]); } diff --git a/yarn-project/txe/src/txe_session.ts b/yarn-project/txe/src/txe_session.ts index 157f10bb1983..5c7b87ea4feb 100644 --- a/yarn-project/txe/src/txe_session.ts +++ b/yarn-project/txe/src/txe_session.ts @@ -113,6 +113,10 @@ export interface TXESessionStateHandler { enterPublicState(contractAddress?: AztecAddress): Promise; enterPrivateState(contractAddress?: AztecAddress, anchorBlockNumber?: BlockNumber): Promise; enterUtilityState(contractAddress?: AztecAddress): Promise; + + // TODO(F-335): Exposing the job info is abstraction breakage - drop the following 2 functions. + cycleJob(): Promise; + getCurrentJob(): string; } /** @@ -193,7 +197,6 @@ export class TXESession implements TXESessionStateHandler { senderAddressBookStore, capsuleStore, privateEventStore, - initialJobId, nextBlockTimestamp, version, chainId, @@ -254,6 +257,17 @@ export class TXESession implements TXESessionStateHandler { } } + getCurrentJob(): string { + return this.currentJobId; + } + + /** Commits the current job and begins a new one. Returns the new job ID. */ + async cycleJob(): Promise { + await this.jobCoordinator.commitJob(this.currentJobId); + this.currentJobId = this.jobCoordinator.beginJob(); + return this.currentJobId; + } + async enterTopLevelState() { switch (this.state.name) { case 'PRIVATE': { @@ -277,8 +291,7 @@ export class TXESession implements TXESessionStateHandler { } // Commit all staged stores from the job that was just completed, then begin a new job - await this.jobCoordinator.commitJob(this.currentJobId); - this.currentJobId = this.jobCoordinator.beginJob(); + await this.cycleJob(); this.oracleHandler = new TXEOracleTopLevelContext( this.stateMachine, @@ -292,7 +305,6 @@ export class TXESession implements TXESessionStateHandler { this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, - this.currentJobId, this.nextBlockTimestamp, this.version, this.chainId, diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index 76899d131bdd..38945d92aa4e 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -95,6 +95,7 @@ describe('CheckpointBuilder', () => { [], // usedTxs [], // returnValues 0, // usedTxBlobFields + [], // debugLogs ]); const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n); @@ -118,6 +119,7 @@ describe('CheckpointBuilder', () => { [], // usedTxs [], // returnValues 0, // usedTxBlobFields + [], // debugLogs ]); const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n); @@ -137,6 +139,7 @@ describe('CheckpointBuilder', () => { [], // usedTxs [], // returnValues 0, // usedTxBlobFields + [], // debugLogs ]); await expect(checkpointBuilder.buildBlock([], blockNumber, 1000n)).rejects.toThrow(NoValidTxsError); diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index b00e5ad5a782..9d26252c0a23 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -28,6 +28,7 @@ import { type PublicProcessorLimits, type WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; +import { type DebugLogStore, NullDebugLogStore } from '@aztec/stdlib/logs'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { type CheckpointGlobalVariables, GlobalVariables, StateReference, Tx } from '@aztec/stdlib/tx'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; @@ -50,6 +51,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { private dateProvider: DateProvider, private telemetryClient: TelemetryClient, bindings?: LoggerBindings, + private debugLogStore: DebugLogStore = new NullDebugLogStore(), ) { this.log = createLogger('checkpoint-builder', { ...bindings, @@ -150,6 +152,8 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { const contractsDB = new PublicContractsDB(this.contractDataSource, this.log.getBindings()); const guardedFork = new GuardedMerkleTreeOperations(fork); + const collectDebugLogs = this.debugLogStore.isEnabled; + const bindings = this.log.getBindings(); const publicTxSimulator = createPublicTxSimulatorForBlockBuilding( guardedFork, @@ -157,6 +161,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { globalVariables, this.telemetryClient, bindings, + collectDebugLogs, ); const processor = new PublicProcessor( @@ -168,6 +173,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { this.telemetryClient, createLogger('simulator:public-processor', bindings), this.config, + this.debugLogStore, ); const validator = createTxValidatorForBlockBuilding( @@ -195,6 +201,7 @@ export class FullNodeCheckpointsBuilder implements ICheckpointsBuilder { private contractDataSource: ContractDataSource, private dateProvider: DateProvider, private telemetryClient: TelemetryClient = getTelemetryClient(), + private debugLogStore: DebugLogStore = new NullDebugLogStore(), ) { this.log = createLogger('checkpoint-builder'); } @@ -249,6 +256,7 @@ export class FullNodeCheckpointsBuilder implements ICheckpointsBuilder { this.dateProvider, this.telemetryClient, bindings, + this.debugLogStore, ); } @@ -309,6 +317,7 @@ export class FullNodeCheckpointsBuilder implements ICheckpointsBuilder { this.dateProvider, this.telemetryClient, bindings, + this.debugLogStore, ); } diff --git a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.test.ts b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.test.ts index ac3983ce72ae..2b2c4cc5e300 100644 --- a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.test.ts +++ b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.test.ts @@ -88,6 +88,7 @@ describe('BaseWallet', () => { txEffect: TxEffect.empty(), publicReturnValues: [optimizedRv0, optimizedRv1], gasUsed: { totalGas: Gas.empty(), teardownGas: Gas.empty(), publicGas: Gas.empty(), billedGas: Gas.empty() }, + debugLogs: [], }; node.simulatePublicCalls.mockResolvedValue(optimizedPublicOutput); @@ -98,6 +99,7 @@ describe('BaseWallet', () => { txEffect: TxEffect.empty(), publicReturnValues: [normalRv0], gasUsed: { totalGas: Gas.empty(), teardownGas: Gas.empty(), publicGas: Gas.empty(), billedGas: Gas.empty() }, + debugLogs: [], }; const normalResult = new TxSimulationResult( mock(), diff --git a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts index 72cfe026eb35..406d4942e75d 100644 --- a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts +++ b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts @@ -8,12 +8,12 @@ import type { AppCapabilities, BatchResults, BatchedMethod, + ExecuteUtilityOptions, PrivateEvent, PrivateEventFilter, ProfileOptions, SendOptions, SimulateOptions, - SimulateUtilityOptions, Wallet, WalletCapabilities, } from '@aztec/aztec.js/wallet'; @@ -28,7 +28,7 @@ import type { ChainInfo } from '@aztec/entrypoints/interfaces'; import { Fr } from '@aztec/foundation/curves/bn254'; import { createLogger } from '@aztec/foundation/log'; import type { FieldsOf } from '@aztec/foundation/types'; -import type { AccessScopes } from '@aztec/pxe/client/lazy'; +import { type AccessScopes, displayDebugLogs } from '@aztec/pxe/client/lazy'; import type { PXE, PackedPrivateEvent } from '@aztec/pxe/server'; import { type ContractArtifact, @@ -52,7 +52,7 @@ import { type TxExecutionRequest, type TxProfileResult, TxSimulationResult, - type UtilitySimulationResult, + type UtilityExecutionResult, } from '@aztec/stdlib/tx'; import { ExecutionPayload, mergeExecutionPayloads } from '@aztec/stdlib/tx'; @@ -348,6 +348,7 @@ export abstract class BaseWallet implements Wallet { feeOptions.gasSettings, blockHeader, opts.skipFeeEnforcement ?? true, + this.getContractName.bind(this), ) : Promise.resolve([]), remainingCalls.length > 0 @@ -400,7 +401,27 @@ export abstract class BaseWallet implements Wallet { // Otherwise, wait for the full receipt (default behavior on wait: undefined) const waitOpts = typeof opts.wait === 'object' ? opts.wait : undefined; - return (await waitForTx(this.aztecNode, txHash, waitOpts)) as SendReturn; + const receipt = await waitForTx(this.aztecNode, txHash, waitOpts); + + // Display debug logs from public execution if present (served in test mode only) + if (receipt.debugLogs?.length) { + await displayDebugLogs(receipt.debugLogs, this.getContractName.bind(this)); + } + + return receipt as SendReturn; + } + + /** + * Resolves a contract address to a human-readable name via PXE, if available. + * @param address - The contract address to resolve. + */ + protected async getContractName(address: AztecAddress): Promise { + const instance = await this.pxe.getContractInstance(address); + if (!instance) { + return undefined; + } + const artifact = await this.pxe.getContractArtifact(instance.currentContractClassId); + return artifact?.name; } protected contextualizeError(err: Error, ...context: string[]): Error { @@ -417,8 +438,8 @@ export abstract class BaseWallet implements Wallet { return err; } - simulateUtility(call: FunctionCall, opts: SimulateUtilityOptions): Promise { - return this.pxe.simulateUtility(call, { authwits: opts.authWitnesses, scopes: [opts.scope] }); + executeUtility(call: FunctionCall, opts: ExecuteUtilityOptions): Promise { + return this.pxe.executeUtility(call, { authwits: opts.authWitnesses, scopes: [opts.scope] }); } async getPrivateEvents( diff --git a/yarn-project/wallet-sdk/src/base-wallet/utils.ts b/yarn-project/wallet-sdk/src/base-wallet/utils.ts index 81737e7674dc..24153108d8ce 100644 --- a/yarn-project/wallet-sdk/src/base-wallet/utils.ts +++ b/yarn-project/wallet-sdk/src/base-wallet/utils.ts @@ -4,6 +4,8 @@ import type { ChainInfo } from '@aztec/entrypoints/interfaces'; import { makeTuple } from '@aztec/foundation/array'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { Tuple } from '@aztec/foundation/serialize'; +import type { ContractNameResolver } from '@aztec/pxe/client/lazy'; +import { displayDebugLogs } from '@aztec/pxe/client/lazy'; import { generateSimulatedProvingResult } from '@aztec/pxe/simulator'; import { type FunctionCall, FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; @@ -72,6 +74,7 @@ async function simulateBatchViaNode( gasSettings: GasSettings, blockHeader: BlockHeader, skipFeeEnforcement: boolean, + getContractName: ContractNameResolver, ): Promise { const txContext = new TxContext(chainInfo.chainId, chainInfo.version, gasSettings); @@ -145,6 +148,9 @@ async function simulateBatchViaNode( throw publicOutput.revertReason; } + // Display debug logs from the public simulation. + await displayDebugLogs(publicOutput.debugLogs, getContractName); + return new TxSimulationResult(privateResult, provingResult.publicInputs, publicOutput, undefined); } @@ -169,6 +175,7 @@ export async function simulateViaNode( gasSettings: GasSettings, blockHeader: BlockHeader, skipFeeEnforcement: boolean = true, + getContractName: ContractNameResolver, ): Promise { const batches: FunctionCall[][] = []; @@ -187,6 +194,7 @@ export async function simulateViaNode( gasSettings, blockHeader, skipFeeEnforcement, + getContractName, ); results.push(result); }