diff --git a/barretenberg/ts/src/barretenberg/index.ts b/barretenberg/ts/src/barretenberg/index.ts index 4cd972257b7d..94806c18d661 100644 --- a/barretenberg/ts/src/barretenberg/index.ts +++ b/barretenberg/ts/src/barretenberg/index.ts @@ -111,7 +111,7 @@ export class BarretenbergSync extends BarretenbergApiSync { return barretenbergSyncSingletonPromise; } - static getSingleton() { + static async getSingleton() { if (!barretenbergSyncSingleton) { throw new Error('First call BarretenbergSync.initSingleton() on @aztec/bb.js module.'); } diff --git a/yarn-project/accounts/src/defaults/account_contract.ts b/yarn-project/accounts/src/defaults/account_contract.ts index 6854af1a10ba..e71238ef83e6 100644 --- a/yarn-project/accounts/src/defaults/account_contract.ts +++ b/yarn-project/accounts/src/defaults/account_contract.ts @@ -11,7 +11,7 @@ import { DefaultAccountInterface } from '../defaults/account_interface.js'; */ export abstract class DefaultAccountContract implements AccountContract { abstract getAuthWitnessProvider(address: CompleteAddress): AuthWitnessProvider; - abstract getDeploymentArgs(): any[] | undefined; + abstract getDeploymentArgs(): Promise; constructor(private artifact: ContractArtifact) {} diff --git a/yarn-project/accounts/src/ecdsa/ecdsa_k/account_contract.ts b/yarn-project/accounts/src/ecdsa/ecdsa_k/account_contract.ts index 3b5f1b54f113..de1cfaca3b8d 100644 --- a/yarn-project/accounts/src/ecdsa/ecdsa_k/account_contract.ts +++ b/yarn-project/accounts/src/ecdsa/ecdsa_k/account_contract.ts @@ -16,8 +16,8 @@ export class EcdsaKAccountContract extends DefaultAccountContract { super(EcdsaKAccountContractArtifact as ContractArtifact); } - getDeploymentArgs() { - const signingPublicKey = new Ecdsa().computePublicKey(this.signingPrivateKey); + async getDeploymentArgs() { + const signingPublicKey = await new Ecdsa().computePublicKey(this.signingPrivateKey); return [signingPublicKey.subarray(0, 32), signingPublicKey.subarray(32, 64)]; } @@ -30,9 +30,9 @@ export class EcdsaKAccountContract extends DefaultAccountContract { class EcdsaKAuthWitnessProvider implements AuthWitnessProvider { constructor(private signingPrivateKey: Buffer) {} - createAuthWit(messageHash: Fr): Promise { + async createAuthWit(messageHash: Fr): Promise { const ecdsa = new Ecdsa(); - const signature = ecdsa.constructSignature(messageHash.toBuffer(), this.signingPrivateKey); + const signature = await ecdsa.constructSignature(messageHash.toBuffer(), this.signingPrivateKey); return Promise.resolve(new AuthWitness(messageHash, [...signature.r, ...signature.s])); } } diff --git a/yarn-project/accounts/src/ecdsa/ssh_ecdsa_r/account_contract.ts b/yarn-project/accounts/src/ecdsa/ssh_ecdsa_r/account_contract.ts index 60812c836808..e76ed9d50475 100644 --- a/yarn-project/accounts/src/ecdsa/ssh_ecdsa_r/account_contract.ts +++ b/yarn-project/accounts/src/ecdsa/ssh_ecdsa_r/account_contract.ts @@ -21,7 +21,7 @@ export class EcdsaRSSHAccountContract extends DefaultAccountContract { super(EcdsaRAccountContractArtifact as ContractArtifact); } - getDeploymentArgs() { + async getDeploymentArgs() { return [this.signingPublicKey.subarray(0, 32), this.signingPublicKey.subarray(32, 64)]; } diff --git a/yarn-project/accounts/src/schnorr/account_contract.ts b/yarn-project/accounts/src/schnorr/account_contract.ts index 7bb4c6dda1f2..82091b0db5be 100644 --- a/yarn-project/accounts/src/schnorr/account_contract.ts +++ b/yarn-project/accounts/src/schnorr/account_contract.ts @@ -16,8 +16,8 @@ export class SchnorrAccountContract extends DefaultAccountContract { super(SchnorrAccountContractArtifact as ContractArtifact); } - getDeploymentArgs() { - const signingPublicKey = new Schnorr().computePublicKey(this.signingPrivateKey); + async getDeploymentArgs() { + const signingPublicKey = await new Schnorr().computePublicKey(this.signingPrivateKey); return [signingPublicKey.x, signingPublicKey.y]; } @@ -30,9 +30,9 @@ export class SchnorrAccountContract extends DefaultAccountContract { class SchnorrAuthWitnessProvider implements AuthWitnessProvider { constructor(private signingPrivateKey: GrumpkinScalar) {} - createAuthWit(messageHash: Fr): Promise { + async createAuthWit(messageHash: Fr): Promise { const schnorr = new Schnorr(); - const signature = schnorr.constructSignature(messageHash.toBuffer(), this.signingPrivateKey).toBuffer(); + const signature = (await schnorr.constructSignature(messageHash.toBuffer(), this.signingPrivateKey)).toBuffer(); return Promise.resolve(new AuthWitness(messageHash, [...signature])); } } diff --git a/yarn-project/accounts/src/single_key/account_contract.ts b/yarn-project/accounts/src/single_key/account_contract.ts index ed2de53ebfec..3a14fe53bb3d 100644 --- a/yarn-project/accounts/src/single_key/account_contract.ts +++ b/yarn-project/accounts/src/single_key/account_contract.ts @@ -16,7 +16,7 @@ export class SingleKeyAccountContract extends DefaultAccountContract { super(SchnorrSingleKeyAccountContractArtifact as ContractArtifact); } - getDeploymentArgs(): undefined { + async getDeploymentArgs(): Promise { return undefined; } @@ -33,9 +33,9 @@ export class SingleKeyAccountContract extends DefaultAccountContract { class SingleKeyAuthWitnessProvider implements AuthWitnessProvider { constructor(private privateKey: GrumpkinScalar, private account: CompleteAddress) {} - createAuthWit(messageHash: Fr): Promise { + async createAuthWit(messageHash: Fr): Promise { const schnorr = new Schnorr(); - const signature = schnorr.constructSignature(messageHash.toBuffer(), this.privateKey); + const signature = await schnorr.constructSignature(messageHash.toBuffer(), this.privateKey); const witness = [...this.account.publicKeys.toFields(), ...signature.toBuffer(), this.account.partialAddress]; return Promise.resolve(new AuthWitness(messageHash, witness)); } diff --git a/yarn-project/accounts/src/testing/configuration.ts b/yarn-project/accounts/src/testing/configuration.ts index bfd74f9912f5..f24e3388c3d3 100644 --- a/yarn-project/accounts/src/testing/configuration.ts +++ b/yarn-project/accounts/src/testing/configuration.ts @@ -43,18 +43,24 @@ export function getInitialTestAccountsWallets(pxe: PXE): Promise { const registeredAccounts = await pxe.getRegisteredAccounts(); return Promise.all( - INITIAL_TEST_SECRET_KEYS.filter(initialSecretKey => { - const initialEncryptionKey = deriveMasterIncomingViewingSecretKey(initialSecretKey); - const publicKey = generatePublicKey(initialEncryptionKey); - return ( - registeredAccounts.find(registered => registered.publicKeys.masterIncomingViewingPublicKey.equals(publicKey)) != - undefined - ); - }).map(secretKey => { - const signingKey = deriveSigningKey(secretKey); - // TODO(#5726): use actual salt here instead of hardcoding Fr.ZERO - return getSchnorrAccount(pxe, secretKey, signingKey, Fr.ZERO).getWallet(); - }), + ( + await Promise.all( + INITIAL_TEST_SECRET_KEYS.map(async initialSecretKey => { + const initialEncryptionKey = deriveMasterIncomingViewingSecretKey(initialSecretKey); + const publicKey = await generatePublicKey(initialEncryptionKey); + const found = registeredAccounts.find(registered => + registered.publicKeys.masterIncomingViewingPublicKey.equals(publicKey), + ); + return found ? initialSecretKey : undefined; + }), + ) + ) + .filter((secretKey): secretKey is Fr => !!secretKey) + .map(secretKey => { + const signingKey = deriveSigningKey(secretKey); + // TODO(#5726): use actual salt here instead of hardcoding Fr.ZERO + return getSchnorrAccount(pxe, secretKey, signingKey, Fr.ZERO).getWallet(); + }), ); } diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 87173270dc59..941a9104ae41 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -61,7 +61,7 @@ describe('Archiver', () => { const GENESIS_ROOT = new Fr(GENESIS_ARCHIVE_ROOT).toString(); - beforeEach(() => { + beforeEach(async () => { now = +new Date(); publicClient = mock>({ // Return a block with a reasonable timestamp @@ -137,7 +137,7 @@ describe('Archiver', () => { (b, i) => (b.header.globalVariables.timestamp = new Fr(now + DefaultL1ContractsConfig.ethereumSlotDuration * (i + 1))), ); - const rollupTxs = blocks.map(makeRollupTx); + const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); publicClient.getBlockNumber.mockResolvedValueOnce(2500n).mockResolvedValueOnce(2600n).mockResolvedValueOnce(2700n); @@ -232,7 +232,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; - const rollupTxs = blocks.map(makeRollupTx); + const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); // Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read. publicClient.getBlockNumber.mockResolvedValue(102n); @@ -273,7 +273,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; - const rollupTxs = blocks.map(makeRollupTx); + const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n); rollupRead.status @@ -308,7 +308,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; - const rollupTxs = blocks.map(makeRollupTx); + const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n).mockResolvedValueOnce(150n); @@ -409,11 +409,11 @@ describe('Archiver', () => { * @param block - The L2Block. * @returns A fake tx with calldata that corresponds to calling process in the Rollup contract. */ -function makeRollupTx(l2Block: L2Block) { +async function makeRollupTx(l2Block: L2Block) { const header = toHex(l2Block.header.toBuffer()); const body = toHex(l2Block.body.toBuffer()); const archive = toHex(l2Block.archive.root.toBuffer()); - const blockHash = toHex(l2Block.header.hash().toBuffer()); + const blockHash = toHex((await l2Block.header.hash()).toBuffer()); const input = encodeFunctionData({ abi: RollupAbi, functionName: 'propose', diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 50730dbb56e7..fcc32c6f7caf 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -59,9 +59,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }, }); - beforeEach(() => { + beforeEach(async () => { store = getStore(); - blocks = times(10, i => makeL1Published(L2Block.random(i + 1), i + 10)); + blocks = await Promise.all(times(10, async i => makeL1Published(await L2Block.random(i + 1), i + 10))); }); describe('addBlocks', () => { @@ -89,7 +89,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('can unwind multiple empty blocks', async () => { - const emptyBlocks = times(10, i => makeL1Published(L2Block.random(i + 1, 0), i + 10)); + const emptyBlocks = await Promise.all( + times(10, async i => makeL1Published(await L2Block.random(i + 1, 0), i + 10)), + ); await store.addBlocks(emptyBlocks); expect(await store.getSynchedL2BlockNumber()).toBe(10); @@ -284,7 +286,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const blockNum = 10; beforeEach(async () => { - contractInstance = { ...SerializableContractInstance.random(), address: AztecAddress.random() }; + contractInstance = { ...(await SerializableContractInstance.random()), address: AztecAddress.random() }; await store.addContractInstances([contractInstance], blockNum); }); @@ -716,8 +718,8 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const numBlocks = 10; const nullifiersPerBlock = new Map(); - beforeEach(() => { - blocks = times(numBlocks, (index: number) => L2Block.random(index + 1, 1)); + beforeEach(async () => { + blocks = await Promise.all(times(numBlocks, (index: number) => L2Block.random(index + 1, 1))); blocks.forEach((block, blockIndex) => { nullifiersPerBlock.set( diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index 458ab2317789..fdfdc3957080 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -197,7 +197,7 @@ export class BlockStore { * @param txHash - The hash of a tx we try to get the receipt for. * @returns The requested tx receipt (or undefined if not found). */ - getSettledTxReceipt(txHash: TxHash): TxReceipt | undefined { + async getSettledTxReceipt(txHash: TxHash): Promise { const [blockNumber, txIndex] = this.getTxLocation(txHash) ?? []; if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') { return undefined; diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 74480bc80070..03fe93734027 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -447,7 +447,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @param txHash - The hash of a tx we try to get the receipt for. * @returns The requested tx receipt (or undefined if not found). */ - public getSettledTxReceipt(txHash: TxHash): Promise { + public async getSettledTxReceipt(txHash: TxHash): Promise { for (const block of this.l2Blocks) { for (const txEffect of block.data.body.txEffects) { if (txEffect.txHash.equals(txHash)) { diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index a2bd3c66ac7a..8fe42ebb5513 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -37,7 +37,7 @@ export async function createArchiver( async function registerProtocolContracts(store: KVArchiverDataStore) { const blockNumber = 0; for (const name of protocolContractNames) { - const contract = getCanonicalProtocolContract(name); + const contract = await getCanonicalProtocolContract(name); const contractClassPublic: ContractClassPublic = { ...contract.contractClass, privateFunctions: [], diff --git a/yarn-project/archiver/src/test/mock_archiver.ts b/yarn-project/archiver/src/test/mock_archiver.ts index a31e7bbd8726..2424859d7da0 100644 --- a/yarn-project/archiver/src/test/mock_archiver.ts +++ b/yarn-project/archiver/src/test/mock_archiver.ts @@ -42,7 +42,7 @@ export class MockPrefilledArchiver extends MockArchiver { messages.forEach((msgs, i) => this.setL1ToL2Messages(blocks[i].number, msgs)); } - public override createBlocks(numBlocks: number) { + public override async createBlocks(numBlocks: number) { if (this.l2Blocks.length + numBlocks > this.precomputed.length) { throw new Error( `Not enough precomputed blocks to create ${numBlocks} more blocks (already at ${this.l2Blocks.length})`, diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index 6dd2c43a8b6f..4ea0fdea2339 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -23,10 +23,10 @@ export class MockL2BlockSource implements L2BlockSource { private log = createDebugLogger('aztec:archiver:mock_l2_block_source'); - public createBlocks(numBlocks: number) { + public async createBlocks(numBlocks: number) { for (let i = 0; i < numBlocks; i++) { const blockNum = this.l2Blocks.length + 1; - const block = L2Block.random(blockNum); + const block = await L2Block.random(blockNum); this.l2Blocks.push(block); } @@ -141,7 +141,7 @@ export class MockL2BlockSource implements L2BlockSource { * @param txHash - The hash of a tx we try to get the receipt for. * @returns The requested tx receipt (or undefined if not found). */ - public getSettledTxReceipt(txHash: TxHash): Promise { + public async getSettledTxReceipt(txHash: TxHash): Promise { for (const block of this.l2Blocks) { for (const txEffect of block.body.txEffects) { if (txEffect.txHash.equals(txHash)) { diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 649c9bfe8fe4..6fa024cee931 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -87,7 +87,7 @@ describe('aztec node', () => { describe('tx validation', () => { it('tests that the node correctly validates double spends', async () => { - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000)]; + const txs = [await mockTxForRollup(0x10000), await mockTxForRollup(0x20000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); @@ -117,7 +117,7 @@ describe('aztec node', () => { }); it('tests that the node correctly validates chain id', async () => { - const tx = mockTxForRollup(0x10000); + const tx = await mockTxForRollup(0x10000); tx.data.constants.txContext.chainId = chainId; expect(await node.isValidTx(tx)).toBe(true); @@ -129,7 +129,7 @@ describe('aztec node', () => { }); it('tests that the node correctly validates max block numbers', async () => { - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; + const txs = [await mockTxForRollup(0x10000), await mockTxForRollup(0x20000), await mockTxForRollup(0x30000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 9a244b42e16e..e1a188e500de 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -598,7 +598,7 @@ export class AztecNodeService implements AztecNode { true, ); - let l2toL1SubtreeRoots = l2toL1Subtrees.map(t => Fr.fromBuffer(t.getRoot(true))); + let l2toL1SubtreeRoots = await Promise.all(l2toL1Subtrees.map(async t => Fr.fromBuffer(await t.getRoot(true)))); if (l2toL1SubtreeRoots.length < 2) { l2toL1SubtreeRoots = padArrayEnd(l2toL1SubtreeRoots, Fr.ZERO, 2); } @@ -750,7 +750,7 @@ export class AztecNodeService implements AztecNode { */ public async getPublicStorageAt(contract: AztecAddress, slot: Fr, blockNumber: L2BlockNumber): Promise { const committedDb = await this.#getWorldState(blockNumber); - const leafSlot = computePublicDataTreeLeafSlot(contract, slot); + const leafSlot = await computePublicDataTreeLeafSlot(contract, slot); const lowLeafResult = await committedDb.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot.toBigInt()); if (!lowLeafResult || !lowLeafResult.alreadyPresent) { diff --git a/yarn-project/aztec.js/src/account/contract.ts b/yarn-project/aztec.js/src/account/contract.ts index 8408c936563e..4be2ac57f1ca 100644 --- a/yarn-project/aztec.js/src/account/contract.ts +++ b/yarn-project/aztec.js/src/account/contract.ts @@ -18,7 +18,7 @@ export interface AccountContract { /** * Returns the deployment arguments for this instance, or undefined if this contract does not require deployment. */ - getDeploymentArgs(): any[] | undefined; + getDeploymentArgs(): Promise; /** * Returns the account interface for this account contract given a deployment at the provided address. diff --git a/yarn-project/aztec.js/src/account_manager/deploy_account_method.ts b/yarn-project/aztec.js/src/account_manager/deploy_account_method.ts index 57d10fa67a43..f77c47528977 100644 --- a/yarn-project/aztec.js/src/account_manager/deploy_account_method.ts +++ b/yarn-project/aztec.js/src/account_manager/deploy_account_method.ts @@ -19,7 +19,7 @@ import { EntrypointPayload, computeCombinedPayloadHash } from '../entrypoint/pay */ export class DeployAccountMethod extends DeployMethod { #authWitnessProvider: AuthWitnessProvider; - #feePaymentArtifact: FunctionArtifact | undefined; + #feePaymentArtifact: Promise; constructor( authWitnessProvider: AuthWitnessProvider, @@ -43,7 +43,7 @@ export class DeployAccountMethod extends DeployMethod { this.#feePaymentArtifact = typeof feePaymentNameOrArtifact === 'string' ? getFunctionArtifact(artifact, feePaymentNameOrArtifact) - : feePaymentNameOrArtifact; + : Promise.resolve(feePaymentNameOrArtifact); } protected override async getInitializeFunctionCalls( @@ -58,23 +58,20 @@ export class DeployAccountMethod extends DeployMethod { const feePayload = await EntrypointPayload.fromFeeOptions(address, fee); exec.calls.push({ - name: this.#feePaymentArtifact.name, + name: feePaymentArtifact.name, to: address, - args: encodeArguments(this.#feePaymentArtifact, [emptyAppPayload, feePayload, false]), - selector: FunctionSelector.fromNameAndParameters( - this.#feePaymentArtifact.name, - this.#feePaymentArtifact.parameters, - ), - type: this.#feePaymentArtifact.functionType, - isStatic: this.#feePaymentArtifact.isStatic, - returnTypes: this.#feePaymentArtifact.returnTypes, + args: encodeArguments(feePaymentArtifact, [emptyAppPayload, feePayload, false]), + selector: await FunctionSelector.fromNameAndParameters(feePaymentArtifact.name, feePaymentArtifact.parameters), + type: feePaymentArtifact.functionType, + isStatic: feePaymentArtifact.isStatic, + returnTypes: feePaymentArtifact.returnTypes, }); exec.authWitnesses ??= []; exec.packedArguments ??= []; exec.authWitnesses.push( - await this.#authWitnessProvider.createAuthWit(computeCombinedPayloadHash(emptyAppPayload, feePayload)), + await this.#authWitnessProvider.createAuthWit(await computeCombinedPayloadHash(emptyAppPayload, feePayload)), ); exec.packedArguments.push(...emptyAppPayload.packedArguments); diff --git a/yarn-project/aztec.js/src/account_manager/index.ts b/yarn-project/aztec.js/src/account_manager/index.ts index a9f5e4cc3280..1bd372e849a5 100644 --- a/yarn-project/aztec.js/src/account_manager/index.ts +++ b/yarn-project/aztec.js/src/account_manager/index.ts @@ -28,26 +28,29 @@ export class AccountManager { /** Deployment salt for the account contract. */ public readonly salt: Fr; - private instance: ContractInstanceWithAddress; + private instance: Promise; constructor(private pxe: PXE, private secretKey: Fr, private accountContract: AccountContract, salt?: Salt) { - this.salt = salt !== undefined ? new Fr(salt) : Fr.random(); - - const { publicKeys } = deriveKeys(secretKey); - - this.instance = getContractInstanceFromDeployParams(this.accountContract.getContractArtifact(), { - constructorArgs: this.accountContract.getDeploymentArgs(), - salt: this.salt, - publicKeys, - }); + salt = salt !== undefined ? new Fr(salt) : Fr.random(); + this.salt = salt; + + this.instance = (async () => { + const { publicKeys } = await deriveKeys(secretKey); + + return await getContractInstanceFromDeployParams(this.accountContract.getContractArtifact(), { + constructorArgs: await this.accountContract.getDeploymentArgs(), + salt, + publicKeys, + }); + })(); } - protected getPublicKeys() { - return this.instance.publicKeys; + protected async getPublicKeys() { + return (await this.instance).publicKeys; } - protected getPublicKeysHash() { - return this.getPublicKeys().hash(); + protected async getPublicKeysHash() { + return (await this.getPublicKeys()).hash(); } /** @@ -56,7 +59,7 @@ export class AccountManager { */ public async getAccount(): Promise { const nodeInfo = await this.pxe.getNodeInfo(); - const completeAddress = this.getCompleteAddress(); + const completeAddress = await this.getCompleteAddress(); return this.accountContract.getInterface(completeAddress, nodeInfo); } @@ -65,8 +68,8 @@ export class AccountManager { * Does not require the account to be deployed or registered. * @returns The address, partial address, and encryption public key. */ - public getCompleteAddress(): CompleteAddress { - return CompleteAddress.fromSecretKeyAndInstance(this.secretKey, this.instance); + public async getCompleteAddress(): Promise { + return await CompleteAddress.fromSecretKeyAndInstance(this.secretKey, await this.instance); } /** @@ -74,8 +77,8 @@ export class AccountManager { * Does not require the account to be deployed or registered. * @returns The address. */ - public getAddress() { - return this.getCompleteAddress().address; + public async getAddress() { + return (await this.getCompleteAddress()).address; } /** @@ -83,8 +86,8 @@ export class AccountManager { * Does not require the account to be deployed or registered. * @returns ContractInstance instance. */ - public getInstance(): ContractInstanceWithAddress { - return this.instance; + public async getInstance(): Promise { + return await this.instance; } /** @@ -107,10 +110,10 @@ export class AccountManager { public async register(): Promise { await this.pxe.registerContract({ artifact: this.accountContract.getContractArtifact(), - instance: this.getInstance(), + instance: await this.getInstance(), }); - await this.pxe.registerAccount(this.secretKey, this.getCompleteAddress().partialAddress); + await this.pxe.registerAccount(this.secretKey, (await this.getCompleteAddress()).partialAddress); return this.getWallet(); } @@ -128,7 +131,7 @@ export class AccountManager { ); } - await this.pxe.registerAccount(this.secretKey, this.getCompleteAddress().partialAddress); + await this.pxe.registerAccount(this.secretKey, (await this.getCompleteAddress()).partialAddress); const { l1ChainId: chainId, protocolVersion } = await this.pxe.getNodeInfo(); const deployWallet = new SignerlessWallet(this.pxe, new DefaultMultiCallEntrypoint(chainId, protocolVersion)); @@ -136,10 +139,10 @@ export class AccountManager { // We use a signerless wallet with the multi call entrypoint in order to make multiple calls in one go // If we used getWallet, the deployment would get routed via the account contract entrypoint // and it can't be used unless the contract is initialized - const args = this.accountContract.getDeploymentArgs() ?? []; + const args = (await this.accountContract.getDeploymentArgs()) ?? []; return new DeployAccountMethod( - this.accountContract.getAuthWitnessProvider(this.getCompleteAddress()), - this.getPublicKeys(), + this.accountContract.getAuthWitnessProvider(await this.getCompleteAddress()), + await this.getPublicKeys(), deployWallet, this.accountContract.getContractArtifact(), args, diff --git a/yarn-project/aztec.js/src/contract/contract.test.ts b/yarn-project/aztec.js/src/contract/contract.test.ts index 66a54e8cfb5b..37f1ae997d6f 100644 --- a/yarn-project/aztec.js/src/contract/contract.test.ts +++ b/yarn-project/aztec.js/src/contract/contract.test.ts @@ -1,10 +1,10 @@ import { - type Tx, - type TxExecutionRequest, - type TxHash, - type TxProvingResult, - type TxReceipt, - type TxSimulationResult, + type Tx, + type TxExecutionRequest, + type TxHash, + type TxProvingResult, + type TxReceipt, + type TxSimulationResult, } from '@aztec/circuit-types'; import { AztecAddress, @@ -140,9 +140,9 @@ describe('Contract Class', () => { notes: {}, }; - beforeEach(() => { + beforeEach(async () => { contractAddress = AztecAddress.random(); - account = CompleteAddress.random(); + account = await CompleteAddress.random(); contractInstance = { address: contractAddress } as ContractInstanceWithAddress; wallet = mock(); diff --git a/yarn-project/aztec.js/src/contract/contract_base.ts b/yarn-project/aztec.js/src/contract/contract_base.ts index 94efbb63a9e7..4f1fd6fa85ca 100644 --- a/yarn-project/aztec.js/src/contract/contract_base.ts +++ b/yarn-project/aztec.js/src/contract/contract_base.ts @@ -18,7 +18,7 @@ export type ContractMethod = ((...args: any[]) => ContractFunctionInteraction) & /** * The unique identifier for a contract function in bytecode. */ - readonly selector: FunctionSelector; + readonly selector: Promise; }; /** diff --git a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts index 71117bdf280f..2b345c4aff8c 100644 --- a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts +++ b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts @@ -73,12 +73,12 @@ export class ContractFunctionInteraction extends BaseContractInteraction { * block for constructing batch requests. * @returns An execution request wrapped in promise. */ - public request(): FunctionCall { + public async request(): Promise { const args = encodeArguments(this.functionDao, this.args); return { name: this.functionDao.name, args, - selector: FunctionSelector.fromNameAndParameters(this.functionDao.name, this.functionDao.parameters), + selector: await FunctionSelector.fromNameAndParameters(this.functionDao.name, this.functionDao.parameters), type: this.functionDao.functionType, to: this.contractAddress, isStatic: this.functionDao.isStatic, diff --git a/yarn-project/aztec.js/src/contract/deploy_method.ts b/yarn-project/aztec.js/src/contract/deploy_method.ts index 869981308a77..b3e5bd45f75e 100644 --- a/yarn-project/aztec.js/src/contract/deploy_method.ts +++ b/yarn-project/aztec.js/src/contract/deploy_method.ts @@ -100,7 +100,7 @@ export class DeployMethod extends Bas // in case the initializer is public. This hints at the need of having "transient" contracts scoped to a // simulation, so we can run the simulation with a set of contracts, but only "commit" them to the wallet // once this tx has gone through. - await this.wallet.registerContract({ artifact: this.artifact, instance: this.getInstance(options) }); + await this.wallet.registerContract({ artifact: this.artifact, instance: await this.getInstance(options) }); const bootstrap = await this.getInitializeFunctionCalls(options); @@ -124,7 +124,7 @@ export class DeployMethod extends Bas * @param options - Deployment options. */ public async register(options: DeployOptions = {}): Promise { - const instance = this.getInstance(options); + const instance = await this.getInstance(options); await this.wallet.registerContract({ artifact: this.artifact, instance }); return this.postDeployCtor(instance.address, this.wallet); } @@ -140,11 +140,11 @@ export class DeployMethod extends Bas const calls: FunctionCall[] = []; // Set contract instance object so it's available for populating the DeploySendTx object - const instance = this.getInstance(options); + const instance = await this.getInstance(options); // Obtain contract class from artifact and check it matches the reported one by the instance. // TODO(@spalladino): We're unnecessarily calculating the contract class multiple times here. - const contractClass = getContractClassFromArtifact(this.artifact); + const contractClass = await getContractClassFromArtifact(this.artifact); if (!instance.contractClassId.equals(contractClass.id)) { throw new Error( `Contract class mismatch when deploying contract: got ${instance.contractClassId.toString()} from instance and ${contractClass.id.toString()} from artifact`, @@ -161,13 +161,13 @@ export class DeployMethod extends Bas this.log.info( `Creating request for registering contract class ${contractClass.id.toString()} as part of deployment for ${instance.address.toString()}`, ); - calls.push((await registerContractClass(this.wallet, this.artifact)).request()); + calls.push(await (await registerContractClass(this.wallet, this.artifact)).request()); } } // Deploy the contract via the instance deployer. if (!options.skipPublicDeployment) { - calls.push(deployInstance(this.wallet, instance).request()); + calls.push(await (await deployInstance(this.wallet, instance)).request()); } return { calls }; @@ -190,7 +190,7 @@ export class DeployMethod extends Bas this.constructorArtifact, this.args, ); - calls.push(constructorCall.request()); + calls.push(await constructorCall.request()); } return Promise.resolve({ calls }); } @@ -206,9 +206,7 @@ export class DeployMethod extends Bas public override send(options: DeployOptions = {}): DeploySentTx { const txHashPromise = super.send(options).getTxHash(); const instance = this.getInstance(options); - this.log.debug( - `Sent deployment tx of ${this.artifact.name} contract with deployment address ${instance.address.toString()}`, - ); + this.log.debug(`Sent deployment tx of ${this.artifact.name} contract`); return new DeploySentTx(this.wallet, txHashPromise, this.postDeployCtor, instance); } @@ -218,9 +216,10 @@ export class DeployMethod extends Bas * @param options - An object containing various deployment options. * @returns An instance object. */ - public getInstance(options: DeployOptions = {}): ContractInstanceWithAddress { + public async getInstance(options: DeployOptions = {}): Promise { if (!this.instance) { - this.instance = getContractInstanceFromDeployParams(this.artifact, { + // TODO: fix race condition + this.instance = await getContractInstanceFromDeployParams(this.artifact, { constructorArgs: this.args, salt: options.contractAddressSalt, publicKeys: this.publicKeys, @@ -238,7 +237,7 @@ export class DeployMethod extends Bas */ public override async prove(options: DeployOptions): Promise> { const txProvingResult = await this.proveInternal(options); - const instance = this.getInstance(options); + const instance = await this.getInstance(options); return new DeployProvenTx(this.wallet, txProvingResult.toTx(), this.postDeployCtor, instance); } diff --git a/yarn-project/aztec.js/src/contract/deploy_proven_tx.ts b/yarn-project/aztec.js/src/contract/deploy_proven_tx.ts index e364c03b20d5..2cb12729dfcc 100644 --- a/yarn-project/aztec.js/src/contract/deploy_proven_tx.ts +++ b/yarn-project/aztec.js/src/contract/deploy_proven_tx.ts @@ -27,6 +27,6 @@ export class DeployProvenTx extends Prove return this.wallet.sendTx(this.getPlainDataTx()); })(); - return new DeploySentTx(this.wallet, promise, this.postDeployCtor, this.instance); + return new DeploySentTx(this.wallet, promise, this.postDeployCtor, Promise.resolve(this.instance)); } } diff --git a/yarn-project/aztec.js/src/contract/deploy_sent_tx.ts b/yarn-project/aztec.js/src/contract/deploy_sent_tx.ts index 6f59cfeb2617..02e203d474ed 100644 --- a/yarn-project/aztec.js/src/contract/deploy_sent_tx.ts +++ b/yarn-project/aztec.js/src/contract/deploy_sent_tx.ts @@ -31,7 +31,7 @@ export class DeploySentTx extends SentTx txHashPromise: Promise, private postDeployCtor: (address: AztecAddress, wallet: Wallet) => Promise, /** The deployed contract instance */ - public instance: ContractInstanceWithAddress, + public instance: Promise, ) { super(wallet, txHashPromise); } @@ -43,7 +43,8 @@ export class DeploySentTx extends SentTx */ public async deployed(opts?: DeployedWaitOpts): Promise { const receipt = await this.wait(opts); - this.log.info(`Contract ${this.instance.address.toString()} successfully deployed.`); + const instance = await this.instance; + this.log.info(`Contract ${instance.address.toString()} successfully deployed.`); return receipt.contract; } @@ -58,12 +59,13 @@ export class DeploySentTx extends SentTx return { ...receipt, contract }; } - protected getContractObject(wallet?: Wallet): Promise { + protected async getContractObject(wallet?: Wallet): Promise { const isWallet = (pxe: PXE | Wallet): pxe is Wallet => !!(pxe as Wallet).createTxExecutionRequest; const contractWallet = wallet ?? (isWallet(this.pxe) && this.pxe); if (!contractWallet) { throw new Error(`A wallet is required for creating a contract instance`); } - return this.postDeployCtor(this.instance.address, contractWallet) as Promise; + const instance = await this.instance; + return this.postDeployCtor(instance.address, contractWallet) as Promise; } } diff --git a/yarn-project/aztec.js/src/contract/get_gas_limits.test.ts b/yarn-project/aztec.js/src/contract/get_gas_limits.test.ts index f42e45a06551..5381210dfbdc 100644 --- a/yarn-project/aztec.js/src/contract/get_gas_limits.test.ts +++ b/yarn-project/aztec.js/src/contract/get_gas_limits.test.ts @@ -6,10 +6,10 @@ import { getGasLimits } from './get_gas_limits.js'; describe('getGasLimits', () => { let txSimulationResult: TxSimulationResult; - beforeEach(() => { - txSimulationResult = mockSimulatedTx(); + beforeEach(async () => { + txSimulationResult = await mockSimulatedTx(); - const tx = mockTxForRollup(); + const tx = await mockTxForRollup(); tx.data.gasUsed = Gas.from({ daGas: 100, l2Gas: 200 }); txSimulationResult.publicInputs = tx.data; diff --git a/yarn-project/aztec.js/src/deployment/broadcast_function.ts b/yarn-project/aztec.js/src/deployment/broadcast_function.ts index 599d2d4b63c7..4e1dd64ebcbd 100644 --- a/yarn-project/aztec.js/src/deployment/broadcast_function.ts +++ b/yarn-project/aztec.js/src/deployment/broadcast_function.ts @@ -6,7 +6,7 @@ import { createUnconstrainedFunctionMembershipProof, getContractClassFromArtifact, } from '@aztec/circuits.js'; -import { type ContractArtifact, type FunctionSelector, FunctionType, bufferAsFields } from '@aztec/foundation/abi'; +import { type ContractArtifact, FunctionSelector, FunctionType, bufferAsFields } from '@aztec/foundation/abi'; import { padArrayEnd } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; @@ -28,8 +28,14 @@ export async function broadcastPrivateFunction( artifact: ContractArtifact, selector: FunctionSelector, ): Promise { - const contractClass = getContractClassFromArtifact(artifact); - const privateFunctionArtifact = artifact.functions.find(fn => selector.equals(fn)); + const contractClass = await getContractClassFromArtifact(artifact); + const privateFunctionArtifact = ( + await Promise.all( + artifact.functions.map(async fn => + selector.equals(await FunctionSelector.fromNameAndParameters(fn.name, fn.parameters)) ? fn : undefined, + ), + ) + ).find(fn => !!fn); if (!privateFunctionArtifact) { throw new Error(`Private function with selector ${selector.toString()} not found`); } @@ -42,7 +48,7 @@ export async function broadcastPrivateFunction( unconstrainedFunctionsArtifactTreeRoot, privateFunctionTreeSiblingPath, privateFunctionTreeLeafIndex, - } = createPrivateFunctionMembershipProof(selector, artifact); + } = await createPrivateFunctionMembershipProof(selector, artifact); const vkHash = computeVerificationKeyHash(privateFunctionArtifact); const bytecode = bufferAsFields( @@ -52,7 +58,7 @@ export async function broadcastPrivateFunction( await wallet.addCapsule(bytecode); - const registerer = getRegistererContract(wallet); + const registerer = await getRegistererContract(wallet); return Promise.resolve( registerer.methods.broadcast_private_function( contractClass.id, @@ -82,10 +88,17 @@ export async function broadcastUnconstrainedFunction( artifact: ContractArtifact, selector: FunctionSelector, ): Promise { - const contractClass = getContractClassFromArtifact(artifact); - const functionArtifactIndex = artifact.functions.findIndex( - fn => fn.functionType === FunctionType.UNCONSTRAINED && selector.equals(fn), - ); + const contractClass = await getContractClassFromArtifact(artifact); + const functionArtifactIndex = ( + await Promise.all( + artifact.functions.map(async fn => { + return ( + fn.functionType === FunctionType.UNCONSTRAINED && + selector.equals(await FunctionSelector.fromNameAndParameters(fn.name, fn.parameters)) + ); + }), + ) + ).findIndex(fn => !!fn); if (functionArtifactIndex < 0) { throw new Error(`Unconstrained function with selector ${selector.toString()} not found`); } @@ -97,13 +110,13 @@ export async function broadcastUnconstrainedFunction( artifactTreeSiblingPath, functionMetadataHash, privateFunctionsArtifactTreeRoot, - } = createUnconstrainedFunctionMembershipProof(selector, artifact); + } = await createUnconstrainedFunctionMembershipProof(selector, artifact); const bytecode = bufferAsFields(functionArtifact.bytecode, MAX_PACKED_BYTECODE_SIZE_PER_PRIVATE_FUNCTION_IN_FIELDS); await wallet.addCapsule(bytecode); - const registerer = getRegistererContract(wallet); + const registerer = await getRegistererContract(wallet); return registerer.methods.broadcast_unconstrained_function( contractClass.id, artifactMetadataHash, diff --git a/yarn-project/aztec.js/src/deployment/deploy_instance.ts b/yarn-project/aztec.js/src/deployment/deploy_instance.ts index 24b5fa65d82b..bf0d501d28d5 100644 --- a/yarn-project/aztec.js/src/deployment/deploy_instance.ts +++ b/yarn-project/aztec.js/src/deployment/deploy_instance.ts @@ -9,8 +9,11 @@ import { getDeployerContract } from './protocol_contracts.js'; * @param wallet - The wallet to use for the deployment. * @param instance - The instance to deploy. */ -export function deployInstance(wallet: Wallet, instance: ContractInstanceWithAddress): ContractFunctionInteraction { - const deployerContract = getDeployerContract(wallet); +export async function deployInstance( + wallet: Wallet, + instance: ContractInstanceWithAddress, +): Promise { + const deployerContract = await getDeployerContract(wallet); const { salt, contractClassId, publicKeys, deployer } = instance; const isUniversalDeploy = deployer.isZero(); if (!isUniversalDeploy && !wallet.getAddress().equals(deployer)) { diff --git a/yarn-project/aztec.js/src/deployment/protocol_contracts.ts b/yarn-project/aztec.js/src/deployment/protocol_contracts.ts index 9fc681f8a556..5cbf059c1cd4 100644 --- a/yarn-project/aztec.js/src/deployment/protocol_contracts.ts +++ b/yarn-project/aztec.js/src/deployment/protocol_contracts.ts @@ -5,13 +5,13 @@ import { UnsafeContract } from '../contract/unsafe_contract.js'; import { type Wallet } from '../wallet/index.js'; /** Returns a Contract wrapper for the class registerer. */ -export function getRegistererContract(wallet: Wallet) { - const { artifact, instance } = getCanonicalClassRegisterer(); +export async function getRegistererContract(wallet: Wallet) { + const { artifact, instance } = await getCanonicalClassRegisterer(); return new UnsafeContract(instance, artifact, wallet); } /** Returns a Contract wrapper for the instance deployer. */ -export function getDeployerContract(wallet: Wallet) { - const { artifact, instance } = getCanonicalInstanceDeployer(); +export async function getDeployerContract(wallet: Wallet) { + const { artifact, instance } = await getCanonicalInstanceDeployer(); return new UnsafeContract(instance, artifact, wallet); } diff --git a/yarn-project/aztec.js/src/deployment/register_class.ts b/yarn-project/aztec.js/src/deployment/register_class.ts index eaaba5e8b959..ed178b91cd8c 100644 --- a/yarn-project/aztec.js/src/deployment/register_class.ts +++ b/yarn-project/aztec.js/src/deployment/register_class.ts @@ -18,9 +18,9 @@ export async function registerContractClass( emitPublicBytecode = defaultEmitPublicBytecode, ): Promise { const { artifactHash, privateFunctionsRoot, publicBytecodeCommitment, packedBytecode } = - getContractClassFromArtifact(artifact); + await getContractClassFromArtifact(artifact); const encodedBytecode = bufferAsFields(packedBytecode, MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS); - const registerer = getRegistererContract(wallet); + const registerer = await getRegistererContract(wallet); await wallet.addCapsule(encodedBytecode); return registerer.methods.register(artifactHash, privateFunctionsRoot, publicBytecodeCommitment, emitPublicBytecode); } diff --git a/yarn-project/aztec.js/src/entrypoint/payload.ts b/yarn-project/aztec.js/src/entrypoint/payload.ts index 8f76aa0e06f1..f43e68c1bd4f 100644 --- a/yarn-project/aztec.js/src/entrypoint/payload.ts +++ b/yarn-project/aztec.js/src/entrypoint/payload.ts @@ -54,16 +54,18 @@ type EncodedFunctionCall = { /** Assembles an entrypoint payload */ export abstract class EntrypointPayload { - #packedArguments: PackedValues[] = []; - #functionCalls: EncodedFunctionCall[] = []; + #packedArguments: PackedValues[]; + #functionCalls: EncodedFunctionCall[]; #nonce: Fr; #generatorIndex: number; - protected constructor(functionCalls: FunctionCall[], generatorIndex: number, nonce = Fr.random()) { - for (const call of functionCalls) { - this.#packedArguments.push(PackedValues.fromValues(call.args)); - } - + protected constructor( + functionCalls: FunctionCall[], + packedArguments: PackedValues[], + generatorIndex: number, + nonce = Fr.random(), + ) { + this.#packedArguments = packedArguments; /* eslint-disable camelcase */ this.#functionCalls = functionCalls.map((call, index) => ({ args_hash: this.#packedArguments[index].hash, @@ -78,6 +80,17 @@ export abstract class EntrypointPayload { this.#nonce = nonce; } + /** + * Use this to pack the function calls to later be passed into an entrypoint constructor + */ + static async packFunctionCalls(functionCalls: FunctionCall[]) { + const packedArguments = []; + for (const call of functionCalls) { + packedArguments.push(await PackedValues.fromValues(call.args)); + } + return packedArguments; + } + /* eslint-disable camelcase */ /** * The function calls to execute. This uses snake_case naming so that it is compatible with Noir encoding @@ -113,8 +126,8 @@ export abstract class EntrypointPayload { * Hashes the payload * @returns The hash of the payload */ - hash() { - return poseidon2HashWithSeparator(this.toFields(), this.#generatorIndex); + async hash() { + return await poseidon2HashWithSeparator(this.toFields(), this.#generatorIndex); } /** Serializes the function calls to an array of fields. */ @@ -133,8 +146,8 @@ export abstract class EntrypointPayload { * @param functionCalls - The function calls to execute * @returns The execution payload */ - static fromFunctionCalls(functionCalls: FunctionCall[]) { - return new AppEntrypointPayload(functionCalls, 0); + static async fromFunctionCalls(functionCalls: FunctionCall[]) { + return new AppEntrypointPayload(functionCalls, await EntrypointPayload.packFunctionCalls(functionCalls), 0); } /** @@ -143,12 +156,17 @@ export abstract class EntrypointPayload { * @param nonce - The nonce for the payload, used to emit a nullifier identifying the call * @returns The execution payload */ - static fromAppExecution(functionCalls: FunctionCall[] | Tuple, nonce = Fr.random()) { + static async fromAppExecution(functionCalls: FunctionCall[] | Tuple, nonce = Fr.random()) { if (functionCalls.length > APP_MAX_CALLS) { throw new Error(`Expected at most ${APP_MAX_CALLS} function calls, got ${functionCalls.length}`); } const paddedCalls = padArrayEnd(functionCalls, FunctionCall.empty(), APP_MAX_CALLS); - return new AppEntrypointPayload(paddedCalls, GeneratorIndex.SIGNATURE_PAYLOAD, nonce); + return new AppEntrypointPayload( + paddedCalls, + await EntrypointPayload.packFunctionCalls(paddedCalls), + GeneratorIndex.SIGNATURE_PAYLOAD, + nonce, + ); } /** @@ -162,7 +180,12 @@ export abstract class EntrypointPayload { const feePayer = await feeOpts?.paymentMethod.getFeePayer(feeOpts?.gasSettings); const isFeePayer = !!feePayer && feePayer.equals(sender); const paddedCalls = padArrayEnd(calls, FunctionCall.empty(), FEE_MAX_CALLS); - return new FeeEntrypointPayload(paddedCalls, GeneratorIndex.FEE_PAYLOAD, isFeePayer); + return new FeeEntrypointPayload( + paddedCalls, + await EntrypointPayload.packFunctionCalls(paddedCalls), + GeneratorIndex.FEE_PAYLOAD, + isFeePayer, + ); } } @@ -177,11 +200,25 @@ class AppEntrypointPayload extends EntrypointPayload { class FeeEntrypointPayload extends EntrypointPayload { #isFeePayer: boolean; - constructor(functionCalls: FunctionCall[], generatorIndex: number, isFeePayer: boolean) { - super(functionCalls, generatorIndex); + constructor( + functionCalls: FunctionCall[], + packedArguments: PackedValues[], + generatorIndex: number, + isFeePayer: boolean, + ) { + super(functionCalls, packedArguments, generatorIndex); this.#isFeePayer = isFeePayer; } + static async new(functionCalls: FunctionCall[], generatorIndex: number, isFeePayer: boolean) { + return new FeeEntrypointPayload( + functionCalls, + await EntrypointPayload.packFunctionCalls(functionCalls), + generatorIndex, + isFeePayer, + ); + } + override toFields(): Fr[] { return [...this.functionCallsToFields(), this.nonce, new Fr(this.#isFeePayer)]; } @@ -200,6 +237,12 @@ class FeeEntrypointPayload extends EntrypointPayload { * @param feePayload - A fee payload. * @returns A hash of a combined payload. */ -export function computeCombinedPayloadHash(appPayload: AppEntrypointPayload, feePayload: FeeEntrypointPayload): Fr { - return poseidon2HashWithSeparator([appPayload.hash(), feePayload.hash()], GeneratorIndex.COMBINED_PAYLOAD); +export async function computeCombinedPayloadHash( + appPayload: AppEntrypointPayload, + feePayload: FeeEntrypointPayload, +): Promise { + return await poseidon2HashWithSeparator( + [await appPayload.hash(), await feePayload.hash()], + GeneratorIndex.COMBINED_PAYLOAD, + ); } diff --git a/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts b/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts index f69f515388eb..e98dcd936d9f 100644 --- a/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts +++ b/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts @@ -21,12 +21,12 @@ export class FeeJuicePaymentMethodWithClaim extends FeeJuicePaymentMethod { * Creates a function call to pay the fee in Fee Juice. * @returns A function call */ - override getFunctionCalls(): Promise { - const selector = FunctionSelector.fromNameAndParameters( + override async getFunctionCalls(): Promise { + const selector = await FunctionSelector.fromNameAndParameters( ProtocolContractArtifact.FeeJuice.functions.find(f => f.name === 'claim')!, ); - return Promise.resolve([ + return [ { to: ProtocolContractAddress.FeeJuice, name: 'claim', @@ -41,6 +41,6 @@ export class FeeJuicePaymentMethodWithClaim extends FeeJuicePaymentMethod { returnTypes: [], type: FunctionType.PRIVATE, }, - ]); + ]; } } diff --git a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts index e505901da360..b17504ac541b 100644 --- a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts @@ -66,7 +66,7 @@ export class PrivateFeePaymentMethod implements FeePaymentMethod { action: { name: 'setup_refund', args: [this.feeRecipient.toField(), this.wallet.getAddress().toField(), maxFee, nonce], - selector: FunctionSelector.fromSignature('setup_refund((Field),(Field),Field,Field)'), + selector: await FunctionSelector.fromSignature('setup_refund((Field),(Field),Field,Field)'), type: FunctionType.PRIVATE, isStatic: false, to: this.asset, @@ -78,7 +78,7 @@ export class PrivateFeePaymentMethod implements FeePaymentMethod { { name: 'fee_entrypoint_private', to: this.paymentContract, - selector: FunctionSelector.fromSignature('fee_entrypoint_private(Field,(Field),Field)'), + selector: await FunctionSelector.fromSignature('fee_entrypoint_private(Field,(Field),Field)'), type: FunctionType.PRIVATE, isStatic: false, args: [maxFee, this.asset.toField(), nonce], diff --git a/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts index 144e307d7ea4..a964015aeccf 100644 --- a/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts @@ -43,19 +43,19 @@ export class PublicFeePaymentMethod implements FeePaymentMethod { * @param gasSettings - The gas settings. * @returns The function call to pay the fee. */ - getFunctionCalls(gasSettings: GasSettings): Promise { + async getFunctionCalls(gasSettings: GasSettings): Promise { const nonce = Fr.random(); const maxFee = gasSettings.getFeeLimit(); - return Promise.resolve([ - this.wallet - .setPublicAuthWit( + return [ + await ( + await this.wallet.setPublicAuthWit( { caller: this.paymentContract, action: { name: 'transfer_in_public', args: [this.wallet.getAddress().toField(), this.paymentContract.toField(), maxFee, nonce], - selector: FunctionSelector.fromSignature('transfer_in_public((Field),(Field),Field,Field)'), + selector: await FunctionSelector.fromSignature('transfer_in_public((Field),(Field),Field,Field)'), type: FunctionType.PUBLIC, isStatic: false, to: this.asset, @@ -64,16 +64,16 @@ export class PublicFeePaymentMethod implements FeePaymentMethod { }, true, ) - .request(), + ).request(), { name: 'fee_entrypoint_public', to: this.paymentContract, - selector: FunctionSelector.fromSignature('fee_entrypoint_public(Field,(Field),Field)'), + selector: await FunctionSelector.fromSignature('fee_entrypoint_public(Field,(Field),Field)'), type: FunctionType.PRIVATE, isStatic: false, args: [maxFee, this.asset.toField(), nonce], returnTypes: [], }, - ]); + ]; } } diff --git a/yarn-project/aztec.js/src/utils/authwit.ts b/yarn-project/aztec.js/src/utils/authwit.ts index 51c62370891b..b652b361b427 100644 --- a/yarn-project/aztec.js/src/utils/authwit.ts +++ b/yarn-project/aztec.js/src/utils/authwit.ts @@ -50,17 +50,17 @@ export type IntentAction = { * @param metadata - The metadata for the intent (chainId, version) * @returns The message hash for the action */ -export const computeAuthWitMessageHash = (intent: IntentInnerHash | IntentAction, metadata: IntentMetadata) => { +export const computeAuthWitMessageHash = async (intent: IntentInnerHash | IntentAction, metadata: IntentMetadata) => { const chainId = metadata.chainId; const version = metadata.version; if ('caller' in intent) { - const action = intent.action instanceof ContractFunctionInteraction ? intent.action.request() : intent.action; + const action = intent.action instanceof ContractFunctionInteraction ? await intent.action.request() : intent.action; return computeOuterAuthWitHash( action.to, chainId, version, - computeInnerAuthWitHashFromAction(intent.caller, action), + await computeInnerAuthWitHashFromAction(intent.caller, action), ); } else { const inner = Buffer.isBuffer(intent.innerHash) ? Fr.fromBuffer(intent.innerHash) : intent.innerHash; @@ -69,8 +69,12 @@ export const computeAuthWitMessageHash = (intent: IntentInnerHash | IntentAction }; // docs:end:authwit_computeAuthWitMessageHash -export const computeInnerAuthWitHashFromAction = (caller: AztecAddress, action: FunctionCall) => - computeInnerAuthWitHash([caller.toField(), action.selector.toField(), PackedValues.fromValues(action.args).hash]); +export const computeInnerAuthWitHashFromAction = async (caller: AztecAddress, action: FunctionCall) => + computeInnerAuthWitHash([ + caller.toField(), + action.selector.toField(), + (await PackedValues.fromValues(action.args)).hash, + ]); /** * Compute the inner hash for an authentication witness. @@ -80,8 +84,8 @@ export const computeInnerAuthWitHashFromAction = (caller: AztecAddress, action: * @param args - The arguments to hash * @returns The inner hash for the witness */ -export const computeInnerAuthWitHash = (args: Fr[]) => { - return poseidon2HashWithSeparator(args, GeneratorIndex.AUTHWIT_INNER); +export const computeInnerAuthWitHash = async (args: Fr[]) => { + return await poseidon2HashWithSeparator(args, GeneratorIndex.AUTHWIT_INNER); }; /** @@ -98,6 +102,9 @@ export const computeInnerAuthWitHash = (args: Fr[]) => { * @param innerHash - The inner hash for the witness * @returns The outer hash for the witness */ -const computeOuterAuthWitHash = (consumer: AztecAddress, chainId: Fr, version: Fr, innerHash: Fr) => { - return poseidon2HashWithSeparator([consumer.toField(), chainId, version, innerHash], GeneratorIndex.AUTHWIT_OUTER); +const computeOuterAuthWitHash = async (consumer: AztecAddress, chainId: Fr, version: Fr, innerHash: Fr) => { + return await poseidon2HashWithSeparator( + [consumer.toField(), chainId, version, innerHash], + GeneratorIndex.AUTHWIT_OUTER, + ); }; diff --git a/yarn-project/aztec.js/src/utils/cheat_codes.ts b/yarn-project/aztec.js/src/utils/cheat_codes.ts index 10b837a9b04a..73d8171500ed 100644 --- a/yarn-project/aztec.js/src/utils/cheat_codes.ts +++ b/yarn-project/aztec.js/src/utils/cheat_codes.ts @@ -206,9 +206,9 @@ export class AztecCheatCodes { * @param key - The key to lookup in the map * @returns The storage slot of the value in the map */ - public computeSlotInMap(mapSlot: Fr | bigint, key: Fr | bigint | AztecAddress): Fr { + public async computeSlotInMap(mapSlot: Fr | bigint, key: Fr | bigint | AztecAddress): Promise { const keyFr = typeof key === 'bigint' ? new Fr(key) : key.toField(); - return deriveStorageSlotInMap(mapSlot, keyFr); + return await deriveStorageSlotInMap(mapSlot, keyFr); } /** diff --git a/yarn-project/aztec.js/src/utils/portal_manager.ts b/yarn-project/aztec.js/src/utils/portal_manager.ts index 7d3c3d2bae22..2c8b7e0ce835 100644 --- a/yarn-project/aztec.js/src/utils/portal_manager.ts +++ b/yarn-project/aztec.js/src/utils/portal_manager.ts @@ -49,9 +49,9 @@ function stringifyEthAddress(address: EthAddress | Hex, name?: string) { } /** Generates a pair secret and secret hash */ -export function generateClaimSecret(logger?: DebugLogger): [Fr, Fr] { +export async function generateClaimSecret(logger?: DebugLogger): Promise<[Fr, Fr]> { const secret = Fr.random(); - const secretHash = computeSecretHash(secret); + const secretHash = await computeSecretHash(secret); logger?.verbose(`Generated claim secret=${secret.toString()} hash=${secretHash.toString()}`); return [secret, secretHash]; } @@ -144,7 +144,7 @@ export class L1FeeJuicePortalManager { * @param mint - Whether to mint the tokens before sending (only during testing). */ public async bridgeTokensPublic(to: AztecAddress, amount: bigint, mint = false): Promise { - const [claimSecret, claimSecretHash] = generateClaimSecret(); + const [claimSecret, claimSecretHash] = await generateClaimSecret(); if (mint) { await this.tokenManager.mint(amount, this.walletClient.account.address); } diff --git a/yarn-project/aztec.js/src/utils/pub_key.ts b/yarn-project/aztec.js/src/utils/pub_key.ts index ab7388a5c168..7e579c18547f 100644 --- a/yarn-project/aztec.js/src/utils/pub_key.ts +++ b/yarn-project/aztec.js/src/utils/pub_key.ts @@ -6,7 +6,7 @@ import { Grumpkin } from '@aztec/circuits.js/barretenberg'; * @param privateKey - The private key. * @returns The generated public key. */ -export function generatePublicKey(privateKey: GrumpkinScalar): PublicKey { +export async function generatePublicKey(privateKey: GrumpkinScalar): Promise { const grumpkin = new Grumpkin(); - return grumpkin.mul(grumpkin.generator(), privateKey); + return await grumpkin.mul(grumpkin.generator(), privateKey); } diff --git a/yarn-project/aztec.js/src/wallet/account_wallet.ts b/yarn-project/aztec.js/src/wallet/account_wallet.ts index 781b8ab454e0..4f33045c64c2 100644 --- a/yarn-project/aztec.js/src/wallet/account_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/account_wallet.ts @@ -55,7 +55,7 @@ export class AccountWallet extends BaseWallet { } else if (messageHashOrIntent instanceof Fr) { messageHash = messageHashOrIntent; } else { - messageHash = this.getMessageHash(messageHashOrIntent); + messageHash = await this.getMessageHash(messageHashOrIntent); } const witness = await this.account.createAuthWit(messageHash); @@ -72,17 +72,17 @@ export class AccountWallet extends BaseWallet { * @param authorized - True to authorize, false to revoke authorization. * @returns - A function interaction. */ - public setPublicAuthWit( + public async setPublicAuthWit( messageHashOrIntent: Fr | Buffer | IntentInnerHash | IntentAction, authorized: boolean, - ): ContractFunctionInteraction { + ): Promise { let messageHash: Fr; if (Buffer.isBuffer(messageHashOrIntent)) { messageHash = Fr.fromBuffer(messageHashOrIntent); } else if (messageHashOrIntent instanceof Fr) { messageHash = messageHashOrIntent; } else { - messageHash = this.getMessageHash(messageHashOrIntent); + messageHash = await this.getMessageHash(messageHashOrIntent); } return new ContractFunctionInteraction(this, ProtocolContractAddress.AuthRegistry, this.getSetAuthorizedAbi(), [ @@ -91,16 +91,17 @@ export class AccountWallet extends BaseWallet { ]); } - private getInnerHashAndConsumer(intent: IntentInnerHash | IntentAction): { + private async getInnerHashAndConsumer(intent: IntentInnerHash | IntentAction): Promise<{ /** The inner hash */ innerHash: Fr; /** The consumer of the authwit */ consumer: AztecAddress; - } { + }> { if ('caller' in intent && 'action' in intent) { - const action = intent.action instanceof ContractFunctionInteraction ? intent.action.request() : intent.action; + const action = + intent.action instanceof ContractFunctionInteraction ? await intent.action.request() : intent.action; return { - innerHash: computeInnerAuthWitHashFromAction(intent.caller, action), + innerHash: await computeInnerAuthWitHashFromAction(intent.caller, action), consumer: action.to, }; } else if (Buffer.isBuffer(intent.innerHash)) { @@ -115,10 +116,10 @@ export class AccountWallet extends BaseWallet { * @param intent - A tuple of (consumer and inner hash) or (caller and action) * @returns The message hash */ - private getMessageHash(intent: IntentInnerHash | IntentAction): Fr { + private async getMessageHash(intent: IntentInnerHash | IntentAction): Promise { const chainId = this.getChainId(); const version = this.getVersion(); - return computeAuthWitMessageHash(intent, { chainId, version }); + return await computeAuthWitMessageHash(intent, { chainId, version }); } /** @@ -140,9 +141,9 @@ export class AccountWallet extends BaseWallet { /** boolean flag indicating if the authwit is valid in public context */ isValidInPublic: boolean; }> { - const { innerHash, consumer } = this.getInnerHashAndConsumer(intent); + const { innerHash, consumer } = await this.getInnerHashAndConsumer(intent); - const messageHash = this.getMessageHash(intent); + const messageHash = await this.getMessageHash(intent); const results = { isValidInPrivate: false, isValidInPublic: false }; // Check private diff --git a/yarn-project/aztec.js/src/wallet/account_wallet_with_private_key.ts b/yarn-project/aztec.js/src/wallet/account_wallet_with_private_key.ts index 135d06ce339e..1d72dfac5a3d 100644 --- a/yarn-project/aztec.js/src/wallet/account_wallet_with_private_key.ts +++ b/yarn-project/aztec.js/src/wallet/account_wallet_with_private_key.ts @@ -30,9 +30,9 @@ export class AccountWalletWithSecretKey extends AccountWallet { * note - this ensures that the address secret always corresponds to an address point with y being positive * dev - this is also referred to as the address secret, which decrypts payloads encrypted to an address point */ - public getEncryptionSecret() { + public async getEncryptionSecret() { return computeAddressSecret( - this.getCompleteAddress().getPreaddress(), + await this.getCompleteAddress().getPreaddress(), deriveMasterIncomingViewingSecretKey(this.getSecretKey()), ); } diff --git a/yarn-project/aztec/src/cli/cmds/start_pxe.ts b/yarn-project/aztec/src/cli/cmds/start_pxe.ts index f1d8681982c5..f92ce1f448a9 100644 --- a/yarn-project/aztec/src/cli/cmds/start_pxe.ts +++ b/yarn-project/aztec/src/cli/cmds/start_pxe.ts @@ -106,7 +106,7 @@ export async function addPXE( initializationHash: initHash, address, deployer: AztecAddress.ZERO, - contractClassId: getContractClassFromArtifact(artifact!).id, + contractClassId: (await getContractClassFromArtifact(artifact!)).id, publicKeys: PublicKeys.default(), }; userLog(`Registering ${name} at ${address.toString()}`); diff --git a/yarn-project/aztec/src/cli/util.ts b/yarn-project/aztec/src/cli/util.ts index cb5ab65fcda2..f525478f0b95 100644 --- a/yarn-project/aztec/src/cli/util.ts +++ b/yarn-project/aztec/src/cli/util.ts @@ -44,7 +44,7 @@ export async function createAccountLogs( const registeredAccounts = await pxe.getRegisteredAccounts(); const accountLogStrings = [`Initial Accounts:\n\n`]; for (const account of accounts) { - const completeAddress = account.account.getCompleteAddress(); + const completeAddress = await account.account.getCompleteAddress(); if (registeredAccounts.find(a => a.equals(completeAddress))) { accountLogStrings.push(` Address: ${completeAddress.address.toString()}\n`); accountLogStrings.push(` Partial Address: ${completeAddress.partialAddress.toString()}\n`); diff --git a/yarn-project/aztec/src/sandbox.ts b/yarn-project/aztec/src/sandbox.ts index cd2a799df92f..76b353578fbe 100644 --- a/yarn-project/aztec/src/sandbox.ts +++ b/yarn-project/aztec/src/sandbox.ts @@ -79,10 +79,10 @@ export async function deployContractsToL1( ? createEthereumChain(aztecNodeConfig.l1RpcUrl, aztecNodeConfig.l1ChainId) : { chainInfo: localAnvil }; - const l1Contracts = await waitThenDeploy(aztecNodeConfig, () => + const l1Contracts = await waitThenDeploy(aztecNodeConfig, async () => deployL1Contracts(aztecNodeConfig.l1RpcUrl, hdAccount, chain.chainInfo, contractDeployLogger, { l2FeeJuiceAddress: ProtocolContractAddress.FeeJuice, - vkTreeRoot: getVKTreeRoot(), + vkTreeRoot: await getVKTreeRoot(), protocolContractTreeRoot, assumeProvenThrough: opts.assumeProvenThroughBlockNumber, salt: opts.salt, diff --git a/yarn-project/bb-prover/src/test/test_avm.ts b/yarn-project/bb-prover/src/test/test_avm.ts index 7dd0954dfe6f..937a9589ea0c 100644 --- a/yarn-project/bb-prover/src/test/test_avm.ts +++ b/yarn-project/bb-prover/src/test/test_avm.ts @@ -31,17 +31,17 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { type PublicFunctionCallResult } from '@aztec/simulator'; // TODO: pub somewhere more usable - copied from abstract phase manager -export function getPublicInputs(result: PublicFunctionCallResult): PublicCircuitPublicInputs { +export async function getPublicInputs(result: PublicFunctionCallResult): Promise { return PublicCircuitPublicInputs.from({ callContext: result.executionRequest.callContext, proverAddress: AztecAddress.ZERO, - argsHash: computeVarArgsHash(result.executionRequest.args), + argsHash: await computeVarArgsHash(result.executionRequest.args), noteHashes: padArrayEnd(result.noteHashes, NoteHash.empty(), MAX_NOTE_HASHES_PER_CALL), nullifiers: padArrayEnd(result.nullifiers, Nullifier.empty(), MAX_NULLIFIERS_PER_CALL), l2ToL1Msgs: padArrayEnd(result.l2ToL1Messages, L2ToL1Message.empty(), MAX_L2_TO_L1_MSGS_PER_CALL), startSideEffectCounter: result.startSideEffectCounter, endSideEffectCounter: result.endSideEffectCounter, - returnsHash: computeVarArgsHash(result.returnValues), + returnsHash: await computeVarArgsHash(result.returnValues), noteHashReadRequests: padArrayEnd( result.noteHashReadRequests, TreeLeafReadRequest.empty(), diff --git a/yarn-project/bb-prover/src/verification_key/verification_key_data.ts b/yarn-project/bb-prover/src/verification_key/verification_key_data.ts index 14f5eb28c59d..fb209cd90794 100644 --- a/yarn-project/bb-prover/src/verification_key/verification_key_data.ts +++ b/yarn-project/bb-prover/src/verification_key/verification_key_data.ts @@ -25,7 +25,7 @@ export async function extractVkData(vkDirectoryPath: string): Promise token.methods.transfer(recipient, TRANSFER_AMOUNT).request())); calls.push( - ...times(publicTransfersPerTx, () => - token.methods.transfer_in_public(sender, recipient, TRANSFER_AMOUNT, 0).request(), - ), + ...(await Promise.all( + times(privateTransfersPerTx, () => token.methods.transfer(recipient, TRANSFER_AMOUNT).request()), + )), + ); + calls.push( + ...(await Promise.all( + times(publicTransfersPerTx, () => + token.methods.transfer_in_public(sender, recipient, TRANSFER_AMOUNT, 0).request(), + ), + )), ); } else { calls.push( - ...times(privateTransfersPerTx, () => - token.methods.transfer(TRANSFER_AMOUNT, sender, recipient, sender).request(), - ), + ...(await Promise.all( + times(privateTransfersPerTx, () => + token.methods.transfer(TRANSFER_AMOUNT, sender, recipient, sender).request(), + ), + )), ); } diff --git a/yarn-project/bot/src/factory.ts b/yarn-project/bot/src/factory.ts index d41ddf174ff1..a4453cc831a7 100644 --- a/yarn-project/bot/src/factory.ts +++ b/yarn-project/bot/src/factory.ts @@ -63,7 +63,7 @@ export class BotFactory { const salt = Fr.ONE; const signingKey = deriveSigningKey(this.config.senderPrivateKey); const account = getSchnorrAccount(this.pxe, this.config.senderPrivateKey, signingKey, salt); - const isInit = await this.pxe.isContractInitialized(account.getAddress()); + const isInit = await this.pxe.isContractInitialized(await account.getAddress()); if (isInit) { this.log.info(`Account at ${account.getAddress().toString()} already initialized`); const wallet = await account.register(); @@ -121,7 +121,7 @@ export class BotFactory { throw new Error(`Unsupported token contract type: ${this.config.contract}`); } - const address = deploy.getInstance(deployOpts).address; + const address = (await deploy.getInstance(deployOpts)).address; if (await this.pxe.isContractPubliclyDeployed(address)) { this.log.info(`Token at ${address.toString()} already deployed`); return deploy.register(); @@ -162,13 +162,13 @@ export class BotFactory { const from = sender; // we are setting from to sender here because of TODO(#9887) calls.push( isStandardToken - ? token.methods.mint_to_private(from, sender, MINT_BALANCE).request() - : token.methods.mint(MINT_BALANCE, sender, sender).request(), + ? await token.methods.mint_to_private(from, sender, MINT_BALANCE).request() + : await token.methods.mint(MINT_BALANCE, sender, sender).request(), ); } if (isStandardToken && publicBalance < MIN_BALANCE) { this.log.info(`Minting public tokens for ${sender.toString()}`); - calls.push(token.methods.mint_to_public(sender, MINT_BALANCE).request()); + calls.push(await token.methods.mint_to_public(sender, MINT_BALANCE).request()); } if (calls.length === 0) { this.log.info(`Skipping minting as ${sender.toString()} has enough tokens`); diff --git a/yarn-project/builder/src/contract-interface-gen/codegen.ts b/yarn-project/builder/src/contract-interface-gen/codegen.ts index dea5cb8417a0..dee4a1e5add4 100644 --- a/yarn-project/builder/src/contract-interface-gen/codegen.ts +++ b/yarn-project/builder/src/contract-interface-gen/codegen.ts @@ -60,7 +60,7 @@ async function generateFromNoirAbi(outputPath: string, noirAbiPath: string, opts relativeArtifactPath = `./${relativeArtifactPath}`; } - const tsWrapper = generateTypescriptContractInterface(aztecAbi, relativeArtifactPath); + const tsWrapper = await generateTypescriptContractInterface(aztecAbi, relativeArtifactPath); const outputFilePath = `${outputPath}/${aztecAbi.name}.ts`; await writeFile(outputFilePath, tsWrapper); diff --git a/yarn-project/builder/src/contract-interface-gen/typescript.ts b/yarn-project/builder/src/contract-interface-gen/typescript.ts index 455378a3d13a..b0f2c443f1b4 100644 --- a/yarn-project/builder/src/contract-interface-gen/typescript.ts +++ b/yarn-project/builder/src/contract-interface-gen/typescript.ts @@ -2,6 +2,7 @@ import { type ABIParameter, type ABIVariable, type ContractArtifact, + EventSelector, type FunctionArtifact, decodeFunctionSignature, getDefaultInitializer, @@ -241,38 +242,44 @@ function generateNotesGetter(input: ContractArtifact) { } // events is of type AbiType -function generateEvents(events: any[] | undefined) { +async function generateEvents(events: any[] | undefined) { if (events === undefined) { return { events: '', eventDefs: '' }; } - const eventsMetadata = events.map(event => { - const eventName = event.path.split('::').at(-1); + const eventsMetadata = await Promise.all( + events.map(async event => { + const eventName = event.path.split('::').at(-1); - const eventDefProps = event.fields.map((field: ABIVariable) => `${field.name}: ${abiTypeToTypescript(field.type)}`); - const eventDef = ` + const eventDefProps = event.fields.map( + (field: ABIVariable) => `${field.name}: ${abiTypeToTypescript(field.type)}`, + ); + const eventDef = ` export type ${eventName} = { ${eventDefProps.join('\n')} } `; - const fieldNames = event.fields.map((field: any) => `"${field.name}"`); - const eventType = `${eventName}: {abiType: AbiType, eventSelector: EventSelector, fieldNames: string[] }`; - // Reusing the decodeFunctionSignature - const eventSignature = decodeFunctionSignature(eventName, event.fields); - const eventSelector = `EventSelector.fromSignature('${eventSignature}')`; - const eventImpl = `${eventName}: { + const fieldNames = event.fields.map((field: any) => `"${field.name}"`); + const eventType = `${eventName}: {abiType: AbiType, eventSelector: EventSelector, fieldNames: string[] }`; + // Reusing the decodeFunctionSignature + const eventSignature = ( + await EventSelector.fromSignature(decodeFunctionSignature(eventName, event.fields)) + ).toString(); + const eventSelector = `EventSelector.fromString('${eventSignature}')`; + const eventImpl = `${eventName}: { abiType: ${JSON.stringify(event, null, 4)}, eventSelector: ${eventSelector}, fieldNames: [${fieldNames}], }`; - return { - eventDef, - eventType, - eventImpl, - }; - }); + return { + eventDef, + eventType, + eventImpl, + }; + }), + ); return { eventDefs: eventsMetadata.map(({ eventDef }) => eventDef).join('\n'), @@ -292,7 +299,7 @@ function generateEvents(events: any[] | undefined) { * @param artifactImportPath - Optional path to import the artifact (if not set, will be required in the constructor). * @returns The corresponding ts code. */ -export function generateTypescriptContractInterface(input: ContractArtifact, artifactImportPath?: string) { +export async function generateTypescriptContractInterface(input: ContractArtifact, artifactImportPath?: string) { const methods = input.functions .filter(f => !f.isInternal) .sort((a, b) => a.name.localeCompare(b.name)) @@ -304,7 +311,7 @@ export function generateTypescriptContractInterface(input: ContractArtifact, art const artifactGetter = artifactImportPath && generateArtifactGetter(input.name); const storageLayoutGetter = artifactImportPath && generateStorageLayoutGetter(input); const notesGetter = artifactImportPath && generateNotesGetter(input); - const { eventDefs, events } = generateEvents(input.outputs.structs?.events); + const { eventDefs, events } = await generateEvents(input.outputs.structs?.events); return ` /* Autogenerated file, do not edit! */ diff --git a/yarn-project/circuit-types/src/interfaces/archiver.test.ts b/yarn-project/circuit-types/src/interfaces/archiver.test.ts index 04aa0e0341d6..d9aa4d344193 100644 --- a/yarn-project/circuit-types/src/interfaces/archiver.test.ts +++ b/yarn-project/circuit-types/src/interfaces/archiver.test.ts @@ -184,7 +184,7 @@ describe('ArchiverApiSchema', () => { }); it('getContractClass', async () => { - const contractClass = getContractClassFromArtifact(artifact); + const contractClass = await getContractClassFromArtifact(artifact); const result = await context.client.getContractClass(Fr.random()); expect(result).toEqual({ ...omit(contractClass, 'publicBytecodeCommitment'), @@ -247,7 +247,7 @@ describe('ArchiverApiSchema', () => { }); it('addContractClass', async () => { - const contractClass = getContractClassFromArtifact(artifact); + const contractClass = await getContractClassFromArtifact(artifact); await context.client.addContractClass({ ...omit(contractClass, 'publicBytecodeCommitment'), unconstrainedFunctions: [], @@ -280,12 +280,12 @@ class MockArchiver implements ArchiverApi { getBlockHeader(_number: number | 'latest'): Promise { return Promise.resolve(BlockHeader.empty()); } - getBlocks(from: number, _limit: number, _proven?: boolean | undefined): Promise { - return Promise.resolve([L2Block.random(from)]); + async getBlocks(from: number, _limit: number, _proven?: boolean | undefined): Promise { + return [await L2Block.random(from)]; } - getTxEffect(_txHash: TxHash): Promise | undefined> { + async getTxEffect(_txHash: TxHash): Promise | undefined> { expect(_txHash).toBeInstanceOf(TxHash); - return Promise.resolve({ l2BlockNumber: 1, l2BlockHash: '0x12', data: TxEffect.random() }); + return Promise.resolve({ l2BlockNumber: 1, l2BlockHash: '0x12', data: await TxEffect.random() }); } getSettledTxReceipt(txHash: TxHash): Promise { expect(txHash).toBeInstanceOf(TxHash); @@ -297,9 +297,9 @@ class MockArchiver implements ArchiverApi { getL2EpochNumber(): Promise { return Promise.resolve(1n); } - getBlocksForEpoch(epochNumber: bigint): Promise { + async getBlocksForEpoch(epochNumber: bigint): Promise { expect(epochNumber).toEqual(1n); - return Promise.resolve([L2Block.random(Number(epochNumber))]); + return [await L2Block.random(Number(epochNumber))]; } isEpochComplete(epochNumber: bigint): Promise { expect(epochNumber).toEqual(1n); @@ -341,10 +341,10 @@ class MockArchiver implements ArchiverApi { expect(selector).toBeInstanceOf(FunctionSelector); return Promise.resolve({ selector, bytecode: Buffer.alloc(10, 10) }); } - getContractClass(id: Fr): Promise { + async getContractClass(id: Fr): Promise { expect(id).toBeInstanceOf(Fr); - const contractClass = getContractClassFromArtifact(this.artifact); - return Promise.resolve({ ...contractClass, unconstrainedFunctions: [], privateFunctions: [] }); + const contractClass = await getContractClassFromArtifact(this.artifact); + return { ...contractClass, unconstrainedFunctions: [], privateFunctions: [] }; } getBytecodeCommitment(id: Fr): Promise { expect(id).toBeInstanceOf(Fr); @@ -366,10 +366,10 @@ class MockArchiver implements ArchiverApi { contractClassId: Fr.random(), deployer: AztecAddress.random(), initializationHash: Fr.random(), - publicKeys: PublicKeys.random(), + publicKeys: await PublicKeys.random(), salt: Fr.random(), version: 1, - }); + }; } getContractClassIds(): Promise { return Promise.resolve([Fr.random()]); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts index 3bb4ee18185f..03e1910fc498 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts @@ -249,7 +249,7 @@ describe('AztecNodeApiSchema', () => { }); it('sendTx', async () => { - await context.client.sendTx(Tx.random()); + await context.client.sendTx(await Tx.random()); }); it('getTxReceipt', async () => { @@ -288,12 +288,12 @@ describe('AztecNodeApiSchema', () => { }); it('simulatePublicCalls', async () => { - const response = await context.client.simulatePublicCalls(Tx.random()); + const response = await context.client.simulatePublicCalls(await Tx.random()); expect(response).toBeInstanceOf(PublicSimulationOutput); }); it('isValidTx', async () => { - const response = await context.client.isValidTx(Tx.random()); + const response = await context.client.isValidTx(await Tx.random()); expect(response).toBe(true); }); @@ -302,7 +302,7 @@ describe('AztecNodeApiSchema', () => { }); it('getContractClass', async () => { - const contractClass = getContractClassFromArtifact(artifact); + const contractClass = await getContractClassFromArtifact(artifact); const response = await context.client.getContractClass(Fr.random()); expect(response).toEqual({ ...omit(contractClass, 'publicBytecodeCommitment'), @@ -343,7 +343,7 @@ describe('AztecNodeApiSchema', () => { }); it('addContractClass', async () => { - const contractClass = getContractClassFromArtifact(artifact); + const contractClass = await getContractClassFromArtifact(artifact); await context.client.addContractClass({ ...contractClass, unconstrainedFunctions: [], privateFunctions: [] }); }); }); @@ -480,7 +480,7 @@ class MockAztecNode implements AztecNode { }); } getBlocks(from: number, limit: number): Promise { - return Promise.resolve(times(limit, i => L2Block.random(from + i))); + return Promise.all(times(limit, async i => await L2Block.random(from + i))); } getNodeVersion(): Promise { return Promise.resolve('1.0.0'); @@ -534,12 +534,12 @@ class MockAztecNode implements AztecNode { expect(txHash).toBeInstanceOf(TxHash); return Promise.resolve(TxReceipt.empty()); } - getTxEffect(txHash: TxHash): Promise | undefined> { + async getTxEffect(txHash: TxHash): Promise | undefined> { expect(txHash).toBeInstanceOf(TxHash); - return Promise.resolve({ l2BlockNumber: 1, l2BlockHash: '0x12', data: TxEffect.random() }); + return Promise.resolve({ l2BlockNumber: 1, l2BlockHash: '0x12', data: await TxEffect.random() }); } - getPendingTxs(): Promise { - return Promise.resolve([Tx.random()]); + async getPendingTxs(): Promise { + return Promise.resolve([await Tx.random()]); } getPendingTxCount(): Promise { return Promise.resolve(1); @@ -568,19 +568,19 @@ class MockAztecNode implements AztecNode { expect(config.coinbase).toBeInstanceOf(EthAddress); return Promise.resolve(); } - getContractClass(id: Fr): Promise { + async getContractClass(id: Fr): Promise { expect(id).toBeInstanceOf(Fr); - const contractClass = getContractClassFromArtifact(this.artifact); + const contractClass = await getContractClassFromArtifact(this.artifact); return Promise.resolve({ ...contractClass, unconstrainedFunctions: [], privateFunctions: [] }); } - getContract(address: AztecAddress): Promise { + async getContract(address: AztecAddress): Promise { expect(address).toBeInstanceOf(AztecAddress); const instance = { version: 1 as const, contractClassId: Fr.random(), deployer: AztecAddress.random(), initializationHash: Fr.random(), - publicKeys: PublicKeys.random(), + publicKeys: await PublicKeys.random(), salt: Fr.random(), address: AztecAddress.random(), }; diff --git a/yarn-project/circuit-types/src/interfaces/pxe.test.ts b/yarn-project/circuit-types/src/interfaces/pxe.test.ts index 65976b07ac7b..24280ed30ffe 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.test.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.test.ts @@ -69,7 +69,7 @@ describe('PXESchema', () => { contractClassId: Fr.random(), deployer: AztecAddress.random(), initializationHash: Fr.random(), - publicKeys: PublicKeys.random(), + publicKeys: await PublicKeys.random(), salt: Fr.random(), address, }; @@ -147,23 +147,26 @@ describe('PXESchema', () => { }); it('proveTx', async () => { - const result = await context.client.proveTx(TxExecutionRequest.random(), PrivateExecutionResult.random()); + const result = await context.client.proveTx( + await TxExecutionRequest.random(), + await PrivateExecutionResult.random(), + ); expect(result).toBeInstanceOf(TxProvingResult); }); it('simulateTx(all)', async () => { - const result = await context.client.simulateTx(TxExecutionRequest.random(), true, address, false, false, []); + const result = await context.client.simulateTx(await TxExecutionRequest.random(), true, address, false, false, []); expect(result).toBeInstanceOf(TxSimulationResult); }); it('simulateTx(required)', async () => { - const result = await context.client.simulateTx(TxExecutionRequest.random(), true); + const result = await context.client.simulateTx(await TxExecutionRequest.random(), true); expect(result).toBeInstanceOf(TxSimulationResult); }); it('simulateTx(undefined)', async () => { const result = await context.client.simulateTx( - TxExecutionRequest.random(), + await TxExecutionRequest.random(), true, undefined, undefined, @@ -174,7 +177,7 @@ describe('PXESchema', () => { }); it('sendTx', async () => { - const result = await context.client.sendTx(Tx.random()); + const result = await context.client.sendTx(await Tx.random()); expect(result).toBeInstanceOf(TxHash); }); @@ -280,7 +283,7 @@ describe('PXESchema', () => { it('getContractClass', async () => { const result = await context.client.getContractClass(Fr.random()); - const expected = omit(getContractClassFromArtifact(artifact), 'privateFunctionsRoot', 'publicBytecodeCommitment'); + const expected = omit(await getContractClassFromArtifact(artifact), 'privateFunctionsRoot', 'publicBytecodeCommitment'); expect(result).toEqual(expected); }); @@ -309,7 +312,7 @@ describe('PXESchema', () => { { abiType: { kind: 'boolean' }, eventSelector: EventSelector.random(), fieldNames: ['name'] }, 1, 1, - [Point.random()], + [await Point.random()], ); expect(result).toEqual([{ value: 1n }]); }); @@ -352,8 +355,8 @@ class MockPXE implements PXE { expect(partialAddress).toBeInstanceOf(Fr); return Promise.resolve(CompleteAddress.random()); } - getRegisteredAccounts(): Promise { - return Promise.resolve([CompleteAddress.random()]); + async getRegisteredAccounts(): Promise { + return [await CompleteAddress.random()]; } getRegisteredAccount(address: AztecAddress): Promise { expect(address).toBeInstanceOf(AztecAddress); @@ -392,7 +395,7 @@ class MockPXE implements PXE { new TxProvingResult(privateExecutionResult, PrivateKernelTailCircuitPublicInputs.empty(), ClientIvcProof.empty()), ); } - simulateTx( + async simulateTx( txRequest: TxExecutionRequest, _simulatePublic: boolean, msgSender?: AztecAddress | undefined, @@ -408,7 +411,7 @@ class MockPXE implements PXE { expect(scopes).toEqual([]); } return Promise.resolve( - new TxSimulationResult(PrivateExecutionResult.random(), PrivateKernelTailCircuitPublicInputs.empty()), + new TxSimulationResult(await PrivateExecutionResult.random(), PrivateKernelTailCircuitPublicInputs.empty()), ); } sendTx(tx: Tx): Promise { @@ -419,9 +422,9 @@ class MockPXE implements PXE { expect(txHash).toBeInstanceOf(TxHash); return Promise.resolve(TxReceipt.empty()); } - getTxEffect(txHash: TxHash): Promise | undefined> { + async getTxEffect(txHash: TxHash): Promise | undefined> { expect(txHash).toBeInstanceOf(TxHash); - return Promise.resolve({ data: TxEffect.random(), l2BlockHash: Fr.random().toString(), l2BlockNumber: 1 }); + return Promise.resolve({ data: await TxEffect.random(), l2BlockHash: Fr.random().toString(), l2BlockNumber: 1 }); } getPublicStorageAt(contract: AztecAddress, slot: Fr): Promise { expect(contract).toBeInstanceOf(AztecAddress); @@ -523,9 +526,9 @@ class MockPXE implements PXE { expect(address).toEqual(this.address); return Promise.resolve(this.instance); } - getContractClass(id: Fr): Promise { + async getContractClass(id: Fr): Promise { expect(id).toBeInstanceOf(Fr); - const contractClass = getContractClassFromArtifact(this.artifact); + const contractClass = await getContractClassFromArtifact(this.artifact); return Promise.resolve(contractClass); } getContractArtifact(id: Fr): Promise { diff --git a/yarn-project/circuit-types/src/l2_block.test.ts b/yarn-project/circuit-types/src/l2_block.test.ts index 848bed33fd70..f9fd1039570f 100644 --- a/yarn-project/circuit-types/src/l2_block.test.ts +++ b/yarn-project/circuit-types/src/l2_block.test.ts @@ -1,8 +1,8 @@ import { L2Block } from './l2_block.js'; describe('L2Block', () => { - it('can serialize an L2 block with logs to a buffer and back', () => { - const block = L2Block.random(42); + it('can serialize an L2 block with logs to a buffer and back', async () => { + const block = await L2Block.random(42); const buffer = block.toBuffer(); const recovered = L2Block.fromBuffer(buffer); diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index c6d4f570595e..82728b6cd721 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -79,7 +79,7 @@ export class L2Block { * @param inHash - The hash of the L1 to L2 messages subtree which got inserted in this block. * @returns The L2 block. */ - static random( + static async random( l2BlockNum: number, txsPerBlock = 4, numPublicCallsPerTx = 3, @@ -114,8 +114,8 @@ export class L2Block { * Returns the block's hash (hash of block header). * @returns The block's hash. */ - public hash(): Fr { - return this.header.hash(); + public async hash(): Promise { + return await this.header.hash(); } /** diff --git a/yarn-project/circuit-types/src/logs/function_l2_logs.test.ts b/yarn-project/circuit-types/src/logs/function_l2_logs.test.ts index a4039913e9be..446ec881dcc6 100644 --- a/yarn-project/circuit-types/src/logs/function_l2_logs.test.ts +++ b/yarn-project/circuit-types/src/logs/function_l2_logs.test.ts @@ -4,8 +4,8 @@ import { UnencryptedFunctionL2Logs } from './function_l2_logs.js'; function shouldBehaveLikeFunctionL2Logs(FunctionL2Logs: typeof UnencryptedFunctionL2Logs) { describe(FunctionL2Logs.name, () => { - it('can encode L2Logs to buffer and back', () => { - const l2Logs = FunctionL2Logs.random(3); + it('can encode L2Logs to buffer and back', async () => { + const l2Logs = await FunctionL2Logs.random(3); const buffer = l2Logs.toBuffer(); const recovered = FunctionL2Logs.fromBuffer(buffer); @@ -13,8 +13,8 @@ function shouldBehaveLikeFunctionL2Logs(FunctionL2Logs: typeof UnencryptedFuncti expect(recovered).toEqual(l2Logs); }); - it('can encode L2Logs to JSON and back', () => { - const l2Logs = FunctionL2Logs.random(3); + it('can encode L2Logs to JSON and back', async () => { + const l2Logs = await FunctionL2Logs.random(3); const buffer = jsonStringify(l2Logs); const recovered = FunctionL2Logs.schema.parse(JSON.parse(buffer)); @@ -22,8 +22,8 @@ function shouldBehaveLikeFunctionL2Logs(FunctionL2Logs: typeof UnencryptedFuncti expect(recovered).toEqual(l2Logs); }); - it('getSerializedLength returns the correct length', () => { - const l2Logs = FunctionL2Logs.random(3); + it('getSerializedLength returns the correct length', async () => { + const l2Logs = await FunctionL2Logs.random(3); const buffer = l2Logs.toBuffer(); const recovered = FunctionL2Logs.fromBuffer(buffer); @@ -36,8 +36,8 @@ function shouldBehaveLikeFunctionL2Logs(FunctionL2Logs: typeof UnencryptedFuncti } }); - it('getKernelLength returns the correct length', () => { - const l2Logs = FunctionL2Logs.random(3); + it('getKernelLength returns the correct length', async () => { + const l2Logs = await FunctionL2Logs.random(3); const expectedLength = l2Logs.logs.map(l => l.length).reduce((a, b) => a + b + 4, 0); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts index af663a834aba..5012cd5dd0ea 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts @@ -27,26 +27,26 @@ describe('EncryptedLogPayload', () => { let original: EncryptedLogPayload; let payload: PrivateLog; - beforeAll(() => { + beforeAll(async () => { const incomingBodyPlaintext = randomBytes(128); const contract = AztecAddress.random(); original = new EncryptedLogPayload(PLACEHOLDER_TAG, contract, incomingBodyPlaintext); const secretKey = Fr.random(); const partialAddress = Fr.random(); - ({ masterOutgoingViewingSecretKey: ovskM, masterIncomingViewingSecretKey: ivskM } = deriveKeys(secretKey)); + ({ masterOutgoingViewingSecretKey: ovskM, masterIncomingViewingSecretKey: ivskM } = await deriveKeys(secretKey)); - completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, partialAddress); + completeAddress = await CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, partialAddress); - const ovKeys = getKeyValidationRequest(ovskM, contract); + const ovKeys = await getKeyValidationRequest(ovskM, contract); const ephSk = GrumpkinScalar.random(); payload = original.generatePayload(ephSk, completeAddress.address, ovKeys); }); - it('decrypt a log as incoming', () => { - const addressSecret = computeAddressSecret(completeAddress.getPreaddress(), ivskM); + it('decrypt a log as incoming', async () => { + const addressSecret = await computeAddressSecret(await completeAddress.getPreaddress(), ivskM); const recreated = EncryptedLogPayload.decryptAsIncoming(payload, addressSecret); @@ -60,7 +60,7 @@ describe('EncryptedLogPayload', () => { }); }); - it('outgoing cipher text matches Noir', () => { + it('outgoing cipher text matches Noir', async () => { const ephSk = GrumpkinScalar.fromHighLow( new Fr(0x000000000000000000000000000000000f096b423017226a18461115fa8d34bbn), new Fr(0x00000000000000000000000000000000d0d302ee245dfaf2807e604eec4715fen), @@ -71,7 +71,7 @@ describe('EncryptedLogPayload', () => { new Fr(0x0000000000000000000000000000000074d2e28c6bc5176ac02cf7c7d36a444en), ); - const ephPk = derivePublicKeyFromSecretKey(ephSk); + const ephPk = await derivePublicKeyFromSecretKey(ephSk); const recipient = AztecAddress.fromBigInt(0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70cn); @@ -99,7 +99,7 @@ describe('EncryptedLogPayload', () => { ); }); - it('encrypted tagged log matches Noir', () => { + it('encrypted tagged log matches Noir', async () => { // All the values in this test were arbitrarily set and copied over to `payload.nr` const contract = AztecAddress.fromString('0x10f48cd9eff7ae5b209c557c70de2e657ee79166868676b787e9417e19260e04'); const plaintext = Buffer.from( @@ -108,13 +108,13 @@ describe('EncryptedLogPayload', () => { ); // We set a random secret, as it is simply the result of an oracle call, and we are not actually computing this in nr. - const logTag = new IndexedTaggingSecret(new Fr(69420), 1337).computeTag( + const logTag = await new IndexedTaggingSecret(new Fr(69420), 1337).computeTag( AztecAddress.fromBigInt(0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70cn), ); const log = new EncryptedLogPayload(logTag, contract, plaintext); const ovskM = new GrumpkinScalar(0x1d7f6b3c491e99f32aad05c433301f3a2b4ed68de661ff8255d275ff94de6fc4n); - const ovKeys = getKeyValidationRequest(ovskM, contract); + const ovKeys = await getKeyValidationRequest(ovskM, contract); const ephSk = new GrumpkinScalar(0x1358d15019d4639393d62b97e1588c095957ce74a1c32d6ec7d62fe6705d9538n); @@ -148,9 +148,9 @@ describe('EncryptedLogPayload', () => { expect(recreated?.toBuffer()).toEqual(log.toBuffer()); }); - const getKeyValidationRequest = (ovskM: GrumpkinScalar, app: AztecAddress) => { - const ovskApp = computeOvskApp(ovskM, app); - const ovpkM = derivePublicKeyFromSecretKey(ovskM); + const getKeyValidationRequest = async (ovskM: GrumpkinScalar, app: AztecAddress) => { + const ovskApp = await computeOvskApp(ovskM, app); + const ovpkM = await derivePublicKeyFromSecretKey(ovskM); return new KeyValidationRequest(ovpkM, ovskApp); }; diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts index 599d73eb9c55..08538df36cdb 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts @@ -104,9 +104,9 @@ export class EncryptedLogPayload { ): PrivateLog { const addressPoint = recipient.toAddressPoint(); - const ephPk = derivePublicKeyFromSecretKey(ephSk); - const incomingHeaderCiphertext = encrypt(this.contractAddress.toBuffer(), ephSk, addressPoint); - const outgoingHeaderCiphertext = encrypt(this.contractAddress.toBuffer(), ephSk, ovKeys.pkM); + const ephPk = await derivePublicKeyFromSecretKey(ephSk); + const incomingHeaderCiphertext = await encrypt(this.contractAddress.toBuffer(), ephSk, addressPoint); + const outgoingHeaderCiphertext = await encrypt(this.contractAddress.toBuffer(), ephSk, ovKeys.pkM); if (incomingHeaderCiphertext.length !== HEADER_SIZE) { throw new Error(`Invalid incoming header size: ${incomingHeaderCiphertext.length}`); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encryption_util.ts b/yarn-project/circuit-types/src/logs/l1_payload/encryption_util.ts index ed10ad06ff02..208ddc64fd68 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encryption_util.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encryption_util.ts @@ -12,18 +12,18 @@ import { deriveDiffieHellmanAESSecret } from './shared_secret_derivation.js'; * @param deriveSecret - Function to derive the AES secret from the ephemeral secret key and public key * @returns The ciphertext */ -export function encrypt( +export async function encrypt( plaintext: Buffer, secret: GrumpkinScalar, publicKey: PublicKey, - deriveSecret: (secret: GrumpkinScalar, publicKey: PublicKey) => Buffer = deriveDiffieHellmanAESSecret, -): Buffer { - const aesSecret = deriveSecret(secret, publicKey); + deriveSecret: (secret: GrumpkinScalar, publicKey: PublicKey) => Promise = deriveDiffieHellmanAESSecret, +): Promise { + const aesSecret = await deriveSecret(secret, publicKey); const key = aesSecret.subarray(0, 16); const iv = aesSecret.subarray(16, 32); const aes128 = new Aes128(); - return aes128.encryptBufferCBC(plaintext, iv, key); + return await aes128.encryptBufferCBC(plaintext, iv, key); } /** @@ -34,16 +34,16 @@ export function encrypt( * @param deriveSecret - Function to derive the AES secret from the ephemeral secret key and public key * @returns */ -export function decrypt( +export async function decrypt( ciphertext: Buffer, secret: GrumpkinScalar, publicKey: PublicKey, - deriveSecret: (secret: GrumpkinScalar, publicKey: PublicKey) => Buffer = deriveDiffieHellmanAESSecret, -): Buffer { - const aesSecret = deriveSecret(secret, publicKey); + deriveSecret: (secret: GrumpkinScalar, publicKey: PublicKey) => Promise = deriveDiffieHellmanAESSecret, +): Promise { + const aesSecret = await deriveSecret(secret, publicKey); const key = aesSecret.subarray(0, 16); const iv = aesSecret.subarray(16, 32); const aes128 = new Aes128(); - return aes128.decryptBufferCBC(ciphertext, iv, key); + return await aes128.decryptBufferCBC(ciphertext, iv, key); } diff --git a/yarn-project/circuit-types/src/logs/l1_payload/shared_secret_derivation.ts b/yarn-project/circuit-types/src/logs/l1_payload/shared_secret_derivation.ts index e9e3ea5caff2..a1a0c3c4f0db 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/shared_secret_derivation.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/shared_secret_derivation.ts @@ -16,14 +16,14 @@ import { numToUInt8 } from '@aztec/foundation/serialize'; * TODO(#5726): This function is called point_to_symmetric_key in Noir. I don't like that name much since point is not * the only input of the function. Unify naming once we have a better name. */ -export function deriveDiffieHellmanAESSecret(secretKey: GrumpkinScalar, publicKey: PublicKey): Buffer { +export async function deriveDiffieHellmanAESSecret(secretKey: GrumpkinScalar, publicKey: PublicKey): Promise { if (publicKey.isZero()) { throw new Error( `Attempting to derive AES secret with a zero public key. You have probably passed a zero public key in your Noir code somewhere thinking that the note won't broadcasted... but it was.`, ); } const curve = new Grumpkin(); - const sharedSecret = curve.mul(publicKey, secretKey); + const sharedSecret = await curve.mul(publicKey, secretKey); const secretBuffer = Buffer.concat([sharedSecret.toCompressedBuffer(), numToUInt8(GeneratorIndex.SYMMETRIC_KEY)]); const hash = sha256(secretBuffer); return hash; @@ -37,12 +37,11 @@ export function deriveDiffieHellmanAESSecret(secretKey: GrumpkinScalar, publicKe * @param ephPk - The ephemeral public key * @returns The derived AES symmetric key */ -export function derivePoseidonAESSecret(ovskApp: GrumpkinScalar, ephPk: PublicKey) { +export async function derivePoseidonAESSecret(ovskApp: GrumpkinScalar, ephPk: PublicKey) { // For performance reasons, we do NOT use the usual `deriveAESSecret` function here and instead we compute it using // poseidon. Note that we can afford to use poseidon here instead of deriving shared secret using Diffie-Hellman // because for outgoing we are encrypting for ourselves and hence we don't need to perform a key exchange. - return poseidon2HashWithSeparator( - [ovskApp.hi, ovskApp.lo, ephPk.x, ephPk.y], - GeneratorIndex.SYMMETRIC_KEY, + return ( + await poseidon2HashWithSeparator([ovskApp.hi, ovskApp.lo, ephPk.x, ephPk.y], GeneratorIndex.SYMMETRIC_KEY) ).toBuffer(); } diff --git a/yarn-project/circuit-types/src/logs/l2_block_l2_logs.test.ts b/yarn-project/circuit-types/src/logs/l2_block_l2_logs.test.ts index bc5f1a2e7fbd..081309d183cc 100644 --- a/yarn-project/circuit-types/src/logs/l2_block_l2_logs.test.ts +++ b/yarn-project/circuit-types/src/logs/l2_block_l2_logs.test.ts @@ -6,11 +6,11 @@ function shouldBehaveLikeL2BlockL2Logs( L2BlockL2Logs: typeof UnencryptedL2BlockL2Logs | typeof ContractClass2BlockL2Logs, ) { describe(L2BlockL2Logs.name, () => { - it('can encode L2Logs to buffer and back', () => { + it('can encode L2Logs to buffer and back', async () => { const l2Logs = L2BlockL2Logs.name == 'ContractClass2BlockL2Logs' - ? L2BlockL2Logs.random(3, 1, 1) - : L2BlockL2Logs.random(3, 4, 2); + ? await L2BlockL2Logs.random(3, 1, 1) + : await L2BlockL2Logs.random(3, 4, 2); const buffer = l2Logs.toBuffer(); const recovered = L2BlockL2Logs.fromBuffer(buffer); @@ -18,11 +18,11 @@ function shouldBehaveLikeL2BlockL2Logs( expect(recovered).toEqual(l2Logs); }); - it('getSerializedLength returns the correct length', () => { + it('getSerializedLength returns the correct length', async () => { const l2Logs = L2BlockL2Logs.name == 'ContractClass2BlockL2Logs' - ? L2BlockL2Logs.random(3, 1, 1) - : L2BlockL2Logs.random(3, 4, 2); + ? await L2BlockL2Logs.random(3, 1, 1) + : await L2BlockL2Logs.random(3, 4, 2); const buffer = l2Logs.toBuffer(); const recovered = L2BlockL2Logs.fromBuffer(buffer); diff --git a/yarn-project/circuit-types/src/logs/tx_l2_logs.test.ts b/yarn-project/circuit-types/src/logs/tx_l2_logs.test.ts index 9397740891d3..9398b33f23e5 100644 --- a/yarn-project/circuit-types/src/logs/tx_l2_logs.test.ts +++ b/yarn-project/circuit-types/src/logs/tx_l2_logs.test.ts @@ -4,8 +4,9 @@ import { ContractClassTxL2Logs, UnencryptedTxL2Logs } from './tx_l2_logs.js'; function shouldBehaveLikeTxL2Logs(TxL2Logs: typeof UnencryptedTxL2Logs | typeof ContractClassTxL2Logs) { describe(TxL2Logs.name, () => { - it('can encode TxL2Logs to buffer and back', () => { - const l2Logs = TxL2Logs.name == 'ContractClassTxL2Logs' ? TxL2Logs.random(1, 1) : TxL2Logs.random(4, 2); + it('can encode TxL2Logs to buffer and back', async () => { + const l2Logs = + TxL2Logs.name == 'ContractClassTxL2Logs' ? await TxL2Logs.random(1, 1) : await TxL2Logs.random(4, 2); const buffer = l2Logs.toBuffer(); const recovered = TxL2Logs.fromBuffer(buffer); @@ -13,8 +14,9 @@ function shouldBehaveLikeTxL2Logs(TxL2Logs: typeof UnencryptedTxL2Logs | typeof expect(recovered).toEqual(l2Logs); }); - it('can encode TxL2Logs to JSON and back', () => { - const l2Logs = TxL2Logs.name == 'ContractClassTxL2Logs' ? TxL2Logs.random(1, 1) : TxL2Logs.random(4, 2); + it('can encode TxL2Logs to JSON and back', async () => { + const l2Logs = + TxL2Logs.name == 'ContractClassTxL2Logs' ? await TxL2Logs.random(1, 1) : await TxL2Logs.random(4, 2); const buffer = jsonStringify(l2Logs); const recovered = TxL2Logs.schema.parse(JSON.parse(buffer)); @@ -22,16 +24,18 @@ function shouldBehaveLikeTxL2Logs(TxL2Logs: typeof UnencryptedTxL2Logs | typeof expect(recovered).toEqual(l2Logs); }); - it('getSerializedLength returns the correct length', () => { - const l2Logs = TxL2Logs.name == 'ContractClassTxL2Logs' ? TxL2Logs.random(1, 1) : TxL2Logs.random(4, 2); + it('getSerializedLength returns the correct length', async () => { + const l2Logs = + TxL2Logs.name == 'ContractClassTxL2Logs' ? await TxL2Logs.random(1, 1) : await TxL2Logs.random(4, 2); const buffer = l2Logs.toBuffer(); const recovered = TxL2Logs.fromBuffer(buffer); expect(recovered.getSerializedLength()).toEqual(buffer.length); }); - it('getKernelLength returns the correct length', () => { - const l2Logs = TxL2Logs.name == 'ContractClassTxL2Logs' ? TxL2Logs.random(1, 1) : TxL2Logs.random(4, 2); + it('getKernelLength returns the correct length', async () => { + const l2Logs = + TxL2Logs.name == 'ContractClassTxL2Logs' ? await TxL2Logs.random(1, 1) : await TxL2Logs.random(4, 2); const expectedLength = l2Logs.functionLogs.map(l => l.getKernelLength()).reduce((a, b) => a + b, 0); diff --git a/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts b/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts index 3dc438940e20..0269cfe2bdf0 100644 --- a/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts +++ b/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts @@ -86,7 +86,7 @@ export async function getNonNullifiedL1ToL2MessageWitness( } const [messageIndex, siblingPath] = response; - const messageNullifier = computeL1ToL2MessageNullifier(contractAddress, messageHash, secret); + const messageNullifier = await computeL1ToL2MessageNullifier(contractAddress, messageHash, secret); const [nullifierIndex] = await node.findLeavesIndexes('latest', MerkleTreeId.NULLIFIER_TREE, [messageNullifier]); if (nullifierIndex !== undefined) { diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 4d38c4bc9868..96a545d6a53d 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -36,7 +36,7 @@ import { TxEffect } from './tx_effect.js'; export const randomTxHash = (): TxHash => new TxHash(randomBytes(32)); -export const mockPrivateExecutionResult = ( +export const mockPrivateExecutionResult = async ( seed = 1, numberOfNonRevertiblePublicCallRequests = MAX_ENQUEUED_CALLS_PER_TX / 2, numberOfRevertiblePublicCallRequests = MAX_ENQUEUED_CALLS_PER_TX / 2, @@ -52,7 +52,9 @@ export const mockPrivateExecutionResult = ( if (isForPublic) { const publicCallRequests = times(totalPublicCallRequests, i => makePublicCallRequest(seed + 0x102 + i)).reverse(); // Reverse it so that they are sorted by counters in descending order. const publicFunctionArgs = times(totalPublicCallRequests, i => [new Fr(seed + i * 100), new Fr(seed + i * 101)]); - publicCallRequests.forEach((r, i) => (r.argsHash = computeVarArgsHash(publicFunctionArgs[i]))); + for (const [i, r] of publicCallRequests.entries()) { + r.argsHash = await computeVarArgsHash(publicFunctionArgs[i]); + } if (hasPublicTeardownCallRequest) { const request = publicCallRequests.shift()!; @@ -80,7 +82,7 @@ export const mockPrivateExecutionResult = ( ); }; -export const mockTx = ( +export const mockTx = async ( seed = 1, { numberOfNonRevertiblePublicCallRequests = MAX_ENQUEUED_CALLS_PER_TX / 2, @@ -117,7 +119,9 @@ export const mockTx = ( const publicCallRequests = times(totalPublicCallRequests, i => makePublicCallRequest(seed + 0x102 + i)).reverse(); // Reverse it so that they are sorted by counters in descending order. const publicFunctionArgs = times(totalPublicCallRequests, i => [new Fr(seed + i * 100), new Fr(seed + i * 101)]); - publicCallRequests.forEach((r, i) => (r.argsHash = computeVarArgsHash(publicFunctionArgs[i]))); + for (const [i, r] of publicCallRequests.entries()) { + r.argsHash = await computeVarArgsHash(publicFunctionArgs[i]); + } if (hasPublicTeardownCallRequest) { const request = publicCallRequests.shift()!; @@ -161,7 +165,7 @@ export const mockSimulatedTx = (seed = 1) => { const output = new PublicSimulationOutput( undefined, makeCombinedConstantData(), - TxEffect.random(), + await TxEffect.random(), [accumulatePrivateReturnValues(privateExecutionResult)], { totalGas: makeGas(), @@ -201,18 +205,18 @@ export const randomContractArtifact = (): ContractArtifact => ({ notes: {}, }); -export const randomContractInstanceWithAddress = ( +export const randomContractInstanceWithAddress = async ( opts: { contractClassId?: Fr } = {}, address?: AztecAddress, -): ContractInstanceWithAddress => { - const instance = SerializableContractInstance.random(opts); - return instance.withAddress(address ?? computeContractAddressFromInstance(instance)); +): Promise => { + const instance = await SerializableContractInstance.random(opts); + return instance.withAddress(address ?? (await computeContractAddressFromInstance(instance))); }; -export const randomDeployedContract = () => { +export const randomDeployedContract = async () => { const artifact = randomContractArtifact(); - const contractClassId = computeContractClassId(getContractClassFromArtifact(artifact)); - return { artifact, instance: randomContractInstanceWithAddress({ contractClassId }) }; + const contractClassId = await computeContractClassId(await getContractClassFromArtifact(artifact)); + return { artifact, instance: await randomContractInstanceWithAddress({ contractClassId }) }; }; export const randomExtendedNote = ({ diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.test.ts b/yarn-project/circuit-types/src/p2p/block_attestation.test.ts index fc32fa7a704c..477d080b73e0 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.test.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.test.ts @@ -10,18 +10,18 @@ describe('Block Attestation serialization / deserialization', () => { expect(deserialized).toEqual(serialized); }; - it('Should serialize / deserialize', () => { - const attestation = makeBlockAttestation(); + it('Should serialize / deserialize', async () => { + const attestation = await makeBlockAttestation(); const serialized = attestation.toBuffer(); const deserialized = BlockAttestation.fromBuffer(serialized); checkEquivalence(attestation, deserialized); }); - it('Should serialize / deserialize + recover sender', () => { + it('Should serialize / deserialize + recover sender', async () => { const account = Secp256k1Signer.random(); - const attestation = makeBlockAttestation({ signer: account }); + const attestation = await makeBlockAttestation({ signer: account }); const serialized = attestation.toBuffer(); const deserialized = BlockAttestation.fromBuffer(serialized); diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 04ccfdf4d52b..cb993cc5e11e 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -50,10 +50,13 @@ export class BlockAttestation extends Gossipable { * Lazily evaluate and cache the sender of the attestation * @returns The sender of the attestation */ - getSender() { + async getSender() { if (!this.sender) { // Recover the sender from the attestation - const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeperator.blockAttestation); + const hashed = await getHashedSignaturePayloadEthSignedMessage( + this.payload, + SignatureDomainSeperator.blockAttestation, + ); // Cache the sender for later use this.sender = recoverAddress(hashed, this.signature); } @@ -61,8 +64,8 @@ export class BlockAttestation extends Gossipable { return this.sender; } - getPayload(): Buffer { - return this.payload.getPayloadToSign(SignatureDomainSeperator.blockAttestation); + async getPayload(): Promise { + return await this.payload.getPayloadToSign(SignatureDomainSeperator.blockAttestation); } toBuffer(): Buffer { diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.test.ts b/yarn-project/circuit-types/src/p2p/block_proposal.test.ts index f1ea41ece797..8131b49d9cd0 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.test.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.test.ts @@ -10,18 +10,18 @@ describe('Block Proposal serialization / deserialization', () => { expect(deserialized).toEqual(serialized); }; - it('Should serialize / deserialize', () => { - const proposal = makeBlockProposal(); + it('Should serialize / deserialize', async () => { + const proposal = await makeBlockProposal(); const serialized = proposal.toBuffer(); const deserialized = BlockProposal.fromBuffer(serialized); checkEquivalence(proposal, deserialized); }); - it('Should serialize / deserialize + recover sender', () => { + it('Should serialize / deserialize + recover sender', async () => { const account = Secp256k1Signer.random(); - const proposal = makeBlockProposal({ signer: account }); + const proposal = await makeBlockProposal({ signer: account }); const serialized = proposal.toBuffer(); const deserialized = BlockProposal.fromBuffer(serialized); diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 207312ba4a1d..2aedf598d069 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -57,7 +57,7 @@ export class BlockProposal extends Gossipable { payload: ConsensusPayload, payloadSigner: (payload: Buffer32) => Promise, ) { - const hashed = getHashedSignaturePayload(payload, SignatureDomainSeperator.blockProposal); + const hashed = await getHashedSignaturePayload(payload, SignatureDomainSeperator.blockProposal); const sig = await payloadSigner(hashed); return new BlockProposal(payload, sig); @@ -66,9 +66,12 @@ export class BlockProposal extends Gossipable { /**Get Sender * Lazily evaluate the sender of the proposal; result is cached */ - getSender() { + async getSender() { if (!this.sender) { - const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeperator.blockProposal); + const hashed = await getHashedSignaturePayloadEthSignedMessage( + this.payload, + SignatureDomainSeperator.blockProposal, + ); // Cache the sender for later use this.sender = recoverAddress(hashed, this.signature); } diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index 1e4e99ac0420..a30ae4256f11 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -16,7 +16,7 @@ export interface MakeConsensusPayloadOptions { txHashes?: TxHash[]; } -const makeAndSignConsensusPayload = ( +const makeAndSignConsensusPayload = async ( domainSeperator: SignatureDomainSeperator, options?: MakeConsensusPayloadOptions, ) => { @@ -33,19 +33,19 @@ const makeAndSignConsensusPayload = ( txHashes, }); - const hash = getHashedSignaturePayloadEthSignedMessage(payload, domainSeperator); + const hash = await getHashedSignaturePayloadEthSignedMessage(payload, domainSeperator); const signature = signer.sign(hash); return { payload, signature }; }; -export const makeBlockProposal = (options?: MakeConsensusPayloadOptions): BlockProposal => { - const { payload, signature } = makeAndSignConsensusPayload(SignatureDomainSeperator.blockProposal, options); +export const makeBlockProposal = async (options?: MakeConsensusPayloadOptions): Promise => { + const { payload, signature } = await makeAndSignConsensusPayload(SignatureDomainSeperator.blockProposal, options); return new BlockProposal(payload, signature); }; // TODO(https://github.com/AztecProtocol/aztec-packages/issues/8028) -export const makeBlockAttestation = (options?: MakeConsensusPayloadOptions): BlockAttestation => { - const { payload, signature } = makeAndSignConsensusPayload(SignatureDomainSeperator.blockAttestation, options); +export const makeBlockAttestation = async (options?: MakeConsensusPayloadOptions): Promise => { + const { payload, signature } = await makeAndSignConsensusPayload(SignatureDomainSeperator.blockAttestation, options); return new BlockAttestation(payload, signature); }; diff --git a/yarn-project/circuit-types/src/p2p/signature_utils.ts b/yarn-project/circuit-types/src/p2p/signature_utils.ts index 25e20ded5d81..eff29685f981 100644 --- a/yarn-project/circuit-types/src/p2p/signature_utils.ts +++ b/yarn-project/circuit-types/src/p2p/signature_utils.ts @@ -7,7 +7,7 @@ export enum SignatureDomainSeperator { } export interface Signable { - getPayloadToSign(domainSeperator: SignatureDomainSeperator): Buffer; + getPayloadToSign(domainSeperator: SignatureDomainSeperator): Promise; } /** @@ -15,8 +15,11 @@ export interface Signable { * @param s - The `Signable` to sign * @returns The hashed payload for the signature of the `Signable` */ -export function getHashedSignaturePayload(s: Signable, domainSeperator: SignatureDomainSeperator): Buffer32 { - return Buffer32.fromBuffer(keccak256(s.getPayloadToSign(domainSeperator))); +export async function getHashedSignaturePayload( + s: Signable, + domainSeperator: SignatureDomainSeperator, +): Promise { + return Buffer32.fromBuffer(keccak256(await s.getPayloadToSign(domainSeperator))); } /** @@ -24,10 +27,10 @@ export function getHashedSignaturePayload(s: Signable, domainSeperator: Signatur * @param s - the `Signable` to sign * @returns The hashed payload for the signature of the `Signable` as an Ethereum signed message */ -export function getHashedSignaturePayloadEthSignedMessage( +export async function getHashedSignaturePayloadEthSignedMessage( s: Signable, domainSeperator: SignatureDomainSeperator, -): Buffer32 { - const payload = getHashedSignaturePayload(s, domainSeperator); +): Promise { + const payload = await getHashedSignaturePayload(s, domainSeperator); return makeEthSignDigest(payload); } diff --git a/yarn-project/circuit-types/src/packed_values.ts b/yarn-project/circuit-types/src/packed_values.ts index 4dfbcea863cf..c8020634bf05 100644 --- a/yarn-project/circuit-types/src/packed_values.ts +++ b/yarn-project/circuit-types/src/packed_values.ts @@ -32,8 +32,8 @@ export class PackedValues { return PackedValues.fromValues([Fr.random(), Fr.random()]); } - static fromValues(values: Fr[]) { - return new PackedValues(values, computeVarArgsHash(values)); + static async fromValues(values: Fr[]) { + return new PackedValues(values, await computeVarArgsHash(values)); } toBuffer() { diff --git a/yarn-project/circuit-types/src/private_execution_result.ts b/yarn-project/circuit-types/src/private_execution_result.ts index 2690bc779ada..e31c154e36eb 100644 --- a/yarn-project/circuit-types/src/private_execution_result.ts +++ b/yarn-project/circuit-types/src/private_execution_result.ts @@ -171,7 +171,7 @@ export class PrivateExecutionResult { [NoteAndSlot.random()], new Map([[0, 0]]), [Fr.random()], - times(nested, () => PrivateExecutionResult.random(0)), + await Promise.all(times(nested, async () => await PrivateExecutionResult.random(0))), [CountedPublicExecutionRequest.random()], PublicExecutionRequest.random(), [new CountedContractClassLog(UnencryptedL2Log.random(), randomInt(10))], diff --git a/yarn-project/circuit-types/src/public_execution_request.ts b/yarn-project/circuit-types/src/public_execution_request.ts index 6371bac3b09f..c94733018923 100644 --- a/yarn-project/circuit-types/src/public_execution_request.ts +++ b/yarn-project/circuit-types/src/public_execution_request.ts @@ -65,23 +65,23 @@ export class PublicExecutionRequest { return this.callContext.isEmpty() && this.args.length === 0; } - isForCallRequest(callRequest: PublicCallRequest) { + async isForCallRequest(callRequest: PublicCallRequest) { return ( this.callContext.msgSender.equals(callRequest.msgSender) && this.callContext.contractAddress.equals(callRequest.contractAddress) && this.callContext.functionSelector.equals(callRequest.functionSelector) && this.callContext.isStaticCall == callRequest.isStaticCall && - computeVarArgsHash(this.args).equals(callRequest.argsHash) + (await computeVarArgsHash(this.args)).equals(callRequest.argsHash) ); } - toCallRequest(): PublicCallRequest { + async toCallRequest(): Promise { return new PublicCallRequest( this.callContext.msgSender, this.callContext.contractAddress, this.callContext.functionSelector, this.callContext.isStaticCall, - computeVarArgsHash(this.args), + await computeVarArgsHash(this.args), ); } diff --git a/yarn-project/circuit-types/src/sibling_path/sibling_path.ts b/yarn-project/circuit-types/src/sibling_path/sibling_path.ts index 96738dab3686..8438f439aeb5 100644 --- a/yarn-project/circuit-types/src/sibling_path/sibling_path.ts +++ b/yarn-project/circuit-types/src/sibling_path/sibling_path.ts @@ -59,12 +59,12 @@ export class SiblingPath { * @param hasher - Implementation of a hasher interface. * @returns A sibling path hashed up from a zero element. */ - public static ZERO(size: N, zeroElement: Buffer, hasher: Hasher): SiblingPath { + public static async ZERO(size: N, zeroElement: Buffer, hasher: Hasher): Promise> { const bufs: Buffer[] = []; let current = zeroElement; for (let i = 0; i < size; ++i) { bufs.push(current); - current = hasher.hash(current, current); + current = await hasher.hash(current, current); } return new SiblingPath(size, bufs); } diff --git a/yarn-project/circuit-types/src/test/factories.ts b/yarn-project/circuit-types/src/test/factories.ts index 6360135750ad..4c4162d036d1 100644 --- a/yarn-project/circuit-types/src/test/factories.ts +++ b/yarn-project/circuit-types/src/test/factories.ts @@ -29,7 +29,7 @@ import { mockTx } from '../mocks.js'; import { makeProcessedTxFromPrivateOnlyTx, makeProcessedTxFromTxWithPublicCalls } from '../tx/processed_tx.js'; /** Makes a bloated processed tx for testing purposes. */ -export function makeBloatedProcessedTx({ +export async function makeBloatedProcessedTx({ seed = 1, header, db, @@ -64,8 +64,8 @@ export function makeBloatedProcessedTx({ txConstantData.protocolContractTreeRoot = protocolContractTreeRoot; const tx = !privateOnly - ? mockTx(seed) - : mockTx(seed, { numberOfNonRevertiblePublicCallRequests: 0, numberOfRevertiblePublicCallRequests: 0 }); + ? await mockTx(seed) + : await mockTx(seed, { numberOfNonRevertiblePublicCallRequests: 0, numberOfRevertiblePublicCallRequests: 0 }); tx.data.constants = txConstantData; // No side effects were created in mockTx. The default gasUsed is the tx overhead. diff --git a/yarn-project/circuit-types/src/tx/tx.test.ts b/yarn-project/circuit-types/src/tx/tx.test.ts index 0710303b16b0..b70e0969beb7 100644 --- a/yarn-project/circuit-types/src/tx/tx.test.ts +++ b/yarn-project/circuit-types/src/tx/tx.test.ts @@ -4,8 +4,8 @@ import { mockTx } from '../mocks.js'; import { Tx } from './tx.js'; describe('Tx', () => { - it('convert to and from buffer', () => { - const tx = mockTx(); + it('convert to and from buffer', async () => { + const tx = await mockTx(); const buf = tx.toBuffer(); expect(Tx.fromBuffer(buf)).toEqual(tx); }); diff --git a/yarn-project/circuit-types/src/tx/tx.ts b/yarn-project/circuit-types/src/tx/tx.ts index bbdd0037d4d3..9c1175b5550b 100644 --- a/yarn-project/circuit-types/src/tx/tx.ts +++ b/yarn-project/circuit-types/src/tx/tx.ts @@ -263,7 +263,7 @@ export class Tx extends Gossipable { ); } - static random() { + static async random() { return new Tx( PrivateKernelTailCircuitPublicInputs.emptyWithNullifier(), ClientIvcProof.empty(), diff --git a/yarn-project/circuit-types/src/tx_effect.test.ts b/yarn-project/circuit-types/src/tx_effect.test.ts index c3a064354027..f6729de688c7 100644 --- a/yarn-project/circuit-types/src/tx_effect.test.ts +++ b/yarn-project/circuit-types/src/tx_effect.test.ts @@ -1,8 +1,8 @@ import { TxEffect } from './tx_effect.js'; describe('TxEffect', () => { - it('convert to and from buffer', () => { - const txEffect = TxEffect.random(); + it('convert to and from buffer', async () => { + const txEffect = await TxEffect.random(); const buf = txEffect.toBuffer(); expect(TxEffect.fromBuffer(buf)).toEqual(txEffect); }); diff --git a/yarn-project/circuit-types/src/tx_execution_request.ts b/yarn-project/circuit-types/src/tx_execution_request.ts index bdd90e28550f..0f0cb8b41473 100644 --- a/yarn-project/circuit-types/src/tx_execution_request.ts +++ b/yarn-project/circuit-types/src/tx_execution_request.ts @@ -132,13 +132,13 @@ export class TxExecutionRequest { return TxExecutionRequest.fromBuffer(hexToBuffer(str)); } - static random() { + static async random() { return new TxExecutionRequest( AztecAddress.random(), FunctionSelector.random(), Fr.random(), TxContext.empty(), - [PackedValues.random()], + [await PackedValues.random()], [AuthWitness.random()], ); } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts index 2f6ca3d5e940..496c1b0a93c6 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts @@ -26,7 +26,7 @@ describe('aes128', () => { return paddedBuffer.subarray(0, paddedBuffer.length - paddingToRemove); }; - it('should correctly encrypt input', () => { + it('should correctly encrypt input', async () => { const data = randomBytes(32); const key = randomBytes(16); const iv = randomBytes(16); @@ -37,12 +37,12 @@ describe('aes128', () => { cipher.setAutoPadding(false); const expected = Buffer.concat([cipher.update(paddedData), cipher.final()]); - const result: Buffer = aes128.encryptBufferCBC(data, iv, key); + const result: Buffer = await aes128.encryptBufferCBC(data, iv, key); expect(result).toEqual(expected); }); - it('should correctly decrypt input', () => { + it('should correctly decrypt input', async () => { const data = randomBytes(32); const key = randomBytes(16); const iv = randomBytes(16); @@ -57,7 +57,7 @@ describe('aes128', () => { decipher.setAutoPadding(false); const expected = removePadding(Buffer.concat([decipher.update(ciphertext), decipher.final()])); - const result: Buffer = aes128.decryptBufferCBC(ciphertext, iv, key); + const result: Buffer = await aes128.decryptBufferCBC(ciphertext, iv, key); expect(result).toEqual(expected); }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts index 824e83b4b7e4..e4c51b258f16 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts @@ -13,7 +13,7 @@ export class Aes128 { * @param key - Key to encrypt with. * @returns Encrypted data. */ - public encryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { + public async encryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { const rawLength = data.length; const numPaddingBytes = 16 - (rawLength % 16); const paddingBuffer = Buffer.alloc(numPaddingBytes); @@ -22,7 +22,7 @@ export class Aes128 { paddingBuffer.fill(numPaddingBytes); const input = Buffer.concat([data, paddingBuffer]); - const api = BarretenbergSync.getSingleton(); + const api = await BarretenbergSync.getSingleton(); return Buffer.from( api.aesEncryptBufferCbc(new RawBuffer(input), new RawBuffer(iv), new RawBuffer(key), input.length), ); @@ -35,8 +35,8 @@ export class Aes128 { * @param key - Key to decrypt with. * @returns Decrypted data. */ - public decryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { - const api = BarretenbergSync.getSingleton(); + public async decryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { + const api = await BarretenbergSync.getSingleton(); const paddedBuffer = Buffer.from( api.aesDecryptBufferCbc(new RawBuffer(data), new RawBuffer(iv), new RawBuffer(key), data.length), ); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.test.ts index 57a61da31248..f638f59cb3af 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.test.ts @@ -9,21 +9,21 @@ describe('ecdsa', () => { ecdsa = new Ecdsa(); }); - it('should verify signature', () => { + it('should verify signature', async () => { // prettier-ignore const privateKey = Buffer.from([ 0x0b, 0x9b, 0x3a, 0xde, 0xe6, 0xb3, 0xd8, 0x1b, 0x28, 0xa0, 0x88, 0x6b, 0x2a, 0x84, 0x15, 0xc7, 0xda, 0x31, 0x29, 0x1a, 0x5e, 0x96, 0xbb, 0x7a, 0x56, 0x63, 0x9e, 0x17, 0x7d, 0x30, 0x1b, 0xeb, ]); - const pubKey = ecdsa.computePublicKey(privateKey); + const pubKey = await ecdsa.computePublicKey(privateKey); const msg = new TextEncoder().encode('The quick brown dog jumped over the lazy fox.'); - const signature = ecdsa.constructSignature(msg, privateKey); + const signature = await ecdsa.constructSignature(msg, privateKey); const verified = ecdsa.verifySignature(msg, pubKey, signature); expect(verified).toBe(true); }); - it('should recover public key from signature', () => { + it('should recover public key from signature', async () => { // prettier-ignore const privateKey = Buffer.from([ 0x0b, 0x9b, 0x3a, 0xde, 0xe6, 0xb3, 0xd8, 0x1b, 0x28, 0xa0, 0x88, 0x6b, 0x2a, 0x84, 0x15, 0xc7, @@ -31,10 +31,10 @@ describe('ecdsa', () => { ]); const pubKey = ecdsa.computePublicKey(privateKey); const msg = new TextEncoder().encode('The quick brown dog jumped over the lazy fox...'); - const signature = ecdsa.constructSignature(msg, privateKey); + const signature = await ecdsa.constructSignature(msg, privateKey); // First, recover the public key - const recoveredPubKey = ecdsa.recoverPublicKey(msg, signature); + const recoveredPubKey = await ecdsa.recoverPublicKey(msg, signature); // Then, verify the signature using the recovered public key const verified = ecdsa.verifySignature(msg, recoveredPubKey, signature); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts index 3b7dd7d3d73e..8f71196a3354 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts @@ -9,17 +9,18 @@ export * from './signature.js'; * TODO: Replace with codegen api on bb.js. */ export class Ecdsa { - private wasm = BarretenbergSync.getSingleton().getWasm(); + private wasm = BarretenbergSync.getSingleton().then(api => api.getWasm()); /** * Computes a secp256k1 public key from a private key. * @param privateKey - Secp256k1 private key. * @returns A secp256k1 public key. */ - public computePublicKey(privateKey: Buffer): Buffer { - this.wasm.writeMemory(0, privateKey); - this.wasm.call('ecdsa__compute_public_key', 0, 32); - return Buffer.from(this.wasm.getMemorySlice(32, 96)); + public async computePublicKey(privateKey: Buffer): Promise { + const wasm = await this.wasm; + wasm.writeMemory(0, privateKey); + wasm.call('ecdsa__compute_public_key', 0, 32); + return Buffer.from(wasm.getMemorySlice(32, 96)); } /** @@ -28,16 +29,17 @@ export class Ecdsa { * @param privateKey - The secp256k1 private key of the signer. * @returns An ECDSA signature of the form (r, s, v). */ - public constructSignature(msg: Uint8Array, privateKey: Buffer) { - const mem = this.wasm.call('bbmalloc', msg.length); - this.wasm.writeMemory(0, privateKey); - this.wasm.writeMemory(mem, msg); - this.wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); + public async constructSignature(msg: Uint8Array, privateKey: Buffer) { + const wasm = await this.wasm; + const mem = wasm.call('bbmalloc', msg.length); + wasm.writeMemory(0, privateKey); + wasm.writeMemory(mem, msg); + wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); return new EcdsaSignature( - Buffer.from(this.wasm.getMemorySlice(32, 64)), - Buffer.from(this.wasm.getMemorySlice(64, 96)), - Buffer.from(this.wasm.getMemorySlice(96, 97)), + Buffer.from(wasm.getMemorySlice(32, 64)), + Buffer.from(wasm.getMemorySlice(64, 96)), + Buffer.from(wasm.getMemorySlice(96, 97)), ); } @@ -47,15 +49,16 @@ export class Ecdsa { * @param sig - The ECDSA signature. * @returns The secp256k1 public key of the signer. */ - public recoverPublicKey(msg: Uint8Array, sig: EcdsaSignature): Buffer { - const mem = this.wasm.call('bbmalloc', msg.length); - this.wasm.writeMemory(0, sig.r); - this.wasm.writeMemory(32, sig.s); - this.wasm.writeMemory(64, sig.v); - this.wasm.writeMemory(mem, msg); - this.wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); + public async recoverPublicKey(msg: Uint8Array, sig: EcdsaSignature): Promise { + const wasm = await this.wasm; + const mem = wasm.call('bbmalloc', msg.length); + wasm.writeMemory(0, sig.r); + wasm.writeMemory(32, sig.s); + wasm.writeMemory(64, sig.v); + wasm.writeMemory(mem, msg); + wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); - return Buffer.from(this.wasm.getMemorySlice(65, 129)); + return Buffer.from(wasm.getMemorySlice(65, 129)); } /** @@ -65,13 +68,14 @@ export class Ecdsa { * @param sig - The ECDSA signature. * @returns True or false. */ - public verifySignature(msg: Uint8Array, pubKey: Buffer, sig: EcdsaSignature) { - const mem = this.wasm.call('bbmalloc', msg.length); - this.wasm.writeMemory(0, pubKey); - this.wasm.writeMemory(64, sig.r); - this.wasm.writeMemory(96, sig.s); - this.wasm.writeMemory(128, sig.v); - this.wasm.writeMemory(mem, msg); - return this.wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; + public async verifySignature(msg: Uint8Array, pubKey: Buffer, sig: EcdsaSignature) { + const wasm = await this.wasm; + const mem = wasm.call('bbmalloc', msg.length); + wasm.writeMemory(0, pubKey); + wasm.writeMemory(64, sig.r); + wasm.writeMemory(96, sig.s); + wasm.writeMemory(128, sig.v); + wasm.writeMemory(mem, msg); + return wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts index 25f2de5f5d23..33f11de44bc4 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts @@ -12,18 +12,18 @@ describe('grumpkin', () => { grumpkin = new Grumpkin(); }); - it('should correctly perform scalar muls', () => { + it('should correctly perform scalar muls', async () => { const exponent = GrumpkinScalar.random(); const numPoints = 2048; const inputPoints: Point[] = []; for (let i = 0; i < numPoints; ++i) { - inputPoints.push(grumpkin.mul(Grumpkin.generator, GrumpkinScalar.random())); + inputPoints.push(await grumpkin.mul(Grumpkin.generator, GrumpkinScalar.random())); } const start = new Date().getTime(); - const outputPoints = grumpkin.batchMul(inputPoints, exponent); + const outputPoints = await grumpkin.batchMul(inputPoints, exponent); log.debug(`batch mul in: ${new Date().getTime() - start}ms`); const start2 = new Date().getTime(); @@ -34,7 +34,7 @@ describe('grumpkin', () => { for (let i = 0; i < numPoints; ++i) { const lhs = outputPoints[i]; - const rhs = grumpkin.mul(inputPoints[i], exponent); + const rhs = await grumpkin.mul(inputPoints[i], exponent); expect(lhs).toEqual(rhs); } }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts index c146417f140d..2eb759080a34 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts @@ -5,7 +5,7 @@ import { Fr, type GrumpkinScalar, Point } from '@aztec/foundation/fields'; * Grumpkin elliptic curve operations. */ export class Grumpkin { - private wasm = BarretenbergSync.getSingleton().getWasm(); + private wasm = BarretenbergSync.getSingleton().then(api => api.getWasm()); // prettier-ignore static generator = Point.fromBuffer(Buffer.from([ @@ -29,11 +29,12 @@ export class Grumpkin { * @param scalar - Scalar to multiply by. * @returns Result of the multiplication. */ - public mul(point: Point, scalar: GrumpkinScalar): Point { - this.wasm.writeMemory(0, point.toBuffer()); - this.wasm.writeMemory(64, scalar.toBuffer()); - this.wasm.call('ecc_grumpkin__mul', 0, 64, 96); - return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(96, 160))); + public async mul(point: Point, scalar: GrumpkinScalar): Promise { + const wasm = await this.wasm; + wasm.writeMemory(0, point.toBuffer()); + wasm.writeMemory(64, scalar.toBuffer()); + wasm.call('ecc_grumpkin__mul', 0, 64, 96); + return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(96, 160))); } /** @@ -42,11 +43,12 @@ export class Grumpkin { * @param b - Point b to add to a * @returns Result of the addition. */ - public add(a: Point, b: Point): Point { - this.wasm.writeMemory(0, a.toBuffer()); - this.wasm.writeMemory(64, b.toBuffer()); - this.wasm.call('ecc_grumpkin__add', 0, 64, 128); - return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(128, 192))); + public async add(a: Point, b: Point): Promise { + const wasm = await this.wasm; + wasm.writeMemory(0, a.toBuffer()); + wasm.writeMemory(64, b.toBuffer()); + wasm.call('ecc_grumpkin__add', 0, 64, 128); + return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(128, 192))); } /** @@ -55,20 +57,21 @@ export class Grumpkin { * @param scalar - Scalar to multiply by. * @returns Points multiplied by the scalar. */ - public batchMul(points: Point[], scalar: GrumpkinScalar) { + public async batchMul(points: Point[], scalar: GrumpkinScalar) { + const wasm = await this.wasm; const concatenatedPoints: Buffer = Buffer.concat(points.map(point => point.toBuffer())); const pointsByteLength = points.length * Point.SIZE_IN_BYTES; - const mem = this.wasm.call('bbmalloc', pointsByteLength * 2); + const mem = wasm.call('bbmalloc', pointsByteLength * 2); - this.wasm.writeMemory(mem, concatenatedPoints); - this.wasm.writeMemory(0, scalar.toBuffer()); - this.wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); + wasm.writeMemory(mem, concatenatedPoints); + wasm.writeMemory(0, scalar.toBuffer()); + wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); const result: Buffer = Buffer.from( - this.wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), + wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), ); - this.wasm.call('bbfree', mem); + wasm.call('bbfree', mem); const parsedResult: Point[] = []; for (let i = 0; i < pointsByteLength; i += 64) { @@ -81,9 +84,10 @@ export class Grumpkin { * Gets a random field element. * @returns Random field element. */ - public getRandomFr(): Fr { - this.wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); - return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(0, 32))); + public async getRandomFr(): Promise { + const wasm = await this.wasm; + wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); + return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(0, 32))); } /** @@ -91,9 +95,10 @@ export class Grumpkin { * @param uint512Buf - The buffer to convert. * @returns Buffer representation of the field element. */ - public reduce512BufferToFr(uint512Buf: Buffer): Fr { - this.wasm.writeMemory(0, uint512Buf); - this.wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(64, 96))); + public async reduce512BufferToFr(uint512Buf: Buffer): Promise { + const wasm = await this.wasm; + wasm.writeMemory(0, uint512Buf); + wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(64, 96))); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.test.ts index 94763421d661..37fc2dacffce 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.test.ts @@ -11,30 +11,30 @@ describe('schnorr', () => { schnorr = new Schnorr(); }); - it('should verify signature', () => { + it('should verify signature', async () => { // prettier-ignore const privateKey = GrumpkinScalar.fromBuffer(Buffer.from([ 0x0b, 0x9b, 0x3a, 0xde, 0xe6, 0xb3, 0xd8, 0x1b, 0x28, 0xa0, 0x88, 0x6b, 0x2a, 0x84, 0x15, 0xc7, 0xda, 0x31, 0x29, 0x1a, 0x5e, 0x96, 0xbb, 0x7a, 0x56, 0x63, 0x9e, 0x17, 0x7d, 0x30, 0x1b, 0xeb, ])); - const pubKey = schnorr.computePublicKey(privateKey); + const pubKey = await schnorr.computePublicKey(privateKey); const msg = new TextEncoder().encode('The quick brown dog jumped over the lazy fox.'); - const signature = schnorr.constructSignature(msg, privateKey); - const verified = schnorr.verifySignature(msg, pubKey, signature); + const signature = await schnorr.constructSignature(msg, privateKey); + const verified = await schnorr.verifySignature(msg, pubKey, signature); expect(verified).toBe(true); }); - it('should fail invalid signature', () => { + it('should fail invalid signature', async () => { // prettier-ignore const privateKey = GrumpkinScalar.fromBuffer(Buffer.from([ 0x0b, 0x9b, 0x3a, 0xde, 0xe6, 0xb3, 0xd8, 0x1b, 0x28, 0xa0, 0x88, 0x6b, 0x2a, 0x84, 0x15, 0xc7, 0xda, 0x31, 0x29, 0x1a, 0x5e, 0x96, 0xbb, 0x7a, 0x56, 0x63, 0x9e, 0x17, 0x7d, 0x30, 0x1b, 0xeb, ])); - const pubKey = schnorr.computePublicKey(privateKey); + const pubKey = await schnorr.computePublicKey(privateKey); const msg = new TextEncoder().encode('The quick brown dog jumped over the lazy fox.'); - const signature = schnorr.constructSignature(msg, GrumpkinScalar.random()); - const verified = schnorr.verifySignature(msg, pubKey, signature); + const signature = await schnorr.constructSignature(msg, GrumpkinScalar.random()); + const verified = await schnorr.verifySignature(msg, pubKey, signature); expect(verified).toBe(false); }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts index 92c33f47e56e..cb69caf54d38 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts @@ -11,17 +11,18 @@ export * from './signature.js'; * Schnorr signature construction and helper operations. */ export class Schnorr { - private wasm = BarretenbergSync.getSingleton().getWasm(); + private wasm = BarretenbergSync.getSingleton().then(api => api.getWasm()); /** * Computes a grumpkin public key from a private key. * @param privateKey - The private key. * @returns A grumpkin public key. */ - public computePublicKey(privateKey: GrumpkinScalar): PublicKey { - this.wasm.writeMemory(0, privateKey.toBuffer()); - this.wasm.call('schnorr_compute_public_key', 0, 32); - return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(32, 96))); + public async computePublicKey(privateKey: GrumpkinScalar): Promise { + const wasm = await this.wasm; + wasm.writeMemory(0, privateKey.toBuffer()); + wasm.call('schnorr_compute_public_key', 0, 32); + return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(32, 96))); } /** @@ -30,13 +31,14 @@ export class Schnorr { * @param privateKey - The private key of the signer. * @returns A Schnorr signature of the form (s, e). */ - public constructSignature(msg: Uint8Array, privateKey: GrumpkinScalar) { - const mem = this.wasm.call('bbmalloc', msg.length + 4); - this.wasm.writeMemory(0, privateKey.toBuffer()); - this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - this.wasm.call('schnorr_construct_signature', mem, 0, 32, 64); + public async constructSignature(msg: Uint8Array, privateKey: GrumpkinScalar) { + const wasm = await this.wasm; + const mem = wasm.call('bbmalloc', msg.length + 4); + wasm.writeMemory(0, privateKey.toBuffer()); + wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + wasm.call('schnorr_construct_signature', mem, 0, 32, 64); - return new SchnorrSignature(Buffer.from(this.wasm.getMemorySlice(32, 96))); + return new SchnorrSignature(Buffer.from(wasm.getMemorySlice(32, 96))); } /** @@ -46,14 +48,15 @@ export class Schnorr { * @param sig - The Schnorr signature. * @returns True or false. */ - public verifySignature(msg: Uint8Array, pubKey: PublicKey, sig: SchnorrSignature) { - const mem = this.wasm.call('bbmalloc', msg.length + 4); - this.wasm.writeMemory(0, pubKey.toBuffer()); - this.wasm.writeMemory(64, sig.s); - this.wasm.writeMemory(96, sig.e); - this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - this.wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); - const result = this.wasm.getMemorySlice(128, 129); + public async verifySignature(msg: Uint8Array, pubKey: PublicKey, sig: SchnorrSignature) { + const wasm = await this.wasm; + const mem = wasm.call('bbmalloc', msg.length + 4); + wasm.writeMemory(0, pubKey.toBuffer()); + wasm.writeMemory(64, sig.s); + wasm.writeMemory(96, sig.e); + wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); + const result = wasm.getMemorySlice(128, 129); return !Buffer.alloc(1, 0).equals(result); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts index f4afdd823469..d9dd20c1c653 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts @@ -4,7 +4,7 @@ import { BarretenbergSync } from '@aztec/bb.js'; * Secp256k1 elliptic curve operations. */ export class Secp256k1 { - private wasm = BarretenbergSync.getSingleton().getWasm(); + private wasm = BarretenbergSync.getSingleton().then(api => api.getWasm()); // prettier-ignore static generator = Buffer.from([ @@ -28,20 +28,22 @@ export class Secp256k1 { * @param scalar - Scalar to multiply by. * @returns Result of the multiplication. */ - public mul(point: Uint8Array, scalar: Uint8Array) { - this.wasm.writeMemory(0, point); - this.wasm.writeMemory(64, scalar); - this.wasm.call('ecc_secp256k1__mul', 0, 64, 96); - return Buffer.from(this.wasm.getMemorySlice(96, 160)); + public async mul(point: Uint8Array, scalar: Uint8Array) { + const wasm = await this.wasm; + wasm.writeMemory(0, point); + wasm.writeMemory(64, scalar); + wasm.call('ecc_secp256k1__mul', 0, 64, 96); + return Buffer.from(wasm.getMemorySlice(96, 160)); } /** * Gets a random field element. * @returns Random field element. */ - public getRandomFr() { - this.wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); - return Buffer.from(this.wasm.getMemorySlice(0, 32)); + public async getRandomFr() { + const wasm = await this.wasm; + wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); + return Buffer.from(wasm.getMemorySlice(0, 32)); } /** @@ -49,9 +51,10 @@ export class Secp256k1 { * @param uint512Buf - The buffer to convert. * @returns Buffer representation of the field element. */ - public reduce512BufferToFr(uint512Buf: Buffer) { - this.wasm.writeMemory(0, uint512Buf); - this.wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Buffer.from(this.wasm.getMemorySlice(64, 96)); + public async reduce512BufferToFr(uint512Buf: Buffer) { + const wasm = await this.wasm; + wasm.writeMemory(0, uint512Buf); + wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Buffer.from(wasm.getMemorySlice(64, 96)); } } diff --git a/yarn-project/circuits.js/src/contract/artifact_hash.ts b/yarn-project/circuits.js/src/contract/artifact_hash.ts index a170f49106db..d4a154a8bd8e 100644 --- a/yarn-project/circuits.js/src/contract/artifact_hash.ts +++ b/yarn-project/circuits.js/src/contract/artifact_hash.ts @@ -36,9 +36,9 @@ const sha256Fr = reduceFn(sha256, Fr); * ``` * @param artifact - Artifact to calculate the hash for. */ -export function computeArtifactHash( +export async function computeArtifactHash( artifact: ContractArtifact | { privateFunctionRoot: Fr; unconstrainedFunctionRoot: Fr; metadataHash: Fr }, -): Fr { +): Promise { if ('privateFunctionRoot' in artifact && 'unconstrainedFunctionRoot' in artifact && 'metadataHash' in artifact) { const { privateFunctionRoot, unconstrainedFunctionRoot, metadataHash } = artifact; const preimage = [privateFunctionRoot, unconstrainedFunctionRoot, metadataHash].map(x => x.toBuffer()); @@ -51,9 +51,9 @@ export function computeArtifactHash( return artifactHash; } -export function computeArtifactHashPreimage(artifact: ContractArtifact) { - const privateFunctionRoot = computeArtifactFunctionTreeRoot(artifact, FunctionType.PRIVATE); - const unconstrainedFunctionRoot = computeArtifactFunctionTreeRoot(artifact, FunctionType.UNCONSTRAINED); +export async function computeArtifactHashPreimage(artifact: ContractArtifact) { + const privateFunctionRoot = await computeArtifactFunctionTreeRoot(artifact, FunctionType.PRIVATE); + const unconstrainedFunctionRoot = await computeArtifactFunctionTreeRoot(artifact, FunctionType.UNCONSTRAINED); const metadataHash = computeArtifactMetadataHash(artifact); return { privateFunctionRoot, unconstrainedFunctionRoot, metadataHash }; } @@ -62,31 +62,40 @@ export function computeArtifactMetadataHash(artifact: ContractArtifact) { return sha256Fr(Buffer.from(JSON.stringify({ name: artifact.name, outputs: artifact.outputs }), 'utf-8')); } -export function computeArtifactFunctionTreeRoot(artifact: ContractArtifact, fnType: FunctionType) { - const root = computeArtifactFunctionTree(artifact, fnType)?.root; +export async function computeArtifactFunctionTreeRoot(artifact: ContractArtifact, fnType: FunctionType) { + const root = (await computeArtifactFunctionTree(artifact, fnType))?.root; return root ? Fr.fromBuffer(root) : Fr.ZERO; } -export function computeArtifactFunctionTree(artifact: ContractArtifact, fnType: FunctionType): MerkleTree | undefined { - const leaves = computeFunctionLeaves(artifact, fnType); +export async function computeArtifactFunctionTree( + artifact: ContractArtifact, + fnType: FunctionType, +): Promise { + const leaves = await computeFunctionLeaves(artifact, fnType); // TODO(@spalladino) Consider implementing a null-object for empty trees if (leaves.length === 0) { return undefined; } const height = Math.ceil(Math.log2(leaves.length)); const calculator = new MerkleTreeCalculator(height, Buffer.alloc(32), getArtifactMerkleTreeHasher()); - return calculator.computeTree(leaves.map(x => x.toBuffer())); + return await calculator.computeTree(leaves.map(x => x.toBuffer())); } -function computeFunctionLeaves(artifact: ContractArtifact, fnType: FunctionType) { - return artifact.functions - .filter(f => f.functionType === fnType) - .map(f => ({ ...f, selector: FunctionSelector.fromNameAndParameters(f.name, f.parameters) })) - .sort((a, b) => a.selector.value - b.selector.value) - .map(computeFunctionArtifactHash); +async function computeFunctionLeaves(artifact: ContractArtifact, fnType: FunctionType) { + return await Promise.all( + ( + await Promise.all( + artifact.functions + .filter(f => f.functionType === fnType) + .map(async f => ({ ...f, selector: await FunctionSelector.fromNameAndParameters(f.name, f.parameters) })), + ) + ) + .sort((a, b) => a.selector.value - b.selector.value) + .map(computeFunctionArtifactHash), + ); } -export function computeFunctionArtifactHash( +export async function computeFunctionArtifactHash( fn: | FunctionArtifact | (Pick & { functionMetadataHash: Fr; selector: FunctionSelector }), diff --git a/yarn-project/circuits.js/src/contract/contract_address.test.ts b/yarn-project/circuits.js/src/contract/contract_address.test.ts index c8918ea0d3e1..8070c8cc1c70 100644 --- a/yarn-project/circuits.js/src/contract/contract_address.test.ts +++ b/yarn-project/circuits.js/src/contract/contract_address.test.ts @@ -52,15 +52,15 @@ describe('ContractAddress', () => { expect(result).toEqual(Fr.ZERO); }); - it('computeContractAddressFromInstance', () => { + it('computeContractAddressFromInstance', async () => { const secretKey = new Fr(2n); const salt = new Fr(3n); const contractClassId = new Fr(4n); const initializationHash = new Fr(5n); const deployer = AztecAddress.fromField(new Fr(7)); - const publicKeys = deriveKeys(secretKey).publicKeys; + const publicKeys = (await deriveKeys(secretKey)).publicKeys; - const address = computeContractAddressFromInstance({ + const address = await computeContractAddressFromInstance({ publicKeys, salt, contractClassId, diff --git a/yarn-project/circuits.js/src/contract/contract_address.ts b/yarn-project/circuits.js/src/contract/contract_address.ts index 25238a1d381e..dac00854cb8a 100644 --- a/yarn-project/circuits.js/src/contract/contract_address.ts +++ b/yarn-project/circuits.js/src/contract/contract_address.ts @@ -19,30 +19,30 @@ import { type ContractInstance } from './interfaces/contract_instance.js'; * ``` * @param instance - A contract instance for which to calculate the deployment address. */ -export function computeContractAddressFromInstance( +export async function computeContractAddressFromInstance( instance: | ContractInstance | ({ contractClassId: Fr; saltedInitializationHash: Fr } & Pick), -): AztecAddress { - const partialAddress = computePartialAddress(instance); - return computeAddress(instance.publicKeys, partialAddress); +): Promise { + const partialAddress = await computePartialAddress(instance); + return await computeAddress(instance.publicKeys, partialAddress); } /** * Computes the partial address defined as the hash of the contract class id and salted initialization hash. * @param instance - Contract instance for which to calculate the partial address. */ -export function computePartialAddress( +export async function computePartialAddress( instance: | Pick | { contractClassId: Fr; saltedInitializationHash: Fr }, -): Fr { +): Promise { const saltedInitializationHash = 'saltedInitializationHash' in instance ? instance.saltedInitializationHash - : computeSaltedInitializationHash(instance); + : await computeSaltedInitializationHash(instance); - return poseidon2HashWithSeparator( + return await poseidon2HashWithSeparator( [instance.contractClassId, saltedInitializationHash], GeneratorIndex.PARTIAL_ADDRESS, ); @@ -52,10 +52,10 @@ export function computePartialAddress( * Computes the salted initialization hash for an address, defined as the hash of the salt and initialization hash. * @param instance - Contract instance for which to compute the salted initialization hash. */ -export function computeSaltedInitializationHash( +export async function computeSaltedInitializationHash( instance: Pick, -): Fr { - return poseidon2HashWithSeparator( +): Promise { + return await poseidon2HashWithSeparator( [instance.salt, instance.initializationHash, instance.deployer], GeneratorIndex.PARTIAL_ADDRESS, ); @@ -67,13 +67,13 @@ export function computeSaltedInitializationHash( * @param args - Unencoded arguments, will be encoded as fields according to the constructor function abi. * @returns The hash, or zero if no initialization function is provided. */ -export function computeInitializationHash(initFn: FunctionAbi | undefined, args: any[]): Fr { +export async function computeInitializationHash(initFn: FunctionAbi | undefined, args: any[]): Promise { if (!initFn) { return Fr.ZERO; } - const selector = FunctionSelector.fromNameAndParameters(initFn.name, initFn.parameters); + const selector = await FunctionSelector.fromNameAndParameters(initFn.name, initFn.parameters); const flatArgs = encodeArguments(initFn, args); - return computeInitializationHashFromEncodedArgs(selector, flatArgs); + return await computeInitializationHashFromEncodedArgs(selector, flatArgs); } /** @@ -82,7 +82,10 @@ export function computeInitializationHash(initFn: FunctionAbi | undefined, args: * @param args - Encoded arguments. * @returns The hash. */ -export function computeInitializationHashFromEncodedArgs(initFn: FunctionSelector, encodedArgs: Fr[]): Fr { - const argsHash = computeVarArgsHash(encodedArgs); - return poseidon2HashWithSeparator([initFn, argsHash], GeneratorIndex.CONSTRUCTOR); +export async function computeInitializationHashFromEncodedArgs( + initFn: FunctionSelector, + encodedArgs: Fr[], +): Promise { + const argsHash = await computeVarArgsHash(encodedArgs); + return await poseidon2HashWithSeparator([initFn, argsHash], GeneratorIndex.CONSTRUCTOR); } diff --git a/yarn-project/circuits.js/src/contract/contract_class.test.ts b/yarn-project/circuits.js/src/contract/contract_class.test.ts index 2ae16daf901c..0368f1ef79e7 100644 --- a/yarn-project/circuits.js/src/contract/contract_class.test.ts +++ b/yarn-project/circuits.js/src/contract/contract_class.test.ts @@ -6,9 +6,9 @@ import { getBenchmarkContractArtifact } from '../tests/fixtures.js'; import { getContractClassFromArtifact } from './contract_class.js'; describe('ContractClass', () => { - it('creates a contract class from a contract compilation artifact', () => { + it('creates a contract class from a contract compilation artifact', async () => { const artifact = getBenchmarkContractArtifact(); - const contractClass = getContractClassFromArtifact({ + const contractClass = await getContractClassFromArtifact({ ...artifact, artifactHash: Fr.fromString('0x1234'), }); diff --git a/yarn-project/circuits.js/src/contract/contract_class.ts b/yarn-project/circuits.js/src/contract/contract_class.ts index 8809a2bbe022..ac6351a1b151 100644 --- a/yarn-project/circuits.js/src/contract/contract_class.ts +++ b/yarn-project/circuits.js/src/contract/contract_class.ts @@ -15,17 +15,20 @@ const cmpFunctionArtifacts = (a: T, b: a.selector.toField().cmp(b.selector.toField()); /** Creates a ContractClass from a contract compilation artifact. */ -export function getContractClassFromArtifact( +export async function getContractClassFromArtifact( artifact: ContractArtifact | ContractArtifactWithHash, -): ContractClassWithId & ContractClassIdPreimage { - const artifactHash = 'artifactHash' in artifact ? artifact.artifactHash : computeArtifactHash(artifact); - const artifactPublicFunctions: ContractClass['publicFunctions'] = artifact.functions - .filter(f => f.functionType === FunctionType.PUBLIC) - .map(f => ({ - selector: FunctionSelector.fromNameAndParameters(f.name, f.parameters), - bytecode: f.bytecode, - })) - .sort(cmpFunctionArtifacts); +): Promise { + const artifactHash = 'artifactHash' in artifact ? artifact.artifactHash : await computeArtifactHash(artifact); + const artifactPublicFunctions: ContractClass['publicFunctions'] = ( + await Promise.all( + artifact.functions + .filter(f => f.functionType === FunctionType.PUBLIC) + .map(async f => ({ + selector: await FunctionSelector.fromNameAndParameters(f.name, f.parameters), + bytecode: f.bytecode, + })), + ) + ).sort(cmpFunctionArtifacts); let packedBytecode = Buffer.alloc(0); let dispatchFunction: PublicFunction | undefined = undefined; @@ -41,10 +44,13 @@ export function getContractClassFromArtifact( packedBytecode = dispatchFunction.bytecode; } - const privateFunctions: ContractClass['privateFunctions'] = artifact.functions - .filter(f => f.functionType === FunctionType.PRIVATE) - .map(getContractClassPrivateFunctionFromArtifact) - .sort(cmpFunctionArtifacts); + const privateFunctions: ContractClass['privateFunctions'] = ( + await Promise.all( + artifact.functions + .filter(f => f.functionType === FunctionType.PRIVATE) + .map(getContractClassPrivateFunctionFromArtifact), + ) + ).sort(cmpFunctionArtifacts); const contractClass: ContractClass = { version: 1, @@ -54,24 +60,24 @@ export function getContractClassFromArtifact( packedBytecode, privateFunctions, }; - return { ...contractClass, ...computeContractClassIdWithPreimage(contractClass) }; + return { ...contractClass, ...(await computeContractClassIdWithPreimage(contractClass)) }; } -export function getContractClassPrivateFunctionFromArtifact( +export async function getContractClassPrivateFunctionFromArtifact( f: FunctionArtifact, -): ContractClass['privateFunctions'][number] { +): Promise { return { - selector: FunctionSelector.fromNameAndParameters(f.name, f.parameters), - vkHash: computeVerificationKeyHash(f), + selector: await FunctionSelector.fromNameAndParameters(f.name, f.parameters), + vkHash: await computeVerificationKeyHash(f), }; } /** * For a given private function, computes the hash of its vk. */ -export function computeVerificationKeyHash(f: FunctionArtifact) { +export async function computeVerificationKeyHash(f: FunctionArtifact) { if (!f.verificationKey) { throw new Error(`Private function ${f.name} must have a verification key`); } - return hashVK(vkAsFieldsMegaHonk(Buffer.from(f.verificationKey, 'base64'))); + return await hashVK(await vkAsFieldsMegaHonk(Buffer.from(f.verificationKey, 'base64'))); } diff --git a/yarn-project/circuits.js/src/contract/contract_class_id.ts b/yarn-project/circuits.js/src/contract/contract_class_id.ts index ef401fe5691c..1d974ca0c84e 100644 --- a/yarn-project/circuits.js/src/contract/contract_class_id.ts +++ b/yarn-project/circuits.js/src/contract/contract_class_id.ts @@ -21,24 +21,24 @@ import { computePrivateFunctionsRoot } from './private_function.js'; * @param contractClass - Contract class. * @returns The identifier. */ -export function computeContractClassId(contractClass: ContractClass | ContractClassIdPreimage): Fr { - return computeContractClassIdWithPreimage(contractClass).id; +export async function computeContractClassId(contractClass: ContractClass | ContractClassIdPreimage): Promise { + return (await computeContractClassIdWithPreimage(contractClass)).id; } /** Computes a contract class id and returns it along with its preimage. */ -export function computeContractClassIdWithPreimage( +export async function computeContractClassIdWithPreimage( contractClass: ContractClass | ContractClassIdPreimage, -): ContractClassIdPreimage & { id: Fr } { +): Promise { const artifactHash = contractClass.artifactHash; const privateFunctionsRoot = 'privateFunctionsRoot' in contractClass ? contractClass.privateFunctionsRoot - : computePrivateFunctionsRoot(contractClass.privateFunctions); + : await computePrivateFunctionsRoot(contractClass.privateFunctions); const publicBytecodeCommitment = 'publicBytecodeCommitment' in contractClass ? contractClass.publicBytecodeCommitment - : computePublicBytecodeCommitment(contractClass.packedBytecode); - const id = poseidon2HashWithSeparator( + : await computePublicBytecodeCommitment(contractClass.packedBytecode); + const id = await poseidon2HashWithSeparator( [artifactHash, privateFunctionsRoot, publicBytecodeCommitment], GeneratorIndex.CONTRACT_LEAF, // TODO(@spalladino): Review all generator indices in this file ); @@ -46,9 +46,9 @@ export function computeContractClassIdWithPreimage( } /** Returns the preimage of a contract class id given a contract class. */ -export function computeContractClassIdPreimage(contractClass: ContractClass): ContractClassIdPreimage { - const privateFunctionsRoot = computePrivateFunctionsRoot(contractClass.privateFunctions); - const publicBytecodeCommitment = computePublicBytecodeCommitment(contractClass.packedBytecode); +export async function computeContractClassIdPreimage(contractClass: ContractClass): Promise { + const privateFunctionsRoot = await computePrivateFunctionsRoot(contractClass.privateFunctions); + const publicBytecodeCommitment = await computePublicBytecodeCommitment(contractClass.packedBytecode); return { artifactHash: contractClass.artifactHash, privateFunctionsRoot, publicBytecodeCommitment }; } @@ -59,7 +59,7 @@ export type ContractClassIdPreimage = { publicBytecodeCommitment: Fr; }; -export function computePublicBytecodeCommitment(packedBytecode: Buffer) { +export async function computePublicBytecodeCommitment(packedBytecode: Buffer) { // Encode the buffer into field elements (chunked into 32 bytes each) const encodedBytecode: Fr[] = bufferAsFields(packedBytecode, MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS); // The first element is the length of the bytecode (in bytes) diff --git a/yarn-project/circuits.js/src/contract/contract_instance.test.ts b/yarn-project/circuits.js/src/contract/contract_instance.test.ts index 86818076f933..2fbab0a8c70b 100644 --- a/yarn-project/circuits.js/src/contract/contract_instance.test.ts +++ b/yarn-project/circuits.js/src/contract/contract_instance.test.ts @@ -1,8 +1,8 @@ import { SerializableContractInstance } from './contract_instance.js'; describe('ContractInstance', () => { - it('can serialize and deserialize an instance', () => { - const instance = SerializableContractInstance.random(); + it('can serialize and deserialize an instance', async () => { + const instance = await SerializableContractInstance.random(); expect(SerializableContractInstance.fromBuffer(instance.toBuffer())).toEqual(instance); }); }); diff --git a/yarn-project/circuits.js/src/contract/contract_instance.ts b/yarn-project/circuits.js/src/contract/contract_instance.ts index 9d60d1d7e4a6..c607530155fd 100644 --- a/yarn-project/circuits.js/src/contract/contract_instance.ts +++ b/yarn-project/circuits.js/src/contract/contract_instance.ts @@ -68,14 +68,14 @@ export class SerializableContractInstance { }); } - static random(opts: Partial> = {}) { + static async random(opts: Partial> = {}) { return new SerializableContractInstance({ version: VERSION, salt: Fr.random(), deployer: AztecAddress.random(), contractClassId: Fr.random(), initializationHash: Fr.random(), - publicKeys: PublicKeys.random(), + publicKeys: await PublicKeys.random(), ...opts, }); } @@ -98,7 +98,7 @@ export class SerializableContractInstance { * @param opts - Options for the deployment. * @returns - The contract instance */ -export function getContractInstanceFromDeployParams( +export async function getContractInstanceFromDeployParams( artifact: ContractArtifact, opts: { constructorArtifact?: FunctionArtifact | string; @@ -108,20 +108,20 @@ export function getContractInstanceFromDeployParams( publicKeys?: PublicKeys; deployer?: AztecAddress; }, -): ContractInstanceWithAddress { +): Promise { const args = opts.constructorArgs ?? []; const salt = opts.salt ?? Fr.random(); const constructorArtifact = getConstructorArtifact(artifact, opts.constructorArtifact); const deployer = opts.deployer ?? AztecAddress.ZERO; - const contractClass = getContractClassFromArtifact(artifact); - const contractClassId = computeContractClassId(contractClass); + const contractClass = await getContractClassFromArtifact(artifact); + const contractClassId = await computeContractClassId(contractClass); const initializationHash = constructorArtifact && opts?.skipArgsDecoding - ? computeInitializationHashFromEncodedArgs( - FunctionSelector.fromNameAndParameters(constructorArtifact?.name, constructorArtifact?.parameters), + ? await computeInitializationHashFromEncodedArgs( + await FunctionSelector.fromNameAndParameters(constructorArtifact?.name, constructorArtifact?.parameters), args, ) - : computeInitializationHash(constructorArtifact, args); + : await computeInitializationHash(constructorArtifact, args); const publicKeys = opts.publicKeys ?? PublicKeys.default(); const instance: ContractInstance = { @@ -133,7 +133,7 @@ export function getContractInstanceFromDeployParams( version: 1, }; - return { ...instance, address: computeContractAddressFromInstance(instance) }; + return { ...instance, address: await computeContractAddressFromInstance(instance) }; } function getConstructorArtifact( diff --git a/yarn-project/circuits.js/src/contract/private_function.test.ts b/yarn-project/circuits.js/src/contract/private_function.test.ts index e3c72db9bfc1..7eab1eb4d4eb 100644 --- a/yarn-project/circuits.js/src/contract/private_function.test.ts +++ b/yarn-project/circuits.js/src/contract/private_function.test.ts @@ -12,25 +12,25 @@ describe('PrivateFunction', () => { { selector: makeSelector(3), vkHash: fr(4) }, ]; - it('computes merkle tree', () => { - const tree = computePrivateFunctionsTree(privateFunctions); + it('computes merkle tree', async () => { + const tree = await computePrivateFunctionsTree(privateFunctions); expect(tree.nodes.map(node => node.toString())).toMatchSnapshot(); }); - it('computes merkle tree root', () => { - const root = computePrivateFunctionsRoot(privateFunctions); + it('computes merkle tree root', async () => { + const root = await computePrivateFunctionsRoot(privateFunctions); expect(root.toString()).toMatchSnapshot(); }); - it('tree and root methods agree', () => { - const tree = computePrivateFunctionsTree(privateFunctions); - const root = computePrivateFunctionsRoot(privateFunctions); + it('tree and root methods agree', async () => { + const tree = await computePrivateFunctionsTree(privateFunctions); + const root = await computePrivateFunctionsRoot(privateFunctions); expect(Fr.fromBuffer(tree.root).equals(root)).toBe(true); }); - it('sorts functions before computing tree', () => { - const root = computePrivateFunctionsRoot(privateFunctions); - const rootReversed = computePrivateFunctionsRoot([...privateFunctions].reverse()); + it('sorts functions before computing tree', async () => { + const root = await computePrivateFunctionsRoot(privateFunctions); + const rootReversed = await computePrivateFunctionsRoot([...privateFunctions].reverse()); expect(root.equals(rootReversed)).toBe(true); }); }); diff --git a/yarn-project/circuits.js/src/contract/private_function.ts b/yarn-project/circuits.js/src/contract/private_function.ts index 16fa61a36b9a..c6d1f7f0423c 100644 --- a/yarn-project/circuits.js/src/contract/private_function.ts +++ b/yarn-project/circuits.js/src/contract/private_function.ts @@ -11,32 +11,34 @@ let privateFunctionTreeCalculator: MerkleTreeCalculator | undefined; const PRIVATE_FUNCTION_SIZE = 2; /** Returns a Merkle tree for the set of private functions in a contract. */ -export function computePrivateFunctionsTree(fns: PrivateFunction[]): MerkleTree { - return getPrivateFunctionTreeCalculator().computeTree(computePrivateFunctionLeaves(fns)); +export async function computePrivateFunctionsTree(fns: PrivateFunction[]): Promise { + return (await getPrivateFunctionTreeCalculator()).computeTree(await computePrivateFunctionLeaves(fns)); } /** Returns the Merkle tree root for the set of private functions in a contract. */ -export function computePrivateFunctionsRoot(fns: PrivateFunction[]): Fr { - return Fr.fromBuffer(getPrivateFunctionTreeCalculator().computeTreeRoot(computePrivateFunctionLeaves(fns))); +export async function computePrivateFunctionsRoot(fns: PrivateFunction[]): Promise { + return Fr.fromBuffer( + await (await getPrivateFunctionTreeCalculator()).computeTreeRoot(await computePrivateFunctionLeaves(fns)), + ); } -function computePrivateFunctionLeaves(fns: PrivateFunction[]): Buffer[] { +async function computePrivateFunctionLeaves(fns: PrivateFunction[]): Promise { const leaves = [...fns].sort((a, b) => a.selector.value - b.selector.value); - return leaves.map(computePrivateFunctionLeaf); + return await Promise.all(leaves.map(computePrivateFunctionLeaf)); } /** Returns the leaf for a given private function. */ -export function computePrivateFunctionLeaf(fn: PrivateFunction): Buffer { - return poseidon2HashWithSeparator([fn.selector, fn.vkHash], GeneratorIndex.FUNCTION_LEAF).toBuffer(); +export async function computePrivateFunctionLeaf(fn: PrivateFunction): Promise { + return (await poseidon2HashWithSeparator([fn.selector, fn.vkHash], GeneratorIndex.FUNCTION_LEAF)).toBuffer(); } -function getPrivateFunctionTreeCalculator(): MerkleTreeCalculator { +async function getPrivateFunctionTreeCalculator(): Promise { if (!privateFunctionTreeCalculator) { - const functionTreeZeroLeaf = pedersenHash(new Array(PRIVATE_FUNCTION_SIZE).fill(0)).toBuffer(); + const functionTreeZeroLeaf = (await pedersenHash(new Array(PRIVATE_FUNCTION_SIZE).fill(0))).toBuffer(); privateFunctionTreeCalculator = new MerkleTreeCalculator( FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf, - (left, right) => poseidon2Hash([left, right]).toBuffer(), + async (left, right) => (await poseidon2Hash([left, right])).toBuffer(), ); } return privateFunctionTreeCalculator; diff --git a/yarn-project/circuits.js/src/contract/private_function_membership_proof.test.ts b/yarn-project/circuits.js/src/contract/private_function_membership_proof.test.ts index 3a57e9787b24..1f140aa6da4d 100644 --- a/yarn-project/circuits.js/src/contract/private_function_membership_proof.test.ts +++ b/yarn-project/circuits.js/src/contract/private_function_membership_proof.test.ts @@ -17,16 +17,16 @@ describe('private_function_membership_proof', () => { let vkHash: Fr; let selector: FunctionSelector; - beforeAll(() => { + beforeAll(async () => { artifact = getBenchmarkContractArtifact(); - contractClass = getContractClassFromArtifact(artifact); + contractClass = await getContractClassFromArtifact(artifact); privateFunction = artifact.functions.findLast(fn => fn.functionType === FunctionType.PRIVATE)!; - vkHash = computeVerificationKeyHash(privateFunction); - selector = FunctionSelector.fromNameAndParameters(privateFunction); + vkHash = await computeVerificationKeyHash(privateFunction); + selector = await FunctionSelector.fromNameAndParameters(privateFunction); }); - it('computes and verifies a proof', () => { - const proof = createPrivateFunctionMembershipProof(selector, artifact); + it('computes and verifies a proof', async () => { + const proof = await createPrivateFunctionMembershipProof(selector, artifact); const fn = { ...privateFunction, ...proof, selector, vkHash }; expect(isValidPrivateFunctionMembershipProof(fn, contractClass)).toBeTruthy(); }); @@ -37,8 +37,8 @@ describe('private_function_membership_proof', () => { 'functionMetadataHash', 'unconstrainedFunctionsArtifactTreeRoot', 'privateFunctionTreeSiblingPath', - ] as const)('fails proof if %s is mangled', field => { - const proof = createPrivateFunctionMembershipProof(selector, artifact); + ] as const)('fails proof if %s is mangled', async field => { + const proof = await createPrivateFunctionMembershipProof(selector, artifact); const original = proof[field]; const mangled = Array.isArray(original) ? [Fr.random(), ...original.slice(1)] : Fr.random(); const wrong = { ...proof, [field]: mangled }; diff --git a/yarn-project/circuits.js/src/contract/private_function_membership_proof.ts b/yarn-project/circuits.js/src/contract/private_function_membership_proof.ts index 0d46fc62f5f0..a4af51ecb38d 100644 --- a/yarn-project/circuits.js/src/contract/private_function_membership_proof.ts +++ b/yarn-project/circuits.js/src/contract/private_function_membership_proof.ts @@ -1,4 +1,4 @@ -import { type ContractArtifact, type FunctionSelector, FunctionType } from '@aztec/foundation/abi'; +import { type ContractArtifact, FunctionSelector, FunctionType } from '@aztec/foundation/abi'; import { poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -25,37 +25,46 @@ import { computePrivateFunctionLeaf, computePrivateFunctionsTree } from './priva * @param selector - Selector of the function to create the proof for. * @param artifact - Artifact of the contract class where the function is defined. */ -export function createPrivateFunctionMembershipProof( +export async function createPrivateFunctionMembershipProof( selector: FunctionSelector, artifact: ContractArtifact, -): PrivateFunctionMembershipProof { +): Promise { const log = createDebugLogger('aztec:circuits:function_membership_proof'); // Locate private function definition and artifact - const privateFunctions = artifact.functions - .filter(fn => fn.functionType === FunctionType.PRIVATE) - .map(getContractClassPrivateFunctionFromArtifact); + const privateFunctions = await Promise.all( + artifact.functions + .filter(fn => fn.functionType === FunctionType.PRIVATE) + .map(getContractClassPrivateFunctionFromArtifact), + ); const privateFunction = privateFunctions.find(fn => fn.selector.equals(selector)); - const privateFunctionArtifact = artifact.functions.find(fn => selector.equals(fn)); + const privateFunctionArtifact = ( + await Promise.all( + artifact.functions.map(async fn => { + const fs = await FunctionSelector.fromNameAndParameters(fn.name, fn.parameters); + return fs.equals(selector) ? fn : undefined; + }), + ) + ).find(fn => !!fn); if (!privateFunction || !privateFunctionArtifact) { throw new Error(`Private function with selector ${selector.toString()} not found`); } // Compute preimage for the artifact hash const { unconstrainedFunctionRoot: unconstrainedFunctionsArtifactTreeRoot, metadataHash: artifactMetadataHash } = - computeArtifactHashPreimage(artifact); + await computeArtifactHashPreimage(artifact); // We need two sibling paths because private function information is split across two trees: // The "private function tree" captures the selectors and verification keys, and is used in the kernel circuit for verifying the proof generated by the app circuit. - const functionLeaf = computePrivateFunctionLeaf(privateFunction); - const functionsTree = computePrivateFunctionsTree(privateFunctions); + const functionLeaf = await computePrivateFunctionLeaf(privateFunction); + const functionsTree = await computePrivateFunctionsTree(privateFunctions); const functionsTreeLeafIndex = functionsTree.getIndex(functionLeaf); const functionsTreeSiblingPath = functionsTree.getSiblingPath(functionsTreeLeafIndex).map(Fr.fromBuffer); // And the "artifact tree" captures function bytecode and metadata, and is used by the pxe to check that its executing the code it's supposed to be executing, but it never goes into circuits. const functionMetadataHash = computeFunctionMetadataHash(privateFunctionArtifact); - const functionArtifactHash = computeFunctionArtifactHash({ ...privateFunctionArtifact, functionMetadataHash }); - const artifactTree = computeArtifactFunctionTree(artifact, FunctionType.PRIVATE)!; + const functionArtifactHash = await computeFunctionArtifactHash({ ...privateFunctionArtifact, functionMetadataHash }); + const artifactTree = (await computeArtifactFunctionTree(artifact, FunctionType.PRIVATE))!; const artifactTreeLeafIndex = artifactTree.getIndex(functionArtifactHash.toBuffer()); const artifactTreeSiblingPath = artifactTree.getSiblingPath(artifactTreeLeafIndex).map(Fr.fromBuffer); @@ -103,20 +112,20 @@ export function createPrivateFunctionMembershipProof( * @param fn - Function to check membership proof for. * @param contractClass - In which contract class the function is expected to be. */ -export function isValidPrivateFunctionMembershipProof( +export async function isValidPrivateFunctionMembershipProof( fn: ExecutablePrivateFunctionWithMembershipProof, contractClass: Pick, ) { const log = createDebugLogger('aztec:circuits:function_membership_proof'); // Check private function tree membership - const functionLeaf = computePrivateFunctionLeaf(fn); + const functionLeaf = await computePrivateFunctionLeaf(fn); const computedPrivateFunctionTreeRoot = Fr.fromBuffer( - computeRootFromSiblingPath( + await computeRootFromSiblingPath( functionLeaf, fn.privateFunctionTreeSiblingPath.map(fr => fr.toBuffer()), fn.privateFunctionTreeLeafIndex, - (left, right) => poseidon2Hash([left, right]).toBuffer(), + async (left, right) => (await poseidon2Hash([left, right])).toBuffer(), ), ); if (!contractClass.privateFunctionsRoot.equals(computedPrivateFunctionTreeRoot)) { @@ -129,16 +138,16 @@ export function isValidPrivateFunctionMembershipProof( } // Check artifact hash - const functionArtifactHash = computeFunctionArtifactHash(fn); + const functionArtifactHash = await computeFunctionArtifactHash(fn); const computedArtifactPrivateFunctionTreeRoot = Fr.fromBuffer( - computeRootFromSiblingPath( + await computeRootFromSiblingPath( functionArtifactHash.toBuffer(), fn.artifactTreeSiblingPath.map(fr => fr.toBuffer()), fn.artifactTreeLeafIndex, getArtifactMerkleTreeHasher(), ), ); - const computedArtifactHash = computeArtifactHash({ + const computedArtifactHash = await computeArtifactHash({ privateFunctionRoot: computedArtifactPrivateFunctionTreeRoot, unconstrainedFunctionRoot: fn.unconstrainedFunctionsArtifactTreeRoot, metadataHash: fn.artifactMetadataHash, diff --git a/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.test.ts b/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.test.ts index f795c6f29b60..a5b8fb41cb65 100644 --- a/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.test.ts +++ b/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.test.ts @@ -17,43 +17,43 @@ describe('unconstrained_function_membership_proof', () => { let vkHash: Fr; let selector: FunctionSelector; - beforeEach(() => { + beforeEach(async () => { artifact = getTestContractArtifact(); - contractClass = getContractClassFromArtifact(artifact); + contractClass = await getContractClassFromArtifact(artifact); unconstrainedFunction = artifact.functions.findLast(fn => fn.functionType === FunctionType.UNCONSTRAINED)!; - selector = FunctionSelector.fromNameAndParameters(unconstrainedFunction); + selector = await FunctionSelector.fromNameAndParameters(unconstrainedFunction); }); const isUnconstrained = (fn: { functionType: FunctionType }) => fn.functionType === FunctionType.UNCONSTRAINED; - it('computes and verifies a proof', () => { + it('computes and verifies a proof', async () => { expect(unconstrainedFunction).toBeDefined(); - const proof = createUnconstrainedFunctionMembershipProof(selector, artifact); + const proof = await createUnconstrainedFunctionMembershipProof(selector, artifact); const fn = { ...unconstrainedFunction, ...proof, selector }; expect(isValidUnconstrainedFunctionMembershipProof(fn, contractClass)).toBeTruthy(); }); - it('handles a contract with a single function', () => { + it('handles a contract with a single function', async () => { // Remove all unconstrained functions from the contract but one const unconstrainedFns = artifact.functions.filter(isUnconstrained); artifact.functions = artifact.functions.filter(fn => !isUnconstrained(fn) || fn === unconstrainedFns[0]); expect(artifact.functions.filter(isUnconstrained).length).toBe(1); const unconstrainedFunction = unconstrainedFns[0]; - const selector = FunctionSelector.fromNameAndParameters(unconstrainedFunction); + const selector = await FunctionSelector.fromNameAndParameters(unconstrainedFunction); - const proof = createUnconstrainedFunctionMembershipProof(selector, artifact); + const proof = await createUnconstrainedFunctionMembershipProof(selector, artifact); expect(proof.artifactTreeSiblingPath.length).toBe(0); const fn = { ...unconstrainedFunction, ...proof, selector }; - const contractClass = getContractClassFromArtifact(artifact); + const contractClass = await getContractClassFromArtifact(artifact); expect(isValidUnconstrainedFunctionMembershipProof(fn, contractClass)).toBeTruthy(); }); test.each(['artifactTreeSiblingPath', 'artifactMetadataHash', 'functionMetadataHash'] as const)( 'fails proof if %s is mangled', - field => { - const proof = createUnconstrainedFunctionMembershipProof(selector, artifact); + async field => { + const proof = await createUnconstrainedFunctionMembershipProof(selector, artifact); const original = proof[field]; const mangled = Array.isArray(original) ? [Fr.random(), ...original.slice(1)] : Fr.random(); const wrong = { ...proof, [field]: mangled }; diff --git a/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.ts b/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.ts index 309078338f76..0ec9bb6f4f23 100644 --- a/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.ts +++ b/yarn-project/circuits.js/src/contract/unconstrained_function_membership_proof.ts @@ -1,4 +1,4 @@ -import { type ContractArtifact, type FunctionSelector, FunctionType } from '@aztec/foundation/abi'; +import { type ContractArtifact, FunctionSelector, FunctionType } from '@aztec/foundation/abi'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -22,14 +22,25 @@ import { * @param selector - Selector of the function to create the proof for. * @param artifact - Artifact of the contract class where the function is defined. */ -export function createUnconstrainedFunctionMembershipProof( +export async function createUnconstrainedFunctionMembershipProof( selector: FunctionSelector, artifact: ContractArtifact, -): UnconstrainedFunctionMembershipProof { +): Promise { const log = createDebugLogger('aztec:circuits:function_membership_proof'); // Locate function artifact - const fn = artifact.functions.find(fn => selector.equals(fn)); + const fn = ( + await Promise.all( + artifact.functions.map(async fn => { + const fs = await FunctionSelector.fromNameAndParameters(fn.name, fn.parameters); + if (!fs.equals(selector)) { + return undefined; + } + return fn; + }), + ) + ).find(fn => !!fn); + if (!fn) { throw new Error(`Function with selector ${selector.toString()} not found`); } else if (fn.functionType !== FunctionType.UNCONSTRAINED) { @@ -38,12 +49,12 @@ export function createUnconstrainedFunctionMembershipProof( // Compute preimage for the artifact hash const { privateFunctionRoot: privateFunctionsArtifactTreeRoot, metadataHash: artifactMetadataHash } = - computeArtifactHashPreimage(artifact); + await computeArtifactHashPreimage(artifact); // Compute the sibling path for the "artifact tree" const functionMetadataHash = computeFunctionMetadataHash(fn); - const functionArtifactHash = computeFunctionArtifactHash({ ...fn, functionMetadataHash }); - const artifactTree = computeArtifactFunctionTree(artifact, FunctionType.UNCONSTRAINED)!; + const functionArtifactHash = await computeFunctionArtifactHash({ ...fn, functionMetadataHash }); + const artifactTree = (await computeArtifactFunctionTree(artifact, FunctionType.UNCONSTRAINED))!; const artifactTreeLeafIndex = artifactTree.getIndex(functionArtifactHash.toBuffer()); const artifactTreeSiblingPath = artifactTree.getSiblingPath(artifactTreeLeafIndex).map(Fr.fromBuffer); @@ -81,22 +92,22 @@ export function createUnconstrainedFunctionMembershipProof( * @param fn - Function to check membership proof for. * @param contractClass - In which contract class the function is expected to be. */ -export function isValidUnconstrainedFunctionMembershipProof( +export async function isValidUnconstrainedFunctionMembershipProof( fn: UnconstrainedFunctionWithMembershipProof, contractClass: Pick, ) { const log = createDebugLogger('aztec:circuits:function_membership_proof'); - const functionArtifactHash = computeFunctionArtifactHash(fn); + const functionArtifactHash = await computeFunctionArtifactHash(fn); const computedArtifactFunctionTreeRoot = Fr.fromBuffer( - computeRootFromSiblingPath( + await computeRootFromSiblingPath( functionArtifactHash.toBuffer(), fn.artifactTreeSiblingPath.map(fr => fr.toBuffer()), fn.artifactTreeLeafIndex, getArtifactMerkleTreeHasher(), ), ); - const computedArtifactHash = computeArtifactHash({ + const computedArtifactHash = await computeArtifactHash({ privateFunctionRoot: fn.privateFunctionsArtifactTreeRoot, unconstrainedFunctionRoot: computedArtifactFunctionTreeRoot, metadataHash: fn.artifactMetadataHash, diff --git a/yarn-project/circuits.js/src/hash/hash.test.ts b/yarn-project/circuits.js/src/hash/hash.test.ts index fc1f0bc9018d..afd8b5cf54b6 100644 --- a/yarn-project/circuits.js/src/hash/hash.test.ts +++ b/yarn-project/circuits.js/src/hash/hash.test.ts @@ -18,10 +18,10 @@ import { describe('hash', () => { setupCustomSnapshotSerializers(expect); - it('computes note hash nonce', () => { + it('computes note hash nonce', async () => { const nullifierZero = new Fr(123n); const noteHashIndex = 456; - const res = computeNoteHashNonce(nullifierZero, noteHashIndex); + const res = await computeNoteHashNonce(nullifierZero, noteHashIndex); expect(res).toMatchSnapshot(); }); @@ -32,17 +32,17 @@ describe('hash', () => { expect(res).toMatchSnapshot(); }); - it('computes siloed note hash', () => { + it('computes siloed note hash', async () => { const contractAddress = new AztecAddress(new Fr(123n).toBuffer()); const uniqueNoteHash = new Fr(456); - const res = siloNoteHash(contractAddress, uniqueNoteHash); + const res = await siloNoteHash(contractAddress, uniqueNoteHash); expect(res).toMatchSnapshot(); }); - it('computes siloed nullifier', () => { + it('computes siloed nullifier', async () => { const contractAddress = new AztecAddress(new Fr(123n).toBuffer()); const innerNullifier = new Fr(456); - const res = siloNullifier(contractAddress, innerNullifier); + const res = await siloNullifier(contractAddress, innerNullifier); expect(res).toMatchSnapshot(); }); @@ -52,10 +52,10 @@ describe('hash', () => { expect(res).toMatchSnapshot(); }); - it('computes public data tree leaf slot', () => { + it('computes public data tree leaf slot', async () => { const contractAddress = makeAztecAddress(); const value = new Fr(3n); - const res = computePublicDataTreeLeafSlot(contractAddress, value); + const res = await computePublicDataTreeLeafSlot(contractAddress, value); expect(res).toMatchSnapshot(); }); @@ -76,9 +76,9 @@ describe('hash', () => { expect(res).toMatchSnapshot(); }); - it('compute secret message hash', () => { + it('compute secret message hash', async () => { const value = new Fr(8n); - const hash = computeSecretHash(value); + const hash = await computeSecretHash(value); expect(hash).toMatchSnapshot(); }); diff --git a/yarn-project/circuits.js/src/hash/hash.ts b/yarn-project/circuits.js/src/hash/hash.ts index a6ca04d52272..5584461dc519 100644 --- a/yarn-project/circuits.js/src/hash/hash.ts +++ b/yarn-project/circuits.js/src/hash/hash.ts @@ -10,8 +10,8 @@ import { type ScopedL2ToL1Message } from '../structs/l2_to_l1_message.js'; * @param vkBuf - The verification key as fields. * @returns The hash of the verification key. */ -export function hashVK(keyAsFields: Fr[]): Fr { - return poseidon2Hash(keyAsFields); +export async function hashVK(keyAsFields: Fr[]): Promise { + return await poseidon2Hash(keyAsFields); } /** @@ -20,8 +20,8 @@ export function hashVK(keyAsFields: Fr[]): Fr { * @param noteHashIndex - The index of the note hash. * @returns A note hash nonce. */ -export function computeNoteHashNonce(nullifierZero: Fr, noteHashIndex: number): Fr { - return poseidon2HashWithSeparator([nullifierZero, noteHashIndex], GeneratorIndex.NOTE_HASH_NONCE); +export async function computeNoteHashNonce(nullifierZero: Fr, noteHashIndex: number): Promise { + return await poseidon2HashWithSeparator([nullifierZero, noteHashIndex], GeneratorIndex.NOTE_HASH_NONCE); } /** @@ -31,8 +31,8 @@ export function computeNoteHashNonce(nullifierZero: Fr, noteHashIndex: number): * @param uniqueNoteHash - The unique note hash to silo. * @returns A siloed note hash. */ -export function siloNoteHash(contract: AztecAddress, uniqueNoteHash: Fr): Fr { - return poseidon2HashWithSeparator([contract, uniqueNoteHash], GeneratorIndex.SILOED_NOTE_HASH); +export async function siloNoteHash(contract: AztecAddress, uniqueNoteHash: Fr): Promise { + return await poseidon2HashWithSeparator([contract, uniqueNoteHash], GeneratorIndex.SILOED_NOTE_HASH); } /** @@ -42,8 +42,8 @@ export function siloNoteHash(contract: AztecAddress, uniqueNoteHash: Fr): Fr { * @param noteHash - A note hash. * @returns A unique note hash. */ -export function computeUniqueNoteHash(nonce: Fr, noteHash: Fr): Fr { - return poseidon2HashWithSeparator([nonce, noteHash], GeneratorIndex.UNIQUE_NOTE_HASH); +export async function computeUniqueNoteHash(nonce: Fr, noteHash: Fr): Promise { + return await poseidon2HashWithSeparator([nonce, noteHash], GeneratorIndex.UNIQUE_NOTE_HASH); } /** @@ -53,8 +53,8 @@ export function computeUniqueNoteHash(nonce: Fr, noteHash: Fr): Fr { * @param innerNullifier - The nullifier to silo. * @returns A siloed nullifier. */ -export function siloNullifier(contract: AztecAddress, innerNullifier: Fr): Fr { - return poseidon2HashWithSeparator([contract, innerNullifier], GeneratorIndex.OUTER_NULLIFIER); +export async function siloNullifier(contract: AztecAddress, innerNullifier: Fr): Promise { + return await poseidon2HashWithSeparator([contract, innerNullifier], GeneratorIndex.OUTER_NULLIFIER); } /** @@ -74,8 +74,8 @@ export function computePublicDataTreeValue(value: Fr): Fr { * @returns Public data tree index computed from contract address and storage slot. */ -export function computePublicDataTreeLeafSlot(contractAddress: AztecAddress, storageSlot: Fr): Fr { - return poseidon2HashWithSeparator([contractAddress, storageSlot], GeneratorIndex.PUBLIC_LEAF_INDEX); +export async function computePublicDataTreeLeafSlot(contractAddress: AztecAddress, storageSlot: Fr): Promise { + return await poseidon2HashWithSeparator([contractAddress, storageSlot], GeneratorIndex.PUBLIC_LEAF_INDEX); } /** @@ -83,12 +83,12 @@ export function computePublicDataTreeLeafSlot(contractAddress: AztecAddress, sto * @param args - Arguments to hash. * @returns Pedersen hash of the arguments. */ -export function computeVarArgsHash(args: Fr[]) { +export async function computeVarArgsHash(args: Fr[]) { if (args.length === 0) { return Fr.ZERO; } - return poseidon2HashWithSeparator(args, GeneratorIndex.FUNCTION_ARGS); + return await poseidon2HashWithSeparator(args, GeneratorIndex.FUNCTION_ARGS); } /** @@ -97,12 +97,15 @@ export function computeVarArgsHash(args: Fr[]) { * @param secret - The secret to hash (could be generated however you want e.g. `Fr.random()`) * @returns The hash */ -export function computeSecretHash(secret: Fr) { - return poseidon2HashWithSeparator([secret], GeneratorIndex.SECRET_HASH); +export async function computeSecretHash(secret: Fr) { + return await poseidon2HashWithSeparator([secret], GeneratorIndex.SECRET_HASH); } -export function computeL1ToL2MessageNullifier(contract: AztecAddress, messageHash: Fr, secret: Fr) { - const innerMessageNullifier = poseidon2HashWithSeparator([messageHash, secret], GeneratorIndex.MESSAGE_NULLIFIER); +export async function computeL1ToL2MessageNullifier(contract: AztecAddress, messageHash: Fr, secret: Fr) { + const innerMessageNullifier = await poseidon2HashWithSeparator( + [messageHash, secret], + GeneratorIndex.MESSAGE_NULLIFIER, + ); return siloNullifier(contract, innerMessageNullifier); } diff --git a/yarn-project/circuits.js/src/hash/map_slot.ts b/yarn-project/circuits.js/src/hash/map_slot.ts index 8ffed794039d..20bf8964e4e1 100644 --- a/yarn-project/circuits.js/src/hash/map_slot.ts +++ b/yarn-project/circuits.js/src/hash/map_slot.ts @@ -7,12 +7,12 @@ import { type Fr } from '@aztec/foundation/fields'; * @param key - The key of the map. * @returns The slot in the contract storage where the value is stored. */ -export function deriveStorageSlotInMap( +export async function deriveStorageSlotInMap( mapSlot: Fr | bigint, key: { /** Convert key to a field. */ toField: () => Fr; }, -): Fr { - return poseidon2Hash([mapSlot, key.toField()]); +): Promise { + return await poseidon2Hash([mapSlot, key.toField()]); } diff --git a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts index ef0a049e2a21..d60bf20840c7 100644 --- a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts @@ -21,7 +21,7 @@ import { buildNoteHashReadRequestHints } from './build_note_hash_read_request_hi describe('buildNoteHashReadRequestHints', () => { const contractAddress = AztecAddress.random(); const settledNoteHashInnerValues = [111, 222, 333]; - const settledNoteHashes = settledNoteHashInnerValues.map(noteHash => siloNoteHash(contractAddress, new Fr(noteHash))); + let settledNoteHashes: Fr[]; const settledLeafIndexes = [1010n, 2020n, 3030n]; const oracle = { getNoteHashMembershipWitness: (leafIndex: bigint) => @@ -83,10 +83,13 @@ describe('buildNoteHashReadRequestHints', () => { futureNoteHashes, ); - beforeEach(() => { + beforeEach(async () => { noteHashReadRequests = makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ScopedReadRequest.empty); noteHashes = makeTuple(MAX_NOTE_HASHES_PER_TX, i => makeNoteHash(getNoteHashValue(i))); noteHashLeafIndexMap = new Map(); + settledNoteHashes = await Promise.all( + settledNoteHashInnerValues.map(noteHash => siloNoteHash(contractAddress, new Fr(noteHash))), + ); expectedHints = NoteHashReadRequestHintsBuilder.empty( MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, diff --git a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts index 7a749b9a9b8a..1e7cce086a86 100644 --- a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts @@ -86,7 +86,7 @@ export async function buildNoteHashReadRequestHintsFromResetStates( +export async function buildSiloedNullifierReadRequestHints( oracle: { getNullifierMembershipWitness(nullifier: Fr): Promise; }, @@ -143,8 +145,10 @@ export function buildSiloedNullifierReadRequestHints - new ReadRequest(siloNullifier(r.contractAddress, r.value), r.counter).scope(AztecAddress.ZERO), + await Promise.all( + getNonEmptyItems(nullifierReadRequests).map(async r => + new ReadRequest(await siloNullifier(r.contractAddress, r.value), r.counter).scope(AztecAddress.ZERO), + ), ), ScopedReadRequest.empty(), MAX_NULLIFIER_READ_REQUESTS_PER_TX, diff --git a/yarn-project/circuits.js/src/keys/derivation.ts b/yarn-project/circuits.js/src/keys/derivation.ts index 495ea964cf85..67789236388f 100644 --- a/yarn-project/circuits.js/src/keys/derivation.ts +++ b/yarn-project/circuits.js/src/keys/derivation.ts @@ -9,17 +9,20 @@ import { PublicKeys } from '../types/public_keys.js'; import { type KeyPrefix } from './key_types.js'; import { getKeyGenerator } from './utils.js'; -export function computeAppNullifierSecretKey(masterNullifierSecretKey: GrumpkinScalar, app: AztecAddress): Fr { - return computeAppSecretKey(masterNullifierSecretKey, app, 'n'); // 'n' is the key prefix for nullifier secret key +export async function computeAppNullifierSecretKey( + masterNullifierSecretKey: GrumpkinScalar, + app: AztecAddress, +): Promise { + return await computeAppSecretKey(masterNullifierSecretKey, app, 'n'); // 'n' is the key prefix for nullifier secret key } -export function computeAppSecretKey(skM: GrumpkinScalar, app: AztecAddress, keyPrefix: KeyPrefix): Fr { +export async function computeAppSecretKey(skM: GrumpkinScalar, app: AztecAddress, keyPrefix: KeyPrefix): Promise { const generator = getKeyGenerator(keyPrefix); - return poseidon2HashWithSeparator([skM.hi, skM.lo, app], generator); + return await poseidon2HashWithSeparator([skM.hi, skM.lo, app], generator); } -export function computeOvskApp(ovsk: GrumpkinScalar, app: AztecAddress) { - const ovskAppFr = computeAppSecretKey(ovsk, app, 'ov'); // 'ov' is the key prefix for outgoing viewing key +export async function computeOvskApp(ovsk: GrumpkinScalar, app: AztecAddress) { + const ovskAppFr = await computeAppSecretKey(ovsk, app, 'ov'); // 'ov' is the key prefix for outgoing viewing key // Here we are intentionally converting Fr (output of poseidon) to Fq. This is fine even though a distribution of // P = s * G will not be uniform because 2 * (q - r) / q is small. return GrumpkinScalar.fromBuffer(ovskAppFr.toBuffer()); @@ -42,25 +45,25 @@ export function deriveSigningKey(secretKey: Fr): GrumpkinScalar { return sha512ToGrumpkinScalar([secretKey, GeneratorIndex.IVSK_M]); } -export function computePreaddress(publicKeysHash: Fr, partialAddress: Fr) { - return poseidon2HashWithSeparator([publicKeysHash, partialAddress], GeneratorIndex.CONTRACT_ADDRESS_V1); +export async function computePreaddress(publicKeysHash: Fr, partialAddress: Fr) { + return await poseidon2HashWithSeparator([publicKeysHash, partialAddress], GeneratorIndex.CONTRACT_ADDRESS_V1); } -export function computeAddress(publicKeys: PublicKeys, partialAddress: Fr): AztecAddress { +export async function computeAddress(publicKeys: PublicKeys, partialAddress: Fr): Promise { // Given public keys and a partial address, we can compute our address in the following steps. // 1. preaddress = poseidon2([publicKeysHash, partialAddress], GeneratorIndex.CONTRACT_ADDRESS_V1); // 2. addressPoint = (preaddress * G) + ivpk_m // 3. address = addressPoint.x - const preaddress = computePreaddress(publicKeys.hash(), partialAddress); - const address = new Grumpkin().add( - derivePublicKeyFromSecretKey(new Fq(preaddress.toBigInt())), + const preaddress = await computePreaddress(await publicKeys.hash(), partialAddress); + const address = await new Grumpkin().add( + await derivePublicKeyFromSecretKey(new Fq(preaddress.toBigInt())), publicKeys.masterIncomingViewingPublicKey, ); return new AztecAddress(address.x); } -export function computeAddressSecret(preaddress: Fr, ivsk: Fq) { +export async function computeAddressSecret(preaddress: Fr, ivsk: Fq) { // TLDR; P1 = (h + ivsk) * G // if P1.y is pos // S = (h + ivsk) @@ -71,7 +74,7 @@ export function computeAddressSecret(preaddress: Fr, ivsk: Fq) { // and the other encodes to a point with a negative y-coordinate. We take the addressSecret candidate that is a simple addition of the two Scalars. const addressSecretCandidate = ivsk.add(new Fq(preaddress.toBigInt())); // We then multiply this secretCandidate by the generator G to create an addressPoint candidate. - const addressPointCandidate = derivePublicKeyFromSecretKey(addressSecretCandidate); + const addressPointCandidate = await derivePublicKeyFromSecretKey(addressSecretCandidate); // Because all encryption to addresses is done using a point with the positive y-coordinate, if our addressSecret candidate derives a point with a // negative y-coordinate, we use the other candidate by negating the secret. This transformation of the secret simply flips the y-coordinate of the derived point while keeping the x-coordinate the same. @@ -92,7 +95,7 @@ export function derivePublicKeyFromSecretKey(secretKey: Fq) { * @param secretKey - The secret key to derive keys from. * @returns The derived keys. */ -export function deriveKeys(secretKey: Fr) { +export async function deriveKeys(secretKey: Fr) { // First we derive master secret keys - we use sha512 here because this derivation will never take place // in a circuit const masterNullifierSecretKey = deriveMasterNullifierSecretKey(secretKey); @@ -101,10 +104,10 @@ export function deriveKeys(secretKey: Fr) { const masterTaggingSecretKey = sha512ToGrumpkinScalar([secretKey, GeneratorIndex.TSK_M]); // Then we derive master public keys - const masterNullifierPublicKey = derivePublicKeyFromSecretKey(masterNullifierSecretKey); - const masterIncomingViewingPublicKey = derivePublicKeyFromSecretKey(masterIncomingViewingSecretKey); - const masterOutgoingViewingPublicKey = derivePublicKeyFromSecretKey(masterOutgoingViewingSecretKey); - const masterTaggingPublicKey = derivePublicKeyFromSecretKey(masterTaggingSecretKey); + const masterNullifierPublicKey = await derivePublicKeyFromSecretKey(masterNullifierSecretKey); + const masterIncomingViewingPublicKey = await derivePublicKeyFromSecretKey(masterIncomingViewingSecretKey); + const masterOutgoingViewingPublicKey = await derivePublicKeyFromSecretKey(masterOutgoingViewingSecretKey); + const masterTaggingPublicKey = await derivePublicKeyFromSecretKey(masterTaggingSecretKey); // We hash the public keys to get the public keys hash const publicKeys = new PublicKeys( @@ -123,8 +126,8 @@ export function deriveKeys(secretKey: Fr) { }; } -export function computeTaggingSecret(knownAddress: CompleteAddress, ivsk: Fq, externalAddress: AztecAddress) { - const knownPreaddress = computePreaddress(knownAddress.publicKeys.hash(), knownAddress.partialAddress); +export async function computeTaggingSecret(knownAddress: CompleteAddress, ivsk: Fq, externalAddress: AztecAddress) { + const knownPreaddress = await computePreaddress(await knownAddress.publicKeys.hash(), knownAddress.partialAddress); // TODO: #8970 - Computation of address point from x coordinate might fail const externalAddressPoint = externalAddress.toAddressPoint(); const curve = new Grumpkin(); @@ -133,5 +136,5 @@ export function computeTaggingSecret(knownAddress: CompleteAddress, ivsk: Fq, ex // Beware! h_a + ivsk_a (also known as the address secret) can lead to an address point with a negative y-coordinate, since there's two possible candidates // computeAddressSecret takes care of selecting the one that leads to a positive y-coordinate, which is the only valid address point - return curve.mul(externalAddressPoint, computeAddressSecret(knownPreaddress, ivsk)); + return curve.mul(externalAddressPoint, await computeAddressSecret(knownPreaddress, ivsk)); } diff --git a/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.test.ts b/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.test.ts index fb3ce4f043d6..af1b220554ec 100644 --- a/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.test.ts +++ b/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.test.ts @@ -29,11 +29,11 @@ describe('merkle tree root calculator', () => { expect(calculator.computeTreeRoot(leaves)).toEqual(expected); }); - it('should compute entire tree', () => { + it('should compute entire tree', async () => { const calculator = new MerkleTreeCalculator(4); const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - const expectedRoot = calculator.computeTreeRoot(leaves); - const result = calculator.computeTree(leaves); + const expectedRoot = await calculator.computeTreeRoot(leaves); + const result = await calculator.computeTree(leaves); expect(result.nodes.length).toEqual(31); expect(result.root).toEqual(expectedRoot); }); diff --git a/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.ts b/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.ts index c8d4870039c0..e1455032e9c3 100644 --- a/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.ts +++ b/yarn-project/circuits.js/src/merkle/merkle_tree_calculator.ts @@ -6,47 +6,54 @@ import { MerkleTree } from './merkle_tree.js'; * Merkle tree calculator. */ export class MerkleTreeCalculator { - private zeroHashes: Buffer[]; - private hasher: (left: Buffer, right: Buffer) => Buffer; + private zeroHashes: Promise; + private hasher: (left: Buffer, right: Buffer) => Buffer | Promise; constructor( private height: number, zeroLeaf = Buffer.alloc(32), - hasher = (left: Buffer, right: Buffer) => pedersenHash([left, right]).toBuffer(), + hasher: (left: Buffer, right: Buffer) => Buffer | Promise = async (left, right) => + (await pedersenHash([left, right])).toBuffer(), ) { this.hasher = hasher; - this.zeroHashes = Array.from({ length: height }).reduce( - (acc: Buffer[], _, i) => [...acc, this.hasher(acc[i], acc[i])], - [zeroLeaf], - ); + this.zeroHashes = (async () => { + const result = [zeroLeaf]; + for (let i = 0; i < height; i++) { + result.push(await hasher(result[i], result[i])); + } + return result; + })(); } - computeTree(leaves: Buffer[] = []): MerkleTree { + async computeTree(leaves: Buffer[] = []): Promise { if (leaves.length === 0) { + const zeroHashes = await this.zeroHashes; // TODO(#4425): We should be returning a number of nodes that matches the tree height. - return new MerkleTree(this.height, [this.zeroHashes[this.zeroHashes.length - 1]]); + return new MerkleTree(this.height, [zeroHashes[zeroHashes.length - 1]]); } let result = leaves.slice(); + const zeroHashes = await this.zeroHashes; for (let i = 0; i < this.height; ++i) { const numLeaves = 2 ** (this.height - i); const newLeaves: Buffer[] = []; for (let j = 0; j < leaves.length / 2; ++j) { const l = leaves[j * 2]; - const r = leaves[j * 2 + 1] || this.zeroHashes[i]; - newLeaves[j] = this.hasher(l, r); + const r = leaves[j * 2 + 1] || zeroHashes[i]; + newLeaves[j] = await this.hasher(l, r); } - result = result.concat(new Array(numLeaves - leaves.length).fill(this.zeroHashes[i]), newLeaves); + result = result.concat(new Array(numLeaves - leaves.length).fill(zeroHashes[i]), newLeaves); leaves = newLeaves; } return new MerkleTree(this.height, result); } - computeTreeRoot(leaves: Buffer[] = []): Buffer { + async computeTreeRoot(leaves: Buffer[] = []): Promise { + const zeroHashes = await this.zeroHashes; if (leaves.length === 0) { - return this.zeroHashes[this.zeroHashes.length - 1]; + return zeroHashes[zeroHashes.length - 1]; } leaves = leaves.slice(); @@ -55,8 +62,8 @@ export class MerkleTreeCalculator { let j = 0; for (; j < leaves.length / 2; ++j) { const l = leaves[j * 2]; - const r = leaves[j * 2 + 1] || this.zeroHashes[i]; - leaves[j] = this.hasher(l, r); + const r = leaves[j * 2 + 1] || zeroHashes[i]; + leaves[j] = await this.hasher(l, r); } leaves = leaves.slice(0, j); } diff --git a/yarn-project/circuits.js/src/merkle/sibling_path.test.ts b/yarn-project/circuits.js/src/merkle/sibling_path.test.ts index 7b2852570d0e..2a89000660a2 100644 --- a/yarn-project/circuits.js/src/merkle/sibling_path.test.ts +++ b/yarn-project/circuits.js/src/merkle/sibling_path.test.ts @@ -7,10 +7,10 @@ import { computeRootFromSiblingPath } from './sibling_path.js'; describe('sibling path', () => { let tree: MerkleTree; - beforeAll(() => { + beforeAll(async () => { const calculator = new MerkleTreeCalculator(4); const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - tree = calculator.computeTree(leaves); + tree = await calculator.computeTree(leaves); }); test.each([0, 1, 2, 3, 4, 5, 6, 7])('recovers the root from a leaf at index %s and its sibling path', index => { diff --git a/yarn-project/circuits.js/src/merkle/sibling_path.ts b/yarn-project/circuits.js/src/merkle/sibling_path.ts index 52383dae87c7..ae5a4918afa2 100644 --- a/yarn-project/circuits.js/src/merkle/sibling_path.ts +++ b/yarn-project/circuits.js/src/merkle/sibling_path.ts @@ -1,15 +1,16 @@ import { pedersenHash } from '@aztec/foundation/crypto'; /** Computes the expected root of a merkle tree given a leaf and its sibling path. */ -export function computeRootFromSiblingPath( +export async function computeRootFromSiblingPath( leaf: Buffer, siblingPath: Buffer[], index: number, - hasher = (left: Buffer, right: Buffer) => pedersenHash([left, right]).toBuffer(), + hasher: (left: Buffer, right: Buffer) => Buffer | Promise = async (left, right) => + (await pedersenHash([left, right])).toBuffer(), ) { let result = leaf; for (const sibling of siblingPath) { - result = index & 1 ? hasher(sibling, result) : hasher(result, sibling); + result = index & 1 ? await hasher(sibling, result) : await hasher(result, sibling); index >>= 1; } return result; diff --git a/yarn-project/circuits.js/src/structs/avm/avm.test.ts b/yarn-project/circuits.js/src/structs/avm/avm.test.ts index 3b6f765e013f..b6cc3a7818e5 100644 --- a/yarn-project/circuits.js/src/structs/avm/avm.test.ts +++ b/yarn-project/circuits.js/src/structs/avm/avm.test.ts @@ -4,8 +4,8 @@ import { makeAvmCircuitInputs } from '../../tests/factories.js'; import { AvmCircuitInputs } from './avm.js'; describe('Avm circuit inputs', () => { - it(`serializes to buffer and deserializes it back`, () => { - const avmCircuitInputs = makeAvmCircuitInputs(randomInt(2000)); + it(`serializes to buffer and deserializes it back`, async () => { + const avmCircuitInputs = await makeAvmCircuitInputs(randomInt(2000)); const buffer = avmCircuitInputs.toBuffer(); const res = AvmCircuitInputs.fromBuffer(buffer); expect(res).toEqual(avmCircuitInputs); diff --git a/yarn-project/circuits.js/src/structs/block_header.ts b/yarn-project/circuits.js/src/structs/block_header.ts index 8d335e7a5dee..f3579a8c9a58 100644 --- a/yarn-project/circuits.js/src/structs/block_header.ts +++ b/yarn-project/circuits.js/src/structs/block_header.ts @@ -148,8 +148,8 @@ export class BlockHeader { return BlockHeader.fromBuffer(hexToBuffer(str)); } - hash(): Fr { - return poseidon2HashWithSeparator(this.toFields(), GeneratorIndex.BLOCK_HASH); + async hash(): Promise { + return await poseidon2HashWithSeparator(this.toFields(), GeneratorIndex.BLOCK_HASH); } [inspect.custom]() { diff --git a/yarn-project/circuits.js/src/structs/complete_address.test.ts b/yarn-project/circuits.js/src/structs/complete_address.test.ts index ec45caaa8d91..06065cb9cef9 100644 --- a/yarn-project/circuits.js/src/structs/complete_address.test.ts +++ b/yarn-project/circuits.js/src/structs/complete_address.test.ts @@ -5,31 +5,27 @@ import { PublicKeys } from '../types/public_keys.js'; import { CompleteAddress } from './complete_address.js'; describe('CompleteAddress', () => { - it('refuses to add an account with incorrect address for given partial address and pubkey', () => { - expect( - () => - new CompleteAddress( - AztecAddress.random(), - new PublicKeys(Point.random(), Point.random(), Point.random(), Point.random()), - Fr.random(), - ), - ).toThrow(/cannot be derived/); + it('refuses to add an account with incorrect address for given partial address and pubkey', async () => { + const points = await Promise.all([Point.random(), Point.random(), Point.random(), Point.random()]); + expect(() => new CompleteAddress(AztecAddress.random(), new PublicKeys(...points), Fr.random())).toThrow( + /cannot be derived/, + ); }); - it('equals returns true when 2 instances are equal', () => { - const address1 = CompleteAddress.random(); + it('equals returns true when 2 instances are equal', async () => { + const address1 = await CompleteAddress.random(); const address2 = new CompleteAddress(address1.address, address1.publicKeys, address1.partialAddress); expect(address1.equals(address2)).toBe(true); }); - it('equals returns true when 2 instances are not equal', () => { - const address1 = CompleteAddress.random(); - const address2 = CompleteAddress.random(); + it('equals returns true when 2 instances are not equal', async () => { + const address1 = await CompleteAddress.random(); + const address2 = await CompleteAddress.random(); expect(address1.equals(address2)).toBe(false); }); - it('serializes / deserializes correctly', () => { - const expectedAddress = CompleteAddress.random(); + it('serializes / deserializes correctly', async () => { + const expectedAddress = await CompleteAddress.random(); const address = CompleteAddress.fromBuffer(expectedAddress.toBuffer()); expect(address.equals(expectedAddress)).toBe(true); }); diff --git a/yarn-project/circuits.js/src/structs/complete_address.ts b/yarn-project/circuits.js/src/structs/complete_address.ts index 5fbd1132e40e..efd8297b5dc2 100644 --- a/yarn-project/circuits.js/src/structs/complete_address.ts +++ b/yarn-project/circuits.js/src/structs/complete_address.ts @@ -40,32 +40,32 @@ export class CompleteAddress { return this.toString(); } - static random(): CompleteAddress { - return this.fromSecretKeyAndPartialAddress(Fr.random(), Fr.random()); + static async random(): Promise { + return await this.fromSecretKeyAndPartialAddress(Fr.random(), Fr.random()); } - static fromSecretKeyAndPartialAddress(secretKey: Fr, partialAddress: Fr): CompleteAddress { - const { publicKeys } = deriveKeys(secretKey); - const address = computeAddress(publicKeys, partialAddress); + static async fromSecretKeyAndPartialAddress(secretKey: Fr, partialAddress: Fr): Promise { + const { publicKeys } = await deriveKeys(secretKey); + const address = await computeAddress(publicKeys, partialAddress); return new CompleteAddress(address, publicKeys, partialAddress); } - getPreaddress() { - return computePreaddress(this.publicKeys.hash(), this.partialAddress); + async getPreaddress() { + return computePreaddress(await this.publicKeys.hash(), this.partialAddress); } - static fromSecretKeyAndInstance( + static async fromSecretKeyAndInstance( secretKey: Fr, instance: Parameters[0], - ): CompleteAddress { - const partialAddress = computePartialAddress(instance); + ): Promise { + const partialAddress = await computePartialAddress(instance); return CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, partialAddress); } /** Throws if the address is not correctly derived from the public key and partial address.*/ - public validate() { - const expectedAddress = computeAddress(this.publicKeys, this.partialAddress); + public async validate() { + const expectedAddress = await computeAddress(this.publicKeys, this.partialAddress); if (!expectedAddress.equals(this.address)) { throw new Error( diff --git a/yarn-project/circuits.js/src/structs/function_data.ts b/yarn-project/circuits.js/src/structs/function_data.ts index afc63a96d216..eb813bbf44c0 100644 --- a/yarn-project/circuits.js/src/structs/function_data.ts +++ b/yarn-project/circuits.js/src/structs/function_data.ts @@ -17,9 +17,9 @@ export class FunctionData { public isPrivate: boolean, ) {} - static fromAbi(abi: FunctionAbi | ContractFunctionDao): FunctionData { + static async fromAbi(abi: FunctionAbi | ContractFunctionDao): Promise { return new FunctionData( - FunctionSelector.fromNameAndParameters(abi.name, abi.parameters), + await FunctionSelector.fromNameAndParameters(abi.name, abi.parameters), abi.functionType === FunctionType.PRIVATE, ); } diff --git a/yarn-project/circuits.js/src/structs/key_validation_request.ts b/yarn-project/circuits.js/src/structs/key_validation_request.ts index 4fb96fabd0ea..eb2e6ccf4d37 100644 --- a/yarn-project/circuits.js/src/structs/key_validation_request.ts +++ b/yarn-project/circuits.js/src/structs/key_validation_request.ts @@ -58,7 +58,7 @@ export class KeyValidationRequest { return new KeyValidationRequest(Point.ZERO, Fr.ZERO); } - static random() { - return new KeyValidationRequest(Point.random(), Fr.random()); + static async random() { + return new KeyValidationRequest(await Point.random(), Fr.random()); } } diff --git a/yarn-project/circuits.js/src/structs/tx_request.ts b/yarn-project/circuits.js/src/structs/tx_request.ts index 7666cb8badac..ed468102c7b6 100644 --- a/yarn-project/circuits.js/src/structs/tx_request.ts +++ b/yarn-project/circuits.js/src/structs/tx_request.ts @@ -62,8 +62,8 @@ export class TxRequest { ); } - hash() { - return poseidon2HashWithSeparator(this.toFields(), GeneratorIndex.TX_REQUEST); + async hash() { + return await poseidon2HashWithSeparator(this.toFields(), GeneratorIndex.TX_REQUEST); } static empty() { diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 63f96df90975..0e620b707fe3 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -1191,15 +1191,18 @@ export function makeUnconstrainedFunctionWithMembershipProof(seed = 0): Unconstr }; } -export function makeContractClassPublic(seed = 0, publicDispatchFunction?: PublicFunction): ContractClassPublic { +export async function makeContractClassPublic( + seed = 0, + publicDispatchFunction?: PublicFunction, +): Promise { const artifactHash = fr(seed + 1); const publicFunctions = publicDispatchFunction ? [publicDispatchFunction] : makeTuple(1, makeContractClassPublicFunction, seed + 2); const privateFunctionsRoot = fr(seed + 3); const packedBytecode = publicDispatchFunction?.bytecode ?? makeBytes(100, seed + 4); - const publicBytecodeCommitment = computePublicBytecodeCommitment(packedBytecode); - const id = computeContractClassId({ artifactHash, privateFunctionsRoot, publicBytecodeCommitment }); + const publicBytecodeCommitment = await computePublicBytecodeCommitment(packedBytecode); + const id = await computeContractClassId({ artifactHash, privateFunctionsRoot, publicBytecodeCommitment }); return { id, artifactHash, @@ -1227,12 +1230,16 @@ function makeContractClassPrivateFunction(seed = 0): PrivateFunction { }; } -export function makeArray(length: number, fn: (i: number) => T, offset = 0) { - return Array.from({ length }, (_: any, i: number) => fn(i + offset)); +export async function makeArray( + length: number, + fn: (i: number) => Promise | T, + offset = 0, +): Promise { + return Promise.all(Array.from({ length }, (_: any, i: number) => fn(i + offset))); } -export function makeVector(length: number, fn: (i: number) => T, offset = 0) { - return new Vector(makeArray(length, fn, offset)); +export async function makeVector(length: number, fn: (i: number) => T | Promise, offset = 0) { + return new Vector(await makeArray(length, fn, offset)); } /** @@ -1249,31 +1256,31 @@ export function makeAvmKeyValueHint(seed = 0): AvmKeyValueHint { * @param seed - The seed to use for generating the state reference. * @returns AvmExternalCallHint. */ -export function makeAvmExternalCallHint(seed = 0): AvmExternalCallHint { +export async function makeAvmExternalCallHint(seed = 0): Promise { return new AvmExternalCallHint( new Fr(seed % 2), - makeArray((seed % 100) + 10, i => new Fr(i), seed + 0x1000), + await makeArray((seed % 100) + 10, i => new Fr(i), seed + 0x1000), new Gas(seed + 0x200, seed), new Fr(seed + 0x300), new AztecAddress(new Fr(seed + 0x400)), ); } -export function makeContractInstanceFromClassId(classId: Fr, seed = 0): ContractInstanceWithAddress { +export async function makeContractInstanceFromClassId(classId: Fr, seed = 0): Promise { const salt = new Fr(seed); const initializationHash = new Fr(seed + 1); const deployer = new AztecAddress(new Fr(seed + 2)); - const publicKeys = PublicKeys.random(); + const publicKeys = await PublicKeys.random(); - const saltedInitializationHash = poseidon2HashWithSeparator( + const saltedInitializationHash = await poseidon2HashWithSeparator( [salt, initializationHash, deployer], GeneratorIndex.PARTIAL_ADDRESS, ); - const partialAddress = poseidon2HashWithSeparator( + const partialAddress = await poseidon2HashWithSeparator( [classId, saltedInitializationHash], GeneratorIndex.PARTIAL_ADDRESS, ); - const address = computeAddress(publicKeys, partialAddress); + const address = await computeAddress(publicKeys, partialAddress); return new SerializableContractInstance({ version: 1, salt, @@ -1284,9 +1291,9 @@ export function makeContractInstanceFromClassId(classId: Fr, seed = 0): Contract }).withAddress(address); } -export function makeAvmBytecodeHints(seed = 0): AvmContractBytecodeHints { - const { artifactHash, privateFunctionsRoot, packedBytecode, id } = makeContractClassPublic(seed); - const instance = makeContractInstanceFromClassId(id, seed + 0x1000); +export async function makeAvmBytecodeHints(seed = 0): Promise { + const { artifactHash, privateFunctionsRoot, packedBytecode, id } = await makeContractClassPublic(seed); + const instance = await makeContractInstanceFromClassId(id, seed + 0x1000); const avmHintInstance = new AvmContractInstanceHint( instance.address, @@ -1298,7 +1305,7 @@ export function makeAvmBytecodeHints(seed = 0): AvmContractBytecodeHints { instance.publicKeys, ); - const publicBytecodeCommitment = computePublicBytecodeCommitment(packedBytecode); + const publicBytecodeCommitment = await computePublicBytecodeCommitment(packedBytecode); return new AvmContractBytecodeHints(packedBytecode, avmHintInstance, { artifactHash, @@ -1307,43 +1314,39 @@ export function makeAvmBytecodeHints(seed = 0): AvmContractBytecodeHints { }); } -export function makeAvmTreeHints(seed = 0): AvmAppendTreeHint { - return new AvmAppendTreeHint( - new Fr(seed), - new Fr(seed + 1), - makeArray(10, i => new Fr(i), seed + 0x1000), - ); +export async function makeAvmTreeHints(seed = 0): Promise { + return new AvmAppendTreeHint(new Fr(seed), new Fr(seed + 1), await makeArray(10, i => new Fr(i), seed + 0x1000)); } -export function makeAvmNullifierReadTreeHints(seed = 0): AvmNullifierReadTreeHint { +export async function makeAvmNullifierReadTreeHints(seed = 0): Promise { const lowNullifierPreimage = new NullifierLeafPreimage(new Fr(seed), new Fr(seed + 1), BigInt(seed + 2)); return new AvmNullifierReadTreeHint( lowNullifierPreimage, new Fr(seed + 1), - makeArray(10, i => new Fr(i), seed + 0x1000), + await makeArray(10, i => new Fr(i), seed + 0x1000), ); } -export function makeAvmNullifierInsertionTreeHints(seed = 0): AvmNullifierWriteTreeHint { +export async function makeAvmNullifierInsertionTreeHints(seed = 0): Promise { return new AvmNullifierWriteTreeHint( - makeAvmNullifierReadTreeHints(seed), - makeArray(20, i => new Fr(i), seed + 0x1000), + await makeAvmNullifierReadTreeHints(seed), + await makeArray(20, i => new Fr(i), seed + 0x1000), ); } -export function makeAvmStorageReadTreeHints(seed = 0): AvmPublicDataReadTreeHint { +export async function makeAvmStorageReadTreeHints(seed = 0): Promise { return new AvmPublicDataReadTreeHint( new PublicDataTreeLeafPreimage(new Fr(seed), new Fr(seed + 1), new Fr(seed + 2), BigInt(seed + 3)), new Fr(seed + 1), - makeArray(10, i => new Fr(i), seed + 0x1000), + await makeArray(10, i => new Fr(i), seed + 0x1000), ); } -export function makeAvmStorageUpdateTreeHints(seed = 0): AvmPublicDataWriteTreeHint { +export async function makeAvmStorageUpdateTreeHints(seed = 0): Promise { return new AvmPublicDataWriteTreeHint( - makeAvmStorageReadTreeHints(seed), + await makeAvmStorageReadTreeHints(seed), new PublicDataTreeLeafPreimage(new Fr(seed), new Fr(seed + 1), new Fr(seed + 2), BigInt(seed + 3)), - makeArray(20, i => new Fr(i), seed + 0x1000), + await makeArray(20, i => new Fr(i), seed + 0x1000), ); } @@ -1369,10 +1372,10 @@ export function makeAvmContractInstanceHint(seed = 0): AvmContractInstanceHint { ); } -export function makeAvmEnqueuedCallHint(seed = 0): AvmEnqueuedCallHint { +export async function makeAvmEnqueuedCallHint(seed = 0): Promise { return AvmEnqueuedCallHint.from({ contractAddress: new AztecAddress(new Fr(seed)), - calldata: makeVector((seed % 20) + 4, i => new Fr(i), seed + 0x1000), + calldata: await makeVector((seed % 20) + 4, i => new Fr(i), seed + 0x1000), }); } @@ -1381,10 +1384,10 @@ export function makeAvmEnqueuedCallHint(seed = 0): AvmEnqueuedCallHint { * @param seed - The seed to use for generating the hints. * @returns the execution hints. */ -export function makeAvmExecutionHints( +export async function makeAvmExecutionHints( seed = 0, overrides: Partial> = {}, -): AvmExecutionHints { +): Promise { const lengthOffset = 10; const lengthSeedMod = 10; const baseLength = lengthOffset + (seed % lengthSeedMod); @@ -1414,13 +1417,16 @@ export function makeAvmExecutionHints( * @param seed - The seed to use for generating the hints. * @returns the execution hints. */ -export function makeAvmCircuitInputs(seed = 0, overrides: Partial> = {}): AvmCircuitInputs { +export async function makeAvmCircuitInputs( + seed = 0, + overrides: Partial> = {}, +): Promise { return AvmCircuitInputs.from({ functionName: `function${seed}`, - calldata: makeArray((seed % 100) + 10, i => new Fr(i), seed + 0x1000), + calldata: await makeArray((seed % 100) + 10, i => new Fr(i), seed + 0x1000), publicInputs: PublicCircuitPublicInputs.empty(), - avmHints: makeAvmExecutionHints(seed + 0x3000), - output: makeAvmCircuitPublicInputs(seed + 0x4000), + avmHints: await makeAvmExecutionHints(seed + 0x3000), + output: await makeAvmCircuitPublicInputs(seed + 0x4000), ...overrides, }); } diff --git a/yarn-project/circuits.js/src/types/public_keys.ts b/yarn-project/circuits.js/src/types/public_keys.ts index d426ab2f3b84..c5946e9f9fb5 100644 --- a/yarn-project/circuits.js/src/types/public_keys.ts +++ b/yarn-project/circuits.js/src/types/public_keys.ts @@ -8,15 +8,15 @@ import { type FieldsOf } from '@aztec/foundation/types'; import { z } from 'zod'; import { - DEFAULT_IVPK_M_X, - DEFAULT_IVPK_M_Y, - DEFAULT_NPK_M_X, - DEFAULT_NPK_M_Y, - DEFAULT_OVPK_M_X, - DEFAULT_OVPK_M_Y, - DEFAULT_TPK_M_X, - DEFAULT_TPK_M_Y, - GeneratorIndex, + DEFAULT_IVPK_M_X, + DEFAULT_IVPK_M_Y, + DEFAULT_NPK_M_X, + DEFAULT_NPK_M_Y, + DEFAULT_OVPK_M_X, + DEFAULT_OVPK_M_Y, + DEFAULT_TPK_M_X, + DEFAULT_TPK_M_Y, + GeneratorIndex, } from '../constants.gen.js'; import { type PublicKey } from './public_key.js'; @@ -52,10 +52,10 @@ export class PublicKeys { ); } - hash() { + async hash() { return this.isEmpty() ? Fr.ZERO - : poseidon2HashWithSeparator( + : await poseidon2HashWithSeparator( [ this.masterNullifierPublicKey, this.masterIncomingViewingPublicKey, @@ -84,8 +84,8 @@ export class PublicKeys { ); } - static random(): PublicKeys { - return new PublicKeys(Point.random(), Point.random(), Point.random(), Point.random()); + static async random(): Promise { + return new PublicKeys(await Point.random(), await Point.random(), await Point.random(), await Point.random()); } /** diff --git a/yarn-project/cli-wallet/src/bin/index.ts b/yarn-project/cli-wallet/src/bin/index.ts index 638c800d1907..b5087219918c 100644 --- a/yarn-project/cli-wallet/src/bin/index.ts +++ b/yarn-project/cli-wallet/src/bin/index.ts @@ -51,7 +51,7 @@ function injectInternalCommands(program: Command, log: LogFn, db: WalletDB) { const options = command.optsWithGlobals(); const { alias } = options; const value = Fr.random(); - const hash = computeSecretHash(value); + const hash = await computeSecretHash(value); await db.storeAlias('secrets', alias, Buffer.from(value.toString()), log); await db.storeAlias('secrets', `${alias}:hash`, Buffer.from(hash.toString()), log); diff --git a/yarn-project/cli-wallet/src/cmds/authorize_action.ts b/yarn-project/cli-wallet/src/cmds/authorize_action.ts index 88c552efd141..88212e5740d2 100644 --- a/yarn-project/cli-wallet/src/cmds/authorize_action.ts +++ b/yarn-project/cli-wallet/src/cmds/authorize_action.ts @@ -27,7 +27,7 @@ export async function authorizeAction( const contract = await Contract.at(contractAddress, contractArtifact, wallet); const action = contract.methods[functionName](...functionArgs); - const witness = await wallet.setPublicAuthWit({ caller, action }, true).send().wait(); + const witness = await (await wallet.setPublicAuthWit({ caller, action }, true)).send().wait(); log(`Authorized action ${functionName} on contract ${contractAddress} for caller ${caller}`); diff --git a/yarn-project/cli-wallet/src/cmds/create_account.ts b/yarn-project/cli-wallet/src/cmds/create_account.ts index 4d21262c97fb..0ba112c1afd4 100644 --- a/yarn-project/cli-wallet/src/cmds/create_account.ts +++ b/yarn-project/cli-wallet/src/cmds/create_account.ts @@ -32,8 +32,8 @@ export async function createAccount( Fr.ZERO, publicKey, ); - const salt = account.getInstance().salt; - const { address, publicKeys, partialAddress } = account.getCompleteAddress(); + const salt = (await account.getInstance()).salt; + const { address, publicKeys, partialAddress } = await account.getCompleteAddress(); const out: Record = {}; if (json) { @@ -44,8 +44,8 @@ export async function createAccount( } out.partialAddress = partialAddress; out.salt = salt; - out.initHash = account.getInstance().initializationHash; - out.deployer = account.getInstance().deployer; + out.initHash = (await account.getInstance()).initializationHash; + out.deployer = (await account.getInstance()).deployer; } else { log(`\nNew account:\n`); log(`Address: ${address.toString()}`); @@ -55,8 +55,8 @@ export async function createAccount( } log(`Partial address: ${partialAddress.toString()}`); log(`Salt: ${salt.toString()}`); - log(`Init hash: ${account.getInstance().initializationHash.toString()}`); - log(`Deployer: ${account.getInstance().deployer.toString()}`); + log(`Init hash: ${(await account.getInstance()).initializationHash.toString()}`); + log(`Deployer: ${(await account.getInstance()).deployer.toString()}`); } let tx; diff --git a/yarn-project/cli-wallet/src/cmds/deploy.ts b/yarn-project/cli-wallet/src/cmds/deploy.ts index d105e8e50634..84bd31a4bf47 100644 --- a/yarn-project/cli-wallet/src/cmds/deploy.ts +++ b/yarn-project/cli-wallet/src/cmds/deploy.ts @@ -91,7 +91,7 @@ export async function deploy( } } else { const { address, partialAddress } = deploy; - const instance = deploy.getInstance(); + const instance = await deploy.getInstance(); if (json) { logJson({ address: address?.toString() ?? 'N/A', diff --git a/yarn-project/cli-wallet/src/cmds/deploy_account.ts b/yarn-project/cli-wallet/src/cmds/deploy_account.ts index 0c9c8235094e..8c7391c8545a 100644 --- a/yarn-project/cli-wallet/src/cmds/deploy_account.ts +++ b/yarn-project/cli-wallet/src/cmds/deploy_account.ts @@ -13,8 +13,8 @@ export async function deployAccount( log: LogFn, ) { const out: Record = {}; - const { address, partialAddress, publicKeys } = account.getCompleteAddress(); - const { initializationHash, deployer, salt } = account.getInstance(); + const { address, partialAddress, publicKeys } = await account.getCompleteAddress(); + const { initializationHash, deployer, salt } = await account.getInstance(); const wallet = await account.getWallet(); const secretKey = wallet.getSecretKey(); diff --git a/yarn-project/cli/src/cmds/contracts/inspect_contract.ts b/yarn-project/cli/src/cmds/contracts/inspect_contract.ts index 7274141cc2f8..93201fcc9251 100644 --- a/yarn-project/cli/src/cmds/contracts/inspect_contract.ts +++ b/yarn-project/cli/src/cmds/contracts/inspect_contract.ts @@ -16,7 +16,7 @@ export async function inspectContract(contractArtifactFile: string, debugLogger: if (contractFns.length === 0) { log(`No functions found for contract ${contractArtifact.name}`); } - const contractClass = getContractClassFromArtifact(contractArtifact); + const contractClass = await getContractClassFromArtifact(contractArtifact); const bytecodeLengthInFields = 1 + Math.ceil(contractClass.packedBytecode.length / 31); log(`Contract class details:`); @@ -26,15 +26,19 @@ export async function inspectContract(contractArtifactFile: string, debugLogger: log(`\tpublic bytecode commitment: ${contractClass.publicBytecodeCommitment.toString()}`); log(`\tpublic bytecode length: ${contractClass.packedBytecode.length} bytes (${bytecodeLengthInFields} fields)`); log(`\nExternal functions:`); - contractFns.filter(f => !f.isInternal).forEach(f => logFunction(f, log)); + for (const f of contractFns.filter(f => !f.isInternal)) { + await logFunction(f, log); + } log(`\nInternal functions:`); - contractFns.filter(f => f.isInternal).forEach(f => logFunction(f, log)); + for (const f of contractFns.filter(f => f.isInternal)) { + await logFunction(f, log); + } } -function logFunction(fn: FunctionArtifact, log: LogFn) { +async function logFunction(fn: FunctionArtifact, log: LogFn) { const signatureWithParameterNames = decodeFunctionSignatureWithParameterNames(fn.name, fn.parameters); const signature = decodeFunctionSignature(fn.name, fn.parameters); - const selector = FunctionSelector.fromSignature(signature); + const selector = await FunctionSelector.fromSignature(signature); const bytecodeSize = fn.bytecode.length; const bytecodeHash = sha256(fn.bytecode).toString('hex'); log( diff --git a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts index 12b283cbdfd5..72c97c751bf7 100644 --- a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts +++ b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts @@ -163,8 +163,8 @@ async function deployToken( .deployed(waitOpts); await new BatchCall(wallet, [ - devCoin.methods.set_minter(bridge.address, true).request(), - devCoin.methods.set_admin(bridge.address).request(), + await devCoin.methods.set_minter(bridge.address, true).request(), + await devCoin.methods.set_admin(bridge.address).request(), ]) .send() .wait(waitOpts); diff --git a/yarn-project/cli/src/cmds/misc/compute_selector.ts b/yarn-project/cli/src/cmds/misc/compute_selector.ts index 9d299a64eff4..e14f9285bf77 100644 --- a/yarn-project/cli/src/cmds/misc/compute_selector.ts +++ b/yarn-project/cli/src/cmds/misc/compute_selector.ts @@ -1,7 +1,7 @@ import { FunctionSelector } from '@aztec/foundation/abi'; import { type LogFn } from '@aztec/foundation/log'; -export function computeSelector(functionSignature: string, log: LogFn) { - const selector = FunctionSelector.fromSignature(functionSignature); +export async function computeSelector(functionSignature: string, log: LogFn) { + const selector = await FunctionSelector.fromSignature(functionSignature); log(`${selector}`); } diff --git a/yarn-project/cli/src/cmds/misc/generate_secret_and_hash.ts b/yarn-project/cli/src/cmds/misc/generate_secret_and_hash.ts index fddea589c334..aca8404b47d7 100644 --- a/yarn-project/cli/src/cmds/misc/generate_secret_and_hash.ts +++ b/yarn-project/cli/src/cmds/misc/generate_secret_and_hash.ts @@ -2,11 +2,11 @@ import { computeSecretHash } from '@aztec/aztec.js'; import { Fr } from '@aztec/foundation/fields'; import { type LogFn } from '@aztec/foundation/log'; -export function generateSecretAndHash(log: LogFn) { +export async function generateSecretAndHash(log: LogFn) { const secret = Fr.random(); // We hash this the same way that aztec nr hash does. - const secretHash = computeSecretHash(secret); + const secretHash = await computeSecretHash(secret); log(` Secret: ${secret} diff --git a/yarn-project/cli/src/cmds/misc/index.ts b/yarn-project/cli/src/cmds/misc/index.ts index addfbc8283fe..c530e945faca 100644 --- a/yarn-project/cli/src/cmds/misc/index.ts +++ b/yarn-project/cli/src/cmds/misc/index.ts @@ -45,7 +45,7 @@ export function injectCommands(program: Command, log: LogFn) { .argument('', 'Function signature to compute selector for e.g. foo(Field)') .action(async (functionSignature: string) => { const { computeSelector } = await import('./compute_selector.js'); - computeSelector(functionSignature, log); + await computeSelector(functionSignature, log); }); program @@ -53,7 +53,7 @@ export function injectCommands(program: Command, log: LogFn) { .description('Generates an arbitrary secret (Fr), and its hash (using aztec-nr defaults)') .action(async () => { const { generateSecretAndHash } = await import('./generate_secret_and_hash.js'); - generateSecretAndHash(log); + await generateSecretAndHash(log); }); program diff --git a/yarn-project/cli/src/cmds/pxe/add_contract.ts b/yarn-project/cli/src/cmds/pxe/add_contract.ts index 3904930d04cc..e56845a05a1e 100644 --- a/yarn-project/cli/src/cmds/pxe/add_contract.ts +++ b/yarn-project/cli/src/cmds/pxe/add_contract.ts @@ -1,5 +1,10 @@ -import { AztecAddress, type ContractInstanceWithAddress, type Fr, getContractClassFromArtifact } from '@aztec/aztec.js'; -import { createCompatibleClient } from '@aztec/aztec.js'; +import { + AztecAddress, + type ContractInstanceWithAddress, + type Fr, + createCompatibleClient, + getContractClassFromArtifact, +} from '@aztec/aztec.js'; import { PublicKeys } from '@aztec/circuits.js'; import { computeContractAddressFromInstance } from '@aztec/circuits.js/contract'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; @@ -22,12 +27,12 @@ export async function addContract( version: 1, salt, initializationHash, - contractClassId: getContractClassFromArtifact(artifact).id, + contractClassId: (await getContractClassFromArtifact(artifact)).id, publicKeys: publicKeys ?? PublicKeys.default(), address, deployer: deployer ?? AztecAddress.ZERO, }; - const computed = computeContractAddressFromInstance(instance); + const computed = await computeContractAddressFromInstance(instance); if (!computed.equals(address)) { throw new Error(`Contract address ${address.toString()} does not match computed address ${computed.toString()}`); } diff --git a/yarn-project/cli/src/utils/aztec.ts b/yarn-project/cli/src/utils/aztec.ts index 08d4d4d02001..128265df4198 100644 --- a/yarn-project/cli/src/utils/aztec.ts +++ b/yarn-project/cli/src/utils/aztec.ts @@ -73,7 +73,7 @@ export async function deployAztecContracts( return await deployL1Contracts(chain.rpcUrl, account, chain.chainInfo, debugLogger, { l2FeeJuiceAddress: ProtocolContractAddress.FeeJuice, - vkTreeRoot: getVKTreeRoot(), + vkTreeRoot: await getVKTreeRoot(), protocolContractTreeRoot, salt, initialValidators, diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts index d77451f317f2..d60a3dd01af4 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -67,8 +67,8 @@ describe('benchmarks/proving', () => { schnorrWalletEncKey = Fr.random(); schnorrWalletSigningKey = Fq.random(); - feeRecipient = CompleteAddress.random(); - recipient = CompleteAddress.random(); + feeRecipient = await CompleteAddress.random(); + recipient = await CompleteAddress.random(); const initialSchnorrWallet = await getSchnorrAccount( ctx.pxe, diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 0e44b93ce98d..f9eb9d81bdd0 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -453,7 +453,7 @@ describe('L1Publisher integration', () => { ); await tree.appendLeaves(l2ToL1MsgsArray); - const expectedRoot = tree.getRoot(true); + const expectedRoot = await tree.getRoot(true); const [returnedRoot] = await outbox.read.getRootData([block.header.globalVariables.blockNumber.toBigInt()]); // check that values are inserted into the outbox diff --git a/yarn-project/end-to-end/src/e2e_authwit.test.ts b/yarn-project/end-to-end/src/e2e_authwit.test.ts index c59fbef886ea..8de801868809 100644 --- a/yarn-project/end-to-end/src/e2e_authwit.test.ts +++ b/yarn-project/end-to-end/src/e2e_authwit.test.ts @@ -90,8 +90,8 @@ describe('e2e_authwit_tests', () => { const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead'), Fr.fromString('0xbeef')]); const intent = { consumer: auth.address, innerHash }; - const messageHash = computeAuthWitMessageHash(intent, { chainId: Fr.random(), version }); - const expectedMessageHash = computeAuthWitMessageHash(intent, { chainId, version }); + const messageHash = await computeAuthWitMessageHash(intent, { chainId: Fr.random(), version }); + const expectedMessageHash = await computeAuthWitMessageHash(intent, { chainId, version }); expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, @@ -122,9 +122,9 @@ describe('e2e_authwit_tests', () => { const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead'), Fr.fromString('0xbeef')]); const intent = { consumer: auth.address, innerHash }; - const messageHash = computeAuthWitMessageHash(intent, { chainId, version: Fr.random() }); + const messageHash = await computeAuthWitMessageHash(intent, { chainId, version: Fr.random() }); - const expectedMessageHash = computeAuthWitMessageHash(intent, { chainId, version }); + const expectedMessageHash = await computeAuthWitMessageHash(intent, { chainId, version }); expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts index e2ca5a2fa640..62a1d0a4001a 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts @@ -199,7 +199,7 @@ export class BlacklistTokenContractTest { this.logger.verbose(`Minting ${amount} privately...`); const secret = Fr.random(); - const secretHash = computeSecretHash(secret); + const secretHash = await computeSecretHash(secret); const receipt = await asset.methods.mint_private(amount, secretHash).send().wait(); await this.addPendingShieldNoteToPXE(0, amount, secretHash, receipt.txHash); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts index 4d9da34679d9..6af2ae42c9fb 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts @@ -196,7 +196,7 @@ describe('e2e_blacklist_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[1]).methods.burn(wallets[0].getAddress(), amount, nonce); - const messageHash = computeAuthWitMessageHash( + const messageHash = await computeAuthWitMessageHash( { caller: wallets[1].getAddress(), action: action.request() }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); @@ -215,7 +215,7 @@ describe('e2e_blacklist_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[2]).methods.burn(wallets[0].getAddress(), amount, nonce); - const expectedMessageHash = computeAuthWitMessageHash( + const expectedMessageHash = await computeAuthWitMessageHash( { caller: wallets[2].getAddress(), action: action.request() }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts index 03aa384b6858..934570d42cf4 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts @@ -79,7 +79,7 @@ describe('e2e_blacklist_token_contract mint', () => { let txHash: TxHash; beforeAll(() => { - secretHash = computeSecretHash(secret); + secretHash = await computeSecretHash(secret); }); describe('Mint flow', () => { diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts index c2955daf7701..45832563dfaa 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts @@ -27,7 +27,7 @@ describe('e2e_blacklist_token_contract shield + redeem_shield', () => { let secretHash: Fr; beforeAll(() => { - secretHash = computeSecretHash(secret); + secretHash = await computeSecretHash(secret); }); it('on behalf of self', async () => { diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts index c35625d34504..c7078c6c6c2d 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts @@ -144,7 +144,7 @@ describe('e2e_blacklist_token_contract transfer private', () => { const action = asset .withWallet(wallets[1]) .methods.transfer(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce); - const messageHash = computeAuthWitMessageHash( + const messageHash = await computeAuthWitMessageHash( { caller: wallets[1].getAddress(), action: action.request() }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); @@ -162,7 +162,7 @@ describe('e2e_blacklist_token_contract transfer private', () => { const action = asset .withWallet(wallets[2]) .methods.transfer(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce); - const expectedMessageHash = computeAuthWitMessageHash( + const expectedMessageHash = await computeAuthWitMessageHash( { caller: wallets[2].getAddress(), action: action.request() }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts index e23372a34c0b..563ed9410ee0 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts @@ -115,7 +115,7 @@ describe('e2e_blacklist_token_contract unshielding', () => { const action = asset .withWallet(wallets[2]) .methods.unshield(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce); - const expectedMessageHash = computeAuthWitMessageHash( + const expectedMessageHash = await computeAuthWitMessageHash( { caller: wallets[2].getAddress(), action: action.request() }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 4989a66d0a9c..f4f1fd21fd4d 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -287,7 +287,7 @@ describe('e2e_block_building', () => { it('calls a method with nested note encrypted logs', async () => { // account setup const privateKey = new Fr(7n); - const keys = deriveKeys(privateKey); + const keys = await deriveKeys(privateKey); const account = getSchnorrAccount(pxe, privateKey, keys.masterIncomingViewingSecretKey); await account.deploy().wait(); const thisWallet = await account.getWallet(); @@ -313,7 +313,7 @@ describe('e2e_block_building', () => { it('calls a method with nested encrypted logs', async () => { // account setup const privateKey = new Fr(7n); - const keys = deriveKeys(privateKey); + const keys = await deriveKeys(privateKey); const account = getSchnorrAccount(pxe, privateKey, keys.masterIncomingViewingSecretKey); await account.deploy().wait(); const thisWallet = await account.getWallet(); diff --git a/yarn-project/end-to-end/src/e2e_card_game.test.ts b/yarn-project/end-to-end/src/e2e_card_game.test.ts index d845ec81ff0d..91d715f76a88 100644 --- a/yarn-project/end-to-end/src/e2e_card_game.test.ts +++ b/yarn-project/end-to-end/src/e2e_card_game.test.ts @@ -1,15 +1,15 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { INITIAL_TEST_SECRET_KEYS } from '@aztec/accounts/testing'; import { - type AccountWallet, - AztecAddress, - type DebugLogger, - GrumpkinScalar, - type PXE, - type Wallet, - computeAppNullifierSecretKey, - deriveKeys, - deriveMasterNullifierSecretKey, + type AccountWallet, + AztecAddress, + type DebugLogger, + GrumpkinScalar, + type PXE, + type Wallet, + computeAppNullifierSecretKey, + deriveKeys, + deriveMasterNullifierSecretKey, } from '@aztec/aztec.js'; import { toBufferLE } from '@aztec/foundation/bigint-buffer'; import { sha256 } from '@aztec/foundation/crypto'; @@ -108,7 +108,7 @@ describe('e2e_card_game', () => { const preRegisteredAccounts = await pxe.getRegisteredAccounts(); const secretKeysToRegister = INITIAL_TEST_SECRET_KEYS.filter(key => { - const publicKey = deriveKeys(key).publicKeys.masterIncomingViewingPublicKey; + const publicKey = await deriveKeys(key).publicKeys.masterIncomingViewingPublicKey; return ( preRegisteredAccounts.find(preRegisteredAccount => { return preRegisteredAccount.publicKeys.masterIncomingViewingPublicKey.equals(publicKey); diff --git a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts index c05d6d0d1a7c..242128f05f16 100644 --- a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts +++ b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts @@ -91,7 +91,7 @@ describe('e2e_crowdfunding_and_claim', () => { logger.info(`Reward Token deployed to ${rewardToken.address}`); crowdfundingSecretKey = Fr.random(); - crowdfundingPublicKeys = deriveKeys(crowdfundingSecretKey).publicKeys; + crowdfundingPublicKeys = await deriveKeys(crowdfundingSecretKey).publicKeys; const crowdfundingDeployment = CrowdfundingContract.deployWithPublicKeys( crowdfundingPublicKeys, diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts index d436c38e0ac9..fa975be477d4 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts @@ -74,7 +74,7 @@ describe('e2e_deploy_contract contract class registration', () => { // TODO(#10007) Remove this test as well. it('starts archiver with pre-registered common contracts', async () => { - const classId = computeContractClassId(getContractClassFromArtifact(TokenContractArtifact)); + const classId = await computeContractClassId(await getContractClassFromArtifact(TokenContractArtifact)); expect(await aztecNode.getContractClass(classId)).not.toBeUndefined(); }); diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts index ca7dff265c2d..4e041629dc89 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts @@ -26,7 +26,7 @@ describe('e2e_deploy_contract private initialization', () => { const testWallet = kind === 'as entrypoint' ? new SignerlessWallet(pxe) : wallet; const contract = await t.registerContract(testWallet, TestContract); const receipt = await contract.methods.emit_nullifier(10).send().wait({ debug: true }); - const expected = siloNullifier(contract.address, new Fr(10)); + const expected = await siloNullifier(contract.address, new Fr(10)); expect(receipt.debugInfo?.nullifiers[1]).toEqual(expected); }, ); diff --git a/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts b/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts index 9a40a99563cb..361d1f26546a 100644 --- a/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts @@ -44,7 +44,7 @@ describe('e2e_escrow_contract', () => { // Generate private key for escrow contract, register key in pxe service, and deploy // Note that we need to register it first if we want to emit an encrypted note for it in the constructor escrowSecretKey = Fr.random(); - escrowPublicKeys = deriveKeys(escrowSecretKey).publicKeys; + escrowPublicKeys = await deriveKeys(escrowSecretKey).publicKeys; const escrowDeployment = EscrowContract.deployWithPublicKeys(escrowPublicKeys, wallet, owner); const escrowInstance = escrowDeployment.getInstance(); await pxe.registerAccount(escrowSecretKey, computePartialAddress(escrowInstance)); diff --git a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts index 7b2abe306b01..d9d12d1c0460 100644 --- a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts @@ -171,7 +171,7 @@ describe('e2e_fees account_init', () => { const [alicesInitialGas] = await t.getGasBalanceFn(aliceAddress); // bob generates the private keys for his account on his own - const bobsPublicKeys = deriveKeys(bobsSecretKey).publicKeys; + const bobsPublicKeys = await deriveKeys(bobsSecretKey).publicKeys; const bobsSigningPubKey = new Schnorr().computePublicKey(bobsPrivateSigningKey); const bobsInstance = bobsAccountManager.getInstance(); diff --git a/yarn-project/end-to-end/src/e2e_keys.test.ts b/yarn-project/end-to-end/src/e2e_keys.test.ts index b8962a0d2032..7826e750beda 100644 --- a/yarn-project/end-to-end/src/e2e_keys.test.ts +++ b/yarn-project/end-to-end/src/e2e_keys.test.ts @@ -1,21 +1,21 @@ import { createAccounts } from '@aztec/accounts/testing'; import { - type AccountWallet, - type AztecAddress, - type AztecNode, - Fr, - type L2Block, - type PXE, - type Wallet, + type AccountWallet, + type AztecAddress, + type AztecNode, + Fr, + type L2Block, + type PXE, + type Wallet, } from '@aztec/aztec.js'; import { - GeneratorIndex, - INITIAL_L2_BLOCK_NUM, - computeAppNullifierSecretKey, - computeAppSecretKey, - deriveMasterNullifierSecretKey, - deriveMasterOutgoingViewingSecretKey, - derivePublicKeyFromSecretKey, + GeneratorIndex, + INITIAL_L2_BLOCK_NUM, + computeAppNullifierSecretKey, + computeAppSecretKey, + deriveMasterNullifierSecretKey, + deriveMasterOutgoingViewingSecretKey, + derivePublicKeyFromSecretKey, } from '@aztec/circuits.js'; import { siloNullifier } from '@aztec/circuits.js/hash'; import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto'; @@ -95,7 +95,7 @@ describe('Keys', () => { ); // 3. Derive all the possible nullifiers using nskApp const derivedNullifiers = noteHashes.map(noteHash => { - const innerNullifier = poseidon2HashWithSeparator([noteHash, nskApp], GeneratorIndex.NOTE_NULLIFIER); + const innerNullifier = await poseidon2HashWithSeparator([noteHash, nskApp], GeneratorIndex.NOTE_NULLIFIER); return siloNullifier(contractAddress, innerNullifier); }); // 4. Count the number of derived nullifiers that are in the nullifiers array diff --git a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts index ff22b574df3a..fb6852148f45 100644 --- a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts +++ b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts @@ -42,7 +42,7 @@ describe('e2e_multiple_accounts_1_enc_key', () => { logger.info('Account contracts deployed'); // Verify that all accounts use the same encryption key - const encryptionPublicKey = deriveKeys(encryptionPrivateKey).publicKeys.masterIncomingViewingPublicKey; + const encryptionPublicKey = await deriveKeys(encryptionPrivateKey).publicKeys.masterIncomingViewingPublicKey; for (const account of accounts) { expect(account.publicKeys.masterIncomingViewingPublicKey).toEqual(encryptionPublicKey); diff --git a/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts b/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts index 54e377ad4e93..3db1b81a4322 100644 --- a/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts +++ b/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts @@ -35,7 +35,7 @@ describe('e2e_non_contract_account', () => { .send() .wait({ interval: 0.1, debug: true }); - const expectedSiloedNullifier = siloNullifier(contract.address, nullifier); + const expectedSiloedNullifier = await siloNullifier(contract.address, nullifier); const siloedNullifier = debugInfo!.nullifiers[1]; expect(siloedNullifier.equals(expectedSiloedNullifier)).toBeTruthy(); diff --git a/yarn-project/end-to-end/src/e2e_ordering.test.ts b/yarn-project/end-to-end/src/e2e_ordering.test.ts index f87a10f6e9a2..671a4e0dbb1c 100644 --- a/yarn-project/end-to-end/src/e2e_ordering.test.ts +++ b/yarn-project/end-to-end/src/e2e_ordering.test.ts @@ -69,9 +69,11 @@ describe('e2e_ordering', () => { expect(enqueuedPublicCalls.length).toEqual(2); // The call stack items in the output of the kernel proof match the tx enqueuedPublicFunctionCalls - enqueuedPublicCalls.forEach((c, i) => { - expect(c.isForCallRequest(tx.data.forPublic!.revertibleAccumulatedData.publicCallRequests[i])).toBe(true); - }); + for (const [i, c] of enqueuedPublicCalls.entries()) { + expect(await c.isForCallRequest(tx.data.forPublic!.revertibleAccumulatedData.publicCallRequests[i])).toBe( + true, + ); + } // The enqueued public calls are in the expected order based on the argument they set (stack is reversed!) // args[1] is used instead of args[0] because public functions are routed through the public dispatch diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts index d1c35dfdb662..7a08090fe26f 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/shared.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -42,7 +42,7 @@ export const createPXEServiceAndSubmitTransactions = async ( const pxeService = await createPXEService(node, rpcConfig, true); const secretKey = Fr.random(); - const completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, Fr.random()); + const completeAddress = await CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, Fr.random()); await pxeService.registerAccount(secretKey, completeAddress.partialAddress); const txs = await submitTxsTo(logger, pxeService, numTxs); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts index 0b7b10306bfb..abd85ba2adb3 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts @@ -189,7 +189,7 @@ describe('e2e_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[1]).methods.burn_private(accounts[0].address, amount, nonce); - const messageHash = computeAuthWitMessageHash( + const messageHash = await computeAuthWitMessageHash( { caller: accounts[1].address, action: action.request() }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); @@ -210,7 +210,7 @@ describe('e2e_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[2]).methods.burn_private(accounts[0].address, amount, nonce); - const expectedMessageHash = computeAuthWitMessageHash( + const expectedMessageHash = await computeAuthWitMessageHash( { caller: accounts[2].address, action: action.request() }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer.test.ts index 619f24c3a7d0..38022396c7c1 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer.test.ts @@ -47,7 +47,7 @@ describe('e2e_token_contract transfer private', () => { const amount = balance0 / 2n; expect(amount).toBeGreaterThan(0n); - const nonDeployed = CompleteAddress.random(); + const nonDeployed = await CompleteAddress.random(); await asset.methods.transfer(nonDeployed.address, amount).send().wait(); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts index d2cf242aff0d..2e5366195f67 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts @@ -108,7 +108,7 @@ describe('e2e_token_contract transfer private', () => { const action = asset .withWallet(wallets[1]) .methods.transfer_in_private(accounts[0].address, accounts[1].address, amount, nonce); - const messageHash = computeAuthWitMessageHash( + const messageHash = await computeAuthWitMessageHash( { caller: accounts[1].address, action: action.request() }, { chainId: wallets[0].getChainId(), @@ -131,7 +131,7 @@ describe('e2e_token_contract transfer private', () => { const action = asset .withWallet(wallets[2]) .methods.transfer_in_private(accounts[0].address, accounts[1].address, amount, nonce); - const expectedMessageHash = computeAuthWitMessageHash( + const expectedMessageHash = await computeAuthWitMessageHash( { caller: accounts[2].address, action: action.request() }, { chainId: wallets[0].getChainId(), diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_to_public.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_to_public.test.ts index dbecfab9212c..4525d5025340 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_to_public.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_to_public.test.ts @@ -113,7 +113,7 @@ describe('e2e_token_contract transfer_to_public', () => { const action = asset .withWallet(wallets[2]) .methods.transfer_to_public(accounts[0].address, accounts[1].address, amount, nonce); - const expectedMessageHash = computeAuthWitMessageHash( + const expectedMessageHash = await computeAuthWitMessageHash( { caller: accounts[2].address, action }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); diff --git a/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts b/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts index b3fdb26403e9..e1ec9a7f6047 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts @@ -16,7 +16,7 @@ export const setupL1Contracts = async ( ) => { const l1Data = await deployL1Contracts(l1RpcUrl, account, foundry, logger, { l2FeeJuiceAddress: ProtocolContractAddress.FeeJuice, - vkTreeRoot: getVKTreeRoot(), + vkTreeRoot: await getVKTreeRoot(), protocolContractTreeRoot, salt: undefined, ...args, diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 482dfc157756..6666a3243941 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -101,7 +101,7 @@ export const setupL1Contracts = async ( ) => { const l1Data = await deployL1Contracts(l1RpcUrl, account, chain, logger, { l2FeeJuiceAddress: ProtocolContractAddress.FeeJuice, - vkTreeRoot: getVKTreeRoot(), + vkTreeRoot: await getVKTreeRoot(), protocolContractTreeRoot, salt: args.salt, initialValidators: args.initialValidators, @@ -508,7 +508,7 @@ export async function ensureAccountsPubliclyDeployed(sender: Wallet, accountsToD const instances = await Promise.all( accountsAndAddresses.filter(({ deployed }) => !deployed).map(({ address }) => sender.getContractInstance(address)), ); - const contractClass = getContractClassFromArtifact(SchnorrAccountContractArtifact); + const contractClass = await getContractClassFromArtifact(SchnorrAccountContractArtifact); if (!(await sender.isContractClassPubliclyRegistered(contractClass.id))) { await (await registerContractClass(sender, SchnorrAccountContractArtifact)).send().wait(); } diff --git a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts index ef6b20c00186..64c7716a3478 100644 --- a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts +++ b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts @@ -608,7 +608,7 @@ export const uniswapL1L2TestSuite = ( // swap should fail since no withdraw approval to uniswap: const nonceForWETHTransferToPublicApproval = new Fr(2n); - const expectedMessageHash = computeAuthWitMessageHash( + const expectedMessageHash = await computeAuthWitMessageHash( { caller: uniswapL2Contract.address, action: wethCrossChainHarness.l2Token.methods diff --git a/yarn-project/entrypoints/src/account_entrypoint.ts b/yarn-project/entrypoints/src/account_entrypoint.ts index 15002e020a26..83a830e4aad9 100644 --- a/yarn-project/entrypoints/src/account_entrypoint.ts +++ b/yarn-project/entrypoints/src/account_entrypoint.ts @@ -25,14 +25,14 @@ export class DefaultAccountEntrypoint implements EntrypointInterface { async createTxExecutionRequest(exec: ExecutionRequestInit): Promise { const { calls, fee, nonce, cancellable } = exec; - const appPayload = EntrypointPayload.fromAppExecution(calls, nonce); + const appPayload = await EntrypointPayload.fromAppExecution(calls, nonce); const feePayload = await EntrypointPayload.fromFeeOptions(this.address, fee); const abi = this.getEntrypointAbi(); const entrypointPackedArgs = PackedValues.fromValues(encodeArguments(abi, [appPayload, feePayload, !!cancellable])); const combinedPayloadAuthWitness = await this.auth.createAuthWit( - computeCombinedPayloadHash(appPayload, feePayload), + await computeCombinedPayloadHash(appPayload, feePayload), ); const txRequest = TxExecutionRequest.from({ diff --git a/yarn-project/entrypoints/src/dapp_entrypoint.ts b/yarn-project/entrypoints/src/dapp_entrypoint.ts index 569715d36301..6d39dcce5ca0 100644 --- a/yarn-project/entrypoints/src/dapp_entrypoint.ts +++ b/yarn-project/entrypoints/src/dapp_entrypoint.ts @@ -26,18 +26,18 @@ export class DefaultDappEntrypoint implements EntrypointInterface { throw new Error(`Expected exactly 1 function call, got ${calls.length}`); } - const payload = EntrypointPayload.fromFunctionCalls(calls); + const payload = await EntrypointPayload.fromFunctionCalls(calls); const abi = this.getEntrypointAbi(); const entrypointPackedArgs = PackedValues.fromValues(encodeArguments(abi, [payload, this.userAddress])); const functionSelector = FunctionSelector.fromNameAndParameters(abi.name, abi.parameters); // Default msg_sender for entrypoints is now Fr.max_value rather than 0 addr (see #7190 & #7404) - const innerHash = computeInnerAuthWitHash([ + const innerHash = await computeInnerAuthWitHash([ Fr.MAX_FIELD_VALUE, functionSelector.toField(), entrypointPackedArgs.hash, ]); - const outerHash = computeAuthWitMessageHash( + const outerHash = await computeAuthWitMessageHash( { consumer: this.dappEntrypointAddress, innerHash }, { chainId: new Fr(this.chainId), version: new Fr(this.version) }, ); diff --git a/yarn-project/foundation/src/abi/abi.ts b/yarn-project/foundation/src/abi/abi.ts index 4ba09fb11a9e..2dfe85c2a01b 100644 --- a/yarn-project/foundation/src/abi/abi.ts +++ b/yarn-project/foundation/src/abi/abi.ts @@ -5,7 +5,7 @@ import { z } from 'zod'; import { type Fr } from '../fields/fields.js'; import { schemas } from '../schemas/schemas.js'; import { type ZodFor } from '../schemas/types.js'; -import { type FunctionSelector } from './function_selector.js'; +import { FunctionSelector } from './function_selector.js'; import { type NoteSelector } from './note_selector.js'; /** A basic value. */ @@ -378,15 +378,23 @@ export const ContractArtifactSchema: ZodFor = z.object({ }); /** Gets a function artifact including debug metadata given its name or selector. */ -export function getFunctionArtifact( +export async function getFunctionArtifact( artifact: ContractArtifact, functionNameOrSelector: string | FunctionSelector, -): FunctionArtifact { - const functionArtifact = artifact.functions.find(f => - typeof functionNameOrSelector === 'string' - ? f.name === functionNameOrSelector - : functionNameOrSelector.equals(f.name, f.parameters), +): Promise { + const foundArtifacts = await Promise.all( + artifact.functions.map(async f => { + const equal: boolean = + typeof functionNameOrSelector === 'string' + ? f.name === functionNameOrSelector + : functionNameOrSelector.equals(await FunctionSelector.fromNameAndParameters(f.name, f.parameters)); + if (!equal) { + return undefined; + } + return f; + }), ); + const functionArtifact = foundArtifacts.find(f => f !== undefined); if (!functionArtifact) { throw new Error(`Unknown function ${functionNameOrSelector}`); } diff --git a/yarn-project/foundation/src/abi/event_selector.ts b/yarn-project/foundation/src/abi/event_selector.ts index 0203d562380c..6338a0ff37e7 100644 --- a/yarn-project/foundation/src/abi/event_selector.ts +++ b/yarn-project/foundation/src/abi/event_selector.ts @@ -40,12 +40,12 @@ export class EventSelector extends Selector { * @param signature - Signature to generate the selector for (e.g. "transfer(field,field)"). * @returns selector. */ - static fromSignature(signature: string) { + static async fromSignature(signature: string) { // throw if signature contains whitespace if (/\s/.test(signature)) { throw new Error('Signature cannot contain whitespace'); } - const hash = poseidon2HashBytes(Buffer.from(signature)); + const hash = await poseidon2HashBytes(Buffer.from(signature)); // We take the last Selector.SIZE big endian bytes const bytes = hash.toBuffer().slice(-Selector.SIZE); return EventSelector.fromBuffer(bytes); diff --git a/yarn-project/foundation/src/abi/function_selector.test.ts b/yarn-project/foundation/src/abi/function_selector.test.ts index a3f6d055354f..6ebdf1ff6f73 100644 --- a/yarn-project/foundation/src/abi/function_selector.test.ts +++ b/yarn-project/foundation/src/abi/function_selector.test.ts @@ -22,13 +22,13 @@ describe('FunctionSelector', () => { expect(res).toEqual(selector); }); - it('computes a function selector from signature', () => { - const res = FunctionSelector.fromSignature('IS_VALID()'); + it('computes a function selector from signature', async () => { + const res = await FunctionSelector.fromSignature('IS_VALID()'); expect(res.toBuffer().toString('hex')).toMatchSnapshot(); }); - it('computes a function selector from a long string', () => { - const res = FunctionSelector.fromSignature('foo_and_bar_and_baz_and_foo_bar_baz_and_bar_foo'); + it('computes a function selector from a long string', async () => { + const res = await FunctionSelector.fromSignature('foo_and_bar_and_baz_and_foo_bar_baz_and_bar_foo'); expect(res.toBuffer().toString('hex')).toMatchSnapshot(); }); }); diff --git a/yarn-project/foundation/src/abi/function_selector.ts b/yarn-project/foundation/src/abi/function_selector.ts index 00b75d1b5002..de20ac217d50 100644 --- a/yarn-project/foundation/src/abi/function_selector.ts +++ b/yarn-project/foundation/src/abi/function_selector.ts @@ -23,20 +23,9 @@ export class FunctionSelector extends Selector { * Checks if this function selector is equal to another. * @returns True if the function selectors are equal. */ - override equals(fn: { name: string; parameters: ABIParameter[] }): boolean; - override equals(otherName: string, otherParams: ABIParameter[]): boolean; override equals(other: FunctionSelector): boolean; - override equals( - other: FunctionSelector | string | { name: string; parameters: ABIParameter[] }, - otherParams?: ABIParameter[], - ): boolean { - if (typeof other === 'string') { - return this.equals(FunctionSelector.fromNameAndParameters(other, otherParams!)); - } else if (typeof other === 'object' && 'name' in other) { - return this.equals(FunctionSelector.fromNameAndParameters(other.name, other.parameters)); - } else { - return this.value === other.value; - } + override equals(other: FunctionSelector): boolean { + return this.value === other.value; } /** @@ -69,12 +58,12 @@ export class FunctionSelector extends Selector { * @param signature - Signature to generate the selector for (e.g. "transfer(field,field)"). * @returns selector. */ - static fromSignature(signature: string) { + static async fromSignature(signature: string) { // throw if signature contains whitespace if (/\s/.test(signature)) { throw new Error('Signature cannot contain whitespace'); } - const hash = poseidon2HashBytes(Buffer.from(signature)); + const hash = await poseidon2HashBytes(Buffer.from(signature)); // We take the last Selector.SIZE big endian bytes const bytes = hash.toBuffer().slice(-Selector.SIZE); return FunctionSelector.fromBuffer(bytes); @@ -110,16 +99,16 @@ export class FunctionSelector extends Selector { * @param parameters - An array of ABIParameter objects, each containing the type information of a function parameter. * @returns A Buffer containing the 4-byte selector. */ - static fromNameAndParameters(args: { name: string; parameters: ABIParameter[] }): FunctionSelector; - static fromNameAndParameters(name: string, parameters: ABIParameter[]): FunctionSelector; - static fromNameAndParameters( + static fromNameAndParameters(args: { name: string; parameters: ABIParameter[] }): Promise; + static fromNameAndParameters(name: string, parameters: ABIParameter[]): Promise; + static async fromNameAndParameters( nameOrArgs: string | { name: string; parameters: ABIParameter[] }, maybeParameters?: ABIParameter[], - ): FunctionSelector { + ): Promise { const { name, parameters } = typeof nameOrArgs === 'string' ? { name: nameOrArgs, parameters: maybeParameters! } : nameOrArgs; const signature = decodeFunctionSignature(name, parameters); - const selector = this.fromSignature(signature); + const selector = await this.fromSignature(signature); // If using the debug logger here it kill the typing in the `server_world_state_synchronizer` and jest tests. // console.log(`selector for ${signature} is ${selector}`); return selector; diff --git a/yarn-project/foundation/src/crypto/keys/index.ts b/yarn-project/foundation/src/crypto/keys/index.ts index 7b2066717a2c..8fe45e4e94f0 100644 --- a/yarn-project/foundation/src/crypto/keys/index.ts +++ b/yarn-project/foundation/src/crypto/keys/index.ts @@ -2,8 +2,9 @@ import { BarretenbergSync, RawBuffer } from '@aztec/bb.js'; import { Fr } from '../../fields/fields.js'; -export function vkAsFieldsMegaHonk(input: Buffer): Fr[] { - return BarretenbergSync.getSingleton() +export async function vkAsFieldsMegaHonk(input: Buffer): Promise { + const api = await BarretenbergSync.getSingleton(); + return api .acirVkAsFieldsMegaHonk(new RawBuffer(input)) .map(bbFr => Fr.fromBuffer(Buffer.from(bbFr.toBuffer()))); // TODO(#4189): remove this conversion } diff --git a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts index fcbceafbfb68..6b14a8cdb72f 100644 --- a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts +++ b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts @@ -7,12 +7,13 @@ import { type Fieldable, serializeToFields } from '../../serialize/serialize.js' * Create a pedersen commitment (point) from an array of input fields. * Left pads any inputs less than 32 bytes. */ -export function pedersenCommit(input: Buffer[], offset = 0) { +export async function pedersenCommit(input: Buffer[], offset = 0) { if (!input.every(i => i.length <= 32)) { throw new Error('All Pedersen Commit input buffers must be <= 32 bytes.'); } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); - const point = BarretenbergSync.getSingleton().pedersenCommit( + const api = await BarretenbergSync.getSingleton(); + const point = api.pedersenCommit( input.map(i => new FrBarretenberg(i)), offset, ); @@ -27,11 +28,12 @@ export function pedersenCommit(input: Buffer[], offset = 0) { * @param index - The separator index to use for the hash. * @returns The pedersen hash. */ -export function pedersenHash(input: Fieldable[], index = 0): Fr { +export async function pedersenHash(input: Fieldable[], index = 0): Promise { const inputFields = serializeToFields(input); + const api = await BarretenbergSync.getSingleton(); return Fr.fromBuffer( Buffer.from( - BarretenbergSync.getSingleton() + api .pedersenHash( inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion index, @@ -44,6 +46,7 @@ export function pedersenHash(input: Fieldable[], index = 0): Fr { /** * Create a pedersen hash from an arbitrary length buffer. */ -export function pedersenHashBuffer(input: Buffer, index = 0) { - return Buffer.from(BarretenbergSync.getSingleton().pedersenHashBuffer(input, index).toBuffer()); +export async function pedersenHashBuffer(input: Buffer, index = 0) { + const api = await BarretenbergSync.getSingleton(); + return Buffer.from(api.pedersenHashBuffer(input, index).toBuffer()); } diff --git a/yarn-project/foundation/src/crypto/poseidon/index.ts b/yarn-project/foundation/src/crypto/poseidon/index.ts index aad83209f2f6..e62173c86cc9 100644 --- a/yarn-project/foundation/src/crypto/poseidon/index.ts +++ b/yarn-project/foundation/src/crypto/poseidon/index.ts @@ -8,11 +8,12 @@ import { type Fieldable, serializeToFields } from '../../serialize/serialize.js' * @param input - The input fields to hash. * @returns The poseidon hash. */ -export function poseidon2Hash(input: Fieldable[]): Fr { +export async function poseidon2Hash(input: Fieldable[]): Promise { const inputFields = serializeToFields(input); + const api = await BarretenbergSync.getSingleton(); return Fr.fromBuffer( Buffer.from( - BarretenbergSync.getSingleton() + api .poseidon2Hash( inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion ) @@ -27,12 +28,13 @@ export function poseidon2Hash(input: Fieldable[]): Fr { * @param separator - The domain separator. * @returns The poseidon hash. */ -export function poseidon2HashWithSeparator(input: Fieldable[], separator: number): Fr { +export async function poseidon2HashWithSeparator(input: Fieldable[], separator: number): Promise { + const api = await BarretenbergSync.getSingleton(); const inputFields = serializeToFields(input); inputFields.unshift(new Fr(separator)); return Fr.fromBuffer( Buffer.from( - BarretenbergSync.getSingleton() + api .poseidon2Hash( inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion ) @@ -57,11 +59,12 @@ export function poseidon2HashAccumulate(input: Fieldable[]): Fr { * @param input the input state. Expected to be of size 4. * @returns the output state, size 4. */ -export function poseidon2Permutation(input: Fieldable[]): Fr[] { +export async function poseidon2Permutation(input: Fieldable[]): Promise { const inputFields = serializeToFields(input); + const api = await BarretenbergSync.getSingleton(); // We'd like this assertion but it's not possible to use it in the browser. // assert(input.length === 4, 'Input state must be of size 4'); - const res = BarretenbergSync.getSingleton().poseidon2Permutation( + const res = api.poseidon2Permutation( inputFields.map(i => new FrBarretenberg(i.toBuffer())), ); // We'd like this assertion but it's not possible to use it in the browser. @@ -69,7 +72,7 @@ export function poseidon2Permutation(input: Fieldable[]): Fr[] { return res.map(o => Fr.fromBuffer(Buffer.from(o.toBuffer()))); } -export function poseidon2HashBytes(input: Buffer): Fr { +export async function poseidon2HashBytes(input: Buffer): Promise { const inputFields = []; for (let i = 0; i < input.length; i += 31) { const fieldBytes = Buffer.alloc(32, 0); @@ -80,9 +83,10 @@ export function poseidon2HashBytes(input: Buffer): Fr { inputFields.push(Fr.fromBuffer(fieldBytes)); } + const api = await BarretenbergSync.getSingleton(); return Fr.fromBuffer( Buffer.from( - BarretenbergSync.getSingleton() + api .poseidon2Hash( inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion ) diff --git a/yarn-project/foundation/src/fields/fields.test.ts b/yarn-project/foundation/src/fields/fields.test.ts index 491b0d797567..40c86a802823 100644 --- a/yarn-project/foundation/src/fields/fields.test.ts +++ b/yarn-project/foundation/src/fields/fields.test.ts @@ -190,8 +190,8 @@ describe('Bn254 arithmetic', () => { [new Fr(4), 2n], [new Fr(9), 3n], [new Fr(16), 4n], - ])('Should return the correct square root for %p', (input, expected) => { - const actual = input.sqrt()!.toBigInt(); + ])('Should return the correct square root for %p', async (input, expected) => { + const actual = (await input.sqrt())!.toBigInt(); // The square root can be either the expected value or the modulus - expected value const isValid = actual == expected || actual == Fr.MODULUS - expected; @@ -199,11 +199,11 @@ describe('Bn254 arithmetic', () => { expect(isValid).toBeTruthy(); }); - it('Should return the correct square root for random value', () => { + it('Should return the correct square root for random value', async () => { const a = Fr.random(); const squared = a.mul(a); - const actual = squared.sqrt(); + const actual = await squared.sqrt(); expect(actual!.mul(actual!)).toEqual(squared); }); }); diff --git a/yarn-project/foundation/src/fields/fields.ts b/yarn-project/foundation/src/fields/fields.ts index 84b559c4f3c9..258600a13e9d 100644 --- a/yarn-project/foundation/src/fields/fields.ts +++ b/yarn-project/foundation/src/fields/fields.ts @@ -286,8 +286,9 @@ export class Fr extends BaseField { * Computes a square root of the field element. * @returns A square root of the field element (null if it does not exist). */ - sqrt(): Fr | null { - const wasm = BarretenbergSync.getSingleton().getWasm(); + async sqrt(): Promise { + const api = await BarretenbergSync.getSingleton(); + const wasm = api.getWasm(); wasm.writeMemory(0, this.toBuffer()); wasm.call('bn254_fr_sqrt', 0, Fr.SIZE_IN_BYTES); const isSqrtBuf = Buffer.from(wasm.getMemorySlice(Fr.SIZE_IN_BYTES, Fr.SIZE_IN_BYTES + 1)); diff --git a/yarn-project/foundation/src/fields/point.test.ts b/yarn-project/foundation/src/fields/point.test.ts index f98771fb5c0e..67cd3ac15d20 100644 --- a/yarn-project/foundation/src/fields/point.test.ts +++ b/yarn-project/foundation/src/fields/point.test.ts @@ -31,7 +31,7 @@ describe('Point', () => { ); const [x, sign] = p.toXAndSign(); - const p2 = Point.fromXAndSign(x, sign); + const p2 = await Point.fromXAndSign(x, sign); expect(p.equals(p2)).toBeTruthy(); }); @@ -43,9 +43,9 @@ describe('Point', () => { expect(p.equals(p2)).toBeTruthy(); }); - it('converts to and from compressed buffer', () => { - const p = Point.random(); - const p2 = Point.fromCompressedBuffer(p.toCompressedBuffer()); + it('converts to and from compressed buffer', async () => { + const p = await Point.random(); + const p2 = await Point.fromCompressedBuffer(p.toCompressedBuffer()); expect(p.equals(p2)).toBeTruthy(); }); diff --git a/yarn-project/key-store/src/key_store.test.ts b/yarn-project/key-store/src/key_store.test.ts index a816660a9a79..3b9b983a34a5 100644 --- a/yarn-project/key-store/src/key_store.test.ts +++ b/yarn-project/key-store/src/key_store.test.ts @@ -9,9 +9,9 @@ describe('KeyStore', () => { // Arbitrary fixed values const sk = new Fr(8923n); - const keys = deriveKeys(sk); - const derivedMasterNullifierPublicKey = derivePublicKeyFromSecretKey(keys.masterNullifierSecretKey); - const computedMasterNullifierPublicKeyHash = derivedMasterNullifierPublicKey.hash(); + const keys = await deriveKeys(sk); + const derivedMasterNullifierPublicKey = await derivePublicKeyFromSecretKey(keys.masterNullifierSecretKey); + const computedMasterNullifierPublicKeyHash = await derivedMasterNullifierPublicKey.hash(); const partialAddress = new Fr(243523n); diff --git a/yarn-project/key-store/src/key_store.ts b/yarn-project/key-store/src/key_store.ts index c0d41bd9318d..b0e7e222c1e6 100644 --- a/yarn-project/key-store/src/key_store.ts +++ b/yarn-project/key-store/src/key_store.ts @@ -51,9 +51,9 @@ export class KeyStore { masterOutgoingViewingSecretKey, masterTaggingSecretKey, publicKeys, - } = deriveKeys(sk); + } = await deriveKeys(sk); - const completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(sk, partialAddress); + const completeAddress = await CompleteAddress.fromSecretKeyAndPartialAddress(sk, partialAddress); const { address: account } = completeAddress; // Naming of keys is as follows ${account}-${n/iv/ov/t}${sk/pk}_m @@ -69,16 +69,22 @@ export class KeyStore { // We store pk_m_hash under `account-{n/iv/ov/t}pk_m_hash` key to be able to obtain address and key prefix // using the #getKeyPrefixAndAccount function later on - await this.#keys.set(`${account.toString()}-npk_m_hash`, publicKeys.masterNullifierPublicKey.hash().toBuffer()); + await this.#keys.set( + `${account.toString()}-npk_m_hash`, + (await publicKeys.masterNullifierPublicKey.hash()).toBuffer(), + ); await this.#keys.set( `${account.toString()}-ivpk_m_hash`, - publicKeys.masterIncomingViewingPublicKey.hash().toBuffer(), + (await publicKeys.masterIncomingViewingPublicKey.hash()).toBuffer(), ); await this.#keys.set( `${account.toString()}-ovpk_m_hash`, - publicKeys.masterOutgoingViewingPublicKey.hash().toBuffer(), + (await publicKeys.masterOutgoingViewingPublicKey.hash()).toBuffer(), + ); + await this.#keys.set( + `${account.toString()}-tpk_m_hash`, + (await publicKeys.masterTaggingPublicKey.hash()).toBuffer(), ); - await this.#keys.set(`${account.toString()}-tpk_m_hash`, publicKeys.masterTaggingPublicKey.hash().toBuffer()); // At last, we return the newly derived account address return Promise.resolve(completeAddress); @@ -102,7 +108,7 @@ export class KeyStore { * @param contractAddress - The contract address to silo the secret key in the key validation request with. * @returns The key validation request. */ - public getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { + public async getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { const [keyPrefix, account] = this.getKeyPrefixAndAccount(pkMHash); // Now we find the master public key for the account @@ -115,7 +121,7 @@ export class KeyStore { const pkM = Point.fromBuffer(pkMBuffer); - if (!pkM.hash().equals(pkMHash)) { + if (!(await pkM.hash()).equals(pkMHash)) { throw new Error(`Could not find ${keyPrefix}pkM for ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`); } @@ -130,12 +136,12 @@ export class KeyStore { const skM = GrumpkinScalar.fromBuffer(skMBuffer); // We sanity check that it's possible to derive the public key from the secret key - if (!derivePublicKeyFromSecretKey(skM).equals(pkM)) { + if (!(await derivePublicKeyFromSecretKey(skM)).equals(pkM)) { throw new Error(`Could not derive ${keyPrefix}pkM from ${keyPrefix}skM.`); } // At last we silo the secret key and return the key validation request - const skApp = computeAppSecretKey(skM, contractAddress, keyPrefix!); + const skApp = await computeAppSecretKey(skM, contractAddress, keyPrefix!); return Promise.resolve(new KeyValidationRequest(pkM, skApp)); } @@ -239,7 +245,7 @@ export class KeyStore { const masterOutgoingViewingSecretKey = GrumpkinScalar.fromBuffer(masterOutgoingViewingSecretKeyBuffer); return Promise.resolve( - poseidon2HashWithSeparator( + await poseidon2HashWithSeparator( [masterOutgoingViewingSecretKey.hi, masterOutgoingViewingSecretKey.lo, app], GeneratorIndex.OVSK_M, ), @@ -253,7 +259,7 @@ export class KeyStore { * @returns A Promise that resolves to sk_m. * @dev Used when feeding the sk_m to the kernel circuit for keys verification. */ - public getMasterSecretKey(pkM: PublicKey): Promise { + public async getMasterSecretKey(pkM: PublicKey): Promise { const [keyPrefix, account] = this.getKeyPrefixAndAccount(pkM); const secretKeyBuffer = this.#keys.get(`${account.toString()}-${keyPrefix}sk_m`); @@ -264,7 +270,7 @@ export class KeyStore { } const skM = GrumpkinScalar.fromBuffer(secretKeyBuffer); - if (!derivePublicKeyFromSecretKey(skM).equals(pkM)) { + if (!(await derivePublicKeyFromSecretKey(skM)).equals(pkM)) { throw new Error(`Could not find ${keyPrefix}skM for ${keyPrefix}pkM ${pkM.toString()} in secret keys buffer.`); } diff --git a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts index 54f30c2a61fd..6f8f5fc83613 100644 --- a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts @@ -71,7 +71,7 @@ export interface IndexedTree * @param includeUncommitted - If true, the uncommitted changes are included in the search. * @returns A copy of the leaf preimage at the given index or undefined if the leaf was not found. */ - getLatestLeafPreimageCopy(index: bigint, includeUncommitted: boolean): IndexedTreeLeafPreimage | undefined; + getLatestLeafPreimageCopy(index: bigint, includeUncommitted: boolean): Promise; /** * Batch insert multiple leaves into the tree. diff --git a/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts b/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts index 44fd8c4d4d71..92cf744cad5a 100644 --- a/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts @@ -21,7 +21,7 @@ export interface MerkleTree extends SiblingPathSo * Returns the current root of the tree. * @param includeUncommitted - Set to true to include uncommitted updates in the calculated root. */ - getRoot(includeUncommitted: boolean): Buffer; + getRoot(includeUncommitted: boolean): Promise; /** * Returns the number of leaves in the tree. @@ -49,7 +49,7 @@ export interface MerkleTree extends SiblingPathSo * @param index - The index of the leaf value to be returned. * @param includeUncommitted - Set to true to include uncommitted updates in the data set. */ - getLeafValue(index: bigint, includeUncommitted: boolean): T | undefined; + getLeafValue(index: bigint, includeUncommitted: boolean): Promise; /** * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. @@ -57,7 +57,7 @@ export interface MerkleTree extends SiblingPathSo * @param includeUncommitted - Indicates whether to include uncommitted data. * @returns The index of the first leaf found with a given value (undefined if not found). */ - findLeafIndex(leaf: T, includeUncommitted: boolean): bigint | undefined; + findLeafIndex(leaf: T, includeUncommitted: boolean): Promise; /** * Returns the first index containing a leaf value after `startIndex`. @@ -66,5 +66,5 @@ export interface MerkleTree extends SiblingPathSo * @param includeUncommitted - Indicates whether to include uncommitted data. * @returns The index of the first leaf found with a given value (undefined if not found). */ - findLeafIndexAfter(leaf: T, startIndex: bigint, includeUncommitted: boolean): bigint | undefined; + findLeafIndexAfter(leaf: T, startIndex: bigint, includeUncommitted: boolean): Promise; } diff --git a/yarn-project/merkle-tree/src/pedersen.ts b/yarn-project/merkle-tree/src/pedersen.ts index 8ddfa6365492..32ee6160d3bb 100644 --- a/yarn-project/merkle-tree/src/pedersen.ts +++ b/yarn-project/merkle-tree/src/pedersen.ts @@ -12,16 +12,16 @@ export class Pedersen implements Hasher { * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { - return pedersenHash([Fr.fromBuffer(Buffer.from(lhs)), Fr.fromBuffer(Buffer.from(rhs))]).toBuffer(); + public async hash(lhs: Uint8Array, rhs: Uint8Array): Promise { + return (await pedersenHash([Fr.fromBuffer(Buffer.from(lhs)), Fr.fromBuffer(Buffer.from(rhs))])).toBuffer(); } /* * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hashInputs(inputs: Buffer[]): Buffer { + public async hashInputs(inputs: Buffer[]): Promise { const inputFields = inputs.map(i => Fr.fromBuffer(i)); - return pedersenHash(inputFields).toBuffer(); + return (await pedersenHash(inputFields)).toBuffer(); } } diff --git a/yarn-project/merkle-tree/src/poseidon.ts b/yarn-project/merkle-tree/src/poseidon.ts index a7e81bf97f27..189b730d18f4 100644 --- a/yarn-project/merkle-tree/src/poseidon.ts +++ b/yarn-project/merkle-tree/src/poseidon.ts @@ -12,16 +12,16 @@ export class Poseidon implements Hasher { * @deprecated Don't call poseidon2 directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { - return poseidon2Hash([Fr.fromBuffer(Buffer.from(lhs)), Fr.fromBuffer(Buffer.from(rhs))]).toBuffer(); + public async hash(lhs: Uint8Array, rhs: Uint8Array): Promise { + return (await poseidon2Hash([Fr.fromBuffer(Buffer.from(lhs)), Fr.fromBuffer(Buffer.from(rhs))])).toBuffer(); } /* * @deprecated Don't call poseidon2 directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hashInputs(inputs: Buffer[]): Buffer { + public async hashInputs(inputs: Buffer[]): Promise { const inputFields = inputs.map(i => Fr.fromBuffer(i)); - return poseidon2Hash(inputFields).toBuffer(); + return (await poseidon2Hash(inputFields)).toBuffer(); } } diff --git a/yarn-project/merkle-tree/src/sha_256.ts b/yarn-project/merkle-tree/src/sha_256.ts index a1cba65c5ae2..6effa572930e 100644 --- a/yarn-project/merkle-tree/src/sha_256.ts +++ b/yarn-project/merkle-tree/src/sha_256.ts @@ -12,7 +12,7 @@ export class SHA256 implements Hasher { * @deprecated Don't call SHA256 directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { + public async hash(lhs: Uint8Array, rhs: Uint8Array): Promise { return sha256(Buffer.concat([Buffer.from(lhs), Buffer.from(rhs)])); } @@ -20,7 +20,7 @@ export class SHA256 implements Hasher { * @deprecated Don't call SHA256 directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hashInputs(inputs: Buffer[]): Buffer { + public async hashInputs(inputs: Buffer[]): Promise { return sha256(Buffer.concat(inputs)); } } @@ -35,7 +35,7 @@ export class SHA256Trunc implements Hasher { * @deprecated Don't call SHA256 directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { + public async hash(lhs: Uint8Array, rhs: Uint8Array): Promise { return truncateAndPad(sha256(Buffer.concat([Buffer.from(lhs), Buffer.from(rhs)]))); } @@ -43,7 +43,7 @@ export class SHA256Trunc implements Hasher { * @deprecated Don't call SHA256 directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public hashInputs(inputs: Buffer[]): Buffer { + public async hashInputs(inputs: Buffer[]): Promise { return truncateAndPad(sha256(Buffer.concat(inputs))); } } diff --git a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts index eca6d258f360..e08ff72968cd 100644 --- a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts @@ -76,8 +76,8 @@ export class AppendOnlySnapshotBuilder implements TreeSnap ); } - snapshot(block: number): Promise> { - return this.db.transaction(() => { + async snapshot(block: number): Promise> { + return await this.db.transaction(async () => { const meta = this.#getSnapshotMeta(block); if (typeof meta !== 'undefined') { // no-op, we already have a snapshot @@ -93,7 +93,7 @@ export class AppendOnlySnapshotBuilder implements TreeSnap ); } - const root = this.tree.getRoot(false); + const root = await this.tree.getRoot(false); const depth = this.tree.getDepth(); const queue: [Buffer, number, bigint][] = [[root, 0, 0n]]; @@ -169,7 +169,7 @@ class AppendOnlySnapshot implements TreeSnapshot { private deserializer: FromBuffer, ) {} - public getSiblingPath(index: bigint): SiblingPath { + public async getSiblingPath(index: bigint): Promise> { const path: Buffer[] = []; const depth = this.tree.getDepth(); let level = depth; @@ -178,7 +178,7 @@ class AppendOnlySnapshot implements TreeSnapshot { const isRight = index & 0x01n; const siblingIndex = isRight ? index - 1n : index + 1n; - const sibling = this.#getHistoricalNodeValue(level, siblingIndex); + const sibling = await this.#getHistoricalNodeValue(level, siblingIndex); path.push(sibling); level -= 1; @@ -201,7 +201,7 @@ class AppendOnlySnapshot implements TreeSnapshot { return this.historicalRoot; } - getLeafValue(index: bigint): T | undefined { + async getLeafValue(index: bigint): Promise { const leafLevel = this.getDepth(); const blockNumber = this.#getBlockNumberThatModifiedNode(leafLevel, index); @@ -220,7 +220,7 @@ class AppendOnlySnapshot implements TreeSnapshot { return undefined; } - #getHistoricalNodeValue(level: number, index: bigint): Buffer { + async #getHistoricalNodeValue(level: number, index: bigint): Promise { const blockNumber = this.#getBlockNumberThatModifiedNode(level, index); // node has never been set @@ -248,27 +248,27 @@ class AppendOnlySnapshot implements TreeSnapshot { return this.tree.getZeroHash(level); } - const [lhs, rhs] = [ + const [lhs, rhs] = await Promise.all([ this.#getHistoricalNodeValue(level + 1, 2n * index), this.#getHistoricalNodeValue(level + 1, 2n * index + 1n), - ]; + ]); - return this.hasher.hash(lhs, rhs); + return await this.hasher.hash(lhs, rhs); } #getBlockNumberThatModifiedNode(level: number, index: bigint): number | undefined { return this.nodeHistory.get(nodeModifiedAtBlockKey(level, index)); } - findLeafIndex(value: T): bigint | undefined { + async findLeafIndex(value: T): Promise { return this.findLeafIndexAfter(value, 0n); } - findLeafIndexAfter(value: T, startIndex: bigint): bigint | undefined { + async findLeafIndexAfter(value: T, startIndex: bigint): Promise { const valueBuffer = serializeToBuffer(value); const numLeaves = this.getNumLeaves(); for (let i = startIndex; i < numLeaves; i++) { - const currentValue = this.getLeafValue(i); + const currentValue = await this.getLeafValue(i); if (currentValue && serializeToBuffer(currentValue).equals(valueBuffer)) { return i; } diff --git a/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts index 63e416416220..fac7ebfe731a 100644 --- a/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts @@ -42,15 +42,15 @@ export abstract class BaseFullTreeSnapshotBuilder this.snapshotMetadata = db.openMap(`full_snapshot:${tree.getName()}:metadata`); } - snapshot(block: number): Promise { - return this.db.transaction(() => { + async snapshot(block: number): Promise { + return await this.db.transaction(async () => { const snapshotMetadata = this.#getSnapshotMeta(block); if (snapshotMetadata) { return this.openSnapshot(snapshotMetadata.root, snapshotMetadata.numLeaves); } - const root = this.tree.getRoot(false); + const root = await this.tree.getRoot(false); const numLeaves = this.tree.getNumLeaves(false); const depth = this.tree.getDepth(); const queue: [Buffer, number, bigint][] = [[root, 0, 0n]]; @@ -80,7 +80,7 @@ export abstract class BaseFullTreeSnapshotBuilder const [lhs, rhs] = [this.tree.getNode(level + 1, 2n * i), this.tree.getNode(level + 1, 2n * i + 1n)]; // we want the zero hash at the children's level, not the node's level - const zeroHash = this.tree.getZeroHash(level + 1); + const zeroHash = await this.tree.getZeroHash(level + 1); void this.nodes.set(nodeKey, [lhs ?? zeroHash, rhs ?? zeroHash]); // enqueue the children only if they're not zero hashes @@ -129,10 +129,10 @@ export class BaseFullTreeSnapshot implements TreeSnapshot< protected deserializer: FromBuffer, ) {} - getSiblingPath(index: bigint): SiblingPath { + async getSiblingPath(index: bigint): Promise> { const siblings: Buffer[] = []; - for (const [_node, sibling] of this.pathFromRootToLeaf(index)) { + for await (const [_node, sibling] of this.pathFromRootToLeaf(index)) { siblings.push(sibling); } @@ -143,9 +143,9 @@ export class BaseFullTreeSnapshot implements TreeSnapshot< return new SiblingPath(this.tree.getDepth() as N, siblings); } - getLeafValue(index: bigint): T | undefined { + async getLeafValue(index: bigint): Promise { let leafNode: Buffer | undefined = undefined; - for (const [node, _sibling] of this.pathFromRootToLeaf(index)) { + for await (const [node, _sibling] of this.pathFromRootToLeaf(index)) { leafNode = node; } @@ -164,7 +164,7 @@ export class BaseFullTreeSnapshot implements TreeSnapshot< return this.numLeaves; } - protected *pathFromRootToLeaf(leafIndex: bigint) { + protected async *pathFromRootToLeaf(leafIndex: bigint) { const root = this.historicRoot; const pathFromRoot = this.#getPathFromRoot(leafIndex); @@ -172,8 +172,8 @@ export class BaseFullTreeSnapshot implements TreeSnapshot< for (let i = 0; i < pathFromRoot.length; i++) { // get both children. We'll need both anyway (one to keep track of, the other to walk down to) const children: [Buffer, Buffer] = this.db.get(node.toString('hex')) ?? [ - this.tree.getZeroHash(i + 1), - this.tree.getZeroHash(i + 1), + await this.tree.getZeroHash(i + 1), + await this.tree.getZeroHash(i + 1), ]; const next = children[pathFromRoot[i]]; const sibling = children[(pathFromRoot[i] + 1) % 2]; @@ -204,15 +204,15 @@ export class BaseFullTreeSnapshot implements TreeSnapshot< return path; } - findLeafIndex(value: T): bigint | undefined { + async findLeafIndex(value: T): Promise { return this.findLeafIndexAfter(value, 0n); } - public findLeafIndexAfter(value: T, startIndex: bigint): bigint | undefined { + public async findLeafIndexAfter(value: T, startIndex: bigint): Promise { const numLeaves = this.getNumLeaves(); const buffer = serializeToBuffer(value); for (let i = startIndex; i < numLeaves; i++) { - const currentValue = this.getLeafValue(i); + const currentValue = await this.getLeafValue(i); if (currentValue && serializeToBuffer(currentValue).equals(buffer)) { return i; } diff --git a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts index 1d70549450da..3ca7ff6c0fed 100644 --- a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts @@ -23,8 +23,8 @@ export class IndexedTreeSnapshotBuilder return new IndexedTreeSnapshotImpl(this.nodes, this.leaves, root, numLeaves, this.tree, this.leafPreimageBuilder); } - protected override handleLeaf(index: bigint, node: Buffer) { - const leafPreimage = this.tree.getLatestLeafPreimageCopy(index, false); + protected override async handleLeaf(index: bigint, node: Buffer) { + const leafPreimage = await this.tree.getLatestLeafPreimageCopy(index, false); if (leafPreimage) { void this.leaves.set(snapshotLeafValue(node, index), leafPreimage.toBuffer()); } @@ -44,13 +44,13 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements In super(db, historicRoot, numLeaves, tree, { fromBuffer: buf => buf }); } - override getLeafValue(index: bigint): Buffer | undefined { - const leafPreimage = this.getLatestLeafPreimageCopy(index); + override async getLeafValue(index: bigint): Promise { + const leafPreimage = await this.getLatestLeafPreimageCopy(index); return leafPreimage?.toBuffer(); } - getLatestLeafPreimageCopy(index: bigint): IndexedTreeLeafPreimage | undefined { - const leafNode = super.getLeafValue(index); + async getLatestLeafPreimageCopy(index: bigint): Promise { + const leafNode = await super.getLeafValue(index); const leafValue = this.leaves.get(snapshotLeafValue(leafNode!, index)); if (leafValue) { return this.leafPreimageBuilder.fromBuffer(leafValue); @@ -59,7 +59,7 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements In } } - findIndexOfPreviousKey(newValue: bigint): { + async findIndexOfPreviousKey(newValue: bigint): Promise<{ /** * The index of the found leaf. */ @@ -68,13 +68,13 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements In * A flag indicating if the corresponding leaf's value is equal to `newValue`. */ alreadyPresent: boolean; - } { + }> { const numLeaves = this.getNumLeaves(); const diff: bigint[] = []; for (let i = 0; i < numLeaves; i++) { // this is very inefficient - const storedLeaf = this.getLatestLeafPreimageCopy(BigInt(i))!; + const storedLeaf = await this.getLatestLeafPreimageCopy(BigInt(i))!; // The stored leaf can be undefined if it addresses an empty leaf // If the leaf is empty we do the same as if the leaf was larger @@ -99,8 +99,8 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements In return { index: BigInt(minIndex), alreadyPresent: false }; } - override findLeafIndex(value: Buffer): bigint | undefined { - const index = this.tree.findLeafIndex(value, false); + override async findLeafIndex(value: Buffer): Promise { + const index = await this.tree.findLeafIndex(value, false); if (index !== undefined && index < this.getNumLeaves()) { return index; } diff --git a/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts b/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts index 5821ae951046..66c92a1d6e59 100644 --- a/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts +++ b/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts @@ -42,13 +42,13 @@ export interface TreeSnapshot { * Returns the value of a leaf at the specified index. * @param index - The index of the leaf value to be returned. */ - getLeafValue(index: bigint): T | undefined; + getLeafValue(index: bigint): Promise; /** * Returns the sibling path for a requested leaf index. * @param index - The index of the leaf for which a sibling path is required. */ - getSiblingPath(index: bigint): SiblingPath; + getSiblingPath(index: bigint): Promise>; /** * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. @@ -56,7 +56,7 @@ export interface TreeSnapshot { * @param value - The leaf value to look for. * @returns The index of the first leaf found with a given value (undefined if not found). */ - findLeafIndex(value: T): bigint | undefined; + findLeafIndex(value: T): Promise; /** * Returns the first index containing a leaf value after `startIndex`. @@ -64,7 +64,7 @@ export interface TreeSnapshot { * @param startIndex - The index to start searching from (used when skipping nullified messages) * @returns The index of the first leaf found with a given value (undefined if not found). */ - findLeafIndexAfter(leaf: T, startIndex: bigint): bigint | undefined; + findLeafIndexAfter(leaf: T, startIndex: bigint): Promise; } /** A snapshot of an indexed tree */ @@ -73,14 +73,14 @@ export interface IndexedTreeSnapshot extends TreeSnapshot { * Gets the historical data for a leaf * @param index - The index of the leaf to get the data for */ - getLatestLeafPreimageCopy(index: bigint): IndexedTreeLeafPreimage | undefined; + getLatestLeafPreimageCopy(index: bigint): Promise; /** * Finds the index of the largest leaf whose value is less than or equal to the provided value. * @param newValue - The new value to be inserted into the tree. * @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`. */ - findIndexOfPreviousKey(newValue: bigint): { + findIndexOfPreviousKey(newValue: bigint): Promise<{ /** * The index of the found leaf. */ @@ -89,5 +89,5 @@ export interface IndexedTreeSnapshot extends TreeSnapshot { * A flag indicating if the corresponding leaf's value is equal to `newValue`. */ alreadyPresent: boolean; - }; + }>; } diff --git a/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts b/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts index b1bcd9a8fa1c..2416399cf10e 100644 --- a/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts +++ b/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts @@ -155,7 +155,7 @@ export function describeSnapshotBuilderTestSuite< await modifyTree(tree); await tree.commit(); const snapshot = await snapshotBuilder.snapshot(1); - const historicalRoot = tree.getRoot(false); + const historicalRoot = await tree.getRoot(false); await modifyTree(tree); await tree.commit(); @@ -210,14 +210,14 @@ export function describeSnapshotBuilderTestSuite< const snapshot = await snapshotBuilder.snapshot(1); const initialLastLeafIndex = tree.getNumLeaves(false) - 1n; - let lastLeaf = tree.getLeafValue(initialLastLeafIndex, false); + let lastLeaf = await tree.getLeafValue(initialLastLeafIndex, false); expect(snapshot.findLeafIndex(lastLeaf!)).toBe(initialLastLeafIndex); await modifyTree(tree); await tree.commit(); const newLastLeafIndex = tree.getNumLeaves(false) - 1n; - lastLeaf = tree.getLeafValue(newLastLeafIndex, false); + lastLeaf = await tree.getLeafValue(newLastLeafIndex, false); expect(snapshot.findLeafIndex(lastLeaf!)).toBe(undefined); }); diff --git a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts index 56cbaee0a58c..a04e95e347d1 100644 --- a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts +++ b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts @@ -58,7 +58,7 @@ describe('SparseTreeSpecific', () => { const tree = await createDb(db, pedersen, 'test', depth); const index = 2n ** BigInt(depth); - expect(() => tree.updateLeaf(Buffer.alloc(32), index)).toThrow(); + await expect(() => tree.updateLeaf(Buffer.alloc(32), index)).rejects.toThrow(); }); it('updating non-empty leaf does not change tree size', async () => { @@ -103,8 +103,8 @@ describe('SparseTreeSpecific', () => { const db = openTmpStore(); const tree = await createDb(db, pedersen, 'test', 3); - const level2ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); - const level1ZeroHash = pedersen.hash(level2ZeroHash, level2ZeroHash); + const level2ZeroHash = await pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level1ZeroHash = await pedersen.hash(level2ZeroHash, level2ZeroHash); expect(tree.getNumLeaves(false)).toEqual(0n); expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); @@ -115,9 +115,9 @@ describe('SparseTreeSpecific', () => { { await tree.updateLeaf(leafAtIndex3, 3n); expect(tree.getNumLeaves(true)).toEqual(1n); - const level2Hash = pedersen.hash(INITIAL_LEAF, leafAtIndex3); - level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash); - const root = pedersen.hash(level1LeftHash, level1ZeroHash); + const level2Hash = await pedersen.hash(INITIAL_LEAF, leafAtIndex3); + level1LeftHash = await pedersen.hash(level2ZeroHash, level2Hash); + const root = await pedersen.hash(level1LeftHash, level1ZeroHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(3n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level2ZeroHash, level1ZeroHash]), @@ -130,9 +130,9 @@ describe('SparseTreeSpecific', () => { const leafAtIndex6 = Fr.random().toBuffer(); await tree.updateLeaf(leafAtIndex6, 6n); expect(tree.getNumLeaves(true)).toEqual(2n); - const level2Hash = pedersen.hash(leafAtIndex6, INITIAL_LEAF); - level1RightHash = pedersen.hash(level2ZeroHash, level2Hash); - const root = pedersen.hash(level1LeftHash, level1RightHash); + const level2Hash = await pedersen.hash(leafAtIndex6, INITIAL_LEAF); + level1RightHash = await pedersen.hash(level2ZeroHash, level2Hash); + const root = await pedersen.hash(level1LeftHash, level1RightHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(6n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level2ZeroHash, level1LeftHash]), @@ -144,9 +144,9 @@ describe('SparseTreeSpecific', () => { { await tree.updateLeaf(leafAtIndex2, 2n); expect(tree.getNumLeaves(true)).toEqual(3n); - const level2Hash = pedersen.hash(leafAtIndex2, leafAtIndex3); - level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash); - const root = pedersen.hash(level1LeftHash, level1RightHash); + const level2Hash = await pedersen.hash(leafAtIndex2, leafAtIndex3); + level1LeftHash = await pedersen.hash(level2ZeroHash, level2Hash); + const root = await pedersen.hash(level1LeftHash, level1RightHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(2n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [leafAtIndex3, level2ZeroHash, level1RightHash]), @@ -158,9 +158,9 @@ describe('SparseTreeSpecific', () => { const updatedLeafAtIndex3 = Fr.random().toBuffer(); await tree.updateLeaf(updatedLeafAtIndex3, 3n); expect(tree.getNumLeaves(true)).toEqual(3n); - const level2Hash = pedersen.hash(leafAtIndex2, updatedLeafAtIndex3); - level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash); - const root = pedersen.hash(level1LeftHash, level1RightHash); + const level2Hash = await pedersen.hash(leafAtIndex2, updatedLeafAtIndex3); + level1LeftHash = await pedersen.hash(level2ZeroHash, level2Hash); + const root = await pedersen.hash(level1LeftHash, level1RightHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(3n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [leafAtIndex2, level2ZeroHash, level1RightHash]), diff --git a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts index 3ea1a8d9eb64..31aa1b3103e9 100644 --- a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts +++ b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts @@ -15,14 +15,14 @@ export class SparseTree extends TreeBase implements Upd * @param leaf - New contents of the leaf. * @param index - Index of the leaf to be updated. */ - public updateLeaf(value: T, index: bigint): Promise { + public async updateLeaf(value: T, index: bigint): Promise { if (index > this.maxIndex) { throw Error(`Index out of bounds. Index ${index}, max index: ${this.maxIndex}.`); } const leaf = serializeToBuffer(value); const insertingZeroElement = leaf.equals(INITIAL_LEAF); - const originallyZeroElement = this.getLeafBuffer(index, true)?.equals(INITIAL_LEAF); + const originallyZeroElement = (await this.getLeafBuffer(index, true))?.equals(INITIAL_LEAF); if (insertingZeroElement && originallyZeroElement) { return Promise.resolve(); } @@ -46,11 +46,15 @@ export class SparseTree extends TreeBase implements Upd return this.#snapshotBuilder.getSnapshot(block); } - public findLeafIndex(_value: T, _includeUncommitted: boolean): bigint | undefined { + public async findLeafIndex(_value: T, _includeUncommitted: boolean): Promise { throw new Error('Finding leaf index is not supported for sparse trees'); } - public findLeafIndexAfter(_value: T, _startIndex: bigint, _includeUncommitted: boolean): bigint | undefined { + public async findLeafIndexAfter( + _value: T, + _startIndex: bigint, + _includeUncommitted: boolean, + ): Promise { throw new Error('Finding leaf index is not supported for sparse trees'); } } diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts index 63823f35620c..b30442fe4bf5 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts @@ -115,8 +115,8 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree * @param includeUncommitted - Indicates whether to include uncommitted leaves in the computation. * @returns The value of the leaf at the given index or undefined if the leaf is empty. */ - public override getLeafValue(index: bigint, includeUncommitted: boolean): Buffer | undefined { - const preimage = this.getLatestLeafPreimageCopy(index, includeUncommitted); + public override async getLeafValue(index: bigint, includeUncommitted: boolean): Promise { + const preimage = await this.getLatestLeafPreimageCopy(index, includeUncommitted); return preimage && preimage.toBuffer(); } @@ -214,7 +214,10 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree * @param includeUncommitted - If true, the uncommitted changes are included in the search. * @returns A copy of the leaf preimage at the given index or undefined if the leaf was not found. */ - public getLatestLeafPreimageCopy(index: bigint, includeUncommitted: boolean): IndexedTreeLeafPreimage | undefined { + public async getLatestLeafPreimageCopy( + index: bigint, + includeUncommitted: boolean, + ): Promise { const preimage = !includeUncommitted ? this.getDbPreimage(index) : this.getCachedPreimage(index) ?? this.getDbPreimage(index); @@ -227,7 +230,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree * @param includeUncommitted - Indicates whether to include uncommitted data. * @returns The index of the first leaf found with a given value (undefined if not found). */ - public findLeafIndex(value: Buffer, includeUncommitted: boolean): bigint | undefined { + public async findLeafIndex(value: Buffer, includeUncommitted: boolean): Promise { const leaf = this.leafFactory.fromBuffer(value); let index = this.leafIndex.get(buildDbKeyForLeafIndex(this.getName(), leaf.getKey())); @@ -239,7 +242,11 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree return index; } - public findLeafIndexAfter(_leaf: Buffer, _startIndex: bigint, _includeUncommitted: boolean): bigint | undefined { + public async findLeafIndexAfter( + _leaf: Buffer, + _startIndex: bigint, + _includeUncommitted: boolean, + ): Promise { throw new Error('Method not implemented for indexed trees'); } @@ -304,13 +311,13 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree * @param preimage - New contents of the leaf. * @param index - Index of the leaf to be updated. */ - protected updateLeaf(preimage: IndexedTreeLeafPreimage, index: bigint) { + protected async updateLeaf(preimage: IndexedTreeLeafPreimage, index: bigint) { if (index > this.maxIndex) { throw Error(`Index out of bounds. Index ${index}, max index: ${this.maxIndex}.`); } this.cachedLeafPreimages[index.toString()] = preimage; - const encodedLeaf = this.encodeLeaf(preimage, true); + const encodedLeaf = await this.encodeLeaf(preimage, true); this.addLeafToCacheAndHashToRoot(encodedLeaf, index); const numLeaves = this.getNumLeaves(true); if (index >= numLeaves) { @@ -513,7 +520,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree const isUpdate = indexOfPrevious.alreadyPresent; // get the low leaf (existence checked in getting index) - const lowLeafPreimage = this.getLatestLeafPreimageCopy(indexOfPrevious.index, true)!; + const lowLeafPreimage = (await this.getLatestLeafPreimageCopy(indexOfPrevious.index, true))!; const siblingPath = await this.getSiblingPath(BigInt(indexOfPrevious.index), true); const witness: LeafUpdateWitnessData = { @@ -534,7 +541,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree lowLeafPreimage.getNextIndex(), ); - this.updateLeaf(newLowLeafPreimage, indexOfPrevious.index); + await this.updateLeaf(newLowLeafPreimage, indexOfPrevious.index); pendingInsertionSubtree[originalIndex] = this.leafPreimageFactory.empty(); } else { @@ -544,7 +551,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree startInsertionIndex + BigInt(originalIndex), ); - this.updateLeaf(newLowLeafPreimage, indexOfPrevious.index); + await this.updateLeaf(newLowLeafPreimage, indexOfPrevious.index); const currentPendingPreimageLeaf = this.leafPreimageFactory.fromLeaf( newLeaf, @@ -609,15 +616,17 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree * @param hash0Leaf - Indicates whether 0 value leaf should be hashed. See {@link encodeLeaf}. * @returns Empty promise */ - private encodeAndAppendLeaves(preimages: IndexedTreeLeafPreimage[], hash0Leaf: boolean): void { + private async encodeAndAppendLeaves(preimages: IndexedTreeLeafPreimage[], hash0Leaf: boolean): Promise { const startInsertionIndex = this.getNumLeaves(true); - const hashedLeaves = preimages.map((preimage, i) => { - this.cachedLeafPreimages[(startInsertionIndex + BigInt(i)).toString()] = preimage; - return this.encodeLeaf(preimage, hash0Leaf); - }); + const hashedLeaves = await Promise.all( + preimages.map(async (preimage, i) => { + this.cachedLeafPreimages[(startInsertionIndex + BigInt(i)).toString()] = preimage; + return await this.encodeLeaf(preimage, hash0Leaf); + }), + ); - super.appendLeaves(hashedLeaves); + await super.appendLeaves(hashedLeaves); } /** @@ -628,12 +637,12 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree * nullifier it is improbable that a valid nullifier would be 0. * @returns Leaf encoded in a buffer. */ - private encodeLeaf(leaf: IndexedTreeLeafPreimage, hash0Leaf: boolean): Buffer { + private async encodeLeaf(leaf: IndexedTreeLeafPreimage, hash0Leaf: boolean): Promise { let encodedLeaf; if (!hash0Leaf && leaf.getKey() == 0n) { encodedLeaf = toBufferBE(0n, 32); } else { - encodedLeaf = this.hasher.hashInputs(leaf.toHashInputs()); + encodedLeaf = await this.hasher.hashInputs(leaf.toHashInputs()); } return encodedLeaf; } diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts index d15aa61326ec..11e4d431a5be 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts @@ -110,19 +110,19 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 0 0 0 0 0 0 0 0. */ - const initialLeafHash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 0, 0)); - const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); - const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); + const initialLeafHash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 0, 0)); + const level1ZeroHash = await pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level2ZeroHash = await pedersen.hash(level1ZeroHash, level1ZeroHash); let index0Hash = initialLeafHash; // Each element is named by the level followed by the index on that level. E.g. e10 -> level 1, index 0, e21 -> level 2, index 1 - let e10 = pedersen.hash(index0Hash, INITIAL_LEAF); - let e20 = pedersen.hash(e10, level1ZeroHash); + let e10 = await pedersen.hash(index0Hash, INITIAL_LEAF); + let e20 = await pedersen.hash(e10, level1ZeroHash); const initialE20 = e20; // Kept for calculating committed state later const initialE10 = e10; - let root = pedersen.hash(e20, level2ZeroHash); + let root = await pedersen.hash(e20, level2ZeroHash); const initialRoot = root; const emptySiblingPath = new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash, level2ZeroHash]); @@ -144,11 +144,11 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 1, 30)); - let index1Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 0, 0)); - e10 = pedersen.hash(index0Hash, index1Hash); - e20 = pedersen.hash(e10, level1ZeroHash); - root = pedersen.hash(e20, level2ZeroHash); + index0Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 1, 30)); + let index1Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 0, 0)); + e10 = await pedersen.hash(index0Hash, index1Hash); + e20 = await pedersen.hash(e10, level1ZeroHash); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(30n, 32)]); @@ -171,12 +171,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 2, 10)); - let index2Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 1, 30)); - e10 = pedersen.hash(index0Hash, index1Hash); - let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); - e20 = pedersen.hash(e10, e11); - root = pedersen.hash(e20, level2ZeroHash); + index0Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 2, 10)); + let index2Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 1, 30)); + e10 = await pedersen.hash(index0Hash, index1Hash); + let e11 = await pedersen.hash(index2Hash, INITIAL_LEAF); + e20 = await pedersen.hash(e10, e11); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(10n, 32)]); @@ -203,12 +203,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 3 1 0 0 0 0 * nextVal 10 0 20 30 0 0 0 0. */ - e10 = pedersen.hash(index0Hash, index1Hash); - index2Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 3, 20)); - const index3Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(20, 1, 30)); - e11 = pedersen.hash(index2Hash, index3Hash); - e20 = pedersen.hash(e10, e11); - root = pedersen.hash(e20, level2ZeroHash); + e10 = await pedersen.hash(index0Hash, index1Hash); + index2Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 3, 20)); + const index3Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(20, 1, 30)); + e11 = await pedersen.hash(index2Hash, index3Hash); + e20 = await pedersen.hash(e10, e11); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(20n, 32)]); @@ -235,13 +235,13 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 4 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 4, 50)); - const index4Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(50, 0, 0)); - e10 = pedersen.hash(index0Hash, index1Hash); - e20 = pedersen.hash(e10, e11); - const e12 = pedersen.hash(index4Hash, INITIAL_LEAF); - const e21 = pedersen.hash(e12, level1ZeroHash); - root = pedersen.hash(e20, e21); + index1Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 4, 50)); + const index4Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(50, 0, 0)); + e10 = await pedersen.hash(index0Hash, index1Hash); + e20 = await pedersen.hash(e10, e11); + const e12 = await pedersen.hash(index4Hash, INITIAL_LEAF); + const e21 = await pedersen.hash(e12, level1ZeroHash); + root = await pedersen.hash(e20, e21); await tree.appendLeaves([toBufferBE(50n, 32)]); @@ -307,18 +307,18 @@ describe('StandardIndexedTreeSpecific', () => { */ const INITIAL_LEAF = toBufferBE(0n, 32); - const initialLeafHash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 0, 0)); - const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); - const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); + const initialLeafHash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 0, 0)); + const level1ZeroHash = await pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level2ZeroHash = await pedersen.hash(level1ZeroHash, level1ZeroHash); let index0Hash = initialLeafHash; - let e10 = pedersen.hash(index0Hash, INITIAL_LEAF); - let e20 = pedersen.hash(e10, level1ZeroHash); + let e10 = await pedersen.hash(index0Hash, INITIAL_LEAF); + let e20 = await pedersen.hash(e10, level1ZeroHash); const inite10 = e10; const inite20 = e20; - let root = pedersen.hash(e20, level2ZeroHash); + let root = await pedersen.hash(e20, level2ZeroHash); const initialRoot = root; const emptySiblingPath = new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash, level2ZeroHash]); @@ -341,11 +341,11 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 1, 30)); - let index1Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 0, 0)); - e10 = pedersen.hash(index0Hash, index1Hash); - e20 = pedersen.hash(e10, level1ZeroHash); - root = pedersen.hash(e20, level2ZeroHash); + index0Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 1, 30)); + let index1Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 0, 0)); + e10 = await pedersen.hash(index0Hash, index1Hash); + e20 = await pedersen.hash(e10, level1ZeroHash); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(30n, 32)]); @@ -367,12 +367,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 2, 10)); - let index2Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 1, 30)); - e10 = pedersen.hash(index0Hash, index1Hash); - let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); - e20 = pedersen.hash(e10, e11); - root = pedersen.hash(e20, level2ZeroHash); + index0Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(0, 2, 10)); + let index2Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 1, 30)); + e10 = await pedersen.hash(index0Hash, index1Hash); + let e11 = await pedersen.hash(index2Hash, INITIAL_LEAF); + e20 = await pedersen.hash(e10, e11); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(10n, 32)]); @@ -399,12 +399,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 3 1 0 0 0 0 * nextVal 10 0 20 30 0 0 0 0. */ - e10 = pedersen.hash(index0Hash, index1Hash); - index2Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 3, 20)); - const index3Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(20, 1, 30)); - e11 = pedersen.hash(index2Hash, index3Hash); - e20 = pedersen.hash(e10, e11); - root = pedersen.hash(e20, level2ZeroHash); + e10 = await pedersen.hash(index0Hash, index1Hash); + index2Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(10, 3, 20)); + const index3Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(20, 1, 30)); + e11 = await pedersen.hash(index2Hash, index3Hash); + e20 = await pedersen.hash(e10, e11); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(20n, 32)]); @@ -439,13 +439,13 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 6 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 6, 50)); - const index6Hash = pedersen.hashInputs(createNullifierTreeLeafHashInputs(50, 0, 0)); - e10 = pedersen.hash(index0Hash, index1Hash); - e20 = pedersen.hash(e10, e11); - const e13 = pedersen.hash(index6Hash, INITIAL_LEAF); - const e21 = pedersen.hash(level1ZeroHash, e13); - root = pedersen.hash(e20, e21); + index1Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(30, 6, 50)); + const index6Hash = await pedersen.hashInputs(createNullifierTreeLeafHashInputs(50, 0, 0)); + e10 = await pedersen.hash(index0Hash, index1Hash); + e20 = await pedersen.hash(e10, e11); + const e13 = await pedersen.hash(index6Hash, INITIAL_LEAF); + const e21 = await pedersen.hash(level1ZeroHash, e13); + root = await pedersen.hash(e20, e21); await tree.appendLeaves([toBufferBE(50n, 32)]); @@ -556,17 +556,17 @@ describe('StandardIndexedTreeSpecific', () => { */ const EMPTY_LEAF = toBufferBE(0n, 32); - const initialLeafHash = pedersen.hashInputs(createPublicDataTreeLeafHashInputs(0, 0, 0, 0)); - const level1ZeroHash = pedersen.hash(EMPTY_LEAF, EMPTY_LEAF); - const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); + const initialLeafHash = await pedersen.hashInputs(createPublicDataTreeLeafHashInputs(0, 0, 0, 0)); + const level1ZeroHash = await pedersen.hash(EMPTY_LEAF, EMPTY_LEAF); + const level2ZeroHash = await pedersen.hash(level1ZeroHash, level1ZeroHash); let index0Hash = initialLeafHash; - let e10 = pedersen.hash(index0Hash, EMPTY_LEAF); - let e20 = pedersen.hash(e10, level1ZeroHash); + let e10 = await pedersen.hash(index0Hash, EMPTY_LEAF); + let e20 = await pedersen.hash(e10, level1ZeroHash); const inite10 = e10; - let root = pedersen.hash(e20, level2ZeroHash); + let root = await pedersen.hash(e20, level2ZeroHash); const initialRoot = root; const emptySiblingPath = new SiblingPath(TEST_TREE_DEPTH, [EMPTY_LEAF, level1ZeroHash, level2ZeroHash]); @@ -590,11 +590,11 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextSlot 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createPublicDataTreeLeafHashInputs(0, 0, 1, 30)); - let index1Hash = pedersen.hashInputs(createPublicDataTreeLeafHashInputs(30, 5, 0, 0)); - e10 = pedersen.hash(index0Hash, index1Hash); - e20 = pedersen.hash(e10, level1ZeroHash); - root = pedersen.hash(e20, level2ZeroHash); + index0Hash = await pedersen.hashInputs(createPublicDataTreeLeafHashInputs(0, 0, 1, 30)); + let index1Hash = await pedersen.hashInputs(createPublicDataTreeLeafHashInputs(30, 5, 0, 0)); + e10 = await pedersen.hash(index0Hash, index1Hash); + e20 = await pedersen.hash(e10, level1ZeroHash); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([createPublicDataTreeLeaf(30, 5).toBuffer()]); @@ -617,10 +617,10 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextSlot 30 0 0 0 0 0 0 0. */ - index1Hash = pedersen.hashInputs(createPublicDataTreeLeafHashInputs(30, 10, 0, 0)); - e10 = pedersen.hash(index0Hash, index1Hash); - e20 = pedersen.hash(e10, level1ZeroHash); - root = pedersen.hash(e20, level2ZeroHash); + index1Hash = await pedersen.hashInputs(createPublicDataTreeLeafHashInputs(30, 10, 0, 0)); + e10 = await pedersen.hash(index0Hash, index1Hash); + e20 = await pedersen.hash(e10, level1ZeroHash); + root = await pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([createPublicDataTreeLeaf(30, 10).toBuffer()]); diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts index 0816f59edc5f..ce55164f8814 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts @@ -12,9 +12,9 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { * @returns Empty promise. * @remarks This method is inefficient and is here mostly for testing. Use batchInsert instead. */ - public override appendLeaves(leaves: Buffer[]): Promise { + public override async appendLeaves(leaves: Buffer[]): Promise { for (const leaf of leaves) { - this.appendLeaf(leaf); + await this.appendLeaf(leaf); } return Promise.resolve(); @@ -33,7 +33,7 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { * @param leaf - The leaf to append. * @returns Empty promise. */ - private appendLeaf(leaf: Buffer): void { + private async appendLeaf(leaf: Buffer): Promise { const newLeaf = this.leafFactory.fromBuffer(leaf); // Special case when appending zero @@ -48,7 +48,7 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { } const isUpdate = lowLeafIndex.alreadyPresent; - const lowLeafPreimage = this.getLatestLeafPreimageCopy(lowLeafIndex.index, true)!; + const lowLeafPreimage = (await this.getLatestLeafPreimageCopy(lowLeafIndex.index, true))!; const currentSize = this.getNumLeaves(true); if (isUpdate) { @@ -59,7 +59,7 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { lowLeafPreimage.getNextIndex(), ); - this.updateLeaf(newLowLeafPreimage, BigInt(lowLeafIndex.index)); + await this.updateLeaf(newLowLeafPreimage, BigInt(lowLeafIndex.index)); this.appendEmptyLeaf(); } else { const newLeafPreimage = this.leafPreimageFactory.fromLeaf( @@ -74,8 +74,8 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { newLeaf.getKey(), BigInt(currentSize), ); - this.updateLeaf(newLowLeafPreimage, BigInt(lowLeafIndex.index)); - this.updateLeaf(newLeafPreimage, currentSize); + await this.updateLeaf(newLowLeafPreimage, BigInt(lowLeafIndex.index)); + await this.updateLeaf(newLeafPreimage, currentSize); } } } diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts index b01409eb14c5..c20572d616a7 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts @@ -60,16 +60,16 @@ describe('StandardTree_batchAppend', () => { expect(pedersen.hashCounter).toEqual(expectedNumHashing); - const level2Node0 = pedersen.hash(leaves[0], leaves[1]); - const level2Node1 = pedersen.hash(leaves[2], leaves[3]); - const level2Node2 = pedersen.hash(leaves[4], INITIAL_LEAF); + const level2Node0 = await pedersen.hash(leaves[0], leaves[1]); + const level2Node1 = await pedersen.hash(leaves[2], leaves[3]); + const level2Node2 = await pedersen.hash(leaves[4], INITIAL_LEAF); - const level2ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level2ZeroHash = await pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); - const level1Node0 = pedersen.hash(level2Node0, level2Node1); - const level1Node1 = pedersen.hash(level2Node2, level2ZeroHash); + const level1Node0 = await pedersen.hash(level2Node0, level2Node1); + const level1Node1 = await pedersen.hash(level2Node2, level2ZeroHash); - const root = pedersen.hash(level1Node0, level1Node1); + const root = await pedersen.hash(level1Node0, level1Node1); expect(tree.getRoot(true)).toEqual(root); }); diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts index 27ce349a4844..b7c31b9af597 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts @@ -18,10 +18,10 @@ export class StandardTree extends TreeBase imp * @param leaves - The leaves to append. * @returns Empty promise. */ - public override appendLeaves(leaves: T[]): Promise { + public override async appendLeaves(leaves: T[]): Promise { this.hasher.reset(); const timer = new Timer(); - super.appendLeaves(leaves); + await super.appendLeaves(leaves); this.log.debug(`Inserted ${leaves.length} leaves into ${this.getName()} tree`, { eventName: 'tree-insertion', duration: timer.ms(), @@ -43,14 +43,18 @@ export class StandardTree extends TreeBase imp return this.#snapshotBuilder.getSnapshot(blockNumber); } - public findLeafIndex(value: T, includeUncommitted: boolean): bigint | undefined { - return this.findLeafIndexAfter(value, 0n, includeUncommitted); + public async findLeafIndex(value: T, includeUncommitted: boolean): Promise { + return await this.findLeafIndexAfter(value, 0n, includeUncommitted); } - public findLeafIndexAfter(value: T, startIndex: bigint, includeUncommitted: boolean): bigint | undefined { + public async findLeafIndexAfter( + value: T, + startIndex: bigint, + includeUncommitted: boolean, + ): Promise { const buffer = serializeToBuffer(value); for (let i = startIndex; i < this.getNumLeaves(includeUncommitted); i++) { - const currentValue = this.getLeafValue(i, includeUncommitted); + const currentValue = await this.getLeafValue(i, includeUncommitted); if (currentValue && serializeToBuffer(currentValue).equals(buffer)) { return i; } diff --git a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts index 6f8fc6d09873..ddfa43caf604 100644 --- a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts +++ b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts @@ -37,7 +37,7 @@ export const standardBasedTreeTestSuite = ( it('should have correct empty tree root for depth 32', async () => { const db = openTmpStore(); const tree = await createDb(db, pedersen, 'test', 32); - const root = tree.getRoot(false); + const root = await tree.getRoot(false); expect(root.toString('hex')).toEqual('16642d9ccd8346c403aa4c3fa451178b22534a27035cdaa6ec34ae53b29c50cb'); }); @@ -52,9 +52,9 @@ export const standardBasedTreeTestSuite = ( const db = openTmpStore(); const tree = await createDb(db, pedersen, 'test', 2); - const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level1ZeroHash = await pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(await pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(0n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); @@ -62,23 +62,27 @@ export const standardBasedTreeTestSuite = ( await appendLeaves(tree, [values[0]]); expect(tree.getNumLeaves(true)).toEqual(1n); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(true)).toEqual(pedersen.hash(pedersen.hash(values[0], INITIAL_LEAF), level1ZeroHash)); + expect(tree.getRoot(true)).toEqual( + await pedersen.hash(await pedersen.hash(values[0], INITIAL_LEAF), level1ZeroHash), + ); expect(await tree.getSiblingPath(0n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); - expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(await pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(0n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); await appendLeaves(tree, [values[1]]); expect(tree.getNumLeaves(true)).toEqual(2n); - expect(tree.getRoot(true)).toEqual(pedersen.hash(pedersen.hash(values[0], values[1]), level1ZeroHash)); + expect(tree.getRoot(true)).toEqual( + await pedersen.hash(await pedersen.hash(values[0], values[1]), level1ZeroHash), + ); expect(await tree.getSiblingPath(1n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [values[0], level1ZeroHash]), ); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(await pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(1n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); @@ -86,13 +90,13 @@ export const standardBasedTreeTestSuite = ( await appendLeaves(tree, [values[2]]); expect(tree.getNumLeaves(true)).toEqual(3n); expect(tree.getRoot(true)).toEqual( - pedersen.hash(pedersen.hash(values[0], values[1]), pedersen.hash(values[2], INITIAL_LEAF)), + pedersen.hash(await pedersen.hash(values[0], values[1]), await pedersen.hash(values[2], INITIAL_LEAF)), ); expect(await tree.getSiblingPath(2n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, pedersen.hash(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, await pedersen.hash(values[0], values[1])]), ); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(await pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(2n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); @@ -100,29 +104,29 @@ export const standardBasedTreeTestSuite = ( await appendLeaves(tree, [values[3]]); expect(tree.getNumLeaves(true)).toEqual(4n); expect(tree.getRoot(true)).toEqual( - pedersen.hash(pedersen.hash(values[0], values[1]), pedersen.hash(values[2], values[3])), + pedersen.hash(await pedersen.hash(values[0], values[1]), await pedersen.hash(values[2], values[3])), ); expect(await tree.getSiblingPath(3n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.hash(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [values[2], await pedersen.hash(values[0], values[1])]), ); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(await pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(3n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); // Lifted from memory_tree.test.cpp to ensure consistency. //expect(root.toString('hex')).toEqual('0bf2e78afd70f72b0e6eafb03c41faef167a82441b05e517cdf35d813302061f'); expect(await tree.getSiblingPath(0n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[1], pedersen.hash(values[2], values[3])]), + new SiblingPath(TEST_TREE_DEPTH, [values[1], await pedersen.hash(values[2], values[3])]), ); expect(await tree.getSiblingPath(1n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[0], pedersen.hash(values[2], values[3])]), + new SiblingPath(TEST_TREE_DEPTH, [values[0], await pedersen.hash(values[2], values[3])]), ); expect(await tree.getSiblingPath(2n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[3], pedersen.hash(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [values[3], await pedersen.hash(values[0], values[1])]), ); expect(await tree.getSiblingPath(3n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.hash(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [values[2], await pedersen.hash(values[0], values[1])]), ); await tree.commit(); diff --git a/yarn-project/merkle-tree/src/test/test_suite.ts b/yarn-project/merkle-tree/src/test/test_suite.ts index dba34f92d8fb..37bc40581197 100644 --- a/yarn-project/merkle-tree/src/test/test_suite.ts +++ b/yarn-project/merkle-tree/src/test/test_suite.ts @@ -15,7 +15,9 @@ const expectSameTrees = async ( ) => { const size = tree1.getNumLeaves(includeUncommitted); expect(size).toBe(tree2.getNumLeaves(includeUncommitted)); - expect(tree1.getRoot(includeUncommitted).toString('hex')).toBe(tree2.getRoot(includeUncommitted).toString('hex')); + expect((await tree1.getRoot(includeUncommitted)).toString('hex')).toBe( + (await tree2.getRoot(includeUncommitted)).toString('hex'), + ); for (let i = 0; i < size; ++i) { const siblingPath1 = await tree1.getSiblingPath(BigInt(i), includeUncommitted); @@ -58,7 +60,7 @@ export const treeTestSuite = ( const tree = await createDb(db, pedersen, 'test2', 10); await appendLeaves(tree, values.slice(0, 4)); - const firstRoot = tree.getRoot(true); + const firstRoot = await tree.getRoot(true); expect(firstRoot).not.toEqual(emptyTree.getRoot(true)); // committed root should still be the empty root expect(tree.getRoot(false)).toEqual(emptyTree.getRoot(false)); diff --git a/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts b/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts index f5ab9af885a2..7c494ae819fc 100644 --- a/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts +++ b/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts @@ -19,9 +19,9 @@ export class PedersenWithCounter extends Pedersen { * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public override hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { + public override async hash(lhs: Uint8Array, rhs: Uint8Array): Promise { this.hashCounter++; - return super.hash(lhs, rhs); + return await super.hash(lhs, rhs); } /** diff --git a/yarn-project/merkle-tree/src/tree_base.ts b/yarn-project/merkle-tree/src/tree_base.ts index cd20e8e0f2d8..2c0c158ebfe1 100644 --- a/yarn-project/merkle-tree/src/tree_base.ts +++ b/yarn-project/merkle-tree/src/tree_base.ts @@ -48,8 +48,8 @@ export const INITIAL_LEAF = Buffer.from('000000000000000000000000000000000000000 export abstract class TreeBase implements MerkleTree { protected readonly maxIndex: bigint; protected cachedSize?: bigint; - private root!: Buffer; - private zeroHashes: Buffer[] = []; + private root!: Promise; + private zeroHashes: Promise; private cache: { [key: string]: Buffer } = {}; protected log: DebugLogger; protected hasher: HasherWithStats; @@ -75,13 +75,18 @@ export abstract class TreeBase implements MerkleTree { this.meta = openTreeMetaSingleton(store, name); // Compute the zero values at each layer. - let current = INITIAL_LEAF; - for (let i = depth - 1; i >= 0; --i) { - this.zeroHashes[i] = current; - current = hasher.hash(current, current); - } + const hashes = (async () => { + let current = INITIAL_LEAF; + let result = []; + for (let i = depth - 1; i >= 0; --i) { + result[i] = current; + current = await hasher.hash(current, current); + } + return { zeroHashes: result, current }; + })(); + this.zeroHashes = hashes.then(x => x.zeroHashes); - this.root = root ? root : current; + this.root = root ? Promise.resolve(root) : hashes.then(x => x.current); this.maxIndex = 2n ** BigInt(depth) - 1n; this.log = createDebugLogger(`aztec:merkle-tree:${name.toLowerCase()}`); @@ -92,8 +97,8 @@ export abstract class TreeBase implements MerkleTree { * @param includeUncommitted - If true, root incorporating uncommitted changes is returned. * @returns The root of the tree. */ - public getRoot(includeUncommitted: boolean): Buffer { - return !includeUncommitted ? this.root : this.cache[indexToKeyHash(this.name, 0, 0n)] ?? this.root; + public async getRoot(includeUncommitted: boolean): Promise { + return !includeUncommitted ? await this.root : this.cache[indexToKeyHash(this.name, 0, 0n)] ?? this.root; } /** @@ -128,12 +133,12 @@ export abstract class TreeBase implements MerkleTree { * @returns A sibling path for the element at the given index. * Note: The sibling path is an array of sibling hashes, with the lowest hash (leaf hash) first, and the highest hash last. */ - public getSiblingPath(index: bigint, includeUncommitted: boolean): Promise> { + public async getSiblingPath(index: bigint, includeUncommitted: boolean): Promise> { const path: Buffer[] = []; let level = this.depth; while (level > 0) { const isRight = index & 0x01n; - const sibling = this.getLatestValueAtIndex(level, isRight ? index - 1n : index + 1n, includeUncommitted); + const sibling = await this.getLatestValueAtIndex(level, isRight ? index - 1n : index + 1n, includeUncommitted); path.push(sibling); level -= 1; index >>= 1n; @@ -175,8 +180,8 @@ export abstract class TreeBase implements MerkleTree { * @param includeUncommitted - Indicates whether to include uncommitted changes. * @returns Leaf value at the given index or undefined. */ - public getLeafValue(index: bigint, includeUncommitted: boolean): T | undefined { - const buf = this.getLatestValueAtIndex(this.depth, index, includeUncommitted); + public async getLeafValue(index: bigint, includeUncommitted: boolean): Promise { + const buf = await this.getLatestValueAtIndex(this.depth, index, includeUncommitted); if (buf) { return this.deserializer.fromBuffer(buf); } else { @@ -190,8 +195,8 @@ export abstract class TreeBase implements MerkleTree { * @param includeUncommitted - Indicates whether to include uncommitted changes. * @returns Leaf value at the given index or undefined. */ - public getLeafBuffer(index: bigint, includeUncommitted: boolean): Buffer | undefined { - return this.getLatestValueAtIndex(this.depth, index, includeUncommitted); + public async getLeafBuffer(index: bigint, includeUncommitted: boolean): Promise { + return await this.getLatestValueAtIndex(this.depth, index, includeUncommitted); } public getNode(level: number, index: bigint): Buffer | undefined { @@ -206,12 +211,13 @@ export abstract class TreeBase implements MerkleTree { return this.dbGet(indexToKeyHash(this.name, level, index)); } - public getZeroHash(level: number): Buffer { + public async getZeroHash(level: number): Promise { if (level <= 0 || level > this.depth) { throw new Error('Invalid level'); } - return this.zeroHashes[level - 1]; + const zeroHashes = await this.zeroHashes; + return zeroHashes[level - 1]; } /** @@ -227,17 +233,17 @@ export abstract class TreeBase implements MerkleTree { * @param leaf - Leaf to add to cache. * @param index - Index of the leaf (used to derive the cache key). */ - protected addLeafToCacheAndHashToRoot(leaf: Buffer, index: bigint) { + protected async addLeafToCacheAndHashToRoot(leaf: Buffer, index: bigint) { const key = indexToKeyHash(this.name, this.depth, index); let current = leaf; this.cache[key] = current; let level = this.depth; while (level > 0) { const isRight = index & 0x01n; - const sibling = this.getLatestValueAtIndex(level, isRight ? index - 1n : index + 1n, true); + const sibling = await this.getLatestValueAtIndex(level, isRight ? index - 1n : index + 1n, true); const lhs = isRight ? sibling : current; const rhs = isRight ? current : sibling; - current = this.hasher.hash(lhs, rhs); + current = await this.hasher.hash(lhs, rhs); level -= 1; index >>= 1n; const cacheKey = indexToKeyHash(this.name, level, index); @@ -253,7 +259,7 @@ export abstract class TreeBase implements MerkleTree { * @returns The latest value at the given index. * Note: If the value is not in the cache, it will be fetched from the database. */ - private getLatestValueAtIndex(level: number, index: bigint, includeUncommitted: boolean): Buffer { + private async getLatestValueAtIndex(level: number, index: bigint, includeUncommitted: boolean): Promise { const key = indexToKeyHash(this.name, level, index); if (includeUncommitted && this.cache[key] !== undefined) { return this.cache[key]; @@ -262,7 +268,8 @@ export abstract class TreeBase implements MerkleTree { if (committed !== undefined) { return committed; } - return this.zeroHashes[level - 1]; + const zeroHashes = await this.zeroHashes; + return zeroHashes[level - 1]; } /** @@ -289,8 +296,8 @@ export abstract class TreeBase implements MerkleTree { * Writes meta data to the provided batch. * @param batch - The batch to which to write the meta data. */ - protected writeMeta() { - const data = encodeMeta(this.getRoot(true), this.depth, this.getNumLeaves(true)); + protected async writeMeta() { + const data = encodeMeta(await this.getRoot(true), this.depth, this.getNumLeaves(true)); return this.meta.set(data); } @@ -309,7 +316,7 @@ export abstract class TreeBase implements MerkleTree { * `getLatestValueAtIndex` will return a value from cache (because at least one of the 2 children was * touched in previous iteration). */ - protected appendLeaves(leaves: T[]): void { + protected async appendLeaves(leaves: T[]): Promise { const numLeaves = this.getNumLeaves(true); if (numLeaves + BigInt(leaves.length) - 1n > this.maxIndex) { throw Error(`Can't append beyond max index. Max index: ${this.maxIndex}`); @@ -330,10 +337,10 @@ export abstract class TreeBase implements MerkleTree { lastIndex >>= 1n; // 3.Iterate over all the affected nodes at this level and update them for (let index = firstIndex; index <= lastIndex; index++) { - const lhs = this.getLatestValueAtIndex(level, index * 2n, true); - const rhs = this.getLatestValueAtIndex(level, index * 2n + 1n, true); + const lhs = await this.getLatestValueAtIndex(level, index * 2n, true); + const rhs = await this.getLatestValueAtIndex(level, index * 2n + 1n, true); const cacheKey = indexToKeyHash(this.name, level - 1, index); - this.cache[cacheKey] = this.hasher.hash(lhs, rhs); + this.cache[cacheKey] = await this.hasher.hash(lhs, rhs); } level -= 1; @@ -347,7 +354,7 @@ export abstract class TreeBase implements MerkleTree { * @param includeUncommitted - Indicates whether to include uncommitted data. * @returns The index of the first leaf found with a given value (undefined if not found). */ - abstract findLeafIndex(value: T, includeUncommitted: boolean): bigint | undefined; + abstract findLeafIndex(value: T, includeUncommitted: boolean): Promise; /** * Returns the first index containing a leaf value after `startIndex`. @@ -356,5 +363,5 @@ export abstract class TreeBase implements MerkleTree { * @param includeUncommitted - Indicates whether to include uncommitted data. * @returns The index of the first leaf found with a given value (undefined if not found). */ - abstract findLeafIndexAfter(leaf: T, startIndex: bigint, includeUncommitted: boolean): bigint | undefined; + abstract findLeafIndexAfter(leaf: T, startIndex: bigint, includeUncommitted: boolean): Promise; } diff --git a/yarn-project/merkle-tree/src/unbalanced_tree.test.ts b/yarn-project/merkle-tree/src/unbalanced_tree.test.ts index 14ee0252ee44..f13a76136288 100644 --- a/yarn-project/merkle-tree/src/unbalanced_tree.test.ts +++ b/yarn-project/merkle-tree/src/unbalanced_tree.test.ts @@ -27,7 +27,7 @@ describe('Wonky tree', () => { // For the final test, we make the final (shifted up) leaf be H(1, 2), so we can calculate the root // with a standard tree easily. if (leaves[30]) { - leaves[30] = hasher.hash(new Fr(1).toBuffer(), new Fr(2).toBuffer()); + leaves[30] = await hasher.hash(new Fr(1).toBuffer(), new Fr(2).toBuffer()); } await tree.appendLeaves(leaves); return { tree, leaves }; @@ -49,8 +49,8 @@ describe('Wonky tree', () => { leaves = res.leaves; }); - it("Shouldn't accept more leaves", () => { - expect(() => tree.appendLeaves([Buffer.alloc(32)])).toThrow( + it("Shouldn't accept more leaves", async () => { + await expect(() => tree.appendLeaves([Buffer.alloc(32)])).rejects.toThrow( "Can't re-append to an unbalanced tree. Current has 2 leaves.", ); }); @@ -61,8 +61,8 @@ describe('Wonky tree', () => { expect(tree.findLeafIndex(leaves[0])).toEqual(0n); }); - it('Correctly computes root', () => { - const root = tree.getRoot(); + it('Correctly computes root', async () => { + const root = await tree.getRoot(); const expectedRoot = sha256Trunc(Buffer.concat([leaves[0], leaves[1]])); expect(root).toEqual(expectedRoot); }); @@ -95,8 +95,8 @@ describe('Wonky tree', () => { expect(tree.findLeafIndex(leaves[0])).toEqual(0n); }); - it('Correctly computes root', () => { - const root = tree.getRoot(); + it('Correctly computes root', async () => { + const root = await tree.getRoot(); const mergeNode = sha256Trunc(Buffer.concat([leaves[0], leaves[1]])); const expectedRoot = sha256Trunc(Buffer.concat([mergeNode, leaves[2]])); expect(root).toEqual(expectedRoot); @@ -132,8 +132,8 @@ describe('Wonky tree', () => { expect(tree.findLeafIndex(leaves[0])).toEqual(0n); }); - it('Correctly computes root', () => { - const root = tree.getRoot(); + it('Correctly computes root', async () => { + const root = await tree.getRoot(); let leftMergeNode = sha256Trunc(Buffer.concat([leaves[0], leaves[1]])); const rightMergeNode = sha256Trunc(Buffer.concat([leaves[2], leaves[3]])); leftMergeNode = sha256Trunc(Buffer.concat([leftMergeNode, rightMergeNode])); @@ -171,8 +171,8 @@ describe('Wonky tree', () => { expect(tree.findLeafIndex(leaves[0])).toEqual(0n); }); - it('Correctly computes root', () => { - const root = tree.getRoot(); + it('Correctly computes root', async () => { + const root = await tree.getRoot(); let leftMergeNode = sha256Trunc(Buffer.concat([leaves[0], leaves[1]])); let rightMergeNode = sha256Trunc(Buffer.concat([leaves[2], leaves[3]])); leftMergeNode = sha256Trunc(Buffer.concat([leftMergeNode, rightMergeNode])); @@ -217,8 +217,8 @@ describe('Wonky tree', () => { expect(tree.findLeafIndex(leaves[0])).toEqual(0n); }); - it('Correctly computes root', () => { - const root = tree.getRoot(); + it('Correctly computes root', async () => { + const root = await tree.getRoot(); const firstMergeNode = sha256Trunc(Buffer.concat([leaves[0], leaves[1]])); secondMergeNode = sha256Trunc(Buffer.concat([leaves[2], leaves[3]])); const thirdMergeNode = sha256Trunc(Buffer.concat([firstMergeNode, secondMergeNode])); @@ -255,8 +255,8 @@ describe('Wonky tree', () => { expect(tree.findLeafIndex(leaves[0])).toEqual(0n); }); - it('Correctly computes root', () => { - const root = tree.getRoot(); + it('Correctly computes root', async () => { + const root = await tree.getRoot(); const expectedRoot = stdTree.getRoot(true); expect(root).toEqual(expectedRoot); }); diff --git a/yarn-project/merkle-tree/src/unbalanced_tree.ts b/yarn-project/merkle-tree/src/unbalanced_tree.ts index 6920b5d12781..a8f3dcc2fb96 100644 --- a/yarn-project/merkle-tree/src/unbalanced_tree.ts +++ b/yarn-project/merkle-tree/src/unbalanced_tree.ts @@ -37,7 +37,7 @@ export class UnbalancedTree implements MerkleTree * Returns the root of the tree. * @returns The root of the tree. */ - public getRoot(): Buffer { + public async getRoot(): Promise { return this.root; } @@ -89,7 +89,7 @@ export class UnbalancedTree implements MerkleTree * So this function cannot reliably give the expected leaf value. * We cannot add level as an input as its based on the MerkleTree class's function. */ - public getLeafValue(_index: bigint): undefined { + public async getLeafValue(_index: bigint): Promise { throw new Error('Unsupported function - cannot get leaf value from an index in an unbalanced tree.'); } @@ -99,7 +99,7 @@ export class UnbalancedTree implements MerkleTree * @returns The index of the first leaf found with a given value (undefined if not found). * @remark This is NOT the index as inserted, but the index which will be used to calculate path structure. */ - public findLeafIndex(value: T): bigint | undefined { + public async findLeafIndex(value: T): Promise { const key = this.valueCache[serializeToBuffer(value).toString('hex')]; const [, , index] = key.split(':'); return BigInt(index); @@ -112,8 +112,8 @@ export class UnbalancedTree implements MerkleTree * @returns The index of the first leaf found with a given value (undefined if not found). * @remark This is not really used for a wonky tree, but required to implement MerkleTree. */ - public findLeafIndexAfter(value: T, startIndex: bigint): bigint | undefined { - const index = this.findLeafIndex(value); + public async findLeafIndexAfter(value: T, startIndex: bigint): Promise { + const index = await this.findLeafIndex(value); if (!index || index < startIndex) { return undefined; } @@ -165,7 +165,7 @@ export class UnbalancedTree implements MerkleTree * @param leaves - The leaves to append. * @returns Empty promise. */ - public appendLeaves(leaves: T[]): Promise { + public async appendLeaves(leaves: T[]): Promise { this.hasher.reset(); if (this.size != BigInt(0)) { throw Error(`Can't re-append to an unbalanced tree. Current has ${this.size} leaves.`); @@ -173,7 +173,7 @@ export class UnbalancedTree implements MerkleTree if (this.size + BigInt(leaves.length) - 1n > this.maxIndex) { throw Error(`Can't append beyond max index. Max index: ${this.maxIndex}`); } - const root = this.batchInsert(leaves); + const root = await this.batchInsert(leaves); this.root = root; return Promise.resolve(); @@ -184,7 +184,7 @@ export class UnbalancedTree implements MerkleTree * @param leaves - The leaves to append. * @returns Resulting root of the tree. */ - private batchInsert(_leaves: T[]): Buffer { + private async batchInsert(_leaves: T[]): Promise { // If we have an even number of leaves, hash them all in pairs // Otherwise, store the final leaf to be shifted up to the next odd sized level let [layerWidth, nodeToShift] = @@ -199,7 +199,7 @@ export class UnbalancedTree implements MerkleTree for (let i = 0; i < this.maxDepth; i++) { for (let j = 0; j < layerWidth; j += 2) { // Store the hash of each pair one layer up - nextLayer[j / 2] = this.hasher.hash(serializeToBuffer(thisLayer[j]), serializeToBuffer(thisLayer[j + 1])); + nextLayer[j / 2] = await this.hasher.hash(serializeToBuffer(thisLayer[j]), serializeToBuffer(thisLayer[j + 1])); this.storeNode(nextLayer[j / 2], this.maxDepth - i - 1, BigInt(j >> 1)); } layerWidth /= 2; diff --git a/yarn-project/noir-protocol-circuits-types/src/noir_test_gen.test.ts b/yarn-project/noir-protocol-circuits-types/src/noir_test_gen.test.ts index dd191ced7448..8a7ae36b8211 100644 --- a/yarn-project/noir-protocol-circuits-types/src/noir_test_gen.test.ts +++ b/yarn-project/noir-protocol-circuits-types/src/noir_test_gen.test.ts @@ -49,15 +49,16 @@ describe('Data generation for noir tests', () => { const format = (obj: object) => JSON.stringify(obj, null, 2).replaceAll('"', ''); - test.each(contracts)('Computes contract info for %s', contract => { + test.each(contracts)('Computes contract info for %s', async contract => { const contractClass: ContractClass = { ...contract, publicFunctions: [], version: 1 }; - const contractClassId = computeContractClassId(contractClass); - const initializationHash = computeInitializationHashFromEncodedArgs(constructorSelector, []); - const { artifactHash, privateFunctionsRoot, publicBytecodeCommitment } = - computeContractClassIdPreimage(contractClass); + const contractClassId = await computeContractClassId(contractClass); + const initializationHash = await computeInitializationHashFromEncodedArgs(constructorSelector, []); + const { artifactHash, privateFunctionsRoot, publicBytecodeCommitment } = await computeContractClassIdPreimage( + contractClass, + ); const deployer = AztecAddress.ZERO; const instance: ContractInstance = { ...contract, version: 1, initializationHash, contractClassId, deployer }; - const address = computeContractAddressFromInstance(instance); + const address = await computeContractAddressFromInstance(instance); const saltedInitializationHash = computeSaltedInitializationHash(instance); const partialAddress = computePartialAddress(instance); @@ -79,8 +80,8 @@ describe('Data generation for noir tests', () => { /* eslint-enable camelcase */ }); - test.each(contracts)('Computes function tree for %s', contract => { - const tree = computePrivateFunctionsTree(contract.privateFunctions); + test.each(contracts)('Computes function tree for %s', async contract => { + const tree = await computePrivateFunctionsTree(contract.privateFunctions); expect( tree.leaves.map((leaf, index) => ({ index, diff --git a/yarn-project/noir-protocol-circuits-types/src/vks.ts b/yarn-project/noir-protocol-circuits-types/src/vks.ts index ca1675d32ad8..4ec82619afd6 100644 --- a/yarn-project/noir-protocol-circuits-types/src/vks.ts +++ b/yarn-project/noir-protocol-circuits-types/src/vks.ts @@ -95,8 +95,8 @@ export const ProtocolCircuitVkIndexes: Record = { }; function buildVKTree() { - const calculator = new MerkleTreeCalculator(VK_TREE_HEIGHT, Buffer.alloc(32), (a, b) => - poseidon2Hash([a, b]).toBuffer(), + const calculator = new MerkleTreeCalculator(VK_TREE_HEIGHT, Buffer.alloc(32), async (a, b) => + (await poseidon2Hash([a, b])).toBuffer(), ); const vkHashes = new Array(2 ** VK_TREE_HEIGHT).fill(Buffer.alloc(32)); @@ -112,18 +112,18 @@ function buildVKTree() { let vkTree: MerkleTree | undefined; -export function getVKTree() { +export async function getVKTree() { if (!vkTree) { - vkTree = buildVKTree(); + vkTree = await buildVKTree(); } return vkTree; } -export function getVKTreeRoot() { - return Fr.fromBuffer(getVKTree().root); +export async function getVKTreeRoot() { + return Fr.fromBuffer((await getVKTree()).root); } -export function getVKIndex(vk: VerificationKeyData | VerificationKeyAsFields | Fr) { +export async function getVKIndex(vk: VerificationKeyData | VerificationKeyAsFields | Fr) { let hash; if (vk instanceof VerificationKeyData) { hash = vk.keyAsFields.hash; @@ -133,18 +133,16 @@ export function getVKIndex(vk: VerificationKeyData | VerificationKeyAsFields | F hash = vk; } - const index = getVKTree().getIndex(hash.toBuffer()); + const index = (await getVKTree()).getIndex(hash.toBuffer()); if (index < 0) { throw new Error(`VK index for ${hash.toString()} not found in VK tree`); } return index; } -export function getVKSiblingPath(vkIndex: number) { +export async function getVKSiblingPath(vkIndex: number) { return assertLength( - getVKTree() - .getSiblingPath(vkIndex) - .map(buf => new Fr(buf)), + (await getVKTree()).getSiblingPath(vkIndex).map(buf => new Fr(buf)), VK_TREE_HEIGHT, ); } diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 219d2caeded7..39591bb07ff9 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -31,7 +31,7 @@ describe('In-Memory P2P Client', () => { let kvStore: AztecKVStore; let client: P2PClient; - beforeEach(() => { + beforeEach(async () => { txPool = { addTxs: jest.fn(), getTxByHash: jest.fn().mockReturnValue(undefined), @@ -69,7 +69,7 @@ describe('In-Memory P2P Client', () => { }; blockSource = new MockL2BlockSource(); - blockSource.createBlocks(100); + await blockSource.createBlocks(100); mempools = { txPool, @@ -110,8 +110,8 @@ describe('In-Memory P2P Client', () => { it('adds txs to pool', async () => { await client.start(); - const tx1 = mockTx(); - const tx2 = mockTx(); + const tx1 = await mockTx(); + const tx2 = await mockTx(); await client.sendTx(tx1); await client.sendTx(tx2); @@ -121,21 +121,21 @@ describe('In-Memory P2P Client', () => { it('rejects txs after being stopped', async () => { await client.start(); - const tx1 = mockTx(); - const tx2 = mockTx(); + const tx1 = await mockTx(); + const tx2 = await mockTx(); await client.sendTx(tx1); await client.sendTx(tx2); expect(txPool.addTxs).toHaveBeenCalledTimes(2); await client.stop(); - const tx3 = mockTx(); + const tx3 = await mockTx(); await expect(client.sendTx(tx3)).rejects.toThrow(); expect(txPool.addTxs).toHaveBeenCalledTimes(2); }); it('republishes previously stored txs on start', async () => { - const tx1 = mockTx(); - const tx2 = mockTx(); + const tx1 = await mockTx(); + const tx2 = await mockTx(); txPool.getAllTxs.mockReturnValue([tx1, tx2]); await client.start(); @@ -261,7 +261,7 @@ describe('In-Memory P2P Client', () => { finalized: { number: 90, hash: expect.any(String) }, }); - blockSource.addBlocks([L2Block.random(91), L2Block.random(92)]); + blockSource.addBlocks([await L2Block.random(91), await L2Block.random(92)]); // give the client a chance to react to the new blocks await sleep(100); @@ -281,10 +281,10 @@ describe('In-Memory P2P Client', () => { // add two txs to the pool. One build against block 90, one against block 95 // then prune the chain back to block 90 // only one tx should be deleted - const goodTx = mockTx(); + const goodTx = await mockTx(); goodTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(90); - const badTx = mockTx(); + const badTx = await mockTx(); badTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(95); txPool.getAllTxs.mockReturnValue([goodTx, badTx]); @@ -303,13 +303,13 @@ describe('In-Memory P2P Client', () => { // add three txs to the pool built against different blocks // then prune the chain back to block 90 // only one tx should be deleted - const goodButOldTx = mockTx(); + const goodButOldTx = await mockTx(); goodButOldTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(89); - const goodTx = mockTx(); + const goodTx = await mockTx(); goodTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(90); - const badTx = mockTx(); + const badTx = await mockTx(); badTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(95); txPool.getAllTxs.mockReturnValue([goodButOldTx, goodTx, badTx]); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts b/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts index 35af12fbd68b..6689ba3353fc 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts @@ -14,7 +14,7 @@ export function describeTxPool(getTxPool: () => TxPool) { }); it('Adds txs to the pool as pending', async () => { - const tx1 = mockTx(); + const tx1 = await mockTx(); await pool.addTxs([tx1]); const poolTx = pool.getTxByHash(tx1.getTxHash()); @@ -24,7 +24,7 @@ export function describeTxPool(getTxPool: () => TxPool) { }); it('Removes txs from the pool', async () => { - const tx1 = mockTx(); + const tx1 = await mockTx(); await pool.addTxs([tx1]); await pool.deleteTxs([tx1.getTxHash()]); @@ -34,8 +34,8 @@ export function describeTxPool(getTxPool: () => TxPool) { }); it('Marks txs as mined', async () => { - const tx1 = mockTx(1); - const tx2 = mockTx(2); + const tx1 = await mockTx(1); + const tx2 = await mockTx(2); await pool.addTxs([tx1, tx2]); await pool.markAsMined([tx1.getTxHash()], 1); @@ -47,8 +47,8 @@ export function describeTxPool(getTxPool: () => TxPool) { }); it('Marks txs as pending after being mined', async () => { - const tx1 = mockTx(1); - const tx2 = mockTx(2); + const tx1 = await mockTx(1); + const tx2 = await mockTx(2); await pool.addTxs([tx1, tx2]); await pool.markAsMined([tx1.getTxHash()], 1); @@ -61,9 +61,9 @@ export function describeTxPool(getTxPool: () => TxPool) { }); it('Only marks txs as pending if they are known', async () => { - const tx1 = mockTx(1); + const tx1 = await mockTx(1); // simulate a situation where not all peers have all the txs - const someTxHashThatThisPeerDidNotSee = mockTx(2).getTxHash(); + const someTxHashThatThisPeerDidNotSee = await (await mockTx(2)).getTxHash(); await pool.addTxs([tx1]); // this peer knows that tx2 was mined, but it does not have the tx object await pool.markAsMined([tx1.getTxHash(), someTxHashThatThisPeerDidNotSee], 1); @@ -79,9 +79,9 @@ export function describeTxPool(getTxPool: () => TxPool) { }); it('Returns all transactions in the pool', async () => { - const tx1 = mockTx(1); - const tx2 = mockTx(2); - const tx3 = mockTx(3); + const tx1 = await mockTx(1); + const tx2 = await mockTx(2); + const tx3 = await mockTx(3); await pool.addTxs([tx1, tx2, tx3]); @@ -91,9 +91,9 @@ export function describeTxPool(getTxPool: () => TxPool) { }); it('Returns all txHashes in the pool', async () => { - const tx1 = mockTx(1); - const tx2 = mockTx(2); - const tx3 = mockTx(3); + const tx1 = await mockTx(1); + const tx2 = await mockTx(2); + const tx3 = await mockTx(3); await pool.addTxs([tx1, tx2, tx3]); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts index c6545c5b4930..0bba98badb5a 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts @@ -138,7 +138,7 @@ describe('Req Resp p2p client integration', () => { } as P2PConfig & DataStoreConfig; l2BlockSource = new MockL2BlockSource(); - l2BlockSource.createBlocks(100); + await l2BlockSource.createBlocks(100); proofVerifier = alwaysTrueVerifier ? new AlwaysTrueCircuitVerifier() : new AlwaysFalseCircuitVerifier(); kvStore = openTmpStore(); @@ -179,7 +179,7 @@ describe('Req Resp p2p client integration', () => { await sleep(2000); // Perform a get tx request from client 1 - const tx = mockTx(); + const tx = await mockTx(); const txHash = tx.getTxHash(); const requestedTx = await client1.requestTxByHash(txHash); @@ -202,7 +202,7 @@ describe('Req Resp p2p client integration', () => { await sleep(6000); // Perform a get tx request from client 1 - const tx = mockTx(); + const tx = await mockTx(); const txHash = tx.getTxHash(); // Mock the tx pool to return the tx we are looking for txPool.getTxByHash.mockImplementationOnce(() => tx); @@ -231,7 +231,7 @@ describe('Req Resp p2p client integration', () => { const penalizePeerSpy = jest.spyOn((client1 as any).p2pService.peerManager, 'penalizePeer'); // Perform a get tx request from client 1 - const tx = mockTx(); + const tx = await mockTx(); const txHash = tx.getTxHash(); // Return the correct tx with an invalid proof -> active attack @@ -263,9 +263,9 @@ describe('Req Resp p2p client integration', () => { const penalizePeerSpy = jest.spyOn((client1 as any).p2pService.peerManager, 'penalizePeer'); // Perform a get tx request from client 1 - const tx = mockTx(); + const tx = await mockTx(); const txHash = tx.getTxHash(); - const tx2 = mockTx(420); + const tx2 = await mockTx(420); // Return an invalid tx txPool.getTxByHash.mockImplementationOnce(() => tx2); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index 349b3a8f6b50..a140e80e036f 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -116,7 +116,7 @@ describe('ReqResp', () => { describe('TX REQ PROTOCOL', () => { it('Can request a Tx from TxHash', async () => { - const tx = mockTx(); + const tx = await mockTx(); const txHash = tx.getTxHash(); const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; @@ -140,7 +140,7 @@ describe('ReqResp', () => { }); it('Does not crash if tx hash returns undefined', async () => { - const tx = mockTx(); + const tx = await mockTx(); const txHash = tx.getTxHash(); const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; @@ -223,7 +223,7 @@ describe('ReqResp', () => { }); it('Should penalize peer if transaction validation fails', async () => { - const tx = mockTx(); + const tx = await mockTx(); const txHash = tx.getTxHash(); // Mock that the node will respond with the tx diff --git a/yarn-project/p2p/src/tx_validator/aggregate_tx_validator.test.ts b/yarn-project/p2p/src/tx_validator/aggregate_tx_validator.test.ts index c74a0fc5e169..133d71da0283 100644 --- a/yarn-project/p2p/src/tx_validator/aggregate_tx_validator.test.ts +++ b/yarn-project/p2p/src/tx_validator/aggregate_tx_validator.test.ts @@ -4,7 +4,7 @@ import { AggregateTxValidator } from './aggregate_tx_validator.js'; describe('AggregateTxValidator', () => { it('allows txs that pass all validation', async () => { - const txs = [mockTx(0), mockTx(1), mockTx(2), mockTx(3), mockTx(4)]; + const txs = await Promise.all([mockTx(0), mockTx(1), mockTx(2), mockTx(3), mockTx(4)]); const agg = new AggregateTxValidator( new TxDenyList(txs[0].getTxHash(), txs[1].getTxHash()), new TxDenyList(txs[2].getTxHash(), txs[3].getTxHash()), diff --git a/yarn-project/p2p/src/tx_validator/data_validator.test.ts b/yarn-project/p2p/src/tx_validator/data_validator.test.ts index 6b7f42859f62..c00c54275841 100644 --- a/yarn-project/p2p/src/tx_validator/data_validator.test.ts +++ b/yarn-project/p2p/src/tx_validator/data_validator.test.ts @@ -4,15 +4,17 @@ import { AztecAddress, Fr, FunctionSelector } from '@aztec/circuits.js'; import { DataTxValidator } from './data_validator.js'; const mockTxs = (numTxs: number) => - Array(numTxs) - .fill(0) - .map((_, i) => - mockTx(i, { - numberOfNonRevertiblePublicCallRequests: 2, - numberOfRevertiblePublicCallRequests: 2, - hasPublicTeardownCallRequest: true, - }), - ); + Promise.all( + Array(numTxs) + .fill(0) + .map((_, i) => + mockTx(i, { + numberOfNonRevertiblePublicCallRequests: 2, + numberOfRevertiblePublicCallRequests: 2, + hasPublicTeardownCallRequest: true, + }), + ), + ); describe('TxDataValidator', () => { let validator: DataTxValidator; @@ -22,13 +24,13 @@ describe('TxDataValidator', () => { }); it('allows transactions with the correct data', async () => { - const txs = mockTxs(3); + const txs = await mockTxs(3); await expect(validator.validateTxs(txs)).resolves.toEqual([txs, []]); }); it('rejects txs with mismatch non revertible execution requests', async () => { - const goodTxs = mockTxs(3); - const badTxs = mockTxs(2); + const goodTxs = await mockTxs(3); + const badTxs = await mockTxs(2); badTxs[0].data.forPublic!.nonRevertibleAccumulatedData.publicCallRequests[0].argsHash = Fr.random(); badTxs[1].data.forPublic!.nonRevertibleAccumulatedData.publicCallRequests[1].contractAddress = AztecAddress.random(); @@ -37,8 +39,8 @@ describe('TxDataValidator', () => { }); it('rejects txs with mismatch revertible execution requests', async () => { - const goodTxs = mockTxs(3); - const badTxs = mockTxs(4); + const goodTxs = await mockTxs(3); + const badTxs = await mockTxs(4); badTxs[0].data.forPublic!.revertibleAccumulatedData.publicCallRequests[0].msgSender = AztecAddress.random(); badTxs[1].data.forPublic!.revertibleAccumulatedData.publicCallRequests[1].contractAddress = AztecAddress.random(); badTxs[2].data.forPublic!.revertibleAccumulatedData.publicCallRequests[0].functionSelector = @@ -50,8 +52,8 @@ describe('TxDataValidator', () => { }); it('rejects txs with mismatch teardown execution requests', async () => { - const goodTxs = mockTxs(3); - const badTxs = mockTxs(2); + const goodTxs = await mockTxs(3); + const badTxs = await mockTxs(2); badTxs[0].data.forPublic!.publicTeardownCallRequest.contractAddress = AztecAddress.random(); badTxs[1].data.forPublic!.publicTeardownCallRequest.msgSender = AztecAddress.random(); @@ -59,8 +61,8 @@ describe('TxDataValidator', () => { }); it('rejects txs with mismatch number of execution requests', async () => { - const goodTxs = mockTxs(3); - const badTxs = mockTxs(2); + const goodTxs = await mockTxs(3); + const badTxs = await mockTxs(2); // Missing an enqueuedPublicFunctionCall. const execRequest = badTxs[0].enqueuedPublicFunctionCalls.pop()!; // Having an extra enqueuedPublicFunctionCall. diff --git a/yarn-project/p2p/src/tx_validator/data_validator.ts b/yarn-project/p2p/src/tx_validator/data_validator.ts index f284f4638cea..603315cca328 100644 --- a/yarn-project/p2p/src/tx_validator/data_validator.ts +++ b/yarn-project/p2p/src/tx_validator/data_validator.ts @@ -23,7 +23,7 @@ export class DataTxValidator implements TxValidator { return Promise.resolve(this.#hasCorrectExecutionRequests(tx)); } - #hasCorrectExecutionRequests(tx: Tx): boolean { + async #hasCorrectExecutionRequests(tx: Tx): Promise { const callRequests = [ ...tx.data.getRevertiblePublicCallRequests(), ...tx.data.getNonRevertiblePublicCallRequests(), @@ -37,9 +37,13 @@ export class DataTxValidator implements TxValidator { return false; } - const invalidExecutionRequestIndex = tx.enqueuedPublicFunctionCalls.findIndex( - (execRequest, i) => !execRequest.isForCallRequest(callRequests[i]), - ); + const invalidExecutionRequestIndex = ( + await Promise.all( + tx.enqueuedPublicFunctionCalls.map(async (execRequest, i) => { + return !(await execRequest.isForCallRequest(callRequests[i])); + }), + ) + ).findIndex(cond => !!cond); if (invalidExecutionRequestIndex !== -1) { this.#log.warn( `Rejecting tx ${Tx.getHash( @@ -52,7 +56,7 @@ export class DataTxValidator implements TxValidator { const teardownCallRequest = tx.data.getTeardownPublicCallRequest(); const isInvalidTeardownExecutionRequest = (!teardownCallRequest && !tx.publicTeardownFunctionCall.isEmpty()) || - (teardownCallRequest && !tx.publicTeardownFunctionCall.isForCallRequest(teardownCallRequest)); + (teardownCallRequest && !(await tx.publicTeardownFunctionCall.isForCallRequest(teardownCallRequest))); if (isInvalidTeardownExecutionRequest) { this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} because of incorrect teardown execution requests.`); return false; diff --git a/yarn-project/p2p/src/tx_validator/double_spend_validator.test.ts b/yarn-project/p2p/src/tx_validator/double_spend_validator.test.ts index 1c123319f33e..3034be7a08ca 100644 --- a/yarn-project/p2p/src/tx_validator/double_spend_validator.test.ts +++ b/yarn-project/p2p/src/tx_validator/double_spend_validator.test.ts @@ -18,19 +18,19 @@ describe('DoubleSpendTxValidator', () => { }); it('rejects duplicates in non revertible data', async () => { - const badTx = mockTxForRollup(); + const badTx = await mockTxForRollup(); badTx.data.forRollup!.end.nullifiers[1] = badTx.data.forRollup!.end.nullifiers[0]; await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); }); it('rejects duplicates in revertible data', async () => { - const badTx = mockTxForRollup(); + const badTx = await mockTxForRollup(); badTx.data.forRollup!.end.nullifiers[1] = badTx.data.forRollup!.end.nullifiers[0]; await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); }); it('rejects duplicates across phases', async () => { - const badTx = mockTx(1, { + const badTx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, }); @@ -40,14 +40,14 @@ describe('DoubleSpendTxValidator', () => { }); it('rejects duplicates across txs', async () => { - const firstTx = mockTxForRollup(1); - const secondTx = mockTxForRollup(2); + const firstTx = await mockTxForRollup(1); + const secondTx = await mockTxForRollup(2); secondTx.data.forRollup!.end.nullifiers[0] = firstTx.data.forRollup!.end.nullifiers[0]; await expect(txValidator.validateTxs([firstTx, secondTx])).resolves.toEqual([[firstTx], [secondTx]]); }); it('rejects duplicates against history', async () => { - const badTx = mockTx(); + const badTx = await mockTx(); nullifierSource.getNullifierIndex.mockReturnValueOnce(Promise.resolve(1n)); await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); }); diff --git a/yarn-project/p2p/src/tx_validator/metadata_validator.test.ts b/yarn-project/p2p/src/tx_validator/metadata_validator.test.ts index 211d4ad0e669..cff1ee045fbc 100644 --- a/yarn-project/p2p/src/tx_validator/metadata_validator.test.ts +++ b/yarn-project/p2p/src/tx_validator/metadata_validator.test.ts @@ -15,8 +15,8 @@ describe('MetadataTxValidator', () => { }); it('allows only transactions for the right chain', async () => { - const goodTxs = [mockTx(1), mockTxForRollup(2)]; - const badTxs = [mockTx(3), mockTxForRollup(4)]; + const goodTxs = [await mockTx(1), await mockTxForRollup(2)]; + const badTxs = [await mockTx(3), await mockTxForRollup(4)]; goodTxs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; @@ -30,7 +30,7 @@ describe('MetadataTxValidator', () => { }); it.each([42, 43])('allows txs with valid max block number', async maxBlockNumber => { - const goodTx = mockTxForRollup(1); + const goodTx = await mockTxForRollup(1); goodTx.data.constants.txContext.chainId = chainId; goodTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(true, new Fr(maxBlockNumber)); @@ -38,7 +38,7 @@ describe('MetadataTxValidator', () => { }); it('allows txs with unset max block number', async () => { - const goodTx = mockTxForRollup(1); + const goodTx = await mockTxForRollup(1); goodTx.data.constants.txContext.chainId = chainId; goodTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(false, Fr.ZERO); @@ -46,7 +46,7 @@ describe('MetadataTxValidator', () => { }); it('rejects txs with lower max block number', async () => { - const badTx = mockTxForRollup(1); + const badTx = await mockTxForRollup(1); badTx.data.constants.txContext.chainId = chainId; badTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(true, blockNumber.sub(new Fr(1))); await expect(validator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); diff --git a/yarn-project/protocol-contracts/src/auth-registry/index.ts b/yarn-project/protocol-contracts/src/auth-registry/index.ts index a5d5c399e0ed..cbba19f487d3 100644 --- a/yarn-project/protocol-contracts/src/auth-registry/index.ts +++ b/yarn-project/protocol-contracts/src/auth-registry/index.ts @@ -1,6 +1,6 @@ import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; /** Returns the canonical deployment of the auth registry. */ -export function getCanonicalAuthRegistry(): ProtocolContract { - return getCanonicalProtocolContract('AuthRegistry'); +export async function getCanonicalAuthRegistry(): Promise { + return await getCanonicalProtocolContract('AuthRegistry'); } diff --git a/yarn-project/protocol-contracts/src/build_protocol_contract_tree.ts b/yarn-project/protocol-contracts/src/build_protocol_contract_tree.ts index 43ae9c3aaf96..d6bf304ab391 100644 --- a/yarn-project/protocol-contracts/src/build_protocol_contract_tree.ts +++ b/yarn-project/protocol-contracts/src/build_protocol_contract_tree.ts @@ -7,9 +7,9 @@ import { } from '@aztec/circuits.js'; import { poseidon2Hash } from '@aztec/foundation/crypto'; -export function buildProtocolContractTree(contracts: { address: AztecAddress; leaf: Fr }[]): MerkleTree { - const calculator = new MerkleTreeCalculator(PROTOCOL_CONTRACT_TREE_HEIGHT, Buffer.alloc(32), (a, b) => - poseidon2Hash([a, b]).toBuffer(), +export async function buildProtocolContractTree(contracts: { address: AztecAddress; leaf: Fr }[]): Promise { + const calculator = new MerkleTreeCalculator(PROTOCOL_CONTRACT_TREE_HEIGHT, Buffer.alloc(32), async (a, b) => + (await poseidon2Hash([a, b])).toBuffer(), ); const leaves = new Array(2 ** PROTOCOL_CONTRACT_TREE_HEIGHT).fill(Buffer.alloc(32)); @@ -19,5 +19,5 @@ export function buildProtocolContractTree(contracts: { address: AztecAddress; le leaves[index] = contract.leaf; } - return calculator.computeTree(leaves); + return await calculator.computeTree(leaves); } diff --git a/yarn-project/protocol-contracts/src/class-registerer/contract_class_registered_event.ts b/yarn-project/protocol-contracts/src/class-registerer/contract_class_registered_event.ts index b8c935dd948d..9f9a9caeb17c 100644 --- a/yarn-project/protocol-contracts/src/class-registerer/contract_class_registered_event.ts +++ b/yarn-project/protocol-contracts/src/class-registerer/contract_class_registered_event.ts @@ -46,11 +46,11 @@ export class ContractClassRegisteredEvent { ); } - toContractClassPublic(): ContractClassPublic { - const computedClassId = computeContractClassId({ + async toContractClassPublic(): Promise { + const computedClassId = await computeContractClassId({ artifactHash: this.artifactHash, privateFunctionsRoot: this.privateFunctionsRoot, - publicBytecodeCommitment: computePublicBytecodeCommitment(this.packedPublicBytecode), + publicBytecodeCommitment: await computePublicBytecodeCommitment(this.packedPublicBytecode), }); if (!computedClassId.equals(this.contractClassId)) { diff --git a/yarn-project/protocol-contracts/src/class-registerer/index.ts b/yarn-project/protocol-contracts/src/class-registerer/index.ts index b30844b28fab..f9786e5b7f96 100644 --- a/yarn-project/protocol-contracts/src/class-registerer/index.ts +++ b/yarn-project/protocol-contracts/src/class-registerer/index.ts @@ -5,6 +5,6 @@ export * from './private_function_broadcasted_event.js'; export * from './unconstrained_function_broadcasted_event.js'; /** Returns the canonical deployment of the class registerer contract. */ -export function getCanonicalClassRegisterer(): ProtocolContract { - return getCanonicalProtocolContract('ContractClassRegisterer'); +export async function getCanonicalClassRegisterer(): Promise { + return await getCanonicalProtocolContract('ContractClassRegisterer'); } diff --git a/yarn-project/protocol-contracts/src/fee-juice/index.ts b/yarn-project/protocol-contracts/src/fee-juice/index.ts index b92451bdb74d..d4b904c57ca9 100644 --- a/yarn-project/protocol-contracts/src/fee-juice/index.ts +++ b/yarn-project/protocol-contracts/src/fee-juice/index.ts @@ -1,6 +1,6 @@ import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; /** Returns the canonical deployment of the Fee Juice. */ -export function getCanonicalFeeJuice(): ProtocolContract { - return getCanonicalProtocolContract('FeeJuice'); +export async function getCanonicalFeeJuice(): Promise { + return await getCanonicalProtocolContract('FeeJuice'); } diff --git a/yarn-project/protocol-contracts/src/instance-deployer/index.ts b/yarn-project/protocol-contracts/src/instance-deployer/index.ts index 1253aeb915d6..b98c87979919 100644 --- a/yarn-project/protocol-contracts/src/instance-deployer/index.ts +++ b/yarn-project/protocol-contracts/src/instance-deployer/index.ts @@ -3,6 +3,6 @@ import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol export * from './contract_instance_deployed_event.js'; /** Returns the canonical deployment of the instance deployer contract. */ -export function getCanonicalInstanceDeployer(): ProtocolContract { - return getCanonicalProtocolContract('ContractInstanceDeployer'); +export async function getCanonicalInstanceDeployer(): Promise { + return await getCanonicalProtocolContract('ContractInstanceDeployer'); } diff --git a/yarn-project/protocol-contracts/src/multi-call-entrypoint/index.ts b/yarn-project/protocol-contracts/src/multi-call-entrypoint/index.ts index 7766894654cc..7fd338395c77 100644 --- a/yarn-project/protocol-contracts/src/multi-call-entrypoint/index.ts +++ b/yarn-project/protocol-contracts/src/multi-call-entrypoint/index.ts @@ -1,5 +1,5 @@ import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; -export function getCanonicalMultiCallEntrypointContract(): ProtocolContract { - return getCanonicalProtocolContract('MultiCallEntrypoint'); +export async function getCanonicalMultiCallEntrypointContract(): Promise { + return await getCanonicalProtocolContract('MultiCallEntrypoint'); } diff --git a/yarn-project/protocol-contracts/src/protocol_contract.ts b/yarn-project/protocol-contracts/src/protocol_contract.ts index bf3f2dbd805c..b3410ff186ff 100644 --- a/yarn-project/protocol-contracts/src/protocol_contract.ts +++ b/yarn-project/protocol-contracts/src/protocol_contract.ts @@ -28,13 +28,13 @@ export interface ProtocolContract { } /** Returns the canonical deployment a given artifact. */ -export function getCanonicalProtocolContract(name: ProtocolContractName): ProtocolContract { +export async function getCanonicalProtocolContract(name: ProtocolContractName): Promise { const artifact = ProtocolContractArtifact[name]; const address = ProtocolContractAddress[name]; const salt = ProtocolContractSalt[name]; // TODO(@spalladino): This computes the contract class from the artifact twice. - const contractClass = getContractClassFromArtifact(artifact); - const instance = getContractInstanceFromDeployParams(artifact, { salt }); + const contractClass = await getContractClassFromArtifact(artifact); + const instance = await getContractInstanceFromDeployParams(artifact, { salt }); return { instance: { ...instance, address }, contractClass, diff --git a/yarn-project/protocol-contracts/src/protocol_contract_tree.ts b/yarn-project/protocol-contracts/src/protocol_contract_tree.ts index 6a90c9950630..8edc67845440 100644 --- a/yarn-project/protocol-contracts/src/protocol_contract_tree.ts +++ b/yarn-project/protocol-contracts/src/protocol_contract_tree.ts @@ -6,19 +6,19 @@ import { ProtocolContractAddress, ProtocolContractLeaf, protocolContractNames } let protocolContractTree: MerkleTree | undefined; -function getTree() { +async function getTree() { if (!protocolContractTree) { const leaves = protocolContractNames.map(name => ({ address: ProtocolContractAddress[name], leaf: ProtocolContractLeaf[name], })); - protocolContractTree = buildProtocolContractTree(leaves); + protocolContractTree = await buildProtocolContractTree(leaves); } return protocolContractTree; } -export function getProtocolContractSiblingPath(address: AztecAddress) { - const tree = getTree(); +export async function getProtocolContractSiblingPath(address: AztecAddress) { + const tree = await getTree(); const index = address.toField().toNumber(); return assertLength( tree.getSiblingPath(index).map(buf => new Fr(buf)), diff --git a/yarn-project/protocol-contracts/src/router/index.ts b/yarn-project/protocol-contracts/src/router/index.ts index 8ea8e67e12a6..e01014aa095c 100644 --- a/yarn-project/protocol-contracts/src/router/index.ts +++ b/yarn-project/protocol-contracts/src/router/index.ts @@ -1,6 +1,6 @@ import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; /** Returns the canonical deployment of the router. */ -export function getCanonicalRouter(): ProtocolContract { - return getCanonicalProtocolContract('Router'); +export async function getCanonicalRouter(): Promise { + return await getCanonicalProtocolContract('Router'); } diff --git a/yarn-project/protocol-contracts/src/scripts/generate_data.ts b/yarn-project/protocol-contracts/src/scripts/generate_data.ts index eebd16860fe6..2f85bc805f93 100644 --- a/yarn-project/protocol-contracts/src/scripts/generate_data.ts +++ b/yarn-project/protocol-contracts/src/scripts/generate_data.ts @@ -64,17 +64,17 @@ async function copyArtifact(srcName: string, destName: string) { return artifact; } -function computeContractLeaf(artifact: NoirCompiledContract) { - const instance = getContractInstanceFromDeployParams(loadContractArtifact(artifact), { salt }); +async function computeContractLeaf(artifact: NoirCompiledContract) { + const instance = await getContractInstanceFromDeployParams(loadContractArtifact(artifact), { salt }); return instance.address; } -function computeRoot(names: string[], leaves: Fr[]) { +async function computeRoot(names: string[], leaves: Fr[]) { const data = names.map((name, i) => ({ address: new AztecAddress(new Fr(contractAddressMapping[name])), leaf: leaves[i], })); - const tree = buildProtocolContractTree(data); + const tree = await buildProtocolContractTree(data); return Fr.fromBuffer(tree.root); } @@ -200,7 +200,7 @@ async function main() { const destName = destNames[i]; const artifact = await copyArtifact(srcName, destName); await generateDeclarationFile(destName); - leaves.push(computeContractLeaf(artifact).toField()); + leaves.push((await computeContractLeaf(artifact)).toField()); } await generateOutputFile(destNames, leaves); diff --git a/yarn-project/prover-client/src/block_builder/light.test.ts b/yarn-project/prover-client/src/block_builder/light.test.ts index de35c68e72f3..2b706aa6b8b7 100644 --- a/yarn-project/prover-client/src/block_builder/light.test.ts +++ b/yarn-project/prover-client/src/block_builder/light.test.ts @@ -80,7 +80,7 @@ describe('LightBlockBuilder', () => { beforeAll(async () => { logger = createDebugLogger('aztec:sequencer-client:test:block-builder'); simulator = new TestCircuitProver(new NoopTelemetryClient()); - vkTreeRoot = getVKTreeRoot(); + vkTreeRoot = await getVKTreeRoot(); emptyProof = makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH); db = await NativeWorldStateService.tmp(); }); @@ -103,7 +103,7 @@ describe('LightBlockBuilder', () => { }); it('builds a 2 tx header', async () => { - const txs = times(2, makeTx); + const txs = await Promise.all(times(2, makeTx)); const header = await buildHeader(txs, l1ToL2Messages); const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); @@ -112,7 +112,7 @@ describe('LightBlockBuilder', () => { }); it('builds a 3 tx header', async () => { - const txs = times(3, makeTx); + const txs = await Promise.all(times(3, makeTx)); const header = await buildHeader(txs, l1ToL2Messages); const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { @@ -124,7 +124,7 @@ describe('LightBlockBuilder', () => { }); it('builds a 4 tx header', async () => { - const txs = times(4, makeTx); + const txs = await Promise.all(times(4, makeTx)); const header = await buildHeader(txs, l1ToL2Messages); const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { @@ -138,7 +138,7 @@ describe('LightBlockBuilder', () => { it('builds a 4 tx header with no l1 to l2 messages', async () => { const l1ToL2Messages: Fr[] = []; - const txs = times(4, makeTx); + const txs = await Promise.all(times(4, makeTx)); const header = await buildHeader(txs, l1ToL2Messages); const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { @@ -151,7 +151,7 @@ describe('LightBlockBuilder', () => { }); it('builds a 5 tx header', async () => { - const txs = times(5, makeTx); + const txs = await Promise.all(times(5, makeTx)); const header = await buildHeader(txs, l1ToL2Messages); const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { @@ -165,7 +165,7 @@ describe('LightBlockBuilder', () => { }); it('builds a single tx header', async () => { - const txs = times(1, makeTx); + const txs = await Promise.all(times(1, makeTx)); const header = await buildHeader(txs, l1ToL2Messages); const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); @@ -236,7 +236,7 @@ describe('LightBlockBuilder', () => { const parityOutput = await getParityOutput(l1ToL2Messages); const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsFork); const rootOutput = await getBlockRootOutput(mergeLeft, mergeRight, parityOutput, l1ToL2Snapshot); - const expectedHeader = buildHeaderFromCircuitOutputs( + const expectedHeader = await buildHeaderFromCircuitOutputs( [mergeLeft, mergeRight], parityOutput, rootOutput, @@ -269,7 +269,7 @@ describe('LightBlockBuilder', () => { const rollupOutputs = []; for (const tx of txs) { const vkIndex = TUBE_VK_INDEX; - const vkPath = getVKSiblingPath(vkIndex); + const vkPath = await getVKSiblingPath(vkIndex); const vkData = new VkWitnessData(TubeVk, vkIndex, vkPath); const tubeData = new PrivateTubeData(tx.data.toKernelCircuitPublicInputs(), emptyProof, vkData); const hints = await buildBaseRollupHints(tx, globalVariables, expectsFork); @@ -282,7 +282,7 @@ describe('LightBlockBuilder', () => { const getMergeOutput = async (left: BaseOrMergeRollupPublicInputs, right: BaseOrMergeRollupPublicInputs) => { const baseRollupVk = ProtocolCircuitVks['PrivateBaseRollupArtifact'].keyAsFields; - const baseRollupVkWitness = getVkMembershipWitness(baseRollupVk); + const baseRollupVkWitness = await getVkMembershipWitness(baseRollupVk); const leftInput = new PreviousRollupData(left, emptyProof, baseRollupVk, baseRollupVkWitness); const rightInput = new PreviousRollupData(right, emptyProof, baseRollupVk, baseRollupVkWitness); const inputs = new MergeRollupInputs([leftInput, rightInput]); @@ -296,7 +296,7 @@ describe('LightBlockBuilder', () => { const rootParityInputs: RootParityInput[] = []; const baseParityVk = ProtocolCircuitVks['BaseParityArtifact'].keyAsFields; - const baseParityVkWitness = getVkMembershipWitness(baseParityVk); + const baseParityVkWitness = await getVkMembershipWitness(baseParityVk); for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) { const input = BaseParityInputs.fromSlice(l1ToL2Messages, i, vkTreeRoot); const { inputs } = await simulator.getBaseParityProof(input); @@ -320,7 +320,7 @@ describe('LightBlockBuilder', () => { }, ) => { const mergeRollupVk = ProtocolCircuitVks['MergeRollupArtifact'].keyAsFields; - const mergeRollupVkWitness = getVkMembershipWitness(mergeRollupVk); + const mergeRollupVkWitness = await getVkMembershipWitness(mergeRollupVk); const rollupLeft = new PreviousRollupData(left, emptyProof, mergeRollupVk, mergeRollupVkWitness); const rollupRight = new PreviousRollupData(right, emptyProof, mergeRollupVk, mergeRollupVkWitness); @@ -330,7 +330,7 @@ describe('LightBlockBuilder', () => { const previousBlockHash = (await expectsFork.getLeafValue(MerkleTreeId.ARCHIVE, previousBlockHashLeafIndex))!; const rootParityVk = ProtocolCircuitVks['RootParityArtifact'].keyAsFields; - const rootParityVkWitness = getVkMembershipWitness(rootParityVk); + const rootParityVkWitness = await getVkMembershipWitness(rootParityVk); const rootParityInput = new RootParityInput( emptyProof, @@ -355,8 +355,8 @@ describe('LightBlockBuilder', () => { return result.inputs; }; - function getVkMembershipWitness(vk: VerificationKeyAsFields) { - const leafIndex = getVKIndex(vk); - return new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)); + async function getVkMembershipWitness(vk: VerificationKeyAsFields) { + const leafIndex = await getVKIndex(vk); + return new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), await getVKSiblingPath(leafIndex)); } }); diff --git a/yarn-project/prover-client/src/block_builder/light.ts b/yarn-project/prover-client/src/block_builder/light.ts index 3bc5d4a299d9..d1ea18a18b40 100644 --- a/yarn-project/prover-client/src/block_builder/light.ts +++ b/yarn-project/prover-client/src/block_builder/light.ts @@ -61,7 +61,7 @@ export class LightweightBlockBuilder implements BlockBuilder { this.db.getInitialHeader(), this.globalVariables!.chainId, this.globalVariables!.version, - getVKTreeRoot(), + await getVKTreeRoot(), protocolContractTreeRoot, ), ); diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index ca78b4395152..7d37c8cc125e 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -97,7 +97,7 @@ export async function buildBaseRollupHints( // Create data hint for reading fee payer initial balance in Fee Juice // If no fee payer is set, read hint should be empty - const leafSlot = computeFeePayerBalanceLeafSlot(tx.data.feePayer); + const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer); const feePayerFeeJuiceBalanceReadHint = tx.data.feePayer.isZero() ? PublicDataHint.empty() : await getPublicDataHint(db, leafSlot.toBigInt()); @@ -173,7 +173,7 @@ export async function buildBaseRollupHints( ), }); - const blockHash = tx.constants.historicalHeader.hash(); + const blockHash = await tx.constants.historicalHeader.hash(); const archiveRootMembershipWitness = await getMembershipWitnessFor( blockHash, MerkleTreeId.ARCHIVE, @@ -226,7 +226,7 @@ export async function buildBaseRollupHints( feeWriteSiblingPath, }); - const blockHash = tx.constants.historicalHeader.hash(); + const blockHash = await tx.constants.historicalHeader.hash(); const archiveRootMembershipWitness = await getMembershipWitnessFor( blockHash, MerkleTreeId.ARCHIVE, @@ -264,18 +264,18 @@ async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot: bigint return new PublicDataHint(new Fr(leafSlot), value, membershipWitness, leafPreimage); } -export function createMergeRollupInputs( +export async function createMergeRollupInputs( left: [BaseOrMergeRollupPublicInputs, RecursiveProof, VerificationKeyAsFields], right: [BaseOrMergeRollupPublicInputs, RecursiveProof, VerificationKeyAsFields], ) { const mergeInputs = new MergeRollupInputs([ - getPreviousRollupDataFromPublicInputs(left[0], left[1], left[2]), - getPreviousRollupDataFromPublicInputs(right[0], right[1], right[2]), + await getPreviousRollupDataFromPublicInputs(left[0], left[1], left[2]), + await getPreviousRollupDataFromPublicInputs(right[0], right[1], right[2]), ]); return mergeInputs; } -export function createBlockMergeRollupInputs( +export async function createBlockMergeRollupInputs( left: [ BlockRootOrBlockMergePublicInputs, RecursiveProof, @@ -288,13 +288,13 @@ export function createBlockMergeRollupInputs( ], ) { const mergeInputs = new BlockMergeRollupInputs([ - getPreviousRollupBlockDataFromPublicInputs(left[0], left[1], left[2]), - getPreviousRollupBlockDataFromPublicInputs(right[0], right[1], right[2]), + await getPreviousRollupBlockDataFromPublicInputs(left[0], left[1], left[2]), + await getPreviousRollupBlockDataFromPublicInputs(right[0], right[1], right[2]), ]); return mergeInputs; } -export function buildHeaderFromCircuitOutputs( +export async function buildHeaderFromCircuitOutputs( previousMergeData: [BaseOrMergeRollupPublicInputs, BaseOrMergeRollupPublicInputs], parityPublicInputs: ParityPublicInputs, rootRollupOutputs: BlockRootOrBlockMergePublicInputs, @@ -318,7 +318,7 @@ export function buildHeaderFromCircuitOutputs( previousMergeData[0].accumulatedFees.add(previousMergeData[1].accumulatedFees), previousMergeData[0].accumulatedManaUsed.add(previousMergeData[1].accumulatedManaUsed), ); - if (!header.hash().equals(rootRollupOutputs.endBlockHash)) { + if (!(await header.hash()).equals(rootRollupOutputs.endBlockHash)) { logger?.error( `Block header mismatch when building header from circuit outputs.` + `\n\nHeader: ${inspect(header)}` + @@ -357,7 +357,7 @@ export async function buildHeaderAndBodyFromTxs( l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); const hasher = (left: Buffer, right: Buffer) => sha256Trunc(Buffer.concat([left, right])); const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP)); - const parityShaRoot = new MerkleTreeCalculator(parityHeight, Fr.ZERO.toBuffer(), hasher).computeTreeRoot( + const parityShaRoot = await new MerkleTreeCalculator(parityHeight, Fr.ZERO.toBuffer(), hasher).computeTreeRoot( l1ToL2Messages.map(msg => msg.toBuffer()), ); @@ -412,7 +412,7 @@ export async function getRootTreeSiblingPath(treeId: T } // Builds the inputs for the final root rollup circuit, without making any changes to trees -export function getRootRollupInput( +export async function getRootRollupInput( rollupOutputLeft: BlockRootOrBlockMergePublicInputs, rollupProofLeft: RecursiveProof, verificationKeyLeft: VerificationKeyAsFields, @@ -422,8 +422,8 @@ export function getRootRollupInput( proverId: Fr, ) { const previousRollupData: RootRollupInputs['previousRollupData'] = [ - getPreviousRollupBlockDataFromPublicInputs(rollupOutputLeft, rollupProofLeft, verificationKeyLeft), - getPreviousRollupBlockDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight), + await getPreviousRollupBlockDataFromPublicInputs(rollupOutputLeft, rollupProofLeft, verificationKeyLeft), + await getPreviousRollupBlockDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight), ]; return RootRollupInputs.from({ @@ -432,33 +432,33 @@ export function getRootRollupInput( }); } -export function getPreviousRollupDataFromPublicInputs( +export async function getPreviousRollupDataFromPublicInputs( rollupOutput: BaseOrMergeRollupPublicInputs, rollupProof: RecursiveProof, vk: VerificationKeyAsFields, ) { - const leafIndex = getVKIndex(vk); + const leafIndex = await getVKIndex(vk); return new PreviousRollupData( rollupOutput, rollupProof, vk, - new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)), + new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), await getVKSiblingPath(leafIndex)), ); } -export function getPreviousRollupBlockDataFromPublicInputs( +export async function getPreviousRollupBlockDataFromPublicInputs( rollupOutput: BlockRootOrBlockMergePublicInputs, rollupProof: RecursiveProof, vk: VerificationKeyAsFields, ) { - const leafIndex = getVKIndex(vk); + const leafIndex = await getVKIndex(vk); return new PreviousRollupBlockData( rollupOutput, rollupProof, vk, - new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)), + new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), await getVKSiblingPath(leafIndex)), ); } @@ -467,7 +467,7 @@ export async function getConstantRollupData( db: MerkleTreeReadOperations, ): Promise { return ConstantRollupData.from({ - vkTreeRoot: getVKTreeRoot(), + vkTreeRoot: await getVKTreeRoot(), protocolContractTreeRoot, lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), globalVariables, diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 73a7a425b030..05d3196214c3 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -178,8 +178,10 @@ export class ProvingOrchestrator implements EpochProver { } catch (err) { throw new Error('Too many L1 to L2 messages'); } - baseParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, (_, i) => - BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()), + baseParityInputs = await Promise.all( + Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, async (_, i) => + BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, await getVKTreeRoot()), + ), ); const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db); @@ -308,7 +310,7 @@ export class ProvingOrchestrator implements EpochProver { this.dbs.get(blockNumber)!.getInitialHeader(), provingState.globalVariables.chainId, provingState.globalVariables.version, - getVKTreeRoot(), + await getVKTreeRoot(), protocolContractTreeRoot, ); const txInputs: Array<{ hints: BaseRollupHints; snapshot: TreeSnapshots }> = []; @@ -322,7 +324,7 @@ export class ProvingOrchestrator implements EpochProver { } // Now enqueue the proving - this.enqueuePaddingTxs(provingState, txInputs, unprovenPaddingTx); + await this.enqueuePaddingTxs(provingState, txInputs, unprovenPaddingTx); } // And build the block header @@ -330,7 +332,7 @@ export class ProvingOrchestrator implements EpochProver { await this.buildBlock(provingState, expectedHeader); // If the proofs were faster than the block building, then we need to try the block root rollup again here - this.checkAndEnqueueBlockRootRollup(provingState); + await this.checkAndEnqueueBlockRootRollup(provingState); return provingState.block!; } @@ -352,7 +354,7 @@ export class ProvingOrchestrator implements EpochProver { [Attributes.EPOCH_SIZE]: this.provingState.totalNumBlocks, }; }) - private padEpoch(): Promise { + private async padEpoch(): Promise { const provingState = this.provingState!; const lastBlock = maxBy( provingState.blocks.filter(b => !!b), @@ -371,9 +373,9 @@ export class ProvingOrchestrator implements EpochProver { const inputs = EmptyBlockRootRollupInputs.from({ archive: lastBlock.archive, - blockHash: lastBlock.header.hash(), + blockHash: await lastBlock.header.hash(), globalVariables: lastBlock.header.globalVariables, - vkTreeRoot: getVKTreeRoot(), + vkTreeRoot: await getVKTreeRoot(), protocolContractTreeRoot, proverId: this.proverId, }); @@ -390,13 +392,13 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getEmptyBlockRootRollupProof(inputs, signal, provingState.epochNumber), ), - result => { + async result => { logger.debug(`Completed proof for padding block`); const currentLevel = provingState.numMergeLevels + 1n; for (let i = 0; i < paddingBlockCount; i++) { logger.debug(`Enqueuing padding block with index ${provingState.blocks.length + i}`); const index = BigInt(provingState.blocks.length + i); - this.storeAndExecuteNextBlockMergeLevel(provingState, currentLevel, index, [ + await this.storeAndExecuteNextBlockMergeLevel(provingState, currentLevel, index, [ result.inputs, result.proof, result.verificationKey.keyAsFields, @@ -463,7 +465,7 @@ export class ProvingOrchestrator implements EpochProver { // Enqueues the proving of the required padding transactions // If the fully proven padding transaction is not available, this will first be proven - private enqueuePaddingTxs( + private async enqueuePaddingTxs( provingState: BlockProvingState, txInputs: Array<{ hints: BaseRollupHints; snapshot: TreeSnapshots }>, paddingTx: ProcessedTx, @@ -471,7 +473,7 @@ export class ProvingOrchestrator implements EpochProver { if (this.paddingTxProof) { // We already have the padding transaction logger.debug(`Enqueuing ${txInputs.length} padding transactions using existing padding tx`); - this.provePaddingTransactions(txInputs, paddingTx, this.paddingTxProof, provingState); + await this.provePaddingTransactions(txInputs, paddingTx, this.paddingTxProof, provingState); return; } logger.debug(`Enqueuing deferred proving for padding txs to enqueue ${txInputs.length} paddings`); @@ -500,10 +502,10 @@ export class ProvingOrchestrator implements EpochProver { provingState.epochNumber, ), ), - result => { + async result => { logger.debug(`Completed proof for padding tx, now enqueuing ${txInputs.length} padding txs`); this.paddingTxProof = { proof: result.proof, verificationKey: result.verificationKey }; - this.provePaddingTransactions(txInputs, paddingTx, this.paddingTxProof, provingState); + await this.provePaddingTransactions(txInputs, paddingTx, this.paddingTxProof, provingState); }, ); } @@ -515,7 +517,7 @@ export class ProvingOrchestrator implements EpochProver { * @param proofAndVk - The proof and vk of the paddingTx. * @param provingState - The block proving state */ - private provePaddingTransactions( + private async provePaddingTransactions( txInputs: Array<{ hints: BaseRollupHints; snapshot: TreeSnapshots }>, paddingTx: ProcessedTx, proofAndVk: ProofAndVerificationKey, @@ -528,7 +530,7 @@ export class ProvingOrchestrator implements EpochProver { const txProvingState = new TxProvingState(paddingTx, hints, snapshot); txProvingState.assignTubeProof(proofAndVk); const txIndex = provingState.addNewTx(txProvingState); - this.enqueueBaseRollup(provingState, txIndex); + await this.enqueueBaseRollup(provingState, txIndex); } } @@ -723,7 +725,7 @@ export class ProvingOrchestrator implements EpochProver { // Executes the base rollup circuit and stored the output as intermediate state for the parent merge/root circuit // Executes the next level of merge if all inputs are available - private enqueueBaseRollup(provingState: BlockProvingState | undefined, txIndex: number) { + private async enqueueBaseRollup(provingState: BlockProvingState | undefined, txIndex: number) { if (!provingState?.verifyState()) { logger.debug('Not running base rollup, state invalid'); return; @@ -751,21 +753,21 @@ export class ProvingOrchestrator implements EpochProver { [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', [Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType satisfies CircuitName, }, - signal => { + async signal => { if (rollupType === 'private-base-rollup') { - const inputs = txProvingState.getPrivateBaseInputs(); + const inputs = await txProvingState.getPrivateBaseInputs(); return this.prover.getPrivateBaseRollupProof(inputs, signal, provingState.epochNumber); } else { - const inputs = txProvingState.getPublicBaseInputs(); + const inputs = await txProvingState.getPublicBaseInputs(); return this.prover.getPublicBaseRollupProof(inputs, signal, provingState.epochNumber); } }, ), - result => { + async result => { logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`); validatePartialState(result.inputs.end, txProvingState.treeSnapshots); const currentLevel = provingState.numMergeLevels + 1n; - this.storeAndExecuteNextMergeLevel(provingState, currentLevel, BigInt(txIndex), [ + await this.storeAndExecuteNextMergeLevel(provingState, currentLevel, BigInt(txIndex), [ result.inputs, result.proof, result.verificationKey.keyAsFields, @@ -800,23 +802,23 @@ export class ProvingOrchestrator implements EpochProver { return this.prover.getTubeProof(inputs, signal, provingState.epochNumber); }, ), - result => { + async result => { logger.debug(`Completed tube proof for tx index: ${txIndex}`); txProvingState.assignTubeProof(result); - this.checkAndEnqueueNextTxCircuit(provingState, txIndex); + await this.checkAndEnqueueNextTxCircuit(provingState, txIndex); }, ); } // Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit // Enqueues the next level of merge if all inputs are available - private enqueueMergeRollup( + private async enqueueMergeRollup( provingState: BlockProvingState, level: bigint, index: bigint, mergeInputData: MergeRollupInputData, ) { - const inputs = createMergeRollupInputs( + const inputs = await createMergeRollupInputs( [mergeInputData.inputs[0]!, mergeInputData.proofs[0]!, mergeInputData.verificationKeys[0]!], [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!, mergeInputData.verificationKeys[1]!], ); @@ -832,8 +834,8 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getMergeRollupProof(inputs, signal, provingState.epochNumber), ), - result => { - this.storeAndExecuteNextMergeLevel(provingState, level, index, [ + async result => { + await this.storeAndExecuteNextMergeLevel(provingState, level, index, [ result.inputs, result.proof, result.verificationKey.keyAsFields, @@ -843,7 +845,7 @@ export class ProvingOrchestrator implements EpochProver { } // Executes the block root rollup circuit - private enqueueBlockRootRollup(provingState: BlockProvingState) { + private async enqueueBlockRootRollup(provingState: BlockProvingState) { if (!provingState.block) { throw new Error(`Invalid proving state for block root rollup, block not available`); } @@ -861,11 +863,13 @@ export class ProvingOrchestrator implements EpochProver { `Enqueuing block root rollup for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs`, ); - const previousRollupData: BlockRootRollupInputs['previousRollupData'] = makeTuple(2, i => - getPreviousRollupDataFromPublicInputs( - mergeInputData.inputs[i]!, - mergeInputData.proofs[i]!, - mergeInputData.verificationKeys[i]!, + const previousRollupData: BlockRootRollupInputs['previousRollupData'] = await Promise.all( + makeTuple(2, i => + getPreviousRollupDataFromPublicInputs( + mergeInputData.inputs[i]!, + mergeInputData.proofs[i]!, + mergeInputData.verificationKeys[i]!, + ), ), ); @@ -892,9 +896,9 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber), ), - result => { - const header = this.extractBlockHeaderFromPublicInputs(provingState, result.inputs); - if (!header.hash().equals(provingState.block!.header.hash())) { + async result => { + const header = await this.extractBlockHeaderFromPublicInputs(provingState, result.inputs); + if (!(await header.hash()).equals(await provingState.block!.header.hash())) { logger.error( `Block header mismatch\nCircuit:${inspect(header)}\nComputed:${inspect(provingState.block!.header)}`, ); @@ -908,7 +912,7 @@ export class ProvingOrchestrator implements EpochProver { // validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover) const currentLevel = this.provingState!.numMergeLevels + 1n; - this.storeAndExecuteNextBlockMergeLevel(this.provingState!, currentLevel, BigInt(provingState.index), [ + await this.storeAndExecuteNextBlockMergeLevel(this.provingState!, currentLevel, BigInt(provingState.index), [ result.inputs, result.proof, result.verificationKey.keyAsFields, @@ -919,7 +923,7 @@ export class ProvingOrchestrator implements EpochProver { // Executes the base parity circuit and stores the intermediate state for the root parity circuit // Enqueues the root parity circuit if all inputs are available - private enqueueBaseParityCircuit(provingState: BlockProvingState, inputs: BaseParityInputs, index: number) { + private async enqueueBaseParityCircuit(provingState: BlockProvingState, inputs: BaseParityInputs, index: number) { this.deferredProving( provingState, wrapCallbackInSpan( @@ -931,11 +935,11 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber), ), - provingOutput => { + async provingOutput => { const rootParityInput = new RootParityInput( provingOutput.proof, provingOutput.verificationKey.keyAsFields, - getVKSiblingPath(getVKIndex(provingOutput.verificationKey)), + await getVKSiblingPath(await getVKIndex(provingOutput.verificationKey)), provingOutput.inputs, ); provingState.setRootParityInputs(rootParityInput, index); @@ -966,28 +970,28 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getRootParityProof(inputs, signal, provingState.epochNumber), ), - provingOutput => { + async provingOutput => { const rootParityInput = new RootParityInput( provingOutput.proof, provingOutput.verificationKey.keyAsFields, - getVKSiblingPath(getVKIndex(provingOutput.verificationKey)), + await getVKSiblingPath(await getVKIndex(provingOutput.verificationKey)), provingOutput.inputs, ); provingState!.finalRootParityInput = rootParityInput; - this.checkAndEnqueueBlockRootRollup(provingState); + await this.checkAndEnqueueBlockRootRollup(provingState); }, ); } // Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit // Enqueues the next level of merge if all inputs are available - private enqueueBlockMergeRollup( + private async enqueueBlockMergeRollup( provingState: EpochProvingState, level: bigint, index: bigint, mergeInputData: BlockMergeRollupInputData, ) { - const inputs = createBlockMergeRollupInputs( + const inputs = await createBlockMergeRollupInputs( [mergeInputData.inputs[0]!, mergeInputData.proofs[0]!, mergeInputData.verificationKeys[0]!], [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!, mergeInputData.verificationKeys[1]!], ); @@ -1003,8 +1007,8 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber), ), - result => { - this.storeAndExecuteNextBlockMergeLevel(provingState, level, index, [ + async result => { + await this.storeAndExecuteNextBlockMergeLevel(provingState, level, index, [ result.inputs, result.proof, result.verificationKey.keyAsFields, @@ -1014,7 +1018,7 @@ export class ProvingOrchestrator implements EpochProver { } // Executes the root rollup circuit - private enqueueRootRollup(provingState: EpochProvingState | undefined) { + private async enqueueRootRollup(provingState: EpochProvingState | undefined) { if (!provingState?.verifyState()) { logger.debug('Not running root rollup, state no longer valid'); return; @@ -1023,7 +1027,7 @@ export class ProvingOrchestrator implements EpochProver { logger.debug(`Preparing root rollup`); const mergeInputData = provingState.getMergeInputs(0); - const inputs = getRootRollupInput( + const inputs = await getRootRollupInput( mergeInputData.inputs[0]!, mergeInputData.proofs[0]!, mergeInputData.verificationKeys[0]!, @@ -1053,7 +1057,7 @@ export class ProvingOrchestrator implements EpochProver { ); } - private checkAndEnqueueBlockRootRollup(provingState: BlockProvingState) { + private async checkAndEnqueueBlockRootRollup(provingState: BlockProvingState) { if (!provingState?.isReadyForBlockRootRollup()) { logger.debug('Not ready for root rollup'); return; @@ -1078,12 +1082,12 @@ export class ProvingOrchestrator implements EpochProver { this.enqueueBlockRootRollup(provingState); } - private checkAndEnqueueRootRollup(provingState: EpochProvingState | undefined) { + private async checkAndEnqueueRootRollup(provingState: EpochProvingState | undefined) { if (!provingState?.isReadyForRootRollup()) { logger.debug('Not ready for root rollup'); return; } - this.enqueueRootRollup(provingState); + await this.enqueueRootRollup(provingState); } /** @@ -1093,7 +1097,7 @@ export class ProvingOrchestrator implements EpochProver { * @param currentIndex - The index of the merge/root circuit * @param mergeInputData - The inputs to be stored */ - private storeAndExecuteNextMergeLevel( + private async storeAndExecuteNextMergeLevel( provingState: BlockProvingState, currentLevel: bigint, currentIndex: bigint, @@ -1117,10 +1121,10 @@ export class ProvingOrchestrator implements EpochProver { } if (mergeLevel === 0n) { - this.checkAndEnqueueBlockRootRollup(provingState); + await this.checkAndEnqueueBlockRootRollup(provingState); } else { // onto the next merge level - this.enqueueMergeRollup(provingState, mergeLevel, indexWithinMergeLevel, nextMergeInputData); + await this.enqueueMergeRollup(provingState, mergeLevel, indexWithinMergeLevel, nextMergeInputData); } } @@ -1131,7 +1135,7 @@ export class ProvingOrchestrator implements EpochProver { * @param currentIndex - The index of the merge/root circuit * @param mergeInputData - The inputs to be stored */ - private storeAndExecuteNextBlockMergeLevel( + private async storeAndExecuteNextBlockMergeLevel( provingState: EpochProvingState, currentLevel: bigint, currentIndex: bigint, @@ -1161,10 +1165,10 @@ export class ProvingOrchestrator implements EpochProver { } if (mergeLevel === 0n) { - this.checkAndEnqueueRootRollup(provingState); + await this.checkAndEnqueueRootRollup(provingState); } else { // onto the next merge level - this.enqueueBlockMergeRollup(provingState, mergeLevel, indexWithinMergeLevel, nextMergeInputData); + await this.enqueueBlockMergeRollup(provingState, mergeLevel, indexWithinMergeLevel, nextMergeInputData); } } @@ -1210,14 +1214,14 @@ export class ProvingOrchestrator implements EpochProver { }, ); - this.deferredProving(provingState, doAvmProving, proofAndVk => { + this.deferredProving(provingState, doAvmProving, async proofAndVk => { logger.debug(`Proven VM for tx index: ${txIndex}`); txProvingState.assignAvmProof(proofAndVk); - this.checkAndEnqueueNextTxCircuit(provingState, txIndex); + await this.checkAndEnqueueNextTxCircuit(provingState, txIndex); }); } - private checkAndEnqueueNextTxCircuit(provingState: BlockProvingState, txIndex: number) { + private async checkAndEnqueueNextTxCircuit(provingState: BlockProvingState, txIndex: number) { const txProvingState = provingState.getTxProvingState(txIndex); if (!txProvingState.ready()) { return; @@ -1226,6 +1230,6 @@ export class ProvingOrchestrator implements EpochProver { // We must have completed all proving (tube proof and (if required) vm proof are generated), we now move to the base rollup. logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`); - this.enqueueBaseRollup(provingState, txIndex); + await this.enqueueBaseRollup(provingState, txIndex); } } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts index 5b4adf7d34d9..6813692b37b9 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts @@ -28,11 +28,13 @@ describe('prover/orchestrator/public-functions', () => { numberOfNonRevertiblePublicCallRequests: number, numberOfRevertiblePublicCallRequests: number, ) => { - const txs = times(numTransactions, (i: number) => - mockTx(100000 * testCount++ + 1000 * i, { - numberOfNonRevertiblePublicCallRequests, - numberOfRevertiblePublicCallRequests, - }), + const txs = await Promise.all( + times(numTransactions, (i: number) => + mockTx(100000 * testCount++ + 1000 * i, { + numberOfNonRevertiblePublicCallRequests, + numberOfRevertiblePublicCallRequests, + }), + ), ); for (const tx of txs) { tx.data.constants.historicalHeader = context.getBlockHeader(0); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts index 040ab5ad44d6..ff51d50e2af3 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts @@ -31,7 +31,7 @@ describe('prover/orchestrator/public-functions', () => { ] as const)( 'builds an L2 block with %i non-revertible and %i revertible calls', async (numberOfNonRevertiblePublicCallRequests: number, numberOfRevertiblePublicCallRequests: number) => { - const tx = mockTx(1000 * testCount++, { + const tx = await mockTx(1000 * testCount++, { numberOfNonRevertiblePublicCallRequests, numberOfRevertiblePublicCallRequests, }); diff --git a/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts b/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts index 311b4aa75a0a..d2ea6a39b6fd 100644 --- a/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts @@ -49,7 +49,7 @@ export class TxProvingState { return this.processedTx.avmProvingRequest!.inputs; } - public getPrivateBaseInputs() { + public async getPrivateBaseInputs() { if (this.requireAvmProof) { throw new Error('Should create public base rollup for a tx requiring avm proof.'); } @@ -57,7 +57,7 @@ export class TxProvingState { throw new Error('Tx not ready for proving base rollup.'); } - const vkData = this.getTubeVkData(); + const vkData = await this.getTubeVkData(); const tubeData = new PrivateTubeData(this.processedTx.data.toKernelCircuitPublicInputs(), this.tube.proof, vkData); if (!(this.baseRollupHints instanceof PrivateBaseRollupHints)) { @@ -66,7 +66,7 @@ export class TxProvingState { return new PrivateBaseRollupInputs(tubeData, this.baseRollupHints); } - public getPublicBaseInputs() { + public async getPublicBaseInputs() { if (!this.processedTx.avmProvingRequest) { throw new Error('Should create private base rollup for a tx not requiring avm proof.'); } @@ -80,13 +80,13 @@ export class TxProvingState { const tubeData = new PublicTubeData( this.processedTx.data.toPublicKernelCircuitPublicInputs(), this.tube.proof, - this.getTubeVkData(), + await this.getTubeVkData(), ); const avmProofData = new AvmProofData( this.processedTx.avmProvingRequest.inputs.output, this.avm.proof, - this.getAvmVkData(), + await this.getAvmVkData(), ); if (!(this.baseRollupHints instanceof PublicBaseRollupHints)) { @@ -107,18 +107,18 @@ export class TxProvingState { private getTubeVkData() { let vkIndex = TUBE_VK_INDEX; try { - vkIndex = getVKIndex(this.tube!.verificationKey); + vkIndex = await getVKIndex(this.tube!.verificationKey); } catch (_ignored) { // TODO(#7410) The VK for the tube won't be in the tree for now, so we manually set it to the tube vk index } - const vkPath = getVKSiblingPath(vkIndex); + const vkPath = await getVKSiblingPath(vkIndex); return new VkWitnessData(this.tube!.verificationKey, vkIndex, vkPath); } - private getAvmVkData() { + private async getAvmVkData() { const vkIndex = AVM_VK_INDEX; - const vkPath = getVKSiblingPath(vkIndex); + const vkPath = await getVKSiblingPath(vkIndex); return new VkWitnessData(this.avm!.verificationKey, AVM_VK_INDEX, vkPath); } } diff --git a/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts b/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts index fe22d7234957..580770c683e2 100644 --- a/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts +++ b/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts @@ -31,7 +31,9 @@ describe('Prover agent <-> queue integration', () => { let prover: ServerCircuitProver; type MakeInputs = { - [K in keyof ServerCircuitProver]: () => Parameters[0]; + [K in keyof ServerCircuitProver]: () => + | Promise[0]> + | Parameters[0]; }; const makeInputs: MakeInputs = { @@ -72,7 +74,7 @@ describe('Prover agent <-> queue integration', () => { // TODO: This test hangs instead of failing when the Inputs are not registered on the RPC wrapper it.each(Object.entries(makeInputs))('can call %s over JSON-RPC', async (fnName, makeInputs) => { - const resp = await queue[fnName as keyof ServerCircuitProver](makeInputs() as any); + const resp = await queue[fnName as keyof ServerCircuitProver]((await makeInputs()) as any); expect(resp).toBeDefined(); }); }); diff --git a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts index 09945d2010e1..bf7ed3bce1ac 100644 --- a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts @@ -38,7 +38,7 @@ describe('prover/bb_prover/base-rollup', () => { const header = context.getBlockHeader(0); const chainId = context.globalVariables.chainId; const version = context.globalVariables.version; - const vkTreeRoot = getVKTreeRoot(); + const vkTreeRoot = await getVKTreeRoot(); const tx = makeEmptyProcessedTx(header, chainId, version, vkTreeRoot, protocolContractTreeRoot); @@ -54,7 +54,7 @@ describe('prover/bb_prover/base-rollup', () => { expect(tubeProof.inputs).toEqual(tx.data.toKernelCircuitPublicInputs()); const vkIndex = PRIVATE_KERNEL_EMPTY_INDEX; - const vkPath = getVKSiblingPath(vkIndex); + const vkPath = await getVKSiblingPath(vkIndex); const vkData = new VkWitnessData(tubeProof.verificationKey, vkIndex, vkPath); const tubeData = new PrivateTubeData(tubeProof.inputs, tubeProof.proof, vkData); diff --git a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts index 182742183e69..7a995e7cc17f 100644 --- a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts @@ -45,13 +45,15 @@ describe('prover/bb_prover/full-rollup', () => { for (let blockNum = 1; blockNum <= blockCount; blockNum++) { const globals = makeGlobals(blockNum); const l1ToL2Messages = makeTuple(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr.random); - const txs = times(nonEmptyTxs, (i: number) => { - const txOpts = { numberOfNonRevertiblePublicCallRequests: 0, numberOfRevertiblePublicCallRequests: 0 }; - const tx = mockTx(blockNum * 100_000 + 1000 * (i + 1), txOpts); - tx.data.constants.historicalHeader = initialHeader; - tx.data.constants.vkTreeRoot = getVKTreeRoot(); - return tx; - }); + const txs = await Promise.all( + times(nonEmptyTxs, async (i: number) => { + const txOpts = { numberOfNonRevertiblePublicCallRequests: 0, numberOfRevertiblePublicCallRequests: 0 }; + const tx = await mockTx(blockNum * 100_000 + 1000 * (i + 1), txOpts); + tx.data.constants.historicalHeader = initialHeader; + tx.data.constants.vkTreeRoot = await getVKTreeRoot(); + return tx; + }), + ); log.info(`Starting new block #${blockNum}`); await context.orchestrator.startNewBlock(totalTxs, globals, l1ToL2Messages); @@ -87,11 +89,13 @@ describe('prover/bb_prover/full-rollup', () => { // TODO(@PhilWindle): Remove public functions and re-enable once we can handle empty tx slots it.skip('proves all circuits', async () => { const numTransactions = 4; - const txs = times(numTransactions, (i: number) => - mockTx(1000 * (i + 1), { - numberOfNonRevertiblePublicCallRequests: 2, - numberOfRevertiblePublicCallRequests: 1, - }), + const txs = await Promise.all( + times(numTransactions, (i: number) => + mockTx(1000 * (i + 1), { + numberOfNonRevertiblePublicCallRequests: 2, + numberOfRevertiblePublicCallRequests: 1, + }), + ), ); for (const tx of txs) { tx.data.constants.historicalHeader = context.getBlockHeader(0); diff --git a/yarn-project/prover-client/src/test/bb_prover_parity.test.ts b/yarn-project/prover-client/src/test/bb_prover_parity.test.ts index 1763fd1b4000..8e15fa9efe35 100644 --- a/yarn-project/prover-client/src/test/bb_prover_parity.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_parity.test.ts @@ -48,8 +48,10 @@ describe('prover/bb_prover/parity', () => { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr.random, ); - const baseParityInputs = makeTuple(NUM_BASE_PARITY_PER_ROOT_PARITY, i => - BaseParityInputs.fromSlice(l1ToL2Messages, i, getVKTreeRoot()), + const baseParityInputs = await Promise.all( + makeTuple(NUM_BASE_PARITY_PER_ROOT_PARITY, async i => + BaseParityInputs.fromSlice(l1ToL2Messages, i, await getVKTreeRoot()), + ), ); // Generate the base parity proofs @@ -57,15 +59,17 @@ describe('prover/bb_prover/parity', () => { baseParityInputs.map(baseInputs => context.prover.getBaseParityProof(baseInputs)), ); - const rootInputs = makeTuple(NUM_BASE_PARITY_PER_ROOT_PARITY, i => { - const { proof, inputs, verificationKey } = baseParityProofsAndPublicInputs[i]; - return new RootParityInput( - proof, - verificationKey.keyAsFields, - getVKSiblingPath(ProtocolCircuitVkIndexes.BaseParityArtifact), - inputs, - ); - }); + const rootInputs = await Promise.all( + makeTuple(NUM_BASE_PARITY_PER_ROOT_PARITY, async i => { + const { proof, inputs, verificationKey } = baseParityProofsAndPublicInputs[i]; + return new RootParityInput( + proof, + verificationKey.keyAsFields, + await getVKSiblingPath(ProtocolCircuitVkIndexes.BaseParityArtifact), + inputs, + ); + }), + ); // These could differ if artifacts generated by `generate_vk_json.js` are not consistent with what we do, // which would cause the root parity proof to fail, because the proof of VK root inclusion would not match the key in the proof. @@ -94,7 +98,7 @@ describe('prover/bb_prover/parity', () => { // In each case either the proof should fail to generate or verify const validVk = rootParityInputs.children[0].verificationKey; - const baseParityVkPath = getVKSiblingPath(ProtocolCircuitVkIndexes.BaseParityArtifact); + const baseParityVkPath = await getVKSiblingPath(ProtocolCircuitVkIndexes.BaseParityArtifact); const validPublicInputs = rootParityInputs.children[0].publicInputs; const validProof = rootParityInputs.children[0].proof; @@ -112,7 +116,7 @@ describe('prover/bb_prover/parity', () => { validProof, validVk, baseParityVkPath, - new ParityPublicInputs(Fr.fromBuffer(shaRoot), Fr.random(), getVKTreeRoot()), + new ParityPublicInputs(Fr.fromBuffer(shaRoot), Fr.random(), await getVKTreeRoot()), ); const defectiveVerificationKey = new RootParityInput( diff --git a/yarn-project/prover-node/src/quote-provider/http.test.ts b/yarn-project/prover-node/src/quote-provider/http.test.ts index 2f498f5312e1..f3dad9d2db80 100644 --- a/yarn-project/prover-node/src/quote-provider/http.test.ts +++ b/yarn-project/prover-node/src/quote-provider/http.test.ts @@ -39,9 +39,9 @@ describe('HttpQuoteProvider', () => { port = (server.address() as AddressInfo).port; }); - beforeEach(() => { + beforeEach(async () => { provider = new HttpQuoteProvider(`http://127.0.0.1:${port}`); - blocks = times(3, i => L2Block.random(i + 1, 4)); + blocks = await Promise.all(times(3, i => L2Block.random(i + 1, 4))); response = { basisPointFee: 100, bondAmount: '100000000000000000000', validUntilSlot: '100' }; }); diff --git a/yarn-project/pxe/src/contract_data_oracle/index.ts b/yarn-project/pxe/src/contract_data_oracle/index.ts index d5f6f9e5c0d2..7b2749673f56 100644 --- a/yarn-project/pxe/src/contract_data_oracle/index.ts +++ b/yarn-project/pxe/src/contract_data_oracle/index.ts @@ -97,7 +97,7 @@ export class ContractDataOracle { selector: FunctionSelector, ): Promise { const tree = await this.getTreeForAddress(contractAddress); - const artifact = tree.getFunctionArtifact(selector); + const artifact = await tree.getFunctionArtifact(selector); return getFunctionDebugMetadata(tree.getArtifact(), artifact); } @@ -138,7 +138,7 @@ export class ContractDataOracle { public async getDebugFunctionName(contractAddress: AztecAddress, selector: FunctionSelector) { const tree = await this.getTreeForAddress(contractAddress); const { name: contractName } = tree.getArtifact(); - const { name: functionName } = tree.getFunctionArtifact(selector); + const { name: functionName } = await tree.getFunctionArtifact(selector); return `${contractName}:${functionName}`; } diff --git a/yarn-project/pxe/src/contract_data_oracle/private_functions_tree.ts b/yarn-project/pxe/src/contract_data_oracle/private_functions_tree.ts index dabd6cc0b9e4..580ea60c8a6f 100644 --- a/yarn-project/pxe/src/contract_data_oracle/private_functions_tree.ts +++ b/yarn-project/pxe/src/contract_data_oracle/private_functions_tree.ts @@ -7,7 +7,7 @@ import { getContractClassFromArtifact, } from '@aztec/circuits.js'; import { type MerkleTree } from '@aztec/circuits.js/merkle'; -import { type ContractArtifact, type FunctionSelector } from '@aztec/foundation/abi'; +import { type ContractArtifact, FunctionSelector } from '@aztec/foundation/abi'; import { Fr } from '@aztec/foundation/fields'; import { assertLength } from '@aztec/foundation/serialize'; @@ -19,7 +19,7 @@ import { assertLength } from '@aztec/foundation/serialize'; */ export class PrivateFunctionsTree { private tree?: MerkleTree; - private contractClass: ContractClassWithId; + private contractClass: Promise; constructor(private readonly artifact: ContractArtifact) { this.contractClass = getContractClassFromArtifact(artifact); @@ -33,8 +33,15 @@ export class PrivateFunctionsTree { * @param selector - The function selector. * @returns The artifact object containing relevant information about the targeted function. */ - public getFunctionArtifact(selector: FunctionSelector) { - const artifact = this.artifact.functions.find(f => selector.equals(f.name, f.parameters)); + public async getFunctionArtifact(selector: FunctionSelector) { + const artifact = ( + await Promise.all( + this.artifact.functions.map(async f => { + const fs = await FunctionSelector.fromNameAndParameters(f.name, f.parameters); + return fs.equals(selector) ? f : undefined; + }), + ) + ).find(f => !!f); if (!artifact) { throw new Error( `Unknown function. Selector ${selector.toString()} not found in the artifact ${ @@ -53,8 +60,8 @@ export class PrivateFunctionsTree { * @param selector - The selector of a function to get bytecode for. * @returns The bytecode of the function as a string. */ - public getBytecode(selector: FunctionSelector) { - return this.getFunctionArtifact(selector).bytecode; + public async getBytecode(selector: FunctionSelector) { + return (await this.getFunctionArtifact(selector)).bytecode; } /** @@ -81,8 +88,8 @@ export class PrivateFunctionsTree { /** * Returns the contract class identifier for the given artifact. */ - public getContractClassId() { - return this.getContractClass().id; + public async getContractClassId() { + return (await this.getContractClass()).id; } /** @@ -94,17 +101,17 @@ export class PrivateFunctionsTree { * @param selector - The function selector. * @returns A MembershipWitness instance representing the position and authentication path of the function in the function tree. */ - public getFunctionMembershipWitness( + public async getFunctionMembershipWitness( selector: FunctionSelector, ): Promise> { - const fn = this.getContractClass().privateFunctions.find(f => f.selector.equals(selector)); + const fn = (await this.getContractClass()).privateFunctions.find(f => f.selector.equals(selector)); if (!fn) { throw new Error(`Private function with selector ${selector.toString()} not found in contract class.`); } - const leaf = computePrivateFunctionLeaf(fn); - const index = this.getTree().getIndex(leaf); - const path = this.getTree().getSiblingPath(index); + const leaf = await computePrivateFunctionLeaf(fn); + const index = (await this.getTree()).getIndex(leaf); + const path = (await this.getTree()).getSiblingPath(index); return Promise.resolve( new MembershipWitness( FUNCTION_TREE_HEIGHT, @@ -114,10 +121,10 @@ export class PrivateFunctionsTree { ); } - private getTree() { + private async getTree() { if (!this.tree) { - const fns = this.getContractClass().privateFunctions; - this.tree = computePrivateFunctionsTree(fns); + const fns = (await this.getContractClass()).privateFunctions; + this.tree = await computePrivateFunctionsTree(fns); } return this.tree; } diff --git a/yarn-project/pxe/src/database/incoming_note_dao.test.ts b/yarn-project/pxe/src/database/incoming_note_dao.test.ts index 1df9103e08fe..1afc0ca7d55f 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.test.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.test.ts @@ -1,8 +1,8 @@ import { IncomingNoteDao } from './incoming_note_dao.js'; describe('Incoming Note DAO', () => { - it('convert to and from buffer', () => { - const note = IncomingNoteDao.random(); + it('convert to and from buffer', async () => { + const note = await IncomingNoteDao.random(); const buf = note.toBuffer(); expect(IncomingNoteDao.fromBuffer(buf)).toEqual(note); }); diff --git a/yarn-project/pxe/src/database/incoming_note_dao.ts b/yarn-project/pxe/src/database/incoming_note_dao.ts index d2dc2d388153..4e734c12879f 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.ts @@ -138,7 +138,7 @@ export class IncomingNoteDao implements NoteData { return noteSize + AztecAddress.SIZE_IN_BYTES + Fr.SIZE_IN_BYTES * 4 + TxHash.SIZE + Point.SIZE_IN_BYTES + indexSize; } - static random({ + static async random({ note = Note.random(), contractAddress = AztecAddress.random(), txHash = randomTxHash(), @@ -150,7 +150,7 @@ export class IncomingNoteDao implements NoteData { noteHash = Fr.random(), siloedNullifier = Fr.random(), index = Fr.random().toBigInt(), - addressPoint = Point.random(), + addressPoint, }: Partial = {}) { return new IncomingNoteDao( note, @@ -164,7 +164,7 @@ export class IncomingNoteDao implements NoteData { noteHash, siloedNullifier, index, - addressPoint, + addressPoint ?? (await Point.random()), ); } } diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.test.ts b/yarn-project/pxe/src/database/outgoing_note_dao.test.ts index 0c293ba13ebf..f0fe174446af 100644 --- a/yarn-project/pxe/src/database/outgoing_note_dao.test.ts +++ b/yarn-project/pxe/src/database/outgoing_note_dao.test.ts @@ -1,9 +1,9 @@ import { OutgoingNoteDao } from './outgoing_note_dao.js'; describe('Outgoing Note DAO', () => { - it('convert to and from buffer', () => { + it('convert to and from buffer', async () => { const note = OutgoingNoteDao.random(); - const buf = note.toBuffer(); + const buf = (await note).toBuffer(); expect(OutgoingNoteDao.fromBuffer(buf)).toEqual(note); }); }); diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.ts b/yarn-project/pxe/src/database/outgoing_note_dao.ts index 386b23ecd573..ef6ef14a4e06 100644 --- a/yarn-project/pxe/src/database/outgoing_note_dao.ts +++ b/yarn-project/pxe/src/database/outgoing_note_dao.ts @@ -126,7 +126,7 @@ export class OutgoingNoteDao { return noteSize + AztecAddress.SIZE_IN_BYTES + Fr.SIZE_IN_BYTES * 2 + TxHash.SIZE + Point.SIZE_IN_BYTES; } - static random({ + static async random({ note = Note.random(), contractAddress = AztecAddress.random(), txHash = randomTxHash(), @@ -137,7 +137,7 @@ export class OutgoingNoteDao { l2BlockHash = Fr.random().toString(), noteHash = Fr.random(), index = Fr.random().toBigInt(), - ovpkM = Point.random(), + ovpkM, }: Partial = {}) { return new OutgoingNoteDao( note, @@ -150,7 +150,7 @@ export class OutgoingNoteDao { nonce, noteHash, index, - ovpkM, + ovpkM ?? (await Point.random()), ); } } diff --git a/yarn-project/pxe/src/database/pxe_database_test_suite.ts b/yarn-project/pxe/src/database/pxe_database_test_suite.ts index 9947e952c514..a45afa0bcaf3 100644 --- a/yarn-project/pxe/src/database/pxe_database_test_suite.ts +++ b/yarn-project/pxe/src/database/pxe_database_test_suite.ts @@ -81,7 +81,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { let storageSlots: Fr[]; let notes: IncomingNoteDao[]; - const filteringTests: [() => IncomingNotesFilter, () => IncomingNoteDao[]][] = [ + const filteringTests: [() => IncomingNotesFilter, () => IncomingNoteDao[] | Promise][] = [ [() => ({}), () => notes], [ @@ -115,7 +115,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { ]; beforeEach(async () => { - owners = Array.from({ length: 2 }).map(() => CompleteAddress.random()); + owners = await Promise.all(Array.from({ length: 2 }).map(() => CompleteAddress.random())); contractAddresses = Array.from({ length: 2 }).map(() => AztecAddress.random()); storageSlots = Array.from({ length: 2 }).map(() => Fr.random()); @@ -138,7 +138,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { await database.addNotes(notes, []); const returnedNotes = await database.getIncomingNotes(getFilter()); - expect(returnedNotes.sort()).toEqual(getExpected().sort()); + expect(returnedNotes.sort()).toEqual((await getExpected()).sort()); }); it.each(filteringTests)('stores notes one by one and retrieves notes', async (getFilter, getExpected) => { @@ -148,7 +148,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { const returnedNotes = await database.getIncomingNotes(getFilter()); - expect(returnedNotes.sort()).toEqual(getExpected().sort()); + expect(returnedNotes.sort()).toEqual((await getExpected()).sort()); }); it.each(filteringTests)('retrieves nullified notes', async (getFilter, getExpected) => { @@ -169,7 +169,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { await expect( database.getIncomingNotes({ ...getFilter(), status: NoteStatus.ACTIVE_OR_NULLIFIED }), - ).resolves.toEqual(getExpected()); + ).resolves.toEqual(await getExpected()); }); it('skips nullified notes by default or when requesting active', async () => { @@ -351,17 +351,19 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { ]; beforeEach(async () => { - owners = Array.from({ length: 2 }).map(() => CompleteAddress.random()); + owners = await Promise.all(Array.from({ length: 2 }).map(() => CompleteAddress.random())); contractAddresses = Array.from({ length: 2 }).map(() => AztecAddress.random()); storageSlots = Array.from({ length: 2 }).map(() => Fr.random()); - notes = Array.from({ length: 10 }).map((_, i) => - OutgoingNoteDao.random({ - contractAddress: contractAddresses[i % contractAddresses.length], - storageSlot: storageSlots[i % storageSlots.length], - ovpkM: owners[i % owners.length].publicKeys.masterOutgoingViewingPublicKey, - index: BigInt(i), - }), + notes = await Promise.all( + Array.from({ length: 10 }).map((_, i) => + OutgoingNoteDao.random({ + contractAddress: contractAddresses[i % contractAddresses.length], + storageSlot: storageSlots[i % storageSlots.length], + ovpkM: owners[i % owners.length].publicKeys.masterOutgoingViewingPublicKey, + index: BigInt(i), + }), + ), ); for (const owner of owners) { @@ -390,22 +392,22 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { describe('addresses', () => { it('stores and retrieves addresses', async () => { - const address = CompleteAddress.random(); + const address = await CompleteAddress.random(); await expect(database.addCompleteAddress(address)).resolves.toBe(true); await expect(database.getCompleteAddress(address.address)).resolves.toEqual(address); }); it('silently ignores an address it already knows about', async () => { - const address = CompleteAddress.random(); + const address = await CompleteAddress.random(); await expect(database.addCompleteAddress(address)).resolves.toBe(true); await expect(database.addCompleteAddress(address)).resolves.toBe(false); }); it.skip('refuses to overwrite an address with a different public key', async () => { - const address = CompleteAddress.random(); + const address = await CompleteAddress.random(); const otherAddress = new CompleteAddress( address.address, - new PublicKeys(Point.random(), Point.random(), Point.random(), Point.random()), + new PublicKeys(await Point.random(), await Point.random(), await Point.random(), await Point.random()), address.partialAddress, ); @@ -414,7 +416,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { }); it('returns all addresses', async () => { - const addresses = Array.from({ length: 10 }).map(() => CompleteAddress.random()); + const addresses = await Promise.all(Array.from({ length: 10 }).map(() => CompleteAddress.random())); for (const address of addresses) { await database.addCompleteAddress(address); } @@ -428,7 +430,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { }); it("returns undefined if it doesn't have an address", async () => { - expect(await database.getCompleteAddress(CompleteAddress.random().address)).toBeUndefined(); + expect(await database.getCompleteAddress((await CompleteAddress.random()).address)).toBeUndefined(); }); }); @@ -455,7 +457,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { it('stores a contract instance', async () => { const address = AztecAddress.random(); - const instance = SerializableContractInstance.random().withAddress(address); + const instance = (await SerializableContractInstance.random()).withAddress(address); await database.addContractInstance(instance); await expect(database.getContractInstance(address)).resolves.toEqual(instance); }); diff --git a/yarn-project/pxe/src/kernel_oracle/index.ts b/yarn-project/pxe/src/kernel_oracle/index.ts index a66ec8db465e..5f9a67a571b0 100644 --- a/yarn-project/pxe/src/kernel_oracle/index.ts +++ b/yarn-project/pxe/src/kernel_oracle/index.ts @@ -37,7 +37,7 @@ export class KernelOracle implements ProvingDataOracle { public async getContractAddressPreimage(address: AztecAddress) { const instance = await this.contractDataOracle.getContractInstance(address); return { - saltedInitializationHash: computeSaltedInitializationHash(instance), + saltedInitializationHash: await computeSaltedInitializationHash(instance), ...instance, }; } @@ -51,9 +51,9 @@ export class KernelOracle implements ProvingDataOracle { return await this.contractDataOracle.getFunctionMembershipWitness(contractAddress, selector); } - public getVkMembershipWitness(vk: VerificationKeyAsFields) { - const leafIndex = getVKIndex(vk); - return Promise.resolve(new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex))); + public async getVkMembershipWitness(vk: VerificationKeyAsFields) { + const leafIndex = await getVKIndex(vk); + return Promise.resolve(new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), await getVKSiblingPath(leafIndex))); } async getNoteHashMembershipWitness(leafIndex: bigint): Promise> { diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index d7ae9401a9b5..b60331d1feaf 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -134,7 +134,7 @@ describe('Kernel Prover', () => { const prove = (executionResult: PrivateExecutionResult) => prover.prove(txRequest, executionResult); - beforeEach(() => { + beforeEach(async () => { txRequest = makeTxRequest(); oracle = mock(); @@ -143,7 +143,7 @@ describe('Kernel Prover', () => { oracle.getContractAddressPreimage.mockResolvedValue({ contractClassId: Fr.random(), - publicKeys: PublicKeys.random(), + publicKeys: await PublicKeys.random(), saltedInitializationHash: Fr.random(), }); oracle.getContractClassIdPreimage.mockResolvedValue({ diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts index 0a9490264145..edafda2ee196 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts @@ -150,7 +150,7 @@ export class KernelProver { if (firstIteration) { const proofInput = new PrivateKernelInitCircuitPrivateInputs( txRequest, - getVKTreeRoot(), + await getVKTreeRoot(), protocolContractTreeRoot, privateCallData, ); @@ -245,8 +245,8 @@ export class KernelProver { private async createPrivateCallData({ publicInputs, vk: vkAsBuffer }: PrivateExecutionResult) { const { contractAddress, functionSelector } = publicInputs.callContext; - const vkAsFields = vkAsFieldsMegaHonk(vkAsBuffer); - const vk = new VerificationKeyAsFields(vkAsFields, hashVK(vkAsFields)); + const vkAsFields = await vkAsFieldsMegaHonk(vkAsBuffer); + const vk = new VerificationKeyAsFields(vkAsFields, await hashVK(vkAsFields)); const functionLeafMembershipWitness = await this.oracle.getFunctionMembershipWitness( contractAddress, @@ -263,7 +263,7 @@ export class KernelProver { const acirHash = Fr.fromBuffer(Buffer.alloc(32, 0)); const protocolContractSiblingPath = isProtocolContract(contractAddress) - ? getProtocolContractSiblingPath(contractAddress) + ? await getProtocolContractSiblingPath(contractAddress) : makeTuple(PROTOCOL_CONTRACT_TREE_HEIGHT, Fr.zero); return PrivateCallData.from({ diff --git a/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts b/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts index cde87260e719..15defe370a05 100644 --- a/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts +++ b/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts @@ -58,7 +58,7 @@ export async function bruteForceNoteInfo( break; } - const expectedNonce = computeNoteHashNonce(firstNullifier, noteHashIndex); + const expectedNonce = await computeNoteHashNonce(firstNullifier, noteHashIndex); ({ noteHash, siloedNoteHash, innerNullifier } = await simulator.computeNoteHashAndOptionallyANullifier( contractAddress, expectedNonce, @@ -84,7 +84,7 @@ export async function bruteForceNoteInfo( noteHashIndex, nonce, noteHash: noteHash!, - siloedNullifier: siloNullifier(contractAddress, innerNullifier!), + siloedNullifier: await siloNullifier(contractAddress, innerNullifier!), txHash, }; } diff --git a/yarn-project/pxe/src/pxe_service/error_enriching.ts b/yarn-project/pxe/src/pxe_service/error_enriching.ts index 938d391ada77..6b588a738cc2 100644 --- a/yarn-project/pxe/src/pxe_service/error_enriching.ts +++ b/yarn-project/pxe/src/pxe_service/error_enriching.ts @@ -29,12 +29,19 @@ export async function enrichSimulationError(err: SimulationError, db: PxeDatabas const contract = await db.getContract(parsedContractAddress); if (contract) { err.enrichWithContractName(parsedContractAddress, contract.name); - selectors.forEach(selector => { - const functionArtifact = contract.functions.find(f => FunctionSelector.fromString(selector).equals(f)); + for (const selector of selectors) { + const functionArtifact = ( + await Promise.all( + contract.functions.map(async f => { + const fs = await FunctionSelector.fromNameAndParameters(f.name, f.parameters); + return fs.equals(FunctionSelector.fromString(selector)) ? f : undefined; + }), + ) + ).find(f => !!f); if (functionArtifact) { err.enrichWithFunctionName( parsedContractAddress, - FunctionSelector.fromNameAndParameters(functionArtifact), + await FunctionSelector.fromNameAndParameters(functionArtifact), functionArtifact.name, ); } else { @@ -42,7 +49,7 @@ export async function enrichSimulationError(err: SimulationError, db: PxeDatabas `Could not function artifact in contract ${contract.name} for selector ${selector} when enriching error callstack`, ); } - }); + } } else { logger.warn( `Could not find contract in database for address: ${parsedContractAddress} when enriching error callstack`, diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index eadab26de2d7..588ace1f8957 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -164,7 +164,7 @@ export class PXEService implements PXE { public async getContractClass(id: Fr): Promise { const artifact = await this.db.getContractArtifact(id); - return artifact && getContractClassFromArtifact(artifact); + return artifact && (await getContractClassFromArtifact(artifact)); } public getContractArtifact(id: Fr): Promise { @@ -239,7 +239,7 @@ export class PXEService implements PXE { } public async registerContractClass(artifact: ContractArtifact): Promise { - const contractClassId = computeContractClassId(getContractClassFromArtifact(artifact)); + const contractClassId = await computeContractClassId(await getContractClassFromArtifact(artifact)); await this.db.addContractArtifact(contractClassId, artifact); this.log.info(`Added contract class ${artifact.name} with id ${contractClassId}`); } @@ -250,8 +250,8 @@ export class PXEService implements PXE { if (artifact) { // If the user provides an artifact, validate it against the expected class id and register it - const contractClass = getContractClassFromArtifact(artifact); - const contractClassId = computeContractClassId(contractClass); + const contractClass = await getContractClassFromArtifact(artifact); + const contractClassId = await computeContractClassId(contractClass); if (!contractClassId.equals(instance.contractClassId)) { throw new Error( `Artifact does not match expected class id (computed ${contractClassId} but instance refers to ${instance.contractClassId})`, @@ -381,7 +381,7 @@ export class PXEService implements PXE { throw new Error('Note does not exist.'); } - const siloedNullifier = siloNullifier(note.contractAddress, innerNullifier!); + const siloedNullifier = await siloNullifier(note.contractAddress, innerNullifier!); const [nullifierIndex] = await this.node.findLeavesIndexes('latest', MerkleTreeId.NULLIFIER_TREE, [ siloedNullifier, ]); @@ -474,7 +474,7 @@ export class PXEService implements PXE { break; } - const nonce = computeNoteHashNonce(firstNullifier, i); + const nonce = await computeNoteHashNonce(firstNullifier, i); const { siloedNoteHash } = await this.simulator.computeNoteHashAndOptionallyANullifier( note.contractAddress, nonce, @@ -683,7 +683,7 @@ export class PXEService implements PXE { return { name: functionDao.name, args: encodeArguments(functionDao, args), - selector: FunctionSelector.fromNameAndParameters(functionDao.name, functionDao.parameters), + selector: await FunctionSelector.fromNameAndParameters(functionDao.name, functionDao.parameters), type: functionDao.functionType, to, isStatic: functionDao.isStatic, @@ -728,7 +728,7 @@ export class PXEService implements PXE { async #registerProtocolContracts() { for (const name of protocolContractNames) { - const { address, contractClass, instance, artifact } = getCanonicalProtocolContract(name); + const { address, contractClass, instance, artifact } = await getCanonicalProtocolContract(name); await this.db.addContractArtifact(contractClass.id, artifact); await this.db.addContractInstance(instance); this.log.info(`Added protocol contract ${name} at ${address.toString()}`); @@ -892,7 +892,7 @@ export class PXEService implements PXE { } public async isContractInitialized(address: AztecAddress): Promise { - const initNullifier = siloNullifier(address, address.toField()); + const initNullifier = await siloNullifier(address, address.toField()); return !!(await this.node.getNullifierMembershipWitness('latest', initNullifier)); } @@ -923,9 +923,9 @@ export class PXEService implements PXE { throw new Error('No registered account'); } - const preaddress = registeredAccount.getPreaddress(); + const preaddress = await registeredAccount.getPreaddress(); - secretKey = computeAddressSecret(preaddress, secretKey); + secretKey = await computeAddressSecret(preaddress, secretKey); } return secretKey; @@ -940,10 +940,10 @@ export class PXEService implements PXE { if (decryptedEvent !== undefined) { return [decryptedEvent]; } - } - return []; - }); + return []; + }), + ); const decodedEvents = visibleEvents .map(visibleEvent => { diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts index 5899e8af003d..a21e1a99b7b5 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts @@ -77,8 +77,8 @@ describe('PXEService', () => { }); it('throws when submitting a tx with a nullifier of already settled tx', async () => { - const settledTx = TxEffect.random(); - const duplicateTx = mockTx(); + const settledTx = await TxEffect.random(); + const duplicateTx = await mockTx(); node.getTxEffect.mockResolvedValue(randomInBlock(settledTx)); diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts index c5fc6219e010..02a496f57b1e 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts @@ -39,7 +39,7 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => }); it('successfully adds a contract', async () => { - const contracts = [randomDeployedContract(), randomDeployedContract()]; + const contracts = await Promise.all([randomDeployedContract(), randomDeployedContract()]); for (const contract of contracts) { await pxe.registerContract(contract); } @@ -51,9 +51,9 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => it('registers a class and adds a contract for it', async () => { const artifact = randomContractArtifact(); - const contractClass = getContractClassFromArtifact(artifact); + const contractClass = await getContractClassFromArtifact(artifact); const contractClassId = contractClass.id; - const instance = randomContractInstanceWithAddress({ contractClassId }); + const instance = await randomContractInstanceWithAddress({ contractClassId }); await pxe.registerContractClass(artifact); expect(await pxe.getContractClass(contractClassId)).toMatchObject( @@ -66,9 +66,9 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => it('refuses to register a class with a mismatched address', async () => { const artifact = randomContractArtifact(); - const contractClass = getContractClassFromArtifact(artifact); + const contractClass = await getContractClassFromArtifact(artifact); const contractClassId = contractClass.id; - const instance = randomContractInstanceWithAddress({ contractClassId }); + const instance = await randomContractInstanceWithAddress({ contractClassId }); await expect( pxe.registerContract({ instance: { @@ -81,13 +81,13 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => }); it('refuses to register a contract with a class that has not been registered', async () => { - const instance = randomContractInstanceWithAddress(); + const instance = await randomContractInstanceWithAddress(); await expect(pxe.registerContract({ instance })).rejects.toThrow(/Missing contract artifact/i); }); it('refuses to register a contract with an artifact with mismatching class id', async () => { const artifact = randomContractArtifact(); - const instance = randomContractInstanceWithAddress(); + const instance = await randomContractInstanceWithAddress(); await expect(pxe.registerContract({ instance, artifact })).rejects.toThrow(/Artifact does not match/i); }); diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 802fe6c42144..99fd3350d678 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -302,7 +302,7 @@ export class SimulatorOracle implements DBOracle { async #calculateTaggingSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { const senderCompleteAddress = await this.getCompleteAddress(sender); const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); - const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); + const sharedSecret = await computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); // Silo the secret to the app so it can't be used to track other app's notes const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); return siloedSecret; @@ -328,10 +328,12 @@ export class SimulatorOracle implements DBOracle { const contacts = [...this.db.getContactAddresses(), ...(await this.keyStore.getAccounts())].filter( (address, index, self) => index === self.findIndex(otherAddress => otherAddress.equals(address)), ); - const appTaggingSecrets = contacts.map(contact => { - const sharedSecret = computeTaggingSecret(recipientCompleteAddress, recipientIvsk, contact); - return poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); - }); + const appTaggingSecrets = await Promise.all( + contacts.map(async contact => { + const sharedSecret = await computeTaggingSecret(recipientCompleteAddress, recipientIvsk, contact); + return poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); + }), + ); const indexes = await this.db.getTaggingSecretsIndexesAsRecipient(appTaggingSecrets); return appTaggingSecrets.map((secret, i) => new IndexedTaggingSecret(secret, indexes[i])); } @@ -537,7 +539,7 @@ export class SimulatorOracle implements DBOracle { const ivskM = await this.keyStore.getMasterSecretKey( recipientCompleteAddress.publicKeys.masterIncomingViewingPublicKey, ); - const addressSecret = computeAddressSecret(recipientCompleteAddress.getPreaddress(), ivskM); + const addressSecret = await computeAddressSecret(await recipientCompleteAddress.getPreaddress(), ivskM); const ovskM = await this.keyStore.getMasterSecretKey( recipientCompleteAddress.publicKeys.masterOutgoingViewingPublicKey, ); diff --git a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts index a9804de5eecd..1dd1103278a8 100644 --- a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts +++ b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts @@ -147,7 +147,7 @@ describe('Simulator oracle', () => { const SENDER_OFFSET_WINDOW_SIZE = 10; let senders: { completeAddress: CompleteAddress; ivsk: Fq; secretKey: Fr }[]; - function generateMockLogs(senderOffset: number) { + async function generateMockLogs(senderOffset: number) { const logs: { [k: string]: TxScopedL2Log[] } = {}; // Add a random note from every address in the address book for our account with index senderOffset @@ -200,7 +200,7 @@ describe('Simulator oracle', () => { // Add a random note from every address in the address book for a random recipient with index senderOffset // Compute the tag as sender (knowledge of preaddress and ivsk) for (const sender of senders) { - const keys = deriveKeys(Fr.random()); + const keys = await deriveKeys(Fr.random()); const partialAddress = Fr.random(); const randomRecipient = computeAddress(keys.publicKeys, partialAddress); const tag = computeSiloedTagForIndex(sender, randomRecipient, contractAddress, senderOffset); @@ -213,7 +213,7 @@ describe('Simulator oracle', () => { randomRecipient, new KeyValidationRequest( keys.publicKeys.masterOutgoingViewingPublicKey, - computeOvskApp(keys.masterOutgoingViewingSecretKey, contractAddress), + await computeOvskApp(keys.masterOutgoingViewingSecretKey, contractAddress), ), ); const log = new TxScopedL2Log(TxHash.random(), 0, blockNumber, false, randomNote.encrypt()); @@ -253,10 +253,12 @@ describe('Simulator oracle', () => { // Recompute the secrets (as recipient) to ensure indexes are updated const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); - const secrets = senders.map(sender => { - const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); - return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); - }); + const secrets = await Promise.all( + senders.map(async sender => { + const firstSenderSharedSecret = await computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + return await poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + }), + ); // First sender should have 2 logs, but keep index 1 since they were built using the same tag // Next 4 senders hould also have index 1 = offset + 1 @@ -333,10 +335,12 @@ describe('Simulator oracle', () => { // Recompute the secrets (as recipient) to ensure indexes are updated const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); - const secrets = senders.map(sender => { - const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); - return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); - }); + const secrets = await Promise.all( + senders.map(async sender => { + const firstSenderSharedSecret = await computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + return await poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + }), + ); // First sender should have 2 logs, but keep index 1 since they were built using the same tag // Next 4 senders hould also have index 6 = offset + 1 @@ -357,10 +361,12 @@ describe('Simulator oracle', () => { // Recompute the secrets (as recipient) to update indexes const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); - const secrets = senders.map(sender => { - const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); - return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); - }); + const secrets = await Promise.all( + senders.map(async sender => { + const firstSenderSharedSecret = await computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + return await poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + }), + ); // Increase our indexes to 2 await database.setTaggingSecretsIndexesAsRecipient(secrets.map(secret => new IndexedTaggingSecret(secret, 2))); @@ -390,10 +396,12 @@ describe('Simulator oracle', () => { // Recompute the secrets (as recipient) to update indexes const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); - const secrets = senders.map(sender => { - const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); - return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); - }); + const secrets = await Promise.all( + senders.map(async sender => { + const firstSenderSharedSecret = await computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + return await poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + }), + ); await database.setTaggingSecretsIndexesAsRecipient( secrets.map(secret => new IndexedTaggingSecret(secret, SENDER_OFFSET_WINDOW_SIZE + 1)), @@ -479,14 +487,12 @@ describe('Simulator oracle', () => { removeNullifiedNotesSpy = jest.spyOn(database, 'removeNullifiedNotes'); removeNullifiedNotesSpy.mockImplementation(() => Promise.resolve([])); simulator = mock(); - simulator.computeNoteHashAndOptionallyANullifier.mockImplementation((...args: any) => - Promise.resolve({ - noteHash: Fr.random(), - uniqueNoteHash: Fr.random(), - siloedNoteHash: pedersenHash(args[5].items), // args[5] is note - innerNullifier: Fr.random(), - }), - ); + simulator.computeNoteHashAndOptionallyANullifier.mockImplementation(async (...args: any) => ({ + noteHash: Fr.random(), + uniqueNoteHash: Fr.random(), + siloedNoteHash: await pedersenHash(args[5].items), // args[5] is note + innerNullifier: Fr.random(), + })); }); afterEach(() => { @@ -512,7 +518,7 @@ describe('Simulator oracle', () => { }, {}); Object.keys(groupedByTx).forEach(blockNumberKey => { const blockNumber = parseInt(blockNumberKey); - Object.keys(groupedByTx[blockNumber]).forEach(txIndexKey => { + Object.keys(groupedByTx[blockNumber]).forEach(async txIndexKey => { const txIndex = parseInt(txIndexKey); const requestsInTx = groupedByTx[blockNumber][txIndex]; const maxNoteIndex = Math.max(...requestsInTx.map(request => request.noteHashIndex)); @@ -530,7 +536,7 @@ describe('Simulator oracle', () => { (request.blockNumber - 1) * NUM_NOTE_HASHES_PER_BLOCK + request.txIndex * MAX_NOTE_HASHES_PER_TX; const taggedLog = new TxScopedL2Log(txHash, dataStartIndex, blockNumber, false, request.encrypt()); const note = request.snippetOfNoteDao.note; - const noteHash = pedersenHash(note.items); + const noteHash = await pedersenHash(note.items); txEffectsMap[txHash.toString()].noteHashes[request.noteHashIndex] = noteHash; taggedLogs.push(taggedLog); } @@ -557,7 +563,7 @@ describe('Simulator oracle', () => { 0, 2, recipient.address, - KeyValidationRequest.random(), + await KeyValidationRequest.random(), ); const taggedLogs = mockTaggedLogs([request]); @@ -582,7 +588,7 @@ describe('Simulator oracle', () => { 4, 0, 2, - CompleteAddress.random().address, + (await CompleteAddress.random()).address, recipientOvKeys, ); @@ -614,7 +620,7 @@ describe('Simulator oracle', () => { 2, 3, 0, - CompleteAddress.random().address, + (await CompleteAddress.random()).address, recipientOvKeys, ), new MockNoteRequest( @@ -623,15 +629,15 @@ describe('Simulator oracle', () => { 3, 2, recipient.address, - KeyValidationRequest.random(), + await KeyValidationRequest.random(), ), new MockNoteRequest( getRandomNoteLogPayload(Fr.random(), contractAddress), 9, 3, 2, - CompleteAddress.random().address, - KeyValidationRequest.random(), + (await CompleteAddress.random()).address, + await KeyValidationRequest.random(), ), new MockNoteRequest( getRandomNoteLogPayload(Fr.random(), contractAddress), @@ -682,16 +688,16 @@ describe('Simulator oracle', () => { 2, 1, 1, - CompleteAddress.random().address, - KeyValidationRequest.random(), + (await CompleteAddress.random()).address, + await KeyValidationRequest.random(), ), new MockNoteRequest( getRandomNoteLogPayload(), 2, 3, 0, - CompleteAddress.random().address, - KeyValidationRequest.random(), + (await CompleteAddress.random()).address, + await KeyValidationRequest.random(), ), ]; diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index dd3318759547..d4d807261e2b 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -34,7 +34,7 @@ describe('Synchronizer', () => { }); it('sets header from latest block', async () => { - const block = L2Block.random(1, 4); + const block = await L2Block.random(1, 4); await synchronizer.handleBlockStreamEvent({ type: 'blocks-added', blocks: [block] }); const obtainedHeader = database.getBlockHeader(); @@ -49,7 +49,10 @@ describe('Synchronizer', () => { Promise.resolve(L2Block.random(blockNumber as number).header), ); - await synchronizer.handleBlockStreamEvent({ type: 'blocks-added', blocks: times(5, L2Block.random) }); + await synchronizer.handleBlockStreamEvent({ + type: 'blocks-added', + blocks: await Promise.all(times(5, L2Block.random)), + }); await synchronizer.handleBlockStreamEvent({ type: 'chain-pruned', blockNumber: 3 }); expect(removeNotesAfter).toHaveBeenCalledWith(3); diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 761300832115..bac66d14f62f 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -11,8 +11,8 @@ import { booleanConfigHelper, getConfigFromMappings, numberConfigHelper, + pickConfigMappings, } from '@aztec/foundation/config'; -import { pickConfigMappings } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; import { FPCContract } from '@aztec/noir-contracts.js/FPC'; import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; @@ -203,7 +203,7 @@ export function parseSequencerAllowList(value: string): AllowedElement[] { return entries; } -function getDefaultAllowedSetupFunctions(): AllowedElement[] { +async function getDefaultAllowedSetupFunctions(): Promise { return [ // needed for authwit support { @@ -217,23 +217,23 @@ function getDefaultAllowedSetupFunctions(): AllowedElement[] { }, // needed for private transfers via FPC { - classId: getContractClassFromArtifact(TokenContractArtifact).id, + classId: (await getContractClassFromArtifact(TokenContractArtifact)).id, // We can't restrict the selector because public functions get routed via dispatch. // selector: FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'), }, { - classId: getContractClassFromArtifact(FPCContract.artifact).id, + classId: (await getContractClassFromArtifact(FPCContract.artifact)).id, // We can't restrict the selector because public functions get routed via dispatch. // selector: FunctionSelector.fromSignature('prepare_fee((Field),Field,(Field),Field)'), }, ]; } -function getDefaultAllowedTeardownFunctions(): AllowedElement[] { +async function getDefaultAllowedTeardownFunctions(): Promise { return [ { - classId: getContractClassFromArtifact(FPCContract.artifact).id, - selector: FunctionSelector.fromSignature('pay_refund((Field),Field,(Field))'), + classId: (await getContractClassFromArtifact(FPCContract.artifact)).id, + selector: await FunctionSelector.fromSignature('pay_refund((Field),Field,(Field))'), }, ]; } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index cedbfbe0d7de..c358e2c85532 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -88,12 +88,12 @@ describe('L1Publisher', () => { const GAS_GUESS = 300_000n; - beforeEach(() => { - l2Block = L2Block.random(42); + beforeEach(async () => { + l2Block = await L2Block.random(42); header = l2Block.header.toBuffer(); archive = l2Block.archive.root.toBuffer(); - blockHash = l2Block.header.hash().toBuffer(); + blockHash = (await l2Block.header.hash()).toBuffer(); body = l2Block.body.toBuffer(); proposeTxHash = `0x${Buffer.from('txHashPropose').toString('hex')}`; // random tx hash diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 204dd065b062..8cc1f4468047 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -495,11 +495,11 @@ export class L1Publisher { const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); - const digest = getHashedSignaturePayload(consensusPayload, SignatureDomainSeperator.blockAttestation); + const digest = await getHashedSignaturePayload(consensusPayload, SignatureDomainSeperator.blockAttestation); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), - blockHash: block.header.hash().toBuffer(), + blockHash: (await block.header.hash()).toBuffer(), body: block.body.toBuffer(), attestations, txHashes: txHashes ?? [], diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 5acbbd261f6a..2577d3dc1b8f 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -97,11 +97,11 @@ describe('sequencer', () => { let block: L2Block; let mockedGlobalVariables: GlobalVariables; - beforeEach(() => { + beforeEach(async () => { lastBlockNumber = 0; hash = Fr.ZERO.toString(); - block = L2Block.random(lastBlockNumber + 1); + block = await L2Block.random(lastBlockNumber + 1); mockedGlobalVariables = new GlobalVariables( chainId, @@ -204,7 +204,7 @@ describe('sequencer', () => { }); it('builds a block out of a single tx', async () => { - const tx = mockTxForRollup(); + const tx = await mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; const txHash = tx.getTxHash(); @@ -237,7 +237,7 @@ describe('sequencer', () => { Math.floor(Date.now() / 1000) - slotDuration * 1 - (sequencer.getTimeTable()[delayedState] + 1), ); - const tx = mockTxForRollup(); + const tx = await mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; p2p.getTxs.mockReturnValueOnce([tx]); @@ -258,7 +258,7 @@ describe('sequencer', () => { }); it('builds a block when it is their turn', async () => { - const tx = mockTxForRollup(); + const tx = await mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; const txHash = tx.getTxHash(); @@ -299,7 +299,7 @@ describe('sequencer', () => { it('builds a block out of several txs rejecting double spends', async () => { const doubleSpendTxIndex = 1; - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; + const txs = [await mockTxForRollup(0x10000), await mockTxForRollup(0x20000), await mockTxForRollup(0x30000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); @@ -334,7 +334,7 @@ describe('sequencer', () => { it('builds a block out of several txs rejecting incorrect chain ids', async () => { const invalidChainTxIndex = 1; - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; + const txs = [await mockTxForRollup(0x10000), await mockTxForRollup(0x20000), await mockTxForRollup(0x30000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); @@ -364,7 +364,7 @@ describe('sequencer', () => { it('builds a block out of several txs dropping the ones that go over max size', async () => { const invalidTransactionIndex = 1; - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; + const txs = [await mockTxForRollup(0x10000), await mockTxForRollup(0x20000), await mockTxForRollup(0x30000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); @@ -392,12 +392,14 @@ describe('sequencer', () => { }); it('builds a block once it reaches the minimum number of transactions', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); + const txs = await Promise.all( + times(8, async i => { + const tx = await mockTxForRollup(i * 0x10000); + tx.data.constants.txContext.chainId = chainId; + return tx; + }), + ); + const block = await L2Block.random(lastBlockNumber + 1); blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); @@ -433,12 +435,14 @@ describe('sequencer', () => { }); it('builds a block that contains zero real transactions once flushed', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); + const txs = await Promise.all( + times(8, async i => { + const tx = await mockTxForRollup(i * 0x10000); + tx.data.constants.txContext.chainId = chainId; + return tx; + }), + ); + const block = await L2Block.random(lastBlockNumber + 1); blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); @@ -474,12 +478,14 @@ describe('sequencer', () => { }); it('builds a block that contains less than the minimum number of transactions once flushed', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); + const txs = await Promise.all( + times(8, async i => { + const tx = await mockTxForRollup(i * 0x10000); + tx.data.constants.txContext.chainId = chainId; + return tx; + }), + ); + const block = await L2Block.random(lastBlockNumber + 1); blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); @@ -518,7 +524,7 @@ describe('sequencer', () => { }); it('aborts building a block if the chain moves underneath it', async () => { - const tx = mockTxForRollup(); + const tx = await mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; p2p.getTxs.mockReturnValueOnce([tx]); @@ -552,10 +558,10 @@ describe('sequencer', () => { describe('Handling proof quotes', () => { let txHash: TxHash; let currentEpoch = 0n; - const setupForBlockNumber = (blockNumber: number) => { + const setupForBlockNumber = async (blockNumber: number) => { currentEpoch = BigInt(blockNumber) / BigInt(epochDuration); // Create a new block and header - block = L2Block.random(blockNumber); + block = await L2Block.random(blockNumber); mockedGlobalVariables = new GlobalVariables( chainId, @@ -593,7 +599,7 @@ describe('sequencer', () => { Promise.resolve(slotNumber / BigInt(epochDuration)), ); - const tx = mockTxForRollup(); + const tx = await mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; txHash = tx.getTxHash(); @@ -603,7 +609,7 @@ describe('sequencer', () => { it('submits a valid proof quote with a block', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote( currentEpoch - 1n, @@ -626,7 +632,7 @@ describe('sequencer', () => { it('does not claim the epoch previous to the first', async () => { const blockNumber = 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote( 0n, @@ -648,7 +654,7 @@ describe('sequencer', () => { it('does not submit a quote with an expired slot number', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote( currentEpoch - 1n, @@ -672,7 +678,7 @@ describe('sequencer', () => { it('does not submit a valid quote if unable to claim epoch', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote( currentEpoch - 1n, @@ -694,7 +700,7 @@ describe('sequencer', () => { it('does not submit an invalid quote', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote( currentEpoch - 1n, @@ -719,7 +725,7 @@ describe('sequencer', () => { it('only selects valid quotes', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); // Create 1 valid quote and 3 that have a higher fee but are invalid const validProofQuote = mockEpochProofQuote( @@ -774,7 +780,7 @@ describe('sequencer', () => { it('selects the lowest cost valid quote', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); // Create 3 valid quotes with different fees. // And 3 invalid quotes with lower fees diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts index 95210a1b69a9..6e3c31ff02c8 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts @@ -28,12 +28,12 @@ describe('GasTxValidator', () => { let expectedBalanceSlot: Fr; let feeLimit: bigint; - beforeEach(() => { - tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); + beforeEach(async () => { + tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); tx.data.feePayer = AztecAddress.random(); tx.data.constants.txContext.gasSettings = GasSettings.default({ maxFeesPerGas: new GasFees(10, 10) }); payer = tx.data.feePayer; - expectedBalanceSlot = poseidon2Hash([FeeJuiceContract.storage.balances.slot, payer]); + expectedBalanceSlot = await poseidon2Hash([FeeJuiceContract.storage.balances.slot, payer]); feeLimit = tx.data.constants.txContext.gasSettings.getFeeLimit().toBigInt(); }); @@ -62,7 +62,7 @@ describe('GasTxValidator', () => { it('allows fee paying txs if fee payer claims enough balance during setup', async () => { mockBalance(feeLimit - 1n); - const selector = FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'); + const selector = await FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'); patchNonRevertibleFn(tx, 0, { address: ProtocolContractAddress.FeeJuice, selector: FunctionSelector.fromField(new Fr(PUBLIC_DISPATCH_SELECTOR)), @@ -84,7 +84,7 @@ describe('GasTxValidator', () => { it('rejects txs if fee payer claims balance outside setup', async () => { mockBalance(feeLimit - 1n); patchRevertibleFn(tx, 0, { - selector: FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'), + selector: await FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'), args: [payer.toField(), new Fr(1n)], }); await expectValidateFail(tx); diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts index 58d92c7ce1a7..7110ebfe9e6f 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts @@ -54,21 +54,28 @@ export class GasTxValidator implements TxValidator { // Read current balance of the feePayer const initialBalance = await this.#publicDataSource.storageRead( this.#feeJuiceAddress, - computeFeePayerBalanceStorageSlot(feePayer), + await computeFeePayerBalanceStorageSlot(feePayer), ); // If there is a claim in this tx that increases the fee payer balance in Fee Juice, add it to balance const setupFns = getExecutionRequestsByPhase(tx, TxExecutionPhase.SETUP); - const claimFunctionCall = setupFns.find( - fn => - fn.callContext.contractAddress.equals(this.#feeJuiceAddress) && - fn.callContext.msgSender.equals(this.#feeJuiceAddress) && - fn.args.length > 2 && - // Public functions get routed through the dispatch function, whose first argument is the target function selector. - fn.args[0].equals(FunctionSelector.fromSignature('_increase_public_balance((Field),Field)').toField()) && - fn.args[1].equals(feePayer.toField()) && - !fn.callContext.isStaticCall, - ); + const claimFunctionCall = ( + await Promise.all( + setupFns.map(async fn => { + const found = + fn.callContext.contractAddress.equals(this.#feeJuiceAddress) && + fn.callContext.msgSender.equals(this.#feeJuiceAddress) && + fn.args.length > 2 && + // Public functions get routed through the dispatch function, whose first argument is the target function selector. + fn.args[0].equals( + (await FunctionSelector.fromSignature('_increase_public_balance((Field),Field)')).toField(), + ) && + fn.args[1].equals(feePayer.toField()) && + !fn.callContext.isStaticCall; + return found ? fn : undefined; + }), + ) + ).find(fn => !!fn); const balance = claimFunctionCall ? initialBalance.add(claimFunctionCall.args[2]) : initialBalance; if (balance.lt(feeLimit)) { diff --git a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts index 55a1d0ecb79a..044d0602b60e 100644 --- a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts @@ -50,15 +50,15 @@ describe('PhasesTxValidator', () => { }); it('allows setup functions on the contracts allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); }); it('allows setup functions on the contracts class allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); - const { address } = patchNonRevertibleFn(tx, 0, { selector: allowedSetupSelector1 }); + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + const { address } = await patchNonRevertibleFn(tx, 0, { selector: allowedSetupSelector1 }); contractDataSource.getContract.mockImplementationOnce(contractAddress => { if (address.equals(contractAddress)) { @@ -74,15 +74,15 @@ describe('PhasesTxValidator', () => { }); it('rejects txs with setup functions not on the allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); }); it('rejects setup functions not on the contracts class list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); // good selector, bad contract class - const { address } = patchNonRevertibleFn(tx, 0, { selector: allowedSetupSelector1 }); + const { address } = await patchNonRevertibleFn(tx, 0, { selector: allowedSetupSelector1 }); contractDataSource.getContract.mockImplementationOnce(contractAddress => { if (address.equals(contractAddress)) { return Promise.resolve({ @@ -96,7 +96,7 @@ describe('PhasesTxValidator', () => { }); it('allows multiple setup functions on the allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedSetupSelector2 }); @@ -104,7 +104,7 @@ describe('PhasesTxValidator', () => { }); it('rejects if one setup functions is not on the allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); diff --git a/yarn-project/sequencer-client/src/tx_validator/test_utils.ts b/yarn-project/sequencer-client/src/tx_validator/test_utils.ts index dace92c02589..f5ac519d6106 100644 --- a/yarn-project/sequencer-client/src/tx_validator/test_utils.ts +++ b/yarn-project/sequencer-client/src/tx_validator/test_utils.ts @@ -2,28 +2,28 @@ import { type Tx } from '@aztec/circuit-types'; import { type AztecAddress, type Fr, type FunctionSelector } from '@aztec/circuits.js'; import { computeVarArgsHash } from '@aztec/circuits.js/hash'; -export function patchNonRevertibleFn( +export async function patchNonRevertibleFn( tx: Tx, index: number, overrides: { address?: AztecAddress; selector: FunctionSelector; args?: Fr[]; msgSender?: AztecAddress }, -): { address: AztecAddress; selector: FunctionSelector } { - return patchFn('nonRevertibleAccumulatedData', tx, index, overrides); +): Promise<{ address: AztecAddress; selector: FunctionSelector }> { + return await patchFn('nonRevertibleAccumulatedData', tx, index, overrides); } -export function patchRevertibleFn( +export async function patchRevertibleFn( tx: Tx, index: number, overrides: { address?: AztecAddress; selector: FunctionSelector; args?: Fr[]; msgSender?: AztecAddress }, -): { address: AztecAddress; selector: FunctionSelector } { - return patchFn('revertibleAccumulatedData', tx, index, overrides); +): Promise<{ address: AztecAddress; selector: FunctionSelector }> { + return await patchFn('revertibleAccumulatedData', tx, index, overrides); } -function patchFn( +async function patchFn( where: 'revertibleAccumulatedData' | 'nonRevertibleAccumulatedData', tx: Tx, index: number, overrides: { address?: AztecAddress; selector: FunctionSelector; args?: Fr[]; msgSender?: AztecAddress }, -): { address: AztecAddress; selector: FunctionSelector } { +): Promise<{ address: AztecAddress; selector: FunctionSelector }> { const fn = tx.enqueuedPublicFunctionCalls.at(-1 * index - 1)!; fn.callContext.contractAddress = overrides.address ?? fn.callContext.contractAddress; fn.callContext.functionSelector = overrides.selector; @@ -36,7 +36,7 @@ function patchFn( request.msgSender = fn.callContext.msgSender; request.functionSelector = fn.callContext.functionSelector; request.isStaticCall = fn.callContext.isStaticCall; - request.argsHash = computeVarArgsHash(fn.args); + request.argsHash = await computeVarArgsHash(fn.args); tx.data.forPublic![where].publicCallRequests[index] = request; return { diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index 99d5d5f29f2c..4062bc326675 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -371,8 +371,8 @@ export class Oracle { return toACVMField(newArgsHash); } - notifySetMinRevertibleSideEffectCounter([minRevertibleSideEffectCounter]: ACVMField[]) { - this.typedOracle.notifySetMinRevertibleSideEffectCounter(frToNumber(fromACVMField(minRevertibleSideEffectCounter))); + async notifySetMinRevertibleSideEffectCounter([minRevertibleSideEffectCounter]: ACVMField[]) { + await this.typedOracle.notifySetMinRevertibleSideEffectCounter(frToNumber(fromACVMField(minRevertibleSideEffectCounter))); } async getAppTaggingSecretAsSender([sender]: ACVMField[], [recipient]: ACVMField[]): Promise { diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 4c9cb102703e..c8750d841812 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -121,17 +121,17 @@ const TIMESTAMP = new Fr(99833); describe('AVM simulator: transpiled Noir contracts', () => { it('bulk testing', async () => { const functionName = 'bulk_testing'; - const functionSelector = getAvmTestContractFunctionSelector(functionName); + const functionSelector = await getAvmTestContractFunctionSelector(functionName); const args = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map(x => new Fr(x)); const calldata = [functionSelector.toField(), ...args]; const globals = GlobalVariables.empty(); globals.timestamp = TIMESTAMP; const bytecode = getAvmTestContractBytecode('public_dispatch'); - const fnSelector = getAvmTestContractFunctionSelector('public_dispatch'); + const fnSelector = await getAvmTestContractFunctionSelector('public_dispatch'); const publicFn: PublicFunction = { bytecode, selector: fnSelector }; - const contractClass = makeContractClassPublic(0, publicFn); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractClass = await makeContractClassPublic(0, publicFn); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); // The values here should match those in getContractInstance test case const instanceGet = new SerializableContractInstance({ @@ -209,7 +209,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { it('addition via dispatch', async () => { const calldata: Fr[] = [ - FunctionSelector.fromSignature('add_args_return(Field,Field)').toField(), + (await FunctionSelector.fromSignature('add_args_return(Field,Field)')).toField(), new Fr(1), new Fr(2), ]; @@ -224,7 +224,10 @@ describe('AVM simulator: transpiled Noir contracts', () => { it('get_args_hash via dispatch', async () => { const calldata = [new Fr(8), new Fr(1), new Fr(2), new Fr(3)]; - const dispatchCalldata = [FunctionSelector.fromSignature('get_args_hash(u8,[Field;3])').toField(), ...calldata]; + const dispatchCalldata = [ + (await FunctionSelector.fromSignature('get_args_hash(u8,[Field;3])')).toField(), + ...calldata, + ]; const context = initContext({ env: initExecutionEnvironment({ calldata: dispatchCalldata }) }); const bytecode = getAvmTestContractBytecode('public_dispatch'); @@ -307,7 +310,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(results.reverted).toBe(false); const grumpkin = new Grumpkin(); - const g3 = grumpkin.mul(grumpkin.generator(), new Fq(3)); + const g3 = await grumpkin.mul(grumpkin.generator(), new Fq(3)); expect(results.output).toEqual([g3.x, g3.y, Fr.ZERO]); }); @@ -319,9 +322,9 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(results.reverted).toBe(false); const grumpkin = new Grumpkin(); - const g3 = grumpkin.mul(grumpkin.generator(), new Fq(3)); - const g20 = grumpkin.mul(grumpkin.generator(), new Fq(20)); - const expectedResult = grumpkin.add(g3, g20); + const g3 = await grumpkin.mul(grumpkin.generator(), new Fq(3)); + const g20 = await grumpkin.mul(grumpkin.generator(), new Fq(20)); + const expectedResult = await grumpkin.add(g3, g20); expect(results.output).toEqual([expectedResult.x, expectedResult.y, Fr.ZERO]); }); @@ -334,7 +337,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(results.reverted).toBe(false); // This doesnt include infinites - const expectedResult = pedersenCommit([Buffer.from([100]), Buffer.from([1])], 20).map(f => new Fr(f)); + const expectedResult = (await pedersenCommit([Buffer.from([100]), Buffer.from([1])], 20)).map(f => new Fr(f)); // TODO: Come back to the handling of infinities when we confirm how they're handled in bb const isInf = expectedResult[0] === new Fr(0) && expectedResult[1] === new Fr(0); expectedResult.push(new Fr(isInf)); @@ -430,7 +433,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { ['poseidon2_hash', /*input=*/ randomMemoryFields(10), /*output=*/ poseidon2FromMemoryFields], ['pedersen_hash', /*input=*/ randomMemoryFields(10), /*output=*/ pedersenFromMemoryFields], ['pedersen_hash_with_index', /*input=*/ randomMemoryFields(10), /*output=*/ indexedPedersenFromMemoryFields], - ])('Hashes in noir contracts', (name: string, input: MemoryValue[], output: (msg: any[]) => Fr[]) => { + ])('Hashes in noir contracts', (name: string, input: MemoryValue[], output: (msg: any[]) => Fr[] | Promise) => { it(`Should execute contract function that performs ${name}`, async () => { const calldata = input.map(e => e.toFr()); @@ -933,12 +936,12 @@ describe('AVM simulator: transpiled Noir contracts', () => { const nestedBytecode = getAvmTestContractBytecode('public_dispatch'); mockGetBytecode(worldStateDB, nestedBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: nestedBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); const nestedTrace = mock(); @@ -958,12 +961,12 @@ describe('AVM simulator: transpiled Noir contracts', () => { const nestedBytecode = getAvmTestContractBytecode('public_dispatch'); mockGetBytecode(worldStateDB, nestedBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: nestedBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); const nestedTrace = mock(); @@ -978,7 +981,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { it(`Nested call with not enough gas (expect failure)`, async () => { const gas = [/*l2=*/ 5, /*da=*/ 10000].map(g => new Fr(g)); - const targetFunctionSelector = FunctionSelector.fromSignature( + const targetFunctionSelector = await FunctionSelector.fromSignature( 'nested_call_to_add_with_gas(Field,Field,Field,Field)', ); const calldata: Fr[] = [targetFunctionSelector.toField(), value0, value1, ...gas]; @@ -986,12 +989,12 @@ describe('AVM simulator: transpiled Noir contracts', () => { const artifact = getAvmTestContractArtifact('public_dispatch'); mockGetBytecode(worldStateDB, artifact.bytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: artifact.bytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); mockTraceFork(trace); @@ -1010,12 +1013,12 @@ describe('AVM simulator: transpiled Noir contracts', () => { const nestedBytecode = getAvmTestContractBytecode('public_dispatch'); mockGetBytecode(worldStateDB, nestedBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: nestedBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); const nestedTrace = mock(); @@ -1041,12 +1044,12 @@ describe('AVM simulator: transpiled Noir contracts', () => { const nestedBytecode = getAvmTestContractBytecode('public_dispatch'); mockGetBytecode(worldStateDB, nestedBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: nestedBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); mockTraceFork(trace); @@ -1065,12 +1068,12 @@ describe('AVM simulator: transpiled Noir contracts', () => { const nestedBytecode = getAvmTestContractBytecode('public_dispatch'); mockGetBytecode(worldStateDB, nestedBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: nestedBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); mockTraceFork(trace); @@ -1119,7 +1122,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { }); }); }); - describe('Side effects including merkle checks', () => { + describe('Side effects including merkle checks', async () => { const address = AztecAddress.fromNumber(1); const sender = AztecAddress.fromNumber(42); @@ -1354,14 +1357,14 @@ function keccakF1600FromMemoryUint64s(mem: Uint64[]): Fr[] { return [...keccakf1600(mem.map(u => u.toBigInt()))].map(b => new Fr(b)); } -function poseidon2FromMemoryFields(fields: Fieldable[]): Fr[] { - return [poseidon2Hash(fields)]; +async function poseidon2FromMemoryFields(fields: Fieldable[]): Promise { + return [await poseidon2Hash(fields)]; } -function pedersenFromMemoryFields(fields: Fieldable[]): Fr[] { - return [pedersenHash(fields)]; +async function pedersenFromMemoryFields(fields: Fieldable[]): Promise { + return [await pedersenHash(fields)]; } -function indexedPedersenFromMemoryFields(fields: Fieldable[]): Fr[] { - return [pedersenHash(fields, /*index=*/ 20)]; +async function indexedPedersenFromMemoryFields(fields: Fieldable[]): Promise { + return [await pedersenHash(fields, /*index=*/ 20)]; } diff --git a/yarn-project/simulator/src/avm/avm_tree.test.ts b/yarn-project/simulator/src/avm/avm_tree.test.ts index b30ef226cbb4..c517622267f5 100644 --- a/yarn-project/simulator/src/avm/avm_tree.test.ts +++ b/yarn-project/simulator/src/avm/avm_tree.test.ts @@ -136,7 +136,7 @@ describe('Simple Note Hash Consistency', () => { // Check that the roots are consistent const wsRoot = await getWorldStateRoot(treeId); - const computedRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + const computedRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); expect(computedRoot.toBuffer()).toEqual(wsRoot); // Check a sibling path from a random index is consistent @@ -163,7 +163,7 @@ describe('Simple Note Hash Consistency', () => { // Check that the roots are consistent const wsRoot = await getWorldStateRoot(treeId); - const computedRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + const computedRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); expect(computedRoot.toBuffer()).toEqual(wsRoot); // Check the sibling path from an index before the fork @@ -202,7 +202,7 @@ describe('Simple Public Data Consistency', () => { // Compare the roots of the container and the world state trees const wsRoot = await getWorldStateRoot(treeId); - const computedRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + const computedRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); expect(computedRoot.toBuffer()).toEqual(wsRoot); // Check that all the accumulated insertion results match @@ -231,7 +231,7 @@ describe('Simple Public Data Consistency', () => { // Compare the roots of the container and the world state trees const wsRoot = await getWorldStateRoot(treeId); - const computedRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + const computedRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); expect(computedRoot.toBuffer()).toEqual(wsRoot); // Get a sibling path from a random index and check it is consistent @@ -270,7 +270,7 @@ describe('Simple Public Data Consistency', () => { // Check the roots are consistent const wsRoot = await getWorldStateRoot(treeId); - const computedRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + const computedRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); expect(computedRoot.toBuffer()).toEqual(wsRoot); // Check the insertion results match @@ -300,7 +300,7 @@ describe('Simple Nullifier Consistency', () => { // Compare the roots of the container and the world state const wsRoot = await getWorldStateRoot(treeId); - const computedRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + const computedRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); expect(computedRoot.toBuffer()).toEqual(wsRoot); // Check that all the accumulated insertion results match @@ -328,7 +328,7 @@ describe('Simple Nullifier Consistency', () => { // Compare the roots of the container and the world state const wsRoot = await getWorldStateRoot(treeId); - const computedRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + const computedRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); expect(computedRoot.toBuffer()).toEqual(wsRoot); // Check insertion results - note we can only compare against the post-insertion results @@ -435,7 +435,7 @@ describe('Big Random Avm Ephemeral Container Test', () => { const computedRoots = []; for (const treeId of [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE]) { wsRoots.push(await getWorldStateRoot(treeId)); - computedRoots.push(treeContainer.treeMap.get(treeId)!.getRoot().toBuffer()); + computedRoots.push((await treeContainer.treeMap.get(treeId)!.getRoot()).toBuffer()); } // All the roots should match @@ -473,17 +473,17 @@ describe('Checking forking and merging', () => { // Write the last element to the forked container await forkedContainer.writePublicStorage(slots[slots.length - 1], values[slots.length - 1]); const forkedRoot = forkedContainer.treeMap.get(treeId)!.getRoot(); - let originalRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + let originalRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); // The roots should NOT match since we have an extra element - expect(forkedRoot.toBuffer()).not.toEqual(originalRoot.toBuffer()); + expect((await forkedRoot).toBuffer()).not.toEqual(originalRoot.toBuffer()); // Write the last element to original container await treeContainer.writePublicStorage(slots[slots.length - 1], values[slots.length - 1]); - originalRoot = treeContainer.treeMap.get(treeId)!.getRoot(); + originalRoot = await treeContainer.treeMap.get(treeId)!.getRoot(); // We should be consistent now - expect(forkedRoot.toBuffer()).toEqual(originalRoot.toBuffer()); + expect((await forkedRoot).toBuffer()).toEqual(originalRoot.toBuffer()); }); it('Fork-Rollback-Fork-Merge should be consistent', async () => { @@ -512,7 +512,7 @@ describe('Checking forking and merging', () => { wsInsertionResults.push(await publicDataInsertWorldState(slots[1], values[1])); wsInsertionResults.push(await publicDataInsertWorldState(slots[3], values[3])); - const containerRoot = forkedContainer.treeMap.get(treeId)!.getRoot(); + const containerRoot = await forkedContainer.treeMap.get(treeId)!.getRoot(); const wsRoot = await getWorldStateRoot(treeId); expect(containerRoot.toBuffer()).toEqual(wsRoot); @@ -545,17 +545,17 @@ describe('AVM Ephemeral Tree Sanity Test', () => { ); const expectedFrontier0 = new Fr(4); - const exepctedFrontier1 = poseidon2Hash([new Fr(4), new Fr(5)]); - const expectedFrontier2 = poseidon2Hash([ - poseidon2Hash([new Fr(0), new Fr(1)]), - poseidon2Hash([new Fr(2), new Fr(3)]), + const exepctedFrontier1 = await poseidon2Hash([new Fr(4), new Fr(5)]); + const expectedFrontier2 = await poseidon2Hash([ + await poseidon2Hash([new Fr(0), new Fr(1)]), + await poseidon2Hash([new Fr(2), new Fr(3)]), ]); const expectedFrontier = [expectedFrontier0, exepctedFrontier1, expectedFrontier2]; expect(tree.frontier).toEqual(expectedFrontier); // Check root await worldStateTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, leaves); const treeInfo = await worldStateTrees.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE, true); - const localRoot = tree.getRoot(); + const localRoot = await tree.getRoot(); expect(localRoot.toBuffer()).toEqual(treeInfo.root); }); }); diff --git a/yarn-project/simulator/src/avm/avm_tree.ts b/yarn-project/simulator/src/avm/avm_tree.ts index 5ac80dd87fd2..adecbd615b0a 100644 --- a/yarn-project/simulator/src/avm/avm_tree.ts +++ b/yarn-project/simulator/src/avm/avm_tree.ts @@ -109,7 +109,7 @@ export class AvmEphemeralForest { */ async getSiblingPath(treeId: MerkleTreeId, index: bigint): Promise { const tree = this.treeMap.get(treeId)!; - let path = tree.getSiblingPath(index); + let path = await tree.getSiblingPath(index); if (path === undefined) { // We dont have the sibling path in our tree - we have to get it from the DB path = (await this.treeDb.getSiblingPath(treeId, index)).toFields(); @@ -117,9 +117,9 @@ export class AvmEphemeralForest { // if we encounter a mismatch, we replace it with the node we found in our tree. for (let i = 0; i < path.length; i++) { const siblingIndex = index ^ 1n; - const node = tree.getNode(siblingIndex, tree.depth - i); + const node = await tree.getNode(siblingIndex, tree.depth - i); if (node !== undefined) { - const nodeHash = tree.hashTree(node, i + 1); + const nodeHash = await tree.hashTree(node, i + 1); if (!nodeHash.equals(path[i])) { path[i] = nodeHash; } @@ -138,25 +138,25 @@ export class AvmEphemeralForest { * @param newLeafPreimage - The preimage of the new leaf to be inserted. * @returns The sibling path of the new leaf (i.e. the insertion path) */ - appendIndexedTree( + async appendIndexedTree( treeId: ID, lowLeafIndex: bigint, lowLeafPreimage: T, newLeafPreimage: T, - ): Fr[] { + ): Promise { const tree = this.treeMap.get(treeId)!; - const newLeaf = this.hashPreimage(newLeafPreimage); + const newLeaf = await this.hashPreimage(newLeafPreimage); const insertIndex = tree.leafCount; - const lowLeaf = this.hashPreimage(lowLeafPreimage); + const lowLeaf = await this.hashPreimage(lowLeafPreimage); // Update the low nullifier hash this.setIndexedUpdates(treeId, lowLeafIndex, lowLeafPreimage); - tree.updateLeaf(lowLeaf, lowLeafIndex); + await tree.updateLeaf(lowLeaf, lowLeafIndex); // Append the new leaf - tree.appendLeaf(newLeaf); - this.setIndexedUpdates(treeId, insertIndex, newLeafPreimage); + await tree.appendLeaf(newLeaf); + await this.setIndexedUpdates(treeId, insertIndex, newLeafPreimage); - return tree.getSiblingPath(insertIndex)!; + return (await tree.getSiblingPath(insertIndex))!; } /** @@ -281,7 +281,7 @@ export class AvmEphemeralForest { updatedLowNullifier.nextIndex = insertionIndex; const newNullifierLeaf = new NullifierLeafPreimage(nullifier, preimage.nextNullifier, preimage.nextIndex); - const insertionPath = this.appendIndexedTree(treeId, index, updatedLowNullifier, newNullifierLeaf); + const insertionPath = await this.appendIndexedTree(treeId, index, updatedLowNullifier, newNullifierLeaf); // Even though the low nullifier key is not updated, we still need to update the sorted keys in case we have // not seen the low nullifier before @@ -308,11 +308,11 @@ export class AvmEphemeralForest { * @param value - The note hash to be appended * @returns The insertion result which contains the insertion path */ - appendNoteHash(noteHash: Fr): Fr[] { + async appendNoteHash(noteHash: Fr): Promise { const tree = this.treeMap.get(MerkleTreeId.NOTE_HASH_TREE)!; - tree.appendLeaf(noteHash); + await tree.appendLeaf(noteHash); // We use leafCount - 1 here because we would have just appended a leaf - const insertionPath = tree.getSiblingPath(tree.leafCount - 1n); + const insertionPath = await tree.getSiblingPath(tree.leafCount - 1n); return insertionPath!; } @@ -548,7 +548,7 @@ export class AvmEphemeralForest { */ hashPreimage(preimage: T): Fr { const input = preimage.toHashInputs().map(x => Fr.fromBuffer(x)); - return poseidon2Hash(input); + return await poseidon2Hash(input); } } @@ -607,20 +607,23 @@ const Leaf = (value: Fr): Leaf => ({ * It is intended to be a lightweight tree that contains only the necessary information to suppport appends or updates */ export class EphemeralAvmTree { - private tree: Tree; - private readonly zeroHashes: Fr[]; + private tree: Promise; + private readonly zeroHashes: Promise; public frontier: Fr[]; private constructor(public leafCount: bigint, public depth: number) { - let zeroHash = Fr.zero(); - // Can probably cache this elsewhere - const zeroHashes = []; - for (let i = 0; i < this.depth; i++) { - zeroHashes.push(zeroHash); - zeroHash = poseidon2Hash([zeroHash, zeroHash]); - } - this.tree = Leaf(zeroHash); - this.zeroHashes = zeroHashes; + const result = (async () => { + let zeroHash = Fr.zero(); + // Can probably cache this elsewhere + const zeroHashes = []; + for (let i = 0; i < this.depth; i++) { + zeroHashes.push(zeroHash); + zeroHash = await poseidon2Hash([zeroHash, zeroHash]); + } + return { zeroHashes, zeroHash }; + })(); + this.tree = result.then(r => Leaf(r.zeroHash)); + this.zeroHashes = result.then(r => r.zeroHashes); this.frontier = []; } @@ -639,9 +642,9 @@ export class EphemeralAvmTree { * This is a recursive function that inserts a leaf into the tree * @param value - The value of the leaf to be inserted */ - appendLeaf(value: Fr): void { + async appendLeaf(value: Fr): Promise { const insertPath = this._derivePathLE(this.leafCount); - this.tree = this._insertLeaf(value, insertPath, this.depth, this.tree); + this.tree = this._insertLeaf(value, insertPath, this.depth, await this.tree); this.leafCount++; } @@ -651,9 +654,9 @@ export class EphemeralAvmTree { * @param index - The index of the leaf to be inserted * @param depth - The depth of the leaf to be inserted (defaults to the bottom of the tree) */ - updateLeaf(value: Fr, index: bigint, depth = this.depth): void { + async updateLeaf(value: Fr, index: bigint, depth = this.depth): Promise { const insertPath = this._derivePathLE(index, depth); - this.tree = this._insertLeaf(value, insertPath, depth, this.tree); + this.tree = this._insertLeaf(value, insertPath, depth, await this.tree); } /** @@ -661,10 +664,10 @@ export class EphemeralAvmTree { * @param index - The index of the leaf for which a sibling path should be returned. * @returns The sibling path of the leaf, can fail if the path is not found */ - getSiblingPath(index: bigint): Fr[] | undefined { + async getSiblingPath(index: bigint): Promise { const searchPath = this._derivePathLE(index); // Handle cases where we error out - const { path, status } = this._getSiblingPath(searchPath, this.tree, []); + const { path, status } = await this._getSiblingPath(searchPath, await this.tree, []); if (status === SiblingStatus.ERROR) { return undefined; } @@ -676,7 +679,7 @@ export class EphemeralAvmTree { * @param index - The index of the leaf that the sibling path is derived from * @param siblingPath - The sibling path of the index */ - insertSiblingPath(index: bigint, siblingPath: Fr[]): void { + async insertSiblingPath(index: bigint, siblingPath: Fr[]): Promise { for (let i = 0; i < siblingPath.length; i++) { // Flip(XOR) the last bit because we are inserting siblings of the leaf const sibIndex = index ^ 1n; @@ -744,7 +747,7 @@ export class EphemeralAvmTree { frontierValues.push(frontierValue); // We insert it at depth - i (the truncated position) // Note this is a leaf node that wont necessarily be at the bottom of the tree (besides the first frontier) - this.tree = this._insertLeaf(frontierValue, frontierPath, this.depth - i, this.tree); + this.tree = this._insertLeaf(frontierValue, frontierPath, this.depth - i, await this.tree); } this.frontier = frontierValues; } @@ -752,8 +755,8 @@ export class EphemeralAvmTree { /** * Computes the root of the tree */ - public getRoot(): Fr { - return this.hashTree(this.tree, this.depth); + public async getRoot(): Promise { + return await this.hashTree(await this.tree, this.depth); } /** @@ -761,10 +764,13 @@ export class EphemeralAvmTree { * @param tree - The tree to be hashed * @param depth - The depth of the tree */ - public hashTree(tree: Tree, depth: number): Fr { + public async hashTree(tree: Tree, depth: number): Promise { switch (tree.tag) { case TreeType.NODE: { - return poseidon2Hash([this.hashTree(tree.leftTree, depth - 1), this.hashTree(tree.rightTree, depth - 1)]); + return await poseidon2Hash([ + await this.hashTree(tree.leftTree, depth - 1), + await this.hashTree(tree.rightTree, depth - 1), + ]); } case TreeType.LEAF: { return tree.value; @@ -778,12 +784,12 @@ export class EphemeralAvmTree { * @param depth - The depth of the node to be extracted * @returns The subtree rooted at the index and depth */ - public getNode(index: bigint, depth: number): Tree | undefined { + public async getNode(index: bigint, depth: number): Promise { const path = this._derivePathBE(index, depth); const truncatedPath = path.slice(0, depth); truncatedPath.reverse(); try { - return this._getNode(truncatedPath, this.tree); + return this._getNode(truncatedPath, await this.tree); } catch (e) { return undefined; } @@ -836,7 +842,7 @@ export class EphemeralAvmTree { * @param tree - The current tree * @param appendMode - If true we append the relevant zeroHashes to the tree as we traverse */ - private _insertLeaf(value: Fr, insertPath: number[], depth: number, tree: Tree): Tree { + private async _insertLeaf(value: Fr, insertPath: number[], depth: number, tree: Tree): Promise { if (insertPath.length > this.depth || depth > this.depth) { throw new Error('PATH EXCEEDS DEPTH'); } @@ -846,14 +852,14 @@ export class EphemeralAvmTree { switch (tree.tag) { case TreeType.NODE: { return insertPath.pop() === 0 - ? Node(this._insertLeaf(value, insertPath, depth - 1, tree.leftTree), tree.rightTree) - : Node(tree.leftTree, this._insertLeaf(value, insertPath, depth - 1, tree.rightTree)); + ? Node(await this._insertLeaf(value, insertPath, depth - 1, tree.leftTree), tree.rightTree) + : Node(tree.leftTree, await this._insertLeaf(value, insertPath, depth - 1, tree.rightTree)); } case TreeType.LEAF: { - const zeroLeaf = Leaf(this.zeroHashes[depth - 1]); + const zeroLeaf = Leaf((await this.zeroHashes)[depth - 1]); return insertPath.pop() === 0 - ? Node(this._insertLeaf(value, insertPath, depth - 1, zeroLeaf), zeroLeaf) - : Node(zeroLeaf, this._insertLeaf(value, insertPath, depth - 1, zeroLeaf)); + ? Node(await this._insertLeaf(value, insertPath, depth - 1, zeroLeaf), zeroLeaf) + : Node(zeroLeaf, await this._insertLeaf(value, insertPath, depth - 1, zeroLeaf)); } } } @@ -864,7 +870,7 @@ export class EphemeralAvmTree { * @param tree - The current tree * @param acc - The accumulated sibling path */ - private _getSiblingPath(searchPath: number[], tree: Tree, acc: Fr[]): AccumulatedSiblingPath { + private async _getSiblingPath(searchPath: number[], tree: Tree, acc: Fr[]): Promise { // If we have reached the end of the path, we should be at a leaf or empty node // If it is a leaf, we check if the value is equal to the leaf value // If it is empty we check if the value is equal to zero @@ -884,12 +890,12 @@ export class EphemeralAvmTree { ? this._getSiblingPath( searchPath, tree.leftTree, - [this.hashTree(tree.rightTree, searchPath.length)].concat(acc), + [await this.hashTree(tree.rightTree, searchPath.length)].concat(acc), ) : this._getSiblingPath( searchPath, tree.rightTree, - [this.hashTree(tree.leftTree, searchPath.length)].concat(acc), + [await this.hashTree(tree.leftTree, searchPath.length)].concat(acc), ); } // In these two situations we are exploring a subtree we dont have information about diff --git a/yarn-project/simulator/src/avm/fixtures/index.ts b/yarn-project/simulator/src/avm/fixtures/index.ts index 5711c9d9dc68..9deacae614da 100644 --- a/yarn-project/simulator/src/avm/fixtures/index.ts +++ b/yarn-project/simulator/src/avm/fixtures/index.ts @@ -122,11 +122,11 @@ export function randomMemoryFields(length: number): Field[] { return [...Array(length)].map(_ => new Field(Fr.random())); } -export function getAvmTestContractFunctionSelector(functionName: string): FunctionSelector { +export async function getAvmTestContractFunctionSelector(functionName: string): Promise { const artifact = AvmTestContractArtifact.functions.find(f => f.name === functionName)!; assert(!!artifact, `Function ${functionName} not found in AvmTestContractArtifact`); const params = artifact.parameters; - return FunctionSelector.fromNameAndParameters(artifact.name, params); + return await FunctionSelector.fromNameAndParameters(artifact.name, params); } export function getAvmTestContractArtifact(functionName: string): FunctionArtifact { diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index 452e9d7267ce..36382a85f2fc 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -152,7 +152,7 @@ export class AvmPersistableStateManager { this.log.debug(`Storage write (address=${contractAddress}, slot=${slot}): value=${value}`); // Cache storage writes for later reference/reads this.publicStorage.write(contractAddress, slot, value); - const leafSlot = computePublicDataTreeLeafSlot(contractAddress, slot); + const leafSlot = await computePublicDataTreeLeafSlot(contractAddress, slot); if (this.doMerkleOperations) { const result = await this.merkleTrees.writePublicStorage(leafSlot, value); assert(result !== undefined, 'Public data tree insertion error. You might want to disable skipMerkleOperations.'); @@ -181,7 +181,7 @@ export class AvmPersistableStateManager { insertionPath, ); } else { - this.trace.tracePublicStorageWrite(contractAddress, slot, value); + await this.trace.tracePublicStorageWrite(contractAddress, slot, value); } } @@ -196,7 +196,7 @@ export class AvmPersistableStateManager { const { value, cached } = await this.publicStorage.read(contractAddress, slot); this.log.debug(`Storage read (address=${contractAddress}, slot=${slot}): value=${value}, cached=${cached}`); - const leafSlot = computePublicDataTreeLeafSlot(contractAddress, slot); + const leafSlot = await computePublicDataTreeLeafSlot(contractAddress, slot); if (this.doMerkleOperations) { // Get leaf if present, low leaf if absent @@ -231,7 +231,7 @@ export class AvmPersistableStateManager { // prove that this is a low leaf that skips leafSlot, and then prove membership of the leaf. this.trace.tracePublicStorageRead(contractAddress, slot, value, leafPreimage, new Fr(leafIndex), leafPath); } else { - this.trace.tracePublicStorageRead(contractAddress, slot, value); + await this.trace.tracePublicStorageRead(contractAddress, slot, value); } return Promise.resolve(value); @@ -563,7 +563,7 @@ export class AvmPersistableStateManager { this.log.verbose(`[AVM] Tracing nested external contract call ${functionName}`); - this.trace.traceNestedCall( + await this.trace.traceNestedCall( forkedState.trace, nestedEnvironment, startGasLeft, diff --git a/yarn-project/simulator/src/avm/opcodes/contract.test.ts b/yarn-project/simulator/src/avm/opcodes/contract.test.ts index 236d49f4d7df..b3c8115648cb 100644 --- a/yarn-project/simulator/src/avm/opcodes/contract.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/contract.test.ts @@ -11,9 +11,9 @@ import { type AvmPersistableStateManager } from '../journal/journal.js'; import { mockGetContractInstance } from '../test_utils.js'; import { ContractInstanceMember, GetContractInstance } from './contract.js'; -describe('Contract opcodes', () => { +describe('Contract opcodes', async () => { const address = AztecAddress.random(); - const contractInstance = SerializableContractInstance.random(); + const contractInstance = await SerializableContractInstance.random(); const deployer = contractInstance.deployer; const contractClassId = contractInstance.contractClassId; const initializationHash = contractInstance.initializationHash; diff --git a/yarn-project/simulator/src/avm/opcodes/ec_add.test.ts b/yarn-project/simulator/src/avm/opcodes/ec_add.test.ts index 7fe9e7ac199e..81c03ef0cd10 100644 --- a/yarn-project/simulator/src/avm/opcodes/ec_add.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/ec_add.test.ts @@ -80,7 +80,7 @@ describe('EC Instructions', () => { }); it('Should add correctly', async () => { - const G2 = grumpkin.add(grumpkin.generator(), grumpkin.generator()); + const G2 = await grumpkin.add(grumpkin.generator(), grumpkin.generator()); const zero = new Uint1(0); const x1 = new Field(grumpkin.generator().x); diff --git a/yarn-project/simulator/src/avm/opcodes/ec_add.ts b/yarn-project/simulator/src/avm/opcodes/ec_add.ts index 0f29954a9673..7bd465a8dd1a 100644 --- a/yarn-project/simulator/src/avm/opcodes/ec_add.ts +++ b/yarn-project/simulator/src/avm/opcodes/ec_add.ts @@ -81,7 +81,7 @@ export class EcAdd extends Instruction { } else if (p2IsInfinite) { dest = p1; } else { - dest = grumpkin.add(p1, p2); + dest = await grumpkin.add(p1, p2); } memory.set(dstOffset, new Field(dest.x)); diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts index 3dbefe89fe90..13b8ed918390 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts @@ -116,12 +116,12 @@ describe('External Calls', () => { ); mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: otherContextInstructionsBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -167,12 +167,12 @@ describe('External Calls', () => { ); mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: otherContextInstructionsBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -252,12 +252,12 @@ describe('External Calls', () => { const otherContextInstructionsBytecode = markBytecodeAsAvm(encodeToBytecode(otherContextInstructions)); mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); - const contractClass = makeContractClassPublic(0, { + const contractClass = await makeContractClassPublic(0, { bytecode: otherContextInstructionsBytecode, selector: FunctionSelector.random(), }); mockGetContractClass(worldStateDB, contractClass); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); + const contractInstance = await makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); const instruction = new StaticCall( diff --git a/yarn-project/simulator/src/avm/opcodes/hashing.ts b/yarn-project/simulator/src/avm/opcodes/hashing.ts index 2f817814b0ea..ab7c4d750500 100644 --- a/yarn-project/simulator/src/avm/opcodes/hashing.ts +++ b/yarn-project/simulator/src/avm/opcodes/hashing.ts @@ -36,7 +36,7 @@ export class Poseidon2 extends Instruction { const outputState = poseidon2Permutation(inputState); memory.setSlice( outputOffset, - outputState.map(word => new Field(word)), + (await outputState).map(word => new Field(word)), ); memory.assert({ reads: Poseidon2.stateSize, writes: Poseidon2.stateSize, addressing }); diff --git a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts index 5af0702b10c3..4fac7e9a3a33 100644 --- a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts @@ -38,7 +38,9 @@ describe('MultiScalarMul Opcode', () => { const grumpkin = new Grumpkin(); // We need to ensure points are actually on curve, so we just use the generator // In future we could use a random point, for now we create an array of [G, 2G, 3G] - const points = Array.from({ length: 3 }, (_, i) => grumpkin.mul(grumpkin.generator(), new Fq(i + 1))); + const points = await Promise.all( + Array.from({ length: 3 }, (_, i) => grumpkin.mul(grumpkin.generator(), new Fq(i + 1))), + ); // Pick some big scalars to test the edge cases const scalars = [new Fq(Fq.MODULUS - 1n), new Fq(Fq.MODULUS - 2n), new Fq(1n)]; @@ -66,9 +68,9 @@ describe('MultiScalarMul Opcode', () => { const result = context.machineState.memory.getSlice(outputOffset, 3).map(r => r.toFr()); // We write it out explicitly here - let expectedResult = grumpkin.mul(points[0], scalars[0]); - expectedResult = grumpkin.add(expectedResult, grumpkin.mul(points[1], scalars[1])); - expectedResult = grumpkin.add(expectedResult, grumpkin.mul(points[2], scalars[2])); + let expectedResult = await grumpkin.mul(points[0], scalars[0]); + expectedResult = await grumpkin.add(expectedResult, await grumpkin.mul(points[1], scalars[1])); + expectedResult = await grumpkin.add(expectedResult, await grumpkin.mul(points[2], scalars[2])); expect(result).toEqual([expectedResult.x, expectedResult.y, new Fr(0n)]); }); @@ -78,7 +80,9 @@ describe('MultiScalarMul Opcode', () => { const grumpkin = new Grumpkin(); // We need to ensure points are actually on curve, so we just use the generator // In future we could use a random point, for now we create an array of [G, 2G, 3G] - const points = Array.from({ length: 3 }, (_, i) => grumpkin.mul(grumpkin.generator(), new Fq(i + 1))); + const points = await Promise.all( + Array.from({ length: 3 }, (_, i) => grumpkin.mul(grumpkin.generator(), new Fq(i + 1))), + ); // Pick some big scalars to test the edge cases const scalars = [new Fq(Fq.MODULUS - 1n), new Fq(Fq.MODULUS - 2n), new Fq(1n)]; @@ -121,9 +125,9 @@ describe('MultiScalarMul Opcode', () => { const result = context.machineState.memory.getSlice(outputOffset, 3).map(r => r.toFr()); // We write it out explicitly here - let expectedResult = grumpkin.mul(points[0], scalars[0]); - expectedResult = grumpkin.add(expectedResult, grumpkin.mul(points[1], scalars[1])); - expectedResult = grumpkin.add(expectedResult, grumpkin.mul(points[2], scalars[2])); + let expectedResult = await grumpkin.mul(points[0], scalars[0]); + expectedResult = await grumpkin.add(expectedResult, await grumpkin.mul(points[1], scalars[1])); + expectedResult = await grumpkin.add(expectedResult, await grumpkin.mul(points[2], scalars[2])); expect(result).toEqual([expectedResult.x, expectedResult.y, new Fr(0n)]); }); diff --git a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts index a3e24bb48fdf..ded5bf489803 100644 --- a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts +++ b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts @@ -91,20 +91,24 @@ export class MultiScalarMul extends Instruction { const [firstBaseScalarPair, ...rest]: Array<[Point, Fq]> = grumpkinPoints.map((p, idx) => [p, scalarFqVector[idx]]); // Fold the points and scalars into a single point // We have to ensure get the first point, since the identity element (point at infinity) isn't quite working in ts - const outputPoint = rest.reduce((acc, curr) => { - if (curr[1] === Fq.ZERO) { - // If we multiply by 0, the result will the point at infinity - so we ignore it - return acc; - } else if (curr[0].inf) { - // If we multiply the point at infinity by a scalar, it's still the point at infinity - return acc; - } else if (acc.inf) { - // If we accumulator is the point at infinity, we can just return the current point - return curr[0]; - } else { - return grumpkin.add(acc, grumpkin.mul(curr[0], curr[1])); + const outputPoint = await (async () => { + let acc = await grumpkin.mul(firstBaseScalarPair[0], firstBaseScalarPair[1]); + for (const curr of rest) { + if (curr[1] === Fq.ZERO) { + // If we multiply by 0, the result will the point at infinity - so we ignore it + continue; + } else if (curr[0].inf) { + // If we multiply the point at infinity by a scalar, it's still the point at infinity + continue; + } else if (acc.inf) { + // If we accumulator is the point at infinity, we can just return the current point + acc = curr[0]; + } else { + acc = await grumpkin.add(acc, await grumpkin.mul(curr[0], curr[1])); + } } - }, grumpkin.mul(firstBaseScalarPair[0], firstBaseScalarPair[1])); + return acc; + })(); memory.set(outputOffset, new Field(outputPoint.x)); memory.set(outputOffset + 1, new Field(outputPoint.y)); diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index 17df61646977..3531bad11ec5 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -245,10 +245,10 @@ export class ClientExecutionContext extends ViewDataOracle { .join(', ')}`, ); - notes.forEach(n => { + notes.forEach(async n => { if (n.index !== undefined) { - const uniqueNoteHash = computeUniqueNoteHash(n.nonce, n.noteHash); - const siloedNoteHash = siloNoteHash(n.contractAddress, uniqueNoteHash); + const uniqueNoteHash = await computeUniqueNoteHash(n.nonce, n.noteHash); + const siloedNoteHash = await siloNoteHash(n.contractAddress, uniqueNoteHash); this.noteHashLeafIndexMap.set(siloedNoteHash.toBigInt(), n.index); } }); @@ -294,8 +294,8 @@ export class ClientExecutionContext extends ViewDataOracle { * @param innerNullifier - The pending nullifier to add in the list (not yet siloed by contract address). * @param noteHash - A hash of the new note. */ - public override notifyNullifiedNote(innerNullifier: Fr, noteHash: Fr, counter: number) { - const nullifiedNoteHashCounter = this.noteCache.nullifyNote( + public override async notifyNullifiedNote(innerNullifier: Fr, noteHash: Fr, counter: number) { + const nullifiedNoteHashCounter = await this.noteCache.nullifyNote( this.callContext.contractAddress, innerNullifier, noteHash, @@ -303,7 +303,6 @@ export class ClientExecutionContext extends ViewDataOracle { if (nullifiedNoteHashCounter !== undefined) { this.noteHashNullifierCounterMap.set(nullifiedNoteHashCounter, counter); } - return Promise.resolve(); } /** @@ -360,7 +359,7 @@ export class ClientExecutionContext extends ViewDataOracle { const derivedTxContext = this.txContext.clone(); - const derivedCallContext = this.deriveCallContext(targetContractAddress, targetArtifact, isStaticCall); + const derivedCallContext = await this.deriveCallContext(targetContractAddress, targetArtifact, isStaticCall); const context = new ClientExecutionContext( argsHash, @@ -415,7 +414,7 @@ export class ClientExecutionContext extends ViewDataOracle { isStaticCall: boolean, ) { const targetArtifact = await this.db.getFunctionArtifact(targetContractAddress, functionSelector); - const derivedCallContext = this.deriveCallContext(targetContractAddress, targetArtifact, isStaticCall); + const derivedCallContext = await this.deriveCallContext(targetContractAddress, targetArtifact, isStaticCall); const args = this.packedValuesCache.unpack(argsHash); this.log.verbose( @@ -458,7 +457,7 @@ export class ClientExecutionContext extends ViewDataOracle { // new_args = [selector, ...old_args], so as to make it suitable to call the public dispatch function. // We don't validate or compute it in the circuit because a) it's harder to do with slices, and // b) this is only temporary. - const newArgsHash = this.packedValuesCache.pack([ + const newArgsHash = await this.packedValuesCache.pack([ functionSelector.toField(), ...this.packedValuesCache.unpack(argsHash), ]); @@ -497,7 +496,7 @@ export class ClientExecutionContext extends ViewDataOracle { // new_args = [selector, ...old_args], so as to make it suitable to call the public dispatch function. // We don't validate or compute it in the circuit because a) it's harder to do with slices, and // b) this is only temporary. - const newArgsHash = this.packedValuesCache.pack([ + const newArgsHash = await this.packedValuesCache.pack([ functionSelector.toField(), ...this.packedValuesCache.unpack(argsHash), ]); @@ -512,8 +511,8 @@ export class ClientExecutionContext extends ViewDataOracle { return newArgsHash; } - public override notifySetMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: number): void { - this.noteCache.setMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter); + public override async notifySetMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: number): Promise { + await this.noteCache.setMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter); } /** @@ -523,7 +522,7 @@ export class ClientExecutionContext extends ViewDataOracle { * @param isStaticCall - Whether the call is a static call. * @returns The derived call context. */ - private deriveCallContext( + private async deriveCallContext( targetContractAddress: AztecAddress, targetArtifact: FunctionArtifact, isStaticCall = false, @@ -531,7 +530,7 @@ export class ClientExecutionContext extends ViewDataOracle { return new CallContext( this.contractAddress, targetContractAddress, - FunctionSelector.fromNameAndParameters(targetArtifact.name, targetArtifact.parameters), + await FunctionSelector.fromNameAndParameters(targetArtifact.name, targetArtifact.parameters), isStaticCall, ); } diff --git a/yarn-project/simulator/src/client/execution_note_cache.ts b/yarn-project/simulator/src/client/execution_note_cache.ts index f8e1ec0a670e..c26c0ece43ce 100644 --- a/yarn-project/simulator/src/client/execution_note_cache.ts +++ b/yarn-project/simulator/src/client/execution_note_cache.ts @@ -36,7 +36,7 @@ export class ExecutionNoteCache { constructor(private readonly txHash: Fr) {} - public setMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: number) { + public async setMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: number) { if (this.minRevertibleSideEffectCounter && this.minRevertibleSideEffectCounter !== minRevertibleSideEffectCounter) { throw new Error( `Cannot override minRevertibleSideEffectCounter. Current value: ${minRevertibleSideEffectCounter}. Previous value: ${this.minRevertibleSideEffectCounter}`, @@ -49,15 +49,17 @@ export class ExecutionNoteCache { // They cannot be squashed by nullifiers emitted after minRevertibleSideEffectCounter is set. // Their indexes in the tx are known at this point and won't change. So we can assign a nonce to each one of them. // The nonces will be used to create the "complete" nullifier. - const updatedNotes = this.notes.map(({ note, counter }, i) => { - const nonce = computeNoteHashNonce(this.txHash, i); - const uniqueNoteHash = computeUniqueNoteHash(nonce, note.noteHash); - return { - counter, - note: { ...note, nonce }, - noteHashForConsumption: siloNoteHash(note.contractAddress, uniqueNoteHash), - }; - }); + const updatedNotes = await Promise.all( + this.notes.map(async ({ note, counter }, i) => { + const nonce = await computeNoteHashNonce(this.txHash, i); + const uniqueNoteHash = await computeUniqueNoteHash(nonce, note.noteHash); + return { + counter, + note: { ...note, nonce }, + noteHashForConsumption: await siloNoteHash(note.contractAddress, uniqueNoteHash), + }; + }), + ); // Rebuild the data. this.notes = []; this.noteMap = new Map(); @@ -86,8 +88,8 @@ export class ExecutionNoteCache { * @param noteHash - A hash of the note. If this value equals 0, it means the note being nullified is from a previous * transaction (and thus not a new note). */ - public nullifyNote(contractAddress: AztecAddress, innerNullifier: Fr, noteHash: Fr) { - const siloedNullifier = siloNullifier(contractAddress, innerNullifier); + public async nullifyNote(contractAddress: AztecAddress, innerNullifier: Fr, noteHash: Fr) { + const siloedNullifier = await siloNullifier(contractAddress, innerNullifier); const nullifiers = this.getNullifiers(contractAddress); nullifiers.add(siloedNullifier.value); this.nullifierMap.set(contractAddress.toBigInt(), nullifiers); diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 022022db071f..a0c44ece9941 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -111,7 +111,7 @@ describe('Private Execution test suite', () => { gasSettings: GasSettings.default({ maxFeesPerGas: new GasFees(10, 10) }), }; - const runSimulator = ({ + const runSimulator = async ({ artifact, args = [], msgSender = AztecAddress.fromField(Fr.MAX_FIELD_VALUE), @@ -124,11 +124,11 @@ describe('Private Execution test suite', () => { args?: any[]; txContext?: Partial>; }) => { - const packedArguments = PackedValues.fromValues(encodeArguments(artifact, args)); + const packedArguments = await PackedValues.fromValues(encodeArguments(artifact, args)); const txRequest = TxExecutionRequest.from({ origin: contractAddress, firstCallArgsHash: packedArguments.hash, - functionSelector: FunctionSelector.fromNameAndParameters(artifact.name, artifact.parameters), + functionSelector: await FunctionSelector.fromNameAndParameters(artifact.name, artifact.parameters), txContext: TxContext.from({ ...txContextFields, ...txContext }), argsOfCalls: [packedArguments], authWitnesses: [], @@ -151,7 +151,10 @@ describe('Private Execution test suite', () => { await tree.appendLeaves(leaves); // Create a new snapshot. - const newSnap = new AppendOnlyTreeSnapshot(Fr.fromBuffer(tree.getRoot(true)), Number(tree.getNumLeaves(true))); + const newSnap = new AppendOnlyTreeSnapshot( + Fr.fromBuffer(await tree.getRoot(true)), + Number(tree.getNumLeaves(true)), + ); if (name === 'noteHash' || name === 'l1ToL2Messages' || name === 'publicData') { header = new BlockHeader( @@ -187,13 +190,17 @@ describe('Private Execution test suite', () => { logger = createDebugLogger('aztec:test:private_execution'); const ownerPartialAddress = Fr.random(); - ownerCompleteAddress = CompleteAddress.fromSecretKeyAndPartialAddress(ownerSk, ownerPartialAddress); - ({ masterNullifierSecretKey: ownerNskM, masterOutgoingViewingSecretKey: ownerOvskM } = deriveKeys(ownerSk)); + ownerCompleteAddress = await CompleteAddress.fromSecretKeyAndPartialAddress(ownerSk, ownerPartialAddress); + ({ masterNullifierSecretKey: ownerNskM, masterOutgoingViewingSecretKey: ownerOvskM } = await deriveKeys(ownerSk)); const recipientPartialAddress = Fr.random(); - recipientCompleteAddress = CompleteAddress.fromSecretKeyAndPartialAddress(recipientSk, recipientPartialAddress); - ({ masterNullifierSecretKey: recipientNskM, masterOutgoingViewingSecretKey: recipientOvskM } = - deriveKeys(recipientSk)); + recipientCompleteAddress = await CompleteAddress.fromSecretKeyAndPartialAddress( + recipientSk, + recipientPartialAddress, + ); + ({ masterNullifierSecretKey: recipientNskM, masterOutgoingViewingSecretKey: recipientOvskM } = await deriveKeys( + recipientSk, + )); owner = ownerCompleteAddress.address; recipient = recipientCompleteAddress.address; @@ -202,36 +209,36 @@ describe('Private Execution test suite', () => { beforeEach(async () => { trees = {}; oracle = mock(); - oracle.getKeyValidationRequest.mockImplementation((pkMHash: Fr, contractAddress: AztecAddress) => { - if (pkMHash.equals(ownerCompleteAddress.publicKeys.masterNullifierPublicKey.hash())) { + oracle.getKeyValidationRequest.mockImplementation(async (pkMHash: Fr, contractAddress: AztecAddress) => { + if (pkMHash.equals(await ownerCompleteAddress.publicKeys.masterNullifierPublicKey.hash())) { return Promise.resolve( new KeyValidationRequest( ownerCompleteAddress.publicKeys.masterNullifierPublicKey, - computeAppNullifierSecretKey(ownerNskM, contractAddress), + await computeAppNullifierSecretKey(ownerNskM, contractAddress), ), ); } - if (pkMHash.equals(ownerCompleteAddress.publicKeys.masterOutgoingViewingPublicKey.hash())) { + if (pkMHash.equals(await ownerCompleteAddress.publicKeys.masterOutgoingViewingPublicKey.hash())) { return Promise.resolve( new KeyValidationRequest( ownerCompleteAddress.publicKeys.masterOutgoingViewingPublicKey, - computeOvskApp(ownerOvskM, contractAddress), + await computeOvskApp(ownerOvskM, contractAddress), ), ); } - if (pkMHash.equals(recipientCompleteAddress.publicKeys.masterNullifierPublicKey.hash())) { + if (pkMHash.equals(await recipientCompleteAddress.publicKeys.masterNullifierPublicKey.hash())) { return Promise.resolve( new KeyValidationRequest( recipientCompleteAddress.publicKeys.masterNullifierPublicKey, - computeAppNullifierSecretKey(recipientNskM, contractAddress), + await computeAppNullifierSecretKey(recipientNskM, contractAddress), ), ); } - if (pkMHash.equals(recipientCompleteAddress.publicKeys.masterOutgoingViewingPublicKey.hash())) { + if (pkMHash.equals(await recipientCompleteAddress.publicKeys.masterOutgoingViewingPublicKey.hash())) { return Promise.resolve( new KeyValidationRequest( recipientCompleteAddress.publicKeys.masterOutgoingViewingPublicKey, - computeOvskApp(recipientOvskM, contractAddress), + await computeOvskApp(recipientOvskM, contractAddress), ), ); } @@ -274,7 +281,7 @@ describe('Private Execution test suite', () => { it('emits a field array as an encrypted log', async () => { // NB: this test does NOT cover correct enc/dec of values, just whether // the contexts correctly populate non-note encrypted logs - const artifact = getFunctionArtifact(TestContractArtifact, 'emit_array_as_encrypted_log'); + const artifact = await getFunctionArtifact(TestContractArtifact, 'emit_array_as_encrypted_log'); // We emit the outgoing here to recipient for no reason at all const outgoingViewer = recipient; const args = [times(5, () => Fr.random()), owner, outgoingViewer, false]; @@ -292,7 +299,7 @@ describe('Private Execution test suite', () => { const mockFirstNullifier = new Fr(1111); let currentNoteIndex = 0n; - const buildNote = (amount: bigint, ownerAddress: AztecAddress, storageSlot: Fr, noteTypeId: NoteSelector) => { + const buildNote = async (amount: bigint, ownerAddress: AztecAddress, storageSlot: Fr, noteTypeId: NoteSelector) => { // WARNING: this is not actually how nonces are computed! // For the purpose of this test we use a mocked firstNullifier and and a random number // to compute the nonce. Proper nonces are only enforced later by the kernel/later circuits @@ -302,10 +309,10 @@ describe('Private Execution test suite', () => { // array index at the output of the final kernel/ordering circuit are used to derive nonce via: // `hash(firstNullifier, noteHashIndex)` const noteHashIndex = randomInt(1); // mock index in TX's final noteHashes array - const nonce = computeNoteHashNonce(mockFirstNullifier, noteHashIndex); + const nonce = await computeNoteHashNonce(mockFirstNullifier, noteHashIndex); const note = new Note([new Fr(amount), ownerAddress.toField(), Fr.random()]); // Note: The following does not correspond to how note hashing is generally done in real notes. - const noteHash = poseidon2Hash([storageSlot, ...note.items]); + const noteHash = await poseidon2Hash([storageSlot, ...note.items]); return { contractAddress, storageSlot, @@ -330,9 +337,11 @@ describe('Private Execution test suite', () => { it('should have a constructor with arguments that inserts notes', async () => { const initArgs = [owner, owner, 140]; - const instance = getContractInstanceFromDeployParams(StatefulTestContractArtifact, { constructorArgs: initArgs }); + const instance = await getContractInstanceFromDeployParams(StatefulTestContractArtifact, { + constructorArgs: initArgs, + }); oracle.getContractInstance.mockResolvedValue(instance); - const artifact = getFunctionArtifact(StatefulTestContractArtifact, 'constructor'); + const artifact = await getFunctionArtifact(StatefulTestContractArtifact, 'constructor'); const topLevelResult = await runSimulator({ args: initArgs, artifact, contractAddress: instance.address }); const result = topLevelResult.nestedExecutions[0]; @@ -352,7 +361,7 @@ describe('Private Execution test suite', () => { }); it('should run the create_note function', async () => { - const artifact = getFunctionArtifact(StatefulTestContractArtifact, 'create_note_no_init_check'); + const artifact = await getFunctionArtifact(StatefulTestContractArtifact, 'create_note_no_init_check'); const result = await runSimulator({ args: [owner, owner, 140], artifact }); @@ -373,18 +382,18 @@ describe('Private Execution test suite', () => { it('should run the destroy_and_create function', async () => { const amountToTransfer = 100n; - const artifact = getFunctionArtifact(StatefulTestContractArtifact, 'destroy_and_create_no_init_check'); + const artifact = await getFunctionArtifact(StatefulTestContractArtifact, 'destroy_and_create_no_init_check'); - const storageSlot = deriveStorageSlotInMap(StatefulTestContractArtifact.storageLayout['notes'].slot, owner); - const recipientStorageSlot = deriveStorageSlotInMap( + const storageSlot = await deriveStorageSlotInMap(StatefulTestContractArtifact.storageLayout['notes'].slot, owner); + const recipientStorageSlot = await deriveStorageSlotInMap( StatefulTestContractArtifact.storageLayout['notes'].slot, recipient, ); - const notes = [ + const notes = await Promise.all([ buildNote(60n, ownerCompleteAddress.address, storageSlot, valueNoteTypeId), buildNote(80n, ownerCompleteAddress.address, storageSlot, valueNoteTypeId), - ]; + ]); oracle.syncTaggedLogs.mockResolvedValue(new Map()); oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue(notes); @@ -438,11 +447,11 @@ describe('Private Execution test suite', () => { it('should be able to destroy_and_create with dummy notes', async () => { const amountToTransfer = 100n; const balance = 160n; - const artifact = getFunctionArtifact(StatefulTestContractArtifact, 'destroy_and_create_no_init_check'); + const artifact = await getFunctionArtifact(StatefulTestContractArtifact, 'destroy_and_create_no_init_check'); - const storageSlot = deriveStorageSlotInMap(new Fr(1n), owner); + const storageSlot = await deriveStorageSlotInMap(new Fr(1n), owner); - const notes = [buildNote(balance, ownerCompleteAddress.address, storageSlot, valueNoteTypeId)]; + const notes = await Promise.all([buildNote(balance, ownerCompleteAddress.address, storageSlot, valueNoteTypeId)]); oracle.syncTaggedLogs.mockResolvedValue(new Map()); oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue(notes); @@ -480,15 +489,15 @@ describe('Private Execution test suite', () => { it('child function should be callable', async () => { const initialValue = 100n; - const artifact = getFunctionArtifact(ChildContractArtifact, 'value'); + const artifact = await getFunctionArtifact(ChildContractArtifact, 'value'); const result = await runSimulator({ args: [initialValue], artifact }); expect(result.returnValues).toEqual([new Fr(initialValue + privateIncrement)]); }); it('parent should call child', async () => { - const childArtifact = getFunctionArtifact(ChildContractArtifact, 'value'); - const parentArtifact = getFunctionArtifact(ParentContractArtifact, 'entry_point'); + const childArtifact = await getFunctionArtifact(ChildContractArtifact, 'value'); + const parentArtifact = await getFunctionArtifact(ParentContractArtifact, 'entry_point'); const parentAddress = AztecAddress.random(); const childAddress = AztecAddress.random(); const childSelector = FunctionSelector.fromNameAndParameters(childArtifact.name, childArtifact.parameters); @@ -517,16 +526,16 @@ describe('Private Execution test suite', () => { let argsHash: Fr; let testCodeGenArtifact: FunctionArtifact; - beforeAll(() => { + beforeAll(async () => { // These args should match the ones hardcoded in importer contract // eslint-disable-next-line camelcase const dummyNote = { amount: 1, secret_hash: 2 }; // eslint-disable-next-line camelcase const deepStruct = { a_field: 1, a_bool: true, a_note: dummyNote, many_notes: [dummyNote, dummyNote, dummyNote] }; args = [1, true, 1, [1, 2], dummyNote, deepStruct]; - testCodeGenArtifact = getFunctionArtifact(TestContractArtifact, 'test_code_gen'); + testCodeGenArtifact = await getFunctionArtifact(TestContractArtifact, 'test_code_gen'); const serializedArgs = encodeArguments(testCodeGenArtifact, args); - argsHash = computeVarArgsHash(serializedArgs); + argsHash = await computeVarArgsHash(serializedArgs); }); it('test function should be directly callable', async () => { @@ -538,7 +547,7 @@ describe('Private Execution test suite', () => { it('test function should be callable through autogenerated interface', async () => { const testAddress = AztecAddress.random(); - const parentArtifact = getFunctionArtifact(ImportTestContractArtifact, 'main_contract'); + const parentArtifact = await getFunctionArtifact(ImportTestContractArtifact, 'main_contract'); const testCodeGenSelector = FunctionSelector.fromNameAndParameters( testCodeGenArtifact.name, testCodeGenArtifact.parameters, @@ -560,8 +569,8 @@ describe('Private Execution test suite', () => { describe('consuming messages', () => { const contractAddress = defaultContractAddress; - describe('L1 to L2', () => { - const artifact = getFunctionArtifact(TestContractArtifact, 'consume_mint_to_private_message'); + describe('L1 to L2', async () => { + const artifact = await getFunctionArtifact(TestContractArtifact, 'consume_mint_to_private_message'); let bridgedAmount = 100n; const l1ToL2MessageIndex = 0; @@ -610,7 +619,7 @@ describe('Private Execution test suite', () => { }; it('Should be able to consume a dummy cross chain message', async () => { - preimage = computePreimage(); + preimage = await computePreimage(); args = computeArgs(); await mockOracles(); @@ -627,7 +636,7 @@ describe('Private Execution test suite', () => { }); it('Invalid membership proof', async () => { - preimage = computePreimage(); + preimage = await computePreimage(); args = computeArgs(); @@ -647,7 +656,7 @@ describe('Private Execution test suite', () => { it('Invalid recipient', async () => { crossChainMsgRecipient = AztecAddress.random(); - preimage = computePreimage(); + preimage = await computePreimage(); args = computeArgs(); @@ -667,7 +676,7 @@ describe('Private Execution test suite', () => { it('Invalid sender', async () => { crossChainMsgSender = EthAddress.random(); - preimage = computePreimage(); + preimage = await computePreimage(); args = computeArgs(); @@ -686,7 +695,7 @@ describe('Private Execution test suite', () => { }); it('Invalid chainid', async () => { - preimage = computePreimage(); + preimage = await computePreimage(); args = computeArgs(); @@ -705,7 +714,7 @@ describe('Private Execution test suite', () => { }); it('Invalid version', async () => { - preimage = computePreimage(); + preimage = await computePreimage(); args = computeArgs(); @@ -724,7 +733,7 @@ describe('Private Execution test suite', () => { }); it('Invalid content', async () => { - preimage = computePreimage(); + preimage = await computePreimage(); bridgedAmount = bridgedAmount + 1n; // Invalid amount args = computeArgs(); @@ -744,7 +753,7 @@ describe('Private Execution test suite', () => { }); it('Invalid Secret', async () => { - preimage = computePreimage(); + preimage = await computePreimage(); secretForL1ToL2MessageConsumption = Fr.random(); args = computeArgs(); @@ -765,9 +774,9 @@ describe('Private Execution test suite', () => { }); it('Should be able to consume a dummy public to private message', async () => { - const artifact = getFunctionArtifact(TestContractArtifact, 'consume_note_from_secret'); + const artifact = await getFunctionArtifact(TestContractArtifact, 'consume_note_from_secret'); const secret = new Fr(1n); - const secretHash = computeSecretHash(secret); + const secretHash = await computeSecretHash(secret); const note = new Note([secretHash]); const storageSlot = TestContractArtifact.storageLayout['example_set'].slot; oracle.syncTaggedLogs.mockResolvedValue(new Map()); @@ -798,11 +807,11 @@ describe('Private Execution test suite', () => { describe('enqueued calls', () => { it.each([false, true])('parent should enqueue call to child (internal %p)', async isInternal => { - const parentArtifact = getFunctionArtifact(ParentContractArtifact, 'enqueue_call_to_child'); + const parentArtifact = await getFunctionArtifact(ParentContractArtifact, 'enqueue_call_to_child'); const childContractArtifact = ChildContractArtifact.functions.find(fn => fn.name === 'public_dispatch')!; expect(childContractArtifact).toBeDefined(); const childAddress = AztecAddress.random(); - const childSelector = FunctionSelector.fromSignature('pub_set_value(Field)'); + const childSelector = await FunctionSelector.fromSignature('pub_set_value(Field)'); const parentAddress = AztecAddress.random(); oracle.getFunctionArtifact.mockImplementation(() => Promise.resolve({ ...childContractArtifact, isInternal })); @@ -834,8 +843,8 @@ describe('Private Execution test suite', () => { describe('setting teardown function', () => { it('should be able to set a teardown function', async () => { - const entrypoint = getFunctionArtifact(TestContractArtifact, 'test_setting_teardown'); - const teardown = getFunctionArtifact(TestContractArtifact, 'dummy_public_call'); + const entrypoint = await getFunctionArtifact(TestContractArtifact, 'test_setting_teardown'); + const teardown = await getFunctionArtifact(TestContractArtifact, 'dummy_public_call'); oracle.getFunctionArtifact.mockImplementation(() => Promise.resolve({ ...teardown })); const result = await runSimulator({ artifact: entrypoint }); expect(result.publicTeardownFunctionCall.isEmpty()).toBeFalsy(); @@ -848,14 +857,14 @@ describe('Private Execution test suite', () => { describe('setting fee payer', () => { it('should default to not being a fee payer', async () => { // arbitrary random function that doesn't set a fee payer - const entrypoint = getFunctionArtifact(TestContractArtifact, 'get_this_address'); + const entrypoint = await getFunctionArtifact(TestContractArtifact, 'get_this_address'); const contractAddress = AztecAddress.random(); const result = await runSimulator({ artifact: entrypoint, contractAddress }); expect(result.publicInputs.isFeePayer).toBe(false); }); it('should be able to set a fee payer', async () => { - const entrypoint = getFunctionArtifact(TestContractArtifact, 'test_setting_fee_payer'); + const entrypoint = await getFunctionArtifact(TestContractArtifact, 'test_setting_fee_payer'); const contractAddress = AztecAddress.random(); const result = await runSimulator({ artifact: entrypoint, contractAddress }); expect(result.publicInputs.isFeePayer).toBe(true); @@ -882,7 +891,10 @@ describe('Private Execution test suite', () => { const amountToTransfer = 100n; const contractAddress = AztecAddress.random(); - const artifact = getFunctionArtifact(PendingNoteHashesContractArtifact, 'test_insert_then_get_then_nullify_flat'); + const artifact = await getFunctionArtifact( + PendingNoteHashesContractArtifact, + 'test_insert_then_get_then_nullify_flat', + ); const outgoingViewer = owner; const args = [amountToTransfer, owner, outgoingViewer]; @@ -902,7 +914,7 @@ describe('Private Execution test suite', () => { expect(noteHashesFromCall).toHaveLength(1); const noteHashFromCall = noteHashesFromCall[0].value; - const storageSlot = deriveStorageSlotInMap( + const storageSlot = await deriveStorageSlotInMap( PendingNoteHashesContractArtifact.storageLayout['balances'].slot, owner, ); @@ -925,8 +937,8 @@ describe('Private Execution test suite', () => { expect(result.returnValues).toEqual([new Fr(amountToTransfer)]); const nullifier = result.publicInputs.nullifiers[0]; - const expectedNullifier = poseidon2HashWithSeparator( - [derivedNoteHash, computeAppNullifierSecretKey(ownerNskM, contractAddress)], + const expectedNullifier = await poseidon2HashWithSeparator( + [derivedNoteHash, await computeAppNullifierSecretKey(ownerNskM, contractAddress)], GeneratorIndex.NOTE_NULLIFIER, ); expect(nullifier.value).toEqual(expectedNullifier); @@ -940,16 +952,22 @@ describe('Private Execution test suite', () => { const amountToTransfer = 100n; const contractAddress = AztecAddress.random(); - const artifact = getFunctionArtifact( + const artifact = await getFunctionArtifact( PendingNoteHashesContractArtifact, 'test_insert_then_get_then_nullify_all_in_nested_calls', ); - const insertArtifact = getFunctionArtifact(PendingNoteHashesContractArtifact, 'insert_note'); + const insertArtifact = await getFunctionArtifact(PendingNoteHashesContractArtifact, 'insert_note'); - const getThenNullifyArtifact = getFunctionArtifact(PendingNoteHashesContractArtifact, 'get_then_nullify_note'); + const getThenNullifyArtifact = await getFunctionArtifact( + PendingNoteHashesContractArtifact, + 'get_then_nullify_note', + ); - const insertFnSelector = FunctionSelector.fromNameAndParameters(insertArtifact.name, insertArtifact.parameters); - const getThenNullifyFnSelector = FunctionSelector.fromNameAndParameters( + const insertFnSelector = await FunctionSelector.fromNameAndParameters( + insertArtifact.name, + insertArtifact.parameters, + ); + const getThenNullifyFnSelector = await FunctionSelector.fromNameAndParameters( getThenNullifyArtifact.name, getThenNullifyArtifact.parameters, ); @@ -971,7 +989,7 @@ describe('Private Execution test suite', () => { const execInsert = result.nestedExecutions[0]; const execGetThenNullify = result.nestedExecutions[1]; - const storageSlot = deriveStorageSlotInMap( + const storageSlot = await deriveStorageSlotInMap( PendingNoteHashesContractArtifact.storageLayout['balances'].slot, owner, ); @@ -1004,8 +1022,8 @@ describe('Private Execution test suite', () => { expect(execGetThenNullify.returnValues).toEqual([new Fr(amountToTransfer)]); const nullifier = execGetThenNullify.publicInputs.nullifiers[0]; - const expectedNullifier = poseidon2HashWithSeparator( - [derivedNoteHash, computeAppNullifierSecretKey(ownerNskM, contractAddress)], + const expectedNullifier = await poseidon2HashWithSeparator( + [derivedNoteHash, await computeAppNullifierSecretKey(ownerNskM, contractAddress)], GeneratorIndex.NOTE_NULLIFIER, ); expect(nullifier.value).toEqual(expectedNullifier); @@ -1020,7 +1038,7 @@ describe('Private Execution test suite', () => { const contractAddress = AztecAddress.random(); - const artifact = getFunctionArtifact(PendingNoteHashesContractArtifact, 'test_bad_get_then_insert_flat'); + const artifact = await getFunctionArtifact(PendingNoteHashesContractArtifact, 'test_bad_get_then_insert_flat'); const args = [amountToTransfer, owner]; // This will throw if we read the note before it was inserted @@ -1035,10 +1053,10 @@ describe('Private Execution test suite', () => { describe('get master incoming viewing public key', () => { it('gets the public key for an address', async () => { // Tweak the contract artifact so we can extract return values - const artifact = getFunctionArtifact(TestContractArtifact, 'get_master_incoming_viewing_public_key'); + const artifact = await getFunctionArtifact(TestContractArtifact, 'get_master_incoming_viewing_public_key'); // Generate a partial address, pubkey, and resulting address - const completeAddress = CompleteAddress.random(); + const completeAddress = await CompleteAddress.random(); const args = [completeAddress.address]; const pubKey = completeAddress.publicKeys.masterIncomingViewingPublicKey; @@ -1050,7 +1068,7 @@ describe('Private Execution test suite', () => { describe('Get notes', () => { it('fails if returning no notes', async () => { - const artifact = getFunctionArtifact(TestContractArtifact, 'call_get_notes'); + const artifact = await getFunctionArtifact(TestContractArtifact, 'call_get_notes'); const args = [2n, true]; oracle.syncTaggedLogs.mockResolvedValue(new Map()); @@ -1068,7 +1086,7 @@ describe('Private Execution test suite', () => { const contractAddress = AztecAddress.random(); // Tweak the contract artifact so we can extract return values - const artifact = getFunctionArtifact(TestContractArtifact, 'get_this_address'); + const artifact = await getFunctionArtifact(TestContractArtifact, 'get_this_address'); // Overwrite the oracle return value const result = await runSimulator({ artifact, args: [], contractAddress }); @@ -1082,12 +1100,12 @@ describe('Private Execution test suite', () => { let args: any[]; let artifact: FunctionArtifact; - beforeEach(() => { + beforeEach(async () => { chainId = Fr.random(); version = Fr.random(); args = [chainId, version]; - artifact = getFunctionArtifact(TestContractArtifact, 'assert_private_global_vars'); + artifact = await getFunctionArtifact(TestContractArtifact, 'assert_private_global_vars'); oracle.getFunctionArtifact.mockImplementation(() => Promise.resolve(artifact)); }); @@ -1116,8 +1134,8 @@ describe('Private Execution test suite', () => { describe('Historical header in private context', () => { let artifact: FunctionArtifact; - beforeEach(() => { - artifact = getFunctionArtifact(TestContractArtifact, 'assert_header_private'); + beforeEach(async () => { + artifact = await getFunctionArtifact(TestContractArtifact, 'assert_header_private'); oracle.getFunctionArtifact.mockImplementation(() => Promise.resolve(artifact)); header = makeHeader(); diff --git a/yarn-project/simulator/src/client/simulator.test.ts b/yarn-project/simulator/src/client/simulator.test.ts index 951899e10a3f..4ada3e014185 100644 --- a/yarn-project/simulator/src/client/simulator.test.ts +++ b/yarn-project/simulator/src/client/simulator.test.ts @@ -19,9 +19,9 @@ describe('Simulator', () => { let contractAddress: AztecAddress; let appNullifierSecretKey: Fr; - beforeEach(() => { + beforeEach(async () => { const ownerSk = Fr.fromString('2dcc5485a58316776299be08c78fa3788a1a7961ae30dc747fb1be17692a8d32'); - const allOwnerKeys = deriveKeys(ownerSk); + const allOwnerKeys = await deriveKeys(ownerSk); ownerMasterNullifierPublicKey = allOwnerKeys.publicKeys.masterNullifierPublicKey; const ownerMasterNullifierSecretKey = allOwnerKeys.masterNullifierSecretKey; @@ -29,9 +29,9 @@ describe('Simulator', () => { contractAddress = AztecAddress.random(); const ownerPartialAddress = Fr.random(); - const ownerCompleteAddress = CompleteAddress.fromSecretKeyAndPartialAddress(ownerSk, ownerPartialAddress); + const ownerCompleteAddress = await CompleteAddress.fromSecretKeyAndPartialAddress(ownerSk, ownerPartialAddress); - appNullifierSecretKey = computeAppNullifierSecretKey(ownerMasterNullifierSecretKey, contractAddress); + appNullifierSecretKey = await computeAppNullifierSecretKey(ownerMasterNullifierSecretKey, contractAddress); oracle = mock(); node = mock(); @@ -43,8 +43,8 @@ describe('Simulator', () => { simulator = new AcirSimulator(oracle, node); }); - describe('computeNoteHashAndOptionallyANullifier', () => { - const artifact = getFunctionArtifact( + describe('computeNoteHashAndOptionallyANullifier', async () => { + const artifact = await getFunctionArtifact( TokenBlacklistContractArtifact, 'compute_note_hash_and_optionally_a_nullifier', ); @@ -53,20 +53,20 @@ describe('Simulator', () => { const noteTypeId = TokenBlacklistContractArtifact.notes['TokenNote'].id; // Amount is a U128, with a lo and hi limbs - const createNote = (amount = 123n) => - new Note([new Fr(amount), new Fr(0), ownerMasterNullifierPublicKey.hash(), Fr.random()]); + const createNote = async (amount = 123n) => + new Note([new Fr(amount), new Fr(0), await ownerMasterNullifierPublicKey.hash(), Fr.random()]); it('throw if the contract does not implement "compute_note_hash_and_optionally_a_nullifier"', async () => { oracle.getFunctionArtifactByName.mockResolvedValue(undefined); - const note = createNote(); + const note = await createNote(); await expect( simulator.computeNoteHashAndOptionallyANullifier(contractAddress, nonce, storageSlot, noteTypeId, true, note), ).rejects.toThrow(/Mandatory implementation of "compute_note_hash_and_optionally_a_nullifier" missing/); }); it('throw if "compute_note_hash_and_optionally_a_nullifier" has the wrong number of parameters', async () => { - const note = createNote(); + const note = await createNote(); const modifiedArtifact: FunctionArtifact = { ...artifact, @@ -84,7 +84,7 @@ describe('Simulator', () => { }); it('throw if a note has more fields than "compute_note_hash_and_optionally_a_nullifier" can process', async () => { - const note = createNote(); + const note = await createNote(); const wrongPreimageLength = note.length - 1; const modifiedArtifact: FunctionArtifact = { diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index a60f634a2883..22393ee03f2f 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -65,11 +65,11 @@ export class AcirSimulator { const callContext = new CallContext( msgSender, contractAddress, - FunctionSelector.fromNameAndParameters(entryPointArtifact.name, entryPointArtifact.parameters), + await FunctionSelector.fromNameAndParameters(entryPointArtifact.name, entryPointArtifact.parameters), entryPointArtifact.isStatic, ); - const txHash = request.toTxRequest().hash(); + const txHash = await request.toTxRequest().hash(); const context = new ClientExecutionContext( request.firstCallArgsHash, diff --git a/yarn-project/simulator/src/client/unconstrained_execution.test.ts b/yarn-project/simulator/src/client/unconstrained_execution.test.ts index c285da49d9ad..b9cd5a729c2a 100644 --- a/yarn-project/simulator/src/client/unconstrained_execution.test.ts +++ b/yarn-project/simulator/src/client/unconstrained_execution.test.ts @@ -35,8 +35,8 @@ describe('Unconstrained Execution test suite', () => { return new Note([new Fr(amount), owner.toField(), Fr.random()]); }; - beforeEach(() => { - const ownerCompleteAddress = CompleteAddress.fromSecretKeyAndPartialAddress(ownerSecretKey, Fr.random()); + beforeEach(async () => { + const ownerCompleteAddress = await CompleteAddress.fromSecretKeyAndPartialAddress(ownerSecretKey, Fr.random()); owner = ownerCompleteAddress.address; oracle.getCompleteAddress.mockImplementation((account: AztecAddress) => { diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index 67af9e77df39..0305457cb926 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -246,7 +246,7 @@ export class ViewDataOracle extends TypedOracle { * @returns A boolean indicating whether the nullifier exists in the tree or not. */ public override async checkNullifierExists(innerNullifier: Fr) { - const nullifier = siloNullifier(this.contractAddress, innerNullifier!); + const nullifier = await siloNullifier(this.contractAddress, innerNullifier!); const index = await this.db.getNullifierIndex(nullifier); return index !== undefined; } diff --git a/yarn-project/simulator/src/common/packed_values_cache.ts b/yarn-project/simulator/src/common/packed_values_cache.ts index 47a8281b330a..1e32f5af25fd 100644 --- a/yarn-project/simulator/src/common/packed_values_cache.ts +++ b/yarn-project/simulator/src/common/packed_values_cache.ts @@ -44,11 +44,11 @@ export class PackedValuesCache { * @param values - The values to pack. * @returns The hash of the packed values. */ - public pack(values: Fr[]) { + public async pack(values: Fr[]) { if (values.length === 0) { return Fr.ZERO; } - const packedValues = PackedValues.fromValues(values); + const packedValues = await PackedValues.fromValues(values); this.cache.set(packedValues.hash.value, packedValues.values); return packedValues.hash; } diff --git a/yarn-project/simulator/src/public/dual_side_effect_trace.ts b/yarn-project/simulator/src/public/dual_side_effect_trace.ts index f6285e0e355c..4648ded5b70e 100644 --- a/yarn-project/simulator/src/public/dual_side_effect_trace.ts +++ b/yarn-project/simulator/src/public/dual_side_effect_trace.ts @@ -38,7 +38,7 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { return this.innerCallTrace.getCounter(); } - public tracePublicStorageRead( + public async tracePublicStorageRead( contractAddress: AztecAddress, slot: Fr, value: Fr, @@ -46,11 +46,11 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { leafIndex: Fr, path: Fr[], ) { - this.innerCallTrace.tracePublicStorageRead(contractAddress, slot, value, leafPreimage, leafIndex, path); - this.enqueuedCallTrace.tracePublicStorageRead(contractAddress, slot, value, leafPreimage, leafIndex, path); + await this.innerCallTrace.tracePublicStorageRead(contractAddress, slot, value, leafPreimage, leafIndex, path); + await this.enqueuedCallTrace.tracePublicStorageRead(contractAddress, slot, value, leafPreimage, leafIndex, path); } - public tracePublicStorageWrite( + public async tracePublicStorageWrite( contractAddress: AztecAddress, slot: Fr, value: Fr, @@ -60,7 +60,7 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { newLeafPreimage: PublicDataTreeLeafPreimage, insertionPath: Fr[], ) { - this.innerCallTrace.tracePublicStorageWrite( + await this.innerCallTrace.tracePublicStorageWrite( contractAddress, slot, value, @@ -70,7 +70,7 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { newLeafPreimage, insertionPath, ); - this.enqueuedCallTrace.tracePublicStorageWrite( + await this.enqueuedCallTrace.tracePublicStorageWrite( contractAddress, slot, value, @@ -160,7 +160,7 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { * Trace a nested call. * Accept some results from a finished nested call's trace into this one. */ - public traceNestedCall( + public async traceNestedCall( /** The trace of the nested call. */ nestedCallTrace: this, /** The execution environment of the nested call. */ @@ -174,7 +174,7 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { /** Function name for logging */ functionName: string = 'unknown', ) { - this.innerCallTrace.traceNestedCall( + await this.innerCallTrace.traceNestedCall( nestedCallTrace.innerCallTrace, nestedEnvironment, startGasLeft, @@ -182,7 +182,7 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { avmCallResults, functionName, ); - this.enqueuedCallTrace.traceNestedCall( + await this.enqueuedCallTrace.traceNestedCall( nestedCallTrace.enqueuedCallTrace, nestedEnvironment, startGasLeft, diff --git a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts index d21f38dee710..aa36a140db4f 100644 --- a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts +++ b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts @@ -53,7 +53,7 @@ describe('Enqueued-call Side Effect Trace', () => { trace = new PublicEnqueuedCallSideEffectTrace(startCounter); }); - it('Should trace storage reads', () => { + it('Should trace storage reads', async () => { const leafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); trace.tracePublicStorageRead(address, slot, value, leafPreimage, leafIndex, siblingPath); expect(trace.getCounter()).toBe(startCounterPlus1); @@ -62,7 +62,7 @@ describe('Enqueued-call Side Effect Trace', () => { expect(trace.getAvmCircuitHints().publicDataReads.items).toEqual([expected]); }); - it('Should trace storage writes', () => { + it('Should trace storage writes', async () => { const lowLeafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); const newLeafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); @@ -78,7 +78,7 @@ describe('Enqueued-call Side Effect Trace', () => { ); expect(trace.getCounter()).toBe(startCounterPlus1); - const leafSlot = computePublicDataTreeLeafSlot(address, slot); + const leafSlot = await computePublicDataTreeLeafSlot(address, slot); const expected = [new PublicDataUpdateRequest(leafSlot, value, startCounter /*contractAddress*/)]; expect(trace.getSideEffects().publicDataWrites).toEqual(expected); @@ -115,7 +115,7 @@ describe('Enqueued-call Side Effect Trace', () => { expect(trace.getAvmCircuitHints().nullifierReads.items).toEqual([expected]); }); - it('Should trace nullifiers', () => { + it('Should trace nullifiers', async () => { const lowLeafPreimage = new NullifierLeafPreimage(utxo, Fr.ZERO, 0n); trace.traceNewNullifier(utxo, lowLeafPreimage, lowLeafIndex, lowLeafSiblingPath, siblingPath); expect(trace.getCounter()).toBe(startCounterPlus1); @@ -156,8 +156,8 @@ describe('Enqueued-call Side Effect Trace', () => { expect(trace.getSideEffects().unencryptedLogsHashes).toEqual(expectedHashes); }); - it('Should trace get contract instance', () => { - const instance = SerializableContractInstance.random(); + it('Should trace get contract instance', async () => { + const instance = await SerializableContractInstance.random(); const { version: _, ...instanceWithoutVersion } = instance; const exists = true; trace.traceGetContractInstance(address, exists, instance); @@ -176,21 +176,22 @@ describe('Enqueued-call Side Effect Trace', () => { for (let i = 0; i < MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX; i++) { const lowLeafPreimage = new PublicDataTreeLeafPreimage(new Fr(i), new Fr(i), Fr.ZERO, 0n); const newLeafPreimage = new PublicDataTreeLeafPreimage(new Fr(i + 1), new Fr(i + 1), Fr.ZERO, 0n); - trace.tracePublicStorageWrite(address, slot, value, lowLeafPreimage, Fr.ZERO, [], newLeafPreimage, []); + await trace.tracePublicStorageWrite(address, slot, value, lowLeafPreimage, Fr.ZERO, [], newLeafPreimage, []); } const leafPreimage = new PublicDataTreeLeafPreimage(new Fr(42), new Fr(42), Fr.ZERO, 0n); - expect(() => - trace.tracePublicStorageWrite( - AztecAddress.fromNumber(42), - new Fr(42), - value, - leafPreimage, - Fr.ZERO, - [], - leafPreimage, - [], - ), - ).toThrow(SideEffectLimitReachedError); + await expect( + async () => + await trace.tracePublicStorageWrite( + AztecAddress.fromNumber(42), + new Fr(42), + value, + leafPreimage, + Fr.ZERO, + [], + leafPreimage, + [], + ), + ).rejects.toThrow(SideEffectLimitReachedError); }); it('Should enforce maximum number of new note hashes', () => { @@ -259,20 +260,20 @@ describe('Enqueued-call Side Effect Trace', () => { }); describe.each([false, true])('Should merge forked traces', reverted => { - it(`${reverted ? 'Reverted' : 'Successful'} forked trace should be merged properly`, () => { + it(`${reverted ? 'Reverted' : 'Successful'} forked trace should be merged properly`, async () => { const existsDefault = true; const nestedTrace = new PublicEnqueuedCallSideEffectTrace(startCounter); let testCounter = startCounter; const leafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); const lowLeafPreimage = new NullifierLeafPreimage(utxo, Fr.ZERO, 0n); - nestedTrace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); + await nestedTrace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); testCounter++; - nestedTrace.tracePublicStorageWrite(address, slot, value, leafPreimage, Fr.ZERO, [], leafPreimage, []); + await nestedTrace.tracePublicStorageWrite(address, slot, value, leafPreimage, Fr.ZERO, [], leafPreimage, []); testCounter++; - nestedTrace.traceNoteHashCheck(address, utxo, leafIndex, existsDefault, []); + await nestedTrace.traceNoteHashCheck(address, utxo, leafIndex, existsDefault, []); // counter does not increment for note hash checks - nestedTrace.traceNewNoteHash(address, utxo, Fr.ZERO, []); + await nestedTrace.traceNewNoteHash(address, utxo, Fr.ZERO, []); testCounter++; nestedTrace.traceNullifierCheck(utxo, true, lowLeafPreimage, Fr.ZERO, []); testCounter++; @@ -280,15 +281,15 @@ describe('Enqueued-call Side Effect Trace', () => { testCounter++; nestedTrace.traceNewNullifier(utxo, lowLeafPreimage, Fr.ZERO, [], []); testCounter++; - nestedTrace.traceL1ToL2MessageCheck(address, utxo, leafIndex, existsDefault, []); + await nestedTrace.traceL1ToL2MessageCheck(address, utxo, leafIndex, existsDefault, []); // counter does not increment for l1tol2 message checks - nestedTrace.traceNewL2ToL1Message(address, recipient, content); + await nestedTrace.traceNewL2ToL1Message(address, recipient, content); testCounter++; - nestedTrace.traceUnencryptedLog(address, log); + await nestedTrace.traceUnencryptedLog(address, log); testCounter++; - nestedTrace.traceGetContractInstance(address, /*exists=*/ true, contractInstance); + await nestedTrace.traceGetContractInstance(address, /*exists=*/ true, contractInstance); testCounter++; - nestedTrace.traceGetContractInstance(address, /*exists=*/ false, contractInstance); + await nestedTrace.traceGetContractInstance(address, /*exists=*/ false, contractInstance); testCounter++; trace.merge(nestedTrace, reverted); diff --git a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts index a7e24ac55208..ffbb744d711c 100644 --- a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts +++ b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts @@ -214,7 +214,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI this.incrementSideEffectCounter(); } - public tracePublicStorageWrite( + public async tracePublicStorageWrite( contractAddress: AztecAddress, slot: Fr, value: Fr, @@ -238,7 +238,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI ); } - const leafSlot = computePublicDataTreeLeafSlot(contractAddress, slot); + const leafSlot = await computePublicDataTreeLeafSlot(contractAddress, slot); this.publicDataWrites.push(new PublicDataUpdateRequest(leafSlot, value, this.sideEffectCounter)); // New hinting @@ -277,7 +277,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI } // TODO(dbanks12): make unique and silo instead of scoping - //const siloedNoteHash = siloNoteHash(contractAddress, noteHash); + //const siloedNoteHash = await siloNoteHash(contractAddress, noteHash); this.noteHashes.push(new NoteHash(noteHash, this.sideEffectCounter).scope(contractAddress)); this.log.debug(`NEW_NOTE_HASH cnt: ${this.sideEffectCounter}`); this.avmCircuitHints.noteHashWrites.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); @@ -417,7 +417,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI * Trace a nested call. * Accept some results from a finished nested call's trace into this one. */ - public traceNestedCall( + public async traceNestedCall( /** The trace of the nested call. */ _nestedCallTrace: this, /** The execution environment of the nested call. */ diff --git a/yarn-project/simulator/src/public/execution.ts b/yarn-project/simulator/src/public/execution.ts index e6fe8c174575..44eb462af1de 100644 --- a/yarn-project/simulator/src/public/execution.ts +++ b/yarn-project/simulator/src/public/execution.ts @@ -152,13 +152,13 @@ export interface PublicFunctionCallResult { functionName: string; } -export function resultToPublicCallRequest(result: PublicFunctionCallResult) { +export async function resultToPublicCallRequest(result: PublicFunctionCallResult) { const request = result.executionRequest; const item = new PublicCallStackItemCompressed( request.callContext.contractAddress, request.callContext, - computeVarArgsHash(request.args), - computeVarArgsHash(result.returnValues), + await computeVarArgsHash(request.args), + await computeVarArgsHash(result.returnValues), // TODO(@just-mitch): need better mapping from simulator to revert code. result.reverted ? RevertCode.APP_LOGIC_REVERTED : RevertCode.OK, Gas.from(result.startGasLeft), diff --git a/yarn-project/simulator/src/public/fee_payment.ts b/yarn-project/simulator/src/public/fee_payment.ts index 43cf1836afd5..21680cb90939 100644 --- a/yarn-project/simulator/src/public/fee_payment.ts +++ b/yarn-project/simulator/src/public/fee_payment.ts @@ -13,10 +13,10 @@ export function computeFeePayerBalanceStorageSlot(feePayer: AztecAddress) { /** * Computes the leaf slot in the public data tree for the balance of the fee payer in the Fee Juice. */ -export function computeFeePayerBalanceLeafSlot(feePayer: AztecAddress): Fr { +export async function computeFeePayerBalanceLeafSlot(feePayer: AztecAddress): Promise { if (feePayer.isZero()) { return Fr.ZERO; } - const balanceSlot = computeFeePayerBalanceStorageSlot(feePayer); - return computePublicDataTreeLeafSlot(ProtocolContractAddress.FeeJuice, balanceSlot); + const balanceSlot = await computeFeePayerBalanceStorageSlot(feePayer); + return await computePublicDataTreeLeafSlot(ProtocolContractAddress.FeeJuice, balanceSlot); } diff --git a/yarn-project/simulator/src/public/public_db_sources.test.ts b/yarn-project/simulator/src/public/public_db_sources.test.ts index 3fa901bb9e46..93a59ee27684 100644 --- a/yarn-project/simulator/src/public/public_db_sources.test.ts +++ b/yarn-project/simulator/src/public/public_db_sources.test.ts @@ -19,16 +19,18 @@ describe('world_state_public_db', () => { let worldStateDB: WorldStateDB; - beforeEach(() => { + beforeEach(async () => { addresses = Array(DB_VALUES_SIZE).fill(0).map(AztecAddress.random); slots = Array(DB_VALUES_SIZE).fill(0).map(Fr.random); dbValues = Array(DB_VALUES_SIZE).fill(0).map(Fr.random); - const publicDataEntries = Array(DB_VALUES_SIZE) - .fill(0) - .map((_, idx: number) => { - const leafSlot = computePublicDataTreeLeafSlot(addresses[idx], slots[idx]); - return new PublicDataTreeLeafPreimage(leafSlot, dbValues[idx], Fr.ZERO, 0n); - }); + const publicDataEntries = await Promise.all( + Array(DB_VALUES_SIZE) + .fill(0) + .map(async (_, idx: number) => { + const leafSlot = await computePublicDataTreeLeafSlot(addresses[idx], slots[idx]); + return new PublicDataTreeLeafPreimage(leafSlot, dbValues[idx], Fr.ZERO, 0n); + }), + ); dbStorage = new Map>([ [ MerkleTreeId.PUBLIC_DATA_TREE, diff --git a/yarn-project/simulator/src/public/public_db_sources.ts b/yarn-project/simulator/src/public/public_db_sources.ts index 27177b3b9193..edb9374cc0e8 100644 --- a/yarn-project/simulator/src/public/public_db_sources.ts +++ b/yarn-project/simulator/src/public/public_db_sources.ts @@ -46,7 +46,7 @@ export class ContractsDataSourcePublicDB implements PublicContractsDB { * Add new contracts from a transaction * @param tx - The transaction to add contracts from. */ - public addNewContracts(tx: Tx): Promise { + public async addNewContracts(tx: Tx): Promise { // Extract contract class and instance data from logs and add to cache for this block const logs = tx.contractClassLogs.unrollLogs(); logs @@ -173,7 +173,7 @@ export class WorldStateDB extends ContractsDataSourcePublicDB implements PublicS * @returns The current value in the storage slot. */ public async storageRead(contract: AztecAddress, slot: Fr): Promise { - const leafSlot = computePublicDataTreeLeafSlot(contract, slot).value; + const leafSlot = (await computePublicDataTreeLeafSlot(contract, slot)).value; const uncommitted = this.publicUncommittedWriteCache.get(leafSlot); if (uncommitted !== undefined) { return uncommitted; @@ -197,8 +197,8 @@ export class WorldStateDB extends ContractsDataSourcePublicDB implements PublicS * @param newValue - The new value to store. * @returns The slot of the written leaf in the public data tree. */ - public storageWrite(contract: AztecAddress, slot: Fr, newValue: Fr): Promise { - const index = computePublicDataTreeLeafSlot(contract, slot).value; + public async storageWrite(contract: AztecAddress, slot: Fr, newValue: Fr): Promise { + const index = (await computePublicDataTreeLeafSlot(contract, slot)).value; this.publicUncommittedWriteCache.set(index, newValue); return Promise.resolve(index); } @@ -245,7 +245,7 @@ export class WorldStateDB extends ContractsDataSourcePublicDB implements PublicS throw new Error(`No L1 to L2 message found for message hash ${messageHash.toString()}`); } - const messageNullifier = computeL1ToL2MessageNullifier(contractAddress, messageHash, secret); + const messageNullifier = await computeL1ToL2MessageNullifier(contractAddress, messageHash, secret); const nullifierIndex = await this.getNullifierIndex(messageNullifier); if (nullifierIndex !== undefined) { @@ -350,7 +350,7 @@ export class WorldStateDB extends ContractsDataSourcePublicDB implements PublicS } export async function readPublicState(db: MerkleTreeReadOperations, contract: AztecAddress, slot: Fr): Promise { - const leafSlot = computePublicDataTreeLeafSlot(contract, slot).toBigInt(); + const leafSlot = (await computePublicDataTreeLeafSlot(contract, slot)).toBigInt(); const lowLeafResult = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot); if (!lowLeafResult || !lowLeafResult.alreadyPresent) { diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 2b2a0a27fbf1..d23bfbbee85d 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -95,7 +95,7 @@ describe('public_processor', () => { describe('process txs', () => { it('process private-only txs', async function () { - const tx = mockPrivateOnlyTx(); + const tx = await mockPrivateOnlyTx(); const [processed, failed] = await processor.process([tx], 1, handler); @@ -108,7 +108,7 @@ describe('public_processor', () => { }); it('runs a tx with enqueued public calls', async function () { - const tx = mockTxWithPublicCalls(); + const tx = await mockTxWithPublicCalls(); const [processed, failed] = await processor.process([tx], 1, handler); @@ -121,7 +121,7 @@ describe('public_processor', () => { }); it('runs a tx with reverted enqueued public calls', async function () { - const tx = mockTxWithPublicCalls(); + const tx = await mockTxWithPublicCalls(); mockedEnqueuedCallsResult.revertCode = RevertCode.APP_LOGIC_REVERTED; mockedEnqueuedCallsResult.revertReason = new SimulationError(`Failed`, []); @@ -138,7 +138,7 @@ describe('public_processor', () => { it('returns failed txs without aborting entire operation', async function () { publicTxProcessor.simulate.mockRejectedValue(new SimulationError(`Failed`, [])); - const tx = mockTxWithPublicCalls(); + const tx = await mockTxWithPublicCalls(); const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toEqual([]); @@ -151,7 +151,7 @@ describe('public_processor', () => { }); it('does not attempt to overfill a block', async function () { - const txs = Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed })); + const txs = await Promise.all(Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed }))); // We are passing 3 txs but only 2 can fit in the block const [processed, failed] = await processor.process(txs, 2, handler); @@ -168,7 +168,7 @@ describe('public_processor', () => { }); it('does not send a transaction to the prover if validation fails', async function () { - const tx = mockPrivateOnlyTx(); + const tx = await mockPrivateOnlyTx(); const txValidator: MockProxy> = mock(); txValidator.validateTxs.mockRejectedValue([[], [tx]]); @@ -190,13 +190,13 @@ describe('public_processor', () => { beforeEach(() => { worldStateDB.storageRead.mockResolvedValue(initialBalance); - worldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => - Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), + worldStateDB.storageWrite.mockImplementation(async (address: AztecAddress, slot: Fr) => + (await computePublicDataTreeLeafSlot(address, slot)).toBigInt(), ); }); it('injects balance update with no public calls', async function () { - const tx = mockPrivateOnlyTx({ + const tx = await mockPrivateOnlyTx({ feePayer, }); @@ -210,7 +210,7 @@ describe('public_processor', () => { expect(processed).toHaveLength(1); expect(processed[0].data.feePayer).toEqual(feePayer); expect(processed[0].txEffect.publicDataWrites[0]).toEqual( - new PublicDataWrite(computeFeePayerBalanceLeafSlot(feePayer), initialBalance.sub(txFee)), + new PublicDataWrite(await computeFeePayerBalanceLeafSlot(feePayer), initialBalance.sub(txFee)), ); expect(failed).toEqual([]); @@ -224,7 +224,7 @@ describe('public_processor', () => { const txFee = new Fr(567); mockedAvmOutput.transactionFee = txFee; - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ feePayer, }); @@ -235,7 +235,7 @@ describe('public_processor', () => { expect(processed[0].data.feePayer).toEqual(feePayer); expect(processed[0].txEffect.transactionFee).toEqual(txFee); expect(processed[0].txEffect.publicDataWrites[0]).toEqual( - new PublicDataWrite(computeFeePayerBalanceLeafSlot(feePayer), initialBalance.sub(txFee)), + new PublicDataWrite(await computeFeePayerBalanceLeafSlot(feePayer), initialBalance.sub(txFee)), ); expect(failed).toEqual([]); @@ -250,7 +250,7 @@ describe('public_processor', () => { const pendingBalance = new Fr(2000); const pendingWrites = [ new PublicDataWrite(new Fr(888n), new Fr(999)), - new PublicDataWrite(computeFeePayerBalanceLeafSlot(feePayer), pendingBalance), + new PublicDataWrite(await computeFeePayerBalanceLeafSlot(feePayer), pendingBalance), new PublicDataWrite(new Fr(666n), new Fr(777)), ]; mockedAvmOutput.transactionFee = txFee; @@ -258,7 +258,7 @@ describe('public_processor', () => { mockedAvmOutput.accumulatedData.publicDataWrites[1] = pendingWrites[1]; mockedAvmOutput.accumulatedData.publicDataWrites[2] = pendingWrites[2]; - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ feePayer, }); @@ -270,7 +270,7 @@ describe('public_processor', () => { expect(countAccumulatedItems(processed[0].txEffect.publicDataWrites)).toBe(3); expect(processed[0].txEffect.publicDataWrites.slice(0, 3)).toEqual([ pendingWrites[0], - new PublicDataWrite(computeFeePayerBalanceLeafSlot(feePayer), pendingBalance.sub(txFee)), + new PublicDataWrite(await computeFeePayerBalanceLeafSlot(feePayer), pendingBalance.sub(txFee)), pendingWrites[2], ]); expect(failed).toEqual([]); @@ -285,7 +285,7 @@ describe('public_processor', () => { const txFee = initialBalance.add(new Fr(1)); mockedAvmOutput.transactionFee = txFee; - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ feePayer, }); diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 01e02975351b..057fcadc46e3 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -204,8 +204,8 @@ export class PublicProcessor { } const feeJuiceAddress = ProtocolContractAddress.FeeJuice; - const balanceSlot = computeFeePayerBalanceStorageSlot(feePayer); - const leafSlot = computeFeePayerBalanceLeafSlot(feePayer); + const balanceSlot = await computeFeePayerBalanceStorageSlot(feePayer); + const leafSlot = await computeFeePayerBalanceLeafSlot(feePayer); this.log.debug(`Deducting ${txFee.toBigInt()} balance in Fee Juice for ${feePayer}`); diff --git a/yarn-project/simulator/src/public/public_tx_context.ts b/yarn-project/simulator/src/public/public_tx_context.ts index 94057597a186..59e36cd5861e 100644 --- a/yarn-project/simulator/src/public/public_tx_context.ts +++ b/yarn-project/simulator/src/public/public_tx_context.ts @@ -300,7 +300,7 @@ export class PublicTxContext { /** * Generate the public inputs for the AVM circuit. */ - private generateAvmCircuitPublicInputs(endStateReference: StateReference): AvmCircuitPublicInputs { + private async generateAvmCircuitPublicInputs(endStateReference: StateReference): Promise { assert(this.halted, 'Can only get AvmCircuitPublicInputs after tx execution ends'); const ephemeralTrees = this.state.getActiveStateManager().merkleTrees.treeMap; diff --git a/yarn-project/simulator/src/public/public_tx_simulator.test.ts b/yarn-project/simulator/src/public/public_tx_simulator.test.ts index eeda04eb316e..2fb80250f8c2 100644 --- a/yarn-project/simulator/src/public/public_tx_simulator.test.ts +++ b/yarn-project/simulator/src/public/public_tx_simulator.test.ts @@ -71,7 +71,7 @@ describe('public_tx_simulator', () => { ) => Promise >; - const mockTxWithPublicCalls = ({ + const mockTxWithPublicCalls = async ({ numberOfSetupCalls = 0, numberOfAppLogicCalls = 0, hasPublicTeardownCall = false, @@ -191,7 +191,7 @@ describe('public_tx_simulator', () => { 1, // Add a default low leaf for the public data hints to be proved against. ); const snap = new AppendOnlyTreeSnapshot( - Fr.fromBuffer(publicDataTree.getRoot(true)), + Fr.fromBuffer(await publicDataTree.getRoot(true)), Number(publicDataTree.getNumLeaves(true)), ); const header = BlockHeader.empty(); @@ -243,7 +243,7 @@ describe('public_tx_simulator', () => { }); it('runs a tx with enqueued public calls in setup phase only', async () => { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ numberOfSetupCalls: 2, }); @@ -278,7 +278,7 @@ describe('public_tx_simulator', () => { }); it('runs a tx with enqueued public calls in app logic phase only', async () => { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ numberOfAppLogicCalls: 2, }); @@ -313,7 +313,7 @@ describe('public_tx_simulator', () => { }); it('runs a tx with enqueued public calls in teardown phase only', async () => { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ hasPublicTeardownCall: true, }); @@ -346,7 +346,7 @@ describe('public_tx_simulator', () => { }); it('runs a tx with all phases', async () => { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ numberOfSetupCalls: 2, numberOfAppLogicCalls: 1, hasPublicTeardownCall: true, @@ -394,7 +394,7 @@ describe('public_tx_simulator', () => { }); it('deduplicates public data writes', async function () { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ numberOfSetupCalls: 1, numberOfAppLogicCalls: 1, hasPublicTeardownCall: true, @@ -441,13 +441,13 @@ describe('public_tx_simulator', () => { expect(output.accumulatedData.publicDataWrites.slice(0, numPublicDataWrites)).toEqual([ // squashed // new PublicDataWrite(computePublicDataTreeLeafSlot(contractAddress, contractSlotA), fr(0x101)), - new PublicDataWrite(computePublicDataTreeLeafSlot(contractAddress, contractSlotB), fr(0x151)), + new PublicDataWrite(await computePublicDataTreeLeafSlot(contractAddress, contractSlotB), fr(0x151)), - new PublicDataWrite(computePublicDataTreeLeafSlot(contractAddress, contractSlotA), fr(0x103)), + new PublicDataWrite(await computePublicDataTreeLeafSlot(contractAddress, contractSlotA), fr(0x103)), // squashed // new PublicDataWrite(computePublicDataTreeLeafSlot(contractAddress, contractSlotC), fr(0x201)), // new PublicDataWrite(computePublicDataTreeLeafSlot(contractAddress, contractSlotC), fr(0x102)), - new PublicDataWrite(computePublicDataTreeLeafSlot(contractAddress, contractSlotC), fr(0x152)), + new PublicDataWrite(await computePublicDataTreeLeafSlot(contractAddress, contractSlotC), fr(0x152)), ]); }); @@ -476,7 +476,7 @@ describe('public_tx_simulator', () => { }); it('includes a transaction that reverts in app logic only', async function () { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ numberOfSetupCalls: 1, numberOfAppLogicCalls: 2, hasPublicTeardownCall: true, @@ -559,7 +559,7 @@ describe('public_tx_simulator', () => { }); it('includes a transaction that reverts in teardown only', async function () { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ numberOfSetupCalls: 1, numberOfAppLogicCalls: 2, hasPublicTeardownCall: true, @@ -639,7 +639,7 @@ describe('public_tx_simulator', () => { }); it('includes a transaction that reverts in app logic and teardown', async function () { - const tx = mockTxWithPublicCalls({ + const tx = await mockTxWithPublicCalls({ numberOfSetupCalls: 1, numberOfAppLogicCalls: 2, hasPublicTeardownCall: true, diff --git a/yarn-project/simulator/src/public/public_tx_simulator.ts b/yarn-project/simulator/src/public/public_tx_simulator.ts index 7c7546e9b0c4..098d015f3980 100644 --- a/yarn-project/simulator/src/public/public_tx_simulator.ts +++ b/yarn-project/simulator/src/public/public_tx_simulator.ts @@ -115,7 +115,7 @@ export class PublicTxSimulator { const endStateReference = await this.db.getStateReference(); - const avmProvingRequest = context.generateProvingRequest(endStateReference); + const avmProvingRequest =await context.generateProvingRequest(endStateReference); const avmCircuitPublicInputs = avmProvingRequest.inputs.output!; const revertCode = context.getFinalRevertCode(); diff --git a/yarn-project/simulator/src/public/side_effect_trace.test.ts b/yarn-project/simulator/src/public/side_effect_trace.test.ts index 7d7e024e967c..1b24aae6cc7f 100644 --- a/yarn-project/simulator/src/public/side_effect_trace.test.ts +++ b/yarn-project/simulator/src/public/side_effect_trace.test.ts @@ -69,9 +69,9 @@ describe('Side Effect Trace', () => { return trc.toPublicFunctionCallResult(avmEnvironment, startGasLeft, bytecode, avmCallResults.finalize()); }; - it('Should trace storage reads', () => { + it('Should trace storage reads', async () => { const leafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); - trace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); + await trace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); expect(trace.getCounter()).toBe(startCounterPlus1); const pxResult = toPxResult(trace); @@ -88,11 +88,11 @@ describe('Side Effect Trace', () => { expect(pxResult.avmCircuitHints.storageValues.items).toEqual([{ key: startCounterFr, value: value }]); }); - it('Should trace storage writes', () => { + it('Should trace storage writes', async () => { const lowLeafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); const newLeafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); - trace.tracePublicStorageWrite(address, slot, value, lowLeafPreimage, Fr.ZERO, [], newLeafPreimage, []); + await trace.tracePublicStorageWrite(address, slot, value, lowLeafPreimage, Fr.ZERO, [], newLeafPreimage, []); expect(trace.getCounter()).toBe(startCounterPlus1); const pxResult = toPxResult(trace); @@ -171,7 +171,7 @@ describe('Side Effect Trace', () => { expect(pxResult.avmCircuitHints.nullifierExists.items).toEqual([{ key: startCounterFr, value: new Fr(exists) }]); }); - it('Should trace nullifiers', () => { + it('Should trace nullifiers', async () => { const lowLeafPreimage = new NullifierLeafPreimage(utxo, Fr.ZERO, 0n); trace.traceNewNullifier(utxo, lowLeafPreimage, Fr.ZERO, [], []); expect(trace.getCounter()).toBe(startCounterPlus1); @@ -230,8 +230,8 @@ describe('Side Effect Trace', () => { ]); }); - it('Should trace get contract instance', () => { - const instance = SerializableContractInstance.random(); + it('Should trace get contract instance', async () => { + const instance = await SerializableContractInstance.random(); const { version: _, ...instanceWithoutVersion } = instance; const exists = true; trace.traceGetContractInstance(address, exists, instance); @@ -248,36 +248,37 @@ describe('Side Effect Trace', () => { ]); }); describe('Maximum accesses', () => { - it('Should enforce maximum number of public storage reads', () => { + it('Should enforce maximum number of public storage reads', async () => { for (let i = 0; i < MAX_PUBLIC_DATA_READS_PER_TX; i++) { const leafPreimage = new PublicDataTreeLeafPreimage(new Fr(i), new Fr(i), Fr.ZERO, 0n); - trace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); + await trace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); } const leafPreimage = new PublicDataTreeLeafPreimage(new Fr(42), new Fr(42), Fr.ZERO, 0n); - expect(() => trace.tracePublicStorageRead(address, new Fr(42), value, leafPreimage, Fr.ZERO, [])).toThrow( - SideEffectLimitReachedError, - ); + await expect(() => + trace.tracePublicStorageRead(address, new Fr(42), value, leafPreimage, Fr.ZERO, []), + ).rejects.toThrow(SideEffectLimitReachedError); }); - it('Should enforce maximum number of public storage writes', () => { + it('Should enforce maximum number of public storage writes', async () => { for (let i = 0; i < MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX; i++) { const lowLeafPreimage = new PublicDataTreeLeafPreimage(new Fr(i), new Fr(i), Fr.ZERO, 0n); const newLeafPreimage = new PublicDataTreeLeafPreimage(new Fr(i + 1), new Fr(i + 1), Fr.ZERO, 0n); - trace.tracePublicStorageWrite(address, slot, value, lowLeafPreimage, Fr.ZERO, [], newLeafPreimage, []); + await trace.tracePublicStorageWrite(address, slot, value, lowLeafPreimage, Fr.ZERO, [], newLeafPreimage, []); } const leafPreimage = new PublicDataTreeLeafPreimage(new Fr(42), new Fr(42), Fr.ZERO, 0n); - expect(() => - trace.tracePublicStorageWrite( - AztecAddress.fromNumber(42), - new Fr(42), - value, - leafPreimage, - Fr.ZERO, - [], - leafPreimage, - [], - ), - ).toThrow(SideEffectLimitReachedError); + await expect( + async () => + await trace.tracePublicStorageWrite( + AztecAddress.fromNumber(42), + new Fr(42), + value, + leafPreimage, + Fr.ZERO, + [], + leafPreimage, + [], + ), + ).rejects.toThrow(SideEffectLimitReachedError); }); it('Should enforce maximum number of note hash checks', () => { @@ -328,7 +329,7 @@ describe('Side Effect Trace', () => { ); }); - it('Should enforce maximum number of new nullifiers', () => { + it('Should enforce maximum number of new nullifiers', async () => { for (let i = 0; i < MAX_NULLIFIERS_PER_TX; i++) { const lowLeafPreimage = new NullifierLeafPreimage(new Fr(i + 1), Fr.ZERO, 0n); trace.traceNewNullifier(new Fr(i), lowLeafPreimage, Fr.ZERO, [], []); @@ -395,20 +396,20 @@ describe('Side Effect Trace', () => { }); }); - it('Should trace nested calls', () => { + it('Should trace nested calls', async () => { const existsDefault = true; const nestedTrace = new PublicSideEffectTrace(startCounter); let testCounter = startCounter; const leafPreimage = new PublicDataTreeLeafPreimage(slot, value, Fr.ZERO, 0n); const lowLeafPreimage = new NullifierLeafPreimage(utxo, Fr.ZERO, 0n); - nestedTrace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); + await nestedTrace.tracePublicStorageRead(address, slot, value, leafPreimage, Fr.ZERO, []); testCounter++; - nestedTrace.tracePublicStorageWrite(address, slot, value, leafPreimage, Fr.ZERO, [], leafPreimage, []); + await nestedTrace.tracePublicStorageWrite(address, slot, value, leafPreimage, Fr.ZERO, [], leafPreimage, []); testCounter++; - nestedTrace.traceNoteHashCheck(address, utxo, leafIndex, existsDefault, []); + await nestedTrace.traceNoteHashCheck(address, utxo, leafIndex, existsDefault, []); // counter does not increment for note hash checks - nestedTrace.traceNewNoteHash(address, utxo, Fr.ZERO, []); + await nestedTrace.traceNewNoteHash(address, utxo, Fr.ZERO, []); testCounter++; nestedTrace.traceNullifierCheck(utxo, true, lowLeafPreimage, Fr.ZERO, []); testCounter++; @@ -416,18 +417,18 @@ describe('Side Effect Trace', () => { testCounter++; nestedTrace.traceNewNullifier(utxo, lowLeafPreimage, Fr.ZERO, [], []); testCounter++; - nestedTrace.traceL1ToL2MessageCheck(address, utxo, leafIndex, existsDefault, []); + await nestedTrace.traceL1ToL2MessageCheck(address, utxo, leafIndex, existsDefault, []); // counter does not increment for l1tol2 message checks - nestedTrace.traceNewL2ToL1Message(address, recipient, content); + await nestedTrace.traceNewL2ToL1Message(address, recipient, content); testCounter++; - nestedTrace.traceUnencryptedLog(address, log); + await nestedTrace.traceUnencryptedLog(address, log); testCounter++; - nestedTrace.traceGetContractInstance(address, /*exists=*/ true, contractInstance); + await nestedTrace.traceGetContractInstance(address, /*exists=*/ true, contractInstance); testCounter++; - nestedTrace.traceGetContractInstance(address, /*exists=*/ false, contractInstance); + await nestedTrace.traceGetContractInstance(address, /*exists=*/ false, contractInstance); testCounter++; - trace.traceNestedCall(nestedTrace, avmEnvironment, startGasLeft, bytecode, avmCallResults); + await trace.traceNestedCall(nestedTrace, avmEnvironment, startGasLeft, bytecode, avmCallResults); // parent trace adopts nested call's counter expect(trace.getCounter()).toBe(testCounter); diff --git a/yarn-project/simulator/src/public/side_effect_trace.ts b/yarn-project/simulator/src/public/side_effect_trace.ts index 8e9f93256d07..d60bbe291b81 100644 --- a/yarn-project/simulator/src/public/side_effect_trace.ts +++ b/yarn-project/simulator/src/public/side_effect_trace.ts @@ -116,7 +116,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { this.sideEffectCounter++; } - public tracePublicStorageRead( + public async tracePublicStorageRead( contractAddress: AztecAddress, slot: Fr, value: Fr, @@ -144,7 +144,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { this.incrementSideEffectCounter(); } - public tracePublicStorageWrite( + public async tracePublicStorageWrite( contractAddress: AztecAddress, slot: Fr, value: Fr, @@ -370,7 +370,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { * Trace a nested call. * Accept some results from a finished nested call's trace into this one. */ - public traceNestedCall( + public async traceNestedCall( /** The trace of the nested call. */ nestedCallTrace: PublicSideEffectTrace, /** The execution environment of the nested call. */ @@ -408,7 +408,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { result.startGasLeft.l2Gas - avmCallResults.gasLeft.l2Gas, ); - this.publicCallRequests.push(resultToPublicCallRequest(result)); + this.publicCallRequests.push(await resultToPublicCallRequest(result)); this.avmCircuitHints.externalCalls.items.push( new AvmExternalCallHint( diff --git a/yarn-project/simulator/src/public/side_effect_trace_interface.ts b/yarn-project/simulator/src/public/side_effect_trace_interface.ts index 06a1c6eb5637..6f6d4efaea2f 100644 --- a/yarn-project/simulator/src/public/side_effect_trace_interface.ts +++ b/yarn-project/simulator/src/public/side_effect_trace_interface.ts @@ -26,7 +26,7 @@ export interface PublicSideEffectTraceInterface { leafPreimage?: PublicDataTreeLeafPreimage, leafIndex?: Fr, path?: Fr[], - ): void; + ): Promise; tracePublicStorageWrite( contractAddress: AztecAddress, slot: Fr, // This is the storage slot not the computed leaf slot @@ -36,7 +36,7 @@ export interface PublicSideEffectTraceInterface { lowLeafPath?: Fr[], newLeafPreimage?: PublicDataTreeLeafPreimage, insertionPath?: Fr[], - ): void; + ): Promise; traceNoteHashCheck(contractAddress: AztecAddress, noteHash: Fr, leafIndex: Fr, exists: boolean, path?: Fr[]): void; traceNewNoteHash(contractAddress: AztecAddress, noteHash: Fr, leafIndex?: Fr, path?: Fr[]): void; traceNullifierCheck( @@ -52,7 +52,7 @@ export interface PublicSideEffectTraceInterface { lowLeafIndex?: Fr, lowLeafPath?: Fr[], insertionPath?: Fr[], - ): void; + ): Promise; traceL1ToL2MessageCheck( contractAddress: AztecAddress, msgHash: Fr, @@ -87,7 +87,7 @@ export interface PublicSideEffectTraceInterface { avmCallResults: AvmContractCallResult, /** Function name */ functionName: string, - ): void; + ): Promise; traceEnqueuedCall( /** The call request from private that enqueued this call. */ publicCallRequest: PublicCallRequest, diff --git a/yarn-project/simulator/src/public/transitional_adapters.ts b/yarn-project/simulator/src/public/transitional_adapters.ts index 09ec0094110f..ec8b1ed8f15e 100644 --- a/yarn-project/simulator/src/public/transitional_adapters.ts +++ b/yarn-project/simulator/src/public/transitional_adapters.ts @@ -24,7 +24,7 @@ import { assertLength } from '@aztec/foundation/serialize'; import { type PublicEnqueuedCallSideEffectTrace } from './enqueued_call_side_effect_trace.js'; -export function generateAvmCircuitPublicInputs( +export async function generateAvmCircuitPublicInputs( trace: PublicEnqueuedCallSideEffectTrace, globalVariables: GlobalVariables, startStateReference: StateReference, @@ -39,7 +39,7 @@ export function generateAvmCircuitPublicInputs( endGasUsed: Gas, transactionFee: Fr, revertCode: RevertCode, -): AvmCircuitPublicInputs { +): Promise { const startTreeSnapshots = new TreeSnapshots( startStateReference.l1ToL2MessageTree, startStateReference.partial.noteHashTree, @@ -103,9 +103,9 @@ export function generateAvmCircuitPublicInputs( const noteHash = scopedNoteHash.value; if (!noteHash.isZero()) { const noteHashIndexInTx = i + countAccumulatedItems(noteHashesFromPrivate); - const nonce = computeNoteHashNonce(txHash, noteHashIndexInTx); - const uniqueNoteHash = computeUniqueNoteHash(nonce, noteHash); - const siloedNoteHash = siloNoteHash(scopedNoteHash.contractAddress, uniqueNoteHash); + const nonce = await computeNoteHashNonce(txHash, noteHashIndexInTx); + const uniqueNoteHash = await computeUniqueNoteHash(nonce, noteHash); + const siloedNoteHash = await siloNoteHash(scopedNoteHash.contractAddress, uniqueNoteHash); avmCircuitPublicInputs.accumulatedData.noteHashes[noteHashIndexInTx] = siloedNoteHash; } } diff --git a/yarn-project/simulator/src/test/utils.ts b/yarn-project/simulator/src/test/utils.ts index 363034d66863..1e2aa48ace6e 100644 --- a/yarn-project/simulator/src/test/utils.ts +++ b/yarn-project/simulator/src/test/utils.ts @@ -11,7 +11,7 @@ import { sha256ToField } from '@aztec/foundation/crypto'; * @param secret - The secret to unlock the message. * @returns The L1 to L2 message. */ -export const buildL1ToL2Message = ( +export const buildL1ToL2Message = async ( selector: string, contentPreimage: Fr[], targetContract: AztecAddress, @@ -22,7 +22,7 @@ export const buildL1ToL2Message = ( const selectorBuf = Buffer.from(selector, 'hex'); const content = sha256ToField([selectorBuf, ...contentPreimage]); - const secretHash = computeSecretHash(secret); + const secretHash = await computeSecretHash(secret); return new L1ToL2Message( new L1Actor(EthAddress.random(), 1), diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 092b952a0b98..34e0428521bf 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -179,8 +179,8 @@ export class TXE implements TypedOracle { } async addContractArtifact(artifact: ContractArtifact) { - const contractClass = getContractClassFromArtifact(artifact); - await this.txeDatabase.addContractArtifact(computeContractClassId(contractClass), artifact); + const contractClass = await getContractClassFromArtifact(artifact); + await this.txeDatabase.addContractArtifact(await computeContractClassId(contractClass), artifact); } async getPrivateContextInputs( @@ -213,7 +213,7 @@ export class TXE implements TypedOracle { const account = this.txeDatabase.getAccount(address); const privateKey = await this.keyStore.getMasterSecretKey(account.publicKeys.masterIncomingViewingPublicKey); const schnorr = new Schnorr(); - const signature = schnorr.constructSignature(messageHash.toBuffer(), privateKey).toBuffer(); + const signature = (await schnorr.constructSignature(messageHash.toBuffer(), privateKey)).toBuffer(); const authWitness = new AuthWitness(messageHash, [...signature]); return this.txeDatabase.addAuthWitness(authWitness.requestHash, authWitness.witness); } @@ -237,7 +237,7 @@ export class TXE implements TypedOracle { } async addNullifiers(contractAddress: AztecAddress, nullifiers: Fr[]) { - const siloedNullifiers = nullifiers.map(nullifier => siloNullifier(contractAddress, nullifier)); + const siloedNullifiers = await Promise.all(nullifiers.map(nullifier => siloNullifier(contractAddress, nullifier))); await this.addSiloedNullifiers(siloedNullifiers); } @@ -246,7 +246,7 @@ export class TXE implements TypedOracle { await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, siloedNoteHashes); } async addNoteHashes(contractAddress: AztecAddress, noteHashes: Fr[]) { - const siloedNoteHashes = noteHashes.map(noteHash => siloNoteHash(contractAddress, noteHash)); + const siloedNoteHashes = await Promise.all(noteHashes.map(noteHash => siloNoteHash(contractAddress, noteHash))); await this.addSiloedNoteHashes(siloedNoteHashes); } @@ -441,14 +441,14 @@ export class TXE implements TypedOracle { return Promise.resolve(); } - notifyNullifiedNote(innerNullifier: Fr, noteHash: Fr, counter: number) { - this.noteCache.nullifyNote(this.contractAddress, innerNullifier, noteHash); + async notifyNullifiedNote(innerNullifier: Fr, noteHash: Fr, counter: number) { + await this.noteCache.nullifyNote(this.contractAddress, innerNullifier, noteHash); this.sideEffectCounter = counter + 1; return Promise.resolve(); } async checkNullifierExists(innerNullifier: Fr): Promise { - const nullifier = siloNullifier(this.contractAddress, innerNullifier!); + const nullifier = await siloNullifier(this.contractAddress, innerNullifier!); const db = await this.trees.getLatest(); const index = await db.findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer()); return index !== undefined; @@ -472,7 +472,7 @@ export class TXE implements TypedOracle { const values = []; for (let i = 0n; i < numberOfElements; i++) { const storageSlot = startStorageSlot.add(new Fr(i)); - const leafSlot = computePublicDataTreeLeafSlot(contractAddress, storageSlot).toBigInt(); + const leafSlot = (await computePublicDataTreeLeafSlot(contractAddress, storageSlot)).toBigInt(); const lowLeafResult = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot); @@ -493,11 +493,13 @@ export class TXE implements TypedOracle { async storageWrite(startStorageSlot: Fr, values: Fr[]): Promise { const db = await this.trees.getLatest(); - const publicDataWrites = values.map((value, i) => { - const storageSlot = startStorageSlot.add(new Fr(i)); - this.logger.debug(`Oracle storage write: slot=${storageSlot.toString()} value=${value}`); - return new PublicDataTreeLeaf(computePublicDataTreeLeafSlot(this.contractAddress, storageSlot), value); - }); + const publicDataWrites = await Promise.all( + values.map(async (value, i) => { + const storageSlot = startStorageSlot.add(new Fr(i)); + this.logger.debug(`Oracle storage write: slot=${storageSlot.toString()} value=${value}`); + return new PublicDataTreeLeaf(await computePublicDataTreeLeafSlot(this.contractAddress, storageSlot), value); + }), + ); await db.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, publicDataWrites.map(write => write.toBuffer()), @@ -620,9 +622,14 @@ export class TXE implements TypedOracle { return undefined; } - const f = artifact.functions.find(f => - FunctionSelector.fromNameAndParameters(f.name, f.parameters).equals(selector), - ); + const f = ( + await Promise.all( + artifact.functions.map(async f => { + const fs = await FunctionSelector.fromNameAndParameters(f.name, f.parameters); + return fs.equals(selector) ? f : undefined; + }), + ) + ).find(f => !!f); if (!f) { return undefined; } @@ -736,8 +743,8 @@ export class TXE implements TypedOracle { ); } - notifySetMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: number) { - this.noteCache.setMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter); + async notifySetMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: number) { + await this.noteCache.setMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter); } debugLog(message: string, fields: Fr[]): void { @@ -759,7 +766,7 @@ export class TXE implements TypedOracle { async #calculateTaggingSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { const senderCompleteAddress = await this.getCompleteAddress(sender); const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); - const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); + const sharedSecret = await computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); // Silo the secret to the app so it can't be used to track other app's notes const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); return siloedSecret; @@ -816,7 +823,7 @@ export class TXE implements TypedOracle { } async avmOpcodeNullifierExists(innerNullifier: Fr, targetAddress: AztecAddress): Promise { - const nullifier = siloNullifier(targetAddress, innerNullifier!); + const nullifier = await siloNullifier(targetAddress, innerNullifier!); const db = await this.trees.getLatest(); const index = await db.findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer()); return index !== undefined; @@ -824,14 +831,14 @@ export class TXE implements TypedOracle { async avmOpcodeEmitNullifier(nullifier: Fr) { const db = await this.trees.getLatest(); - const siloedNullifier = siloNullifier(this.contractAddress, nullifier); + const siloedNullifier = await siloNullifier(this.contractAddress, nullifier); await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, [siloedNullifier.toBuffer()], NULLIFIER_SUBTREE_HEIGHT); return Promise.resolve(); } async avmOpcodeEmitNoteHash(noteHash: Fr) { const db = await this.trees.getLatest(); - const siloedNoteHash = siloNoteHash(this.contractAddress, noteHash); + const siloedNoteHash = await siloNoteHash(this.contractAddress, noteHash); await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, [siloedNoteHash]); return Promise.resolve(); } @@ -839,7 +846,7 @@ export class TXE implements TypedOracle { async avmOpcodeStorageRead(slot: Fr) { const db = await this.trees.getLatest(); - const leafSlot = computePublicDataTreeLeafSlot(this.contractAddress, slot); + const leafSlot = await computePublicDataTreeLeafSlot(this.contractAddress, slot); const lowLeafResult = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot.toBigInt()); if (!lowLeafResult || !lowLeafResult.alreadyPresent) { diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index 28cf3a97a175..6794c2bf581e 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -48,7 +48,7 @@ export class TXEService { const txeDatabase = new TXEDatabase(store); // Register protocol contracts. for (const name of protocolContractNames) { - const { contractClass, instance, artifact } = getCanonicalProtocolContract(name); + const { contractClass, instance, artifact } = await getCanonicalProtocolContract(name); await txeDatabase.addContractArtifact(contractClass.id, artifact); await txeDatabase.addContractInstance(instance); } @@ -76,7 +76,7 @@ export class TXEService { const l2Block = L2Block.empty(); header.state = await trees.getStateReference(true); header.globalVariables.blockNumber = new Fr(blockNumber); - await trees.appendLeaves(MerkleTreeId.ARCHIVE, [header.hash()]); + await trees.appendLeaves(MerkleTreeId.ARCHIVE, [await header.hash()]); l2Block.archive.root = Fr.fromBuffer((await trees.getTreeInfo(MerkleTreeId.ARCHIVE, true)).root); l2Block.header = header; this.logger.debug(`Block ${blockNumber} created, header hash ${header.hash().toString()}`); @@ -92,8 +92,8 @@ export class TXEService { return toForeignCallResult([]); } - deriveKeys(secret: ForeignCallSingle) { - const keys = (this.typedOracle as TXE).deriveKeys(fromSingle(secret)); + async deriveKeys(secret: ForeignCallSingle) { + const keys = await (this.typedOracle as TXE).deriveKeys(fromSingle(secret)); return toForeignCallResult(keys.publicKeys.toFields().map(toSingle)); } @@ -113,7 +113,7 @@ export class TXEService { `Deploy ${artifact.name} with initializer ${initializerStr}(${decodedArgs}) and public keys hash ${publicKeysHashFr}`, ); - const instance = getContractInstanceFromDeployParams(artifact, { + const instance = await getContractInstanceFromDeployParams(artifact, { constructorArgs: decodedArgs, skipArgsDecoding: true, salt: Fr.ONE, @@ -148,11 +148,13 @@ export class TXEService { const contractAddressFr = addressFromSingle(contractAddress); const db = await trees.getLatest(); - const publicDataWrites = valuesFr.map((value, i) => { - const storageSlot = startStorageSlotFr.add(new Fr(i)); - this.logger.debug(`Oracle storage write: slot=${storageSlot.toString()} value=${value}`); - return new PublicDataTreeLeaf(computePublicDataTreeLeafSlot(contractAddressFr, storageSlot), value); - }); + const publicDataWrites = await Promise.all( + valuesFr.map(async (value, i) => { + const storageSlot = startStorageSlotFr.add(new Fr(i)); + this.logger.debug(`Oracle storage write: slot=${storageSlot.toString()} value=${value}`); + return new PublicDataTreeLeaf(await computePublicDataTreeLeafSlot(contractAddressFr, storageSlot), value); + }), + ); await db.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, publicDataWrites.map(write => write.toBuffer()), @@ -174,10 +176,10 @@ export class TXEService { } async addAccount(secret: ForeignCallSingle) { - const keys = (this.typedOracle as TXE).deriveKeys(fromSingle(secret)); + const keys = await (this.typedOracle as TXE).deriveKeys(fromSingle(secret)); const args = [keys.publicKeys.masterIncomingViewingPublicKey.x, keys.publicKeys.masterIncomingViewingPublicKey.y]; const artifact = SchnorrAccountContractArtifact; - const instance = getContractInstanceFromDeployParams(artifact, { + const instance = await getContractInstanceFromDeployParams(artifact, { constructorArgs: args, skipArgsDecoding: true, salt: Fr.ONE, @@ -191,7 +193,7 @@ export class TXEService { await (this.typedOracle as TXE).addContractArtifact(artifact); const keyStore = (this.typedOracle as TXE).getKeyStore(); - const completeAddress = await keyStore.addAccount(fromSingle(secret), computePartialAddress(instance)); + const completeAddress = await keyStore.addAccount(fromSingle(secret), await computePartialAddress(instance)); const accountStore = (this.typedOracle as TXE).getTXEDatabase(); await accountStore.setAccount(completeAddress.address, completeAddress); this.logger.debug(`Created account ${completeAddress.address}`); @@ -528,8 +530,10 @@ export class TXEService { return toForeignCallResult([toSingle(newArgsHash)]); } - public notifySetMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: ForeignCallSingle) { - this.typedOracle.notifySetMinRevertibleSideEffectCounter(fromSingle(minRevertibleSideEffectCounter).toNumber()); + public async notifySetMinRevertibleSideEffectCounter(minRevertibleSideEffectCounter: ForeignCallSingle) { + await this.typedOracle.notifySetMinRevertibleSideEffectCounter( + fromSingle(minRevertibleSideEffectCounter).toNumber(), + ); } async getChainId() { diff --git a/yarn-project/txe/src/util/txe_public_contract_data_source.ts b/yarn-project/txe/src/util/txe_public_contract_data_source.ts index a56aeb40d8b2..c86808f60f8c 100644 --- a/yarn-project/txe/src/util/txe_public_contract_data_source.ts +++ b/yarn-project/txe/src/util/txe_public_contract_data_source.ts @@ -48,7 +48,7 @@ export class TXEPublicContractDataSource implements ContractDataSource { artifactHash: contractClass!.artifactHash, packedBytecode: contractClass!.packedBytecode, publicFunctions: publicFunctions, - privateFunctionsRoot: new Fr(privateFunctionsRoot!.root), + privateFunctionsRoot: new Fr((await privateFunctionsRoot!).root), version: contractClass!.version, privateFunctions: [], unconstrainedFunctions: [], diff --git a/yarn-project/txe/src/util/txe_world_state_db.ts b/yarn-project/txe/src/util/txe_world_state_db.ts index d1a2b6f17375..349f3899d20f 100644 --- a/yarn-project/txe/src/util/txe_world_state_db.ts +++ b/yarn-project/txe/src/util/txe_world_state_db.ts @@ -15,7 +15,7 @@ export class TXEWorldStateDB extends WorldStateDB { } override async storageRead(contract: AztecAddress, slot: Fr): Promise { - const leafSlot = computePublicDataTreeLeafSlot(contract, slot).toBigInt(); + const leafSlot = (await computePublicDataTreeLeafSlot(contract, slot)).toBigInt(); const lowLeafResult = await this.merkleDb.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot); @@ -33,7 +33,7 @@ export class TXEWorldStateDB extends WorldStateDB { override async storageWrite(contract: AztecAddress, slot: Fr, newValue: Fr): Promise { await this.merkleDb.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, - [new PublicDataTreeLeaf(computePublicDataTreeLeafSlot(contract, slot), newValue).toBuffer()], + [new PublicDataTreeLeaf(await computePublicDataTreeLeafSlot(contract, slot), newValue).toBuffer()], 0, ); return newValue.toBigInt(); diff --git a/yarn-project/types/src/interfaces/hasher.ts b/yarn-project/types/src/interfaces/hasher.ts index 5fb1da8b3f23..4d8603d70447 100644 --- a/yarn-project/types/src/interfaces/hasher.ts +++ b/yarn-project/types/src/interfaces/hasher.ts @@ -8,12 +8,12 @@ export interface Hasher { * @param rhs - The second array. * @returns The new 32-byte hash. */ - hash(lhs: Uint8Array, rhs: Uint8Array): Buffer; + hash(lhs: Uint8Array, rhs: Uint8Array): Promise; /** * Hashes an array of buffers. * @param inputs - The array of buffers to hash. * @returns The resulting 32-byte hash. */ - hashInputs(inputs: Buffer[]): Buffer; + hashInputs(inputs: Buffer[]): Promise; } diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index e79fe5fa8c13..6ade573744a0 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -43,7 +43,7 @@ export class ValidationService { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct const buf = Buffer32.fromBuffer( - keccak256(proposal.payload.getPayloadToSign(SignatureDomainSeperator.blockAttestation)), + keccak256(await proposal.payload.getPayloadToSign(SignatureDomainSeperator.blockAttestation)), ); const sig = await this.keyStore.signMessage(buf); return new BlockAttestation(proposal.payload, sig); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index df2bee4ccbb2..726b331edf87 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -77,13 +77,13 @@ describe('ValidationService', () => { }); it('Should a timeout if we do not collect enough attestations in time', async () => { - const proposal = makeBlockProposal(); + const proposal = await makeBlockProposal(); await expect(validatorClient.collectAttestations(proposal, 2)).rejects.toThrow(AttestationTimeoutError); }); it('Should throw an error if the transactions are not available', async () => { - const proposal = makeBlockProposal(); + const proposal = await makeBlockProposal(); // mock the p2pClient.getTxStatus to return undefined for all transactions p2pClient.getTxStatus.mockImplementation(() => undefined); @@ -148,21 +148,21 @@ describe('ValidationService', () => { const archive = Fr.random(); const txHashes = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); - const proposal = makeBlockProposal({ signer, archive, txHashes }); + const proposal = await makeBlockProposal({ signer, archive, txHashes }); // Mock the attestations to be returned - const expectedAttestations = [ + const expectedAttestations = await Promise.all([ makeBlockAttestation({ signer: attestor1, archive, txHashes }), makeBlockAttestation({ signer: attestor2, archive, txHashes }), - ]; - p2pClient.getAttestationsForSlot.mockImplementation((slot, proposalId) => { + ]); + p2pClient.getAttestationsForSlot.mockImplementation(async (slot, proposalId) => { if ( slot === proposal.payload.header.globalVariables.slotNumber.toBigInt() && proposalId === proposal.archive.toString() ) { - return Promise.resolve(expectedAttestations); + return expectedAttestations; } - return Promise.resolve([]); + return []; }); // Perform the query diff --git a/yarn-project/world-state/src/native/merkle_trees_facade.ts b/yarn-project/world-state/src/native/merkle_trees_facade.ts index 6d3ea76d7fda..c9fe87dcab00 100644 --- a/yarn-project/world-state/src/native/merkle_trees_facade.ts +++ b/yarn-project/world-state/src/native/merkle_trees_facade.ts @@ -191,7 +191,7 @@ export class MerkleTreesForkFacade extends MerkleTreesFacade implements MerkleTr async updateArchive(header: BlockHeader): Promise { await this.instance.call(WorldStateMessageType.UPDATE_ARCHIVE, { forkId: this.revision.forkId, - blockHeaderHash: header.hash().toBuffer(), + blockHeaderHash: (await header.hash()).toBuffer(), blockStateRef: blockStateReference(header.state), }); } diff --git a/yarn-project/world-state/src/native/native_world_state.ts b/yarn-project/world-state/src/native/native_world_state.ts index 6ba046827694..5192a20259fa 100644 --- a/yarn-project/world-state/src/native/native_world_state.ts +++ b/yarn-project/world-state/src/native/native_world_state.ts @@ -136,7 +136,7 @@ export class NativeWorldStateService implements MerkleTreeDatabase { // the initial header _must_ be the first element in the archive tree // if this assertion fails, check that the hashing done in Header in yarn-project matches the initial header hash done in world_state.cpp - const initialHeaderIndex = await committed.findLeafIndex(MerkleTreeId.ARCHIVE, this.initialHeader.hash()); + const initialHeaderIndex = await committed.findLeafIndex(MerkleTreeId.ARCHIVE, await this.initialHeader.hash()); assert.strictEqual(initialHeaderIndex, 0n, 'Invalid initial archive state'); } diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts index a8840645fc98..5525dd6d84df 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts @@ -39,14 +39,14 @@ describe('ServerWorldStateSynchronizer', () => { const LATEST_BLOCK_NUMBER = 5; - beforeAll(() => { + beforeAll(async () => { log = createDebugLogger('aztec:world-state:test:server_world_state_synchronizer'); // Seed l1 to l2 msgs l1ToL2Messages = times(randomInt(2 ** L1_TO_L2_MSG_SUBTREE_HEIGHT), Fr.random); // Compute inHash for verification - inHash = new MerkleTreeCalculator( + inHash = await new MerkleTreeCalculator( L1_TO_L2_MSG_SUBTREE_HEIGHT, Buffer.alloc(32), new SHA256Trunc().hash, diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts index ae344f4144a5..c2ef92253bcc 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts @@ -258,7 +258,7 @@ export class ServerWorldStateSynchronizer // Note that we cannot optimize this check by checking the root of the subtree after inserting the messages // to the real L1_TO_L2_MESSAGE_TREE (like we do in merkleTreeDb.handleL2BlockAndMessages(...)) because that // tree uses pedersen and we don't have access to the converted root. - this.verifyMessagesHashToInHash(l1ToL2Messages, l2Block.header.contentCommitment.inHash); + await this.verifyMessagesHashToInHash(l1ToL2Messages, l2Block.header.contentCommitment.inHash); // If the above check succeeds, we can proceed to handle the block. const result = await this.merkleTreeDb.handleL2BlockAndMessages(l2Block, l1ToL2Messages); @@ -312,14 +312,14 @@ export class ServerWorldStateSynchronizer * @param inHash - The inHash of the block. * @throws If the L1 to L2 messages do not hash to the block inHash. */ - protected verifyMessagesHashToInHash(l1ToL2Messages: Fr[], inHash: Buffer) { + protected async verifyMessagesHashToInHash(l1ToL2Messages: Fr[], inHash: Buffer) { const treeCalculator = new MerkleTreeCalculator( L1_TO_L2_MSG_SUBTREE_HEIGHT, Buffer.alloc(32), new SHA256Trunc().hash, ); - const root = treeCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())); + const root = await treeCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())); if (!root.equals(inHash)) { throw new Error('Obtained L1 to L2 messages failed to be hashed to the block inHash'); diff --git a/yarn-project/world-state/src/test/integration.test.ts b/yarn-project/world-state/src/test/integration.test.ts index 52fde9a94ae9..7ba342dbd470 100644 --- a/yarn-project/world-state/src/test/integration.test.ts +++ b/yarn-project/world-state/src/test/integration.test.ts @@ -106,33 +106,33 @@ describe('world-state integration', () => { describe('block syncing', () => { it('performs initial sync from the archiver from genesis', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); await synchronizer.start(); await expectSynchedToBlock(5); }); it('syncs new blocks from the archiver from genesis', async () => { await synchronizer.start(); - archiver.createBlocks(5); + await archiver.createBlocks(5); await awaitSync(5); await expectSynchedToBlock(5); }); it('syncs new blocks as they are added to archiver', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); await synchronizer.start(); - archiver.createBlocks(3); + await archiver.createBlocks(3); await awaitSync(8); await expectSynchedToBlock(8); }); it('syncs new blocks via multiple batches', async () => { - archiver.createBlocks(10); + await archiver.createBlocks(10); await synchronizer.start(); await expectSynchedToBlock(10); - archiver.createBlocks(10); + await archiver.createBlocks(10); await awaitSync(20); await expectSynchedToBlock(20); }); @@ -141,18 +141,18 @@ describe('world-state integration', () => { const getBlocksSpy = jest.spyOn(archiver, 'getBlocks'); await synchronizer.start(); - archiver.createBlocks(5); + await archiver.createBlocks(5); await awaitSync(5); await expectSynchedToBlock(5); await synchronizer.stopBlockStream(); synchronizer = new TestWorldStateSynchronizer(db, archiver, config, new NoopTelemetryClient()); - archiver.createBlocks(3); + await archiver.createBlocks(3); await synchronizer.start(); await expectSynchedToBlock(8); - archiver.createBlocks(4); + await archiver.createBlocks(4); await awaitSync(12); await expectSynchedToBlock(12); @@ -170,7 +170,7 @@ describe('world-state integration', () => { new NoopTelemetryClient(), ); - archiver.createBlocks(5); + await archiver.createBlocks(5); archiver.setProvenBlockNumber(3); await synchronizer.start(); await expectSynchedToBlock(3); @@ -183,7 +183,7 @@ describe('world-state integration', () => { describe('reorgs', () => { it('prunes blocks upon a reorg and resyncs', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); await synchronizer.start(); await expectSynchedToBlock(5); @@ -192,7 +192,7 @@ describe('world-state integration', () => { archiver.setPrefilledBlocks(blocks, messages); archiver.removeBlocks(3); - archiver.createBlocks(2); + await archiver.createBlocks(2); await sleep(2000); await awaitSync(4); await expectSynchedToBlock(4); @@ -211,44 +211,44 @@ describe('world-state integration', () => { }); it('syncs immediately to the latest block', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); await synchronizer.start(); await expectSynchedToBlock(5); - archiver.createBlocks(2); + await archiver.createBlocks(2); await expectSynchedToBlock(5); await synchronizer.syncImmediate(); await expectSynchedToBlock(7); }); it('syncs immediately to at least the target block', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); await synchronizer.start(); await expectSynchedToBlock(5); - archiver.createBlocks(2); + await archiver.createBlocks(2); await expectSynchedToBlock(5); await synchronizer.syncImmediate(6); await expectSynchedToBlock(7); }); it('syncs immediately to a past block', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); await synchronizer.start(); await expectSynchedToBlock(5); - archiver.createBlocks(2); + await archiver.createBlocks(2); await expectSynchedToBlock(5); await synchronizer.syncImmediate(4); await expectSynchedToBlock(5); }); it('fails to sync to unreachable block', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); await synchronizer.start(); await expectSynchedToBlock(5); - archiver.createBlocks(2); + await archiver.createBlocks(2); await expectSynchedToBlock(5); await expect(() => synchronizer.syncImmediate(9)).rejects.toThrow(/unable to sync/i); }); @@ -256,7 +256,7 @@ describe('world-state integration', () => { describe('finalized chain', () => { it('syncs finalized chain tip', async () => { - archiver.createBlocks(5); + await archiver.createBlocks(5); archiver.setProvenBlockNumber(3); await synchronizer.start(); @@ -272,7 +272,7 @@ describe('world-state integration', () => { class TestWorldStateSynchronizer extends ServerWorldStateSynchronizer { // Skip validation for the sake of this test - protected override verifyMessagesHashToInHash(_l1ToL2Messages: Fr[], _inHash: Buffer): void {} + protected override async verifyMessagesHashToInHash(_l1ToL2Messages: Fr[], _inHash: Buffer): Promise {} // Stops the block stream but not the db so we can reuse it for another synchronizer public async stopBlockStream() { diff --git a/yarn-project/world-state/src/test/utils.ts b/yarn-project/world-state/src/test/utils.ts index 4edd18883840..de6a525cdba7 100644 --- a/yarn-project/world-state/src/test/utils.ts +++ b/yarn-project/world-state/src/test/utils.ts @@ -18,7 +18,7 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { type NativeWorldStateService } from '../native/native_world_state.js'; export async function mockBlock(blockNum: number, size: number, fork: MerkleTreeWriteOperations) { - const l2Block = L2Block.random(blockNum, size); + const l2Block = await L2Block.random(blockNum, size); const l1ToL2Messages = Array(16).fill(0).map(Fr.random); const paddedTxEffects = padArrayEnd( diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts index 5e703e9c3131..50bfb21d133b 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts @@ -69,7 +69,7 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeReadOperati index: bigint, ): Promise | undefined> { const snapshot = await this.#getTreeSnapshot(treeId); - return snapshot.getLeafValue(BigInt(index)) as MerkleTreeLeafType | undefined; + return (await snapshot.getLeafValue(BigInt(index))) as MerkleTreeLeafType | undefined; } async getPreviousValueIndex( diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index bb7bcd58a8d4..8214794f1100 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -304,21 +304,21 @@ export class MerkleTrees implements MerkleTreeAdminDatabase { * @param includeUncommitted - Indicates whether to include uncommitted data. * @returns The current state reference */ - public getStateReference(includeUncommitted: boolean): Promise { - const getAppendOnlyTreeSnapshot = (treeId: MerkleTreeId) => { + public async getStateReference(includeUncommitted: boolean): Promise { + const getAppendOnlyTreeSnapshot = async (treeId: MerkleTreeId) => { const tree = this.trees[treeId] as AppendOnlyTree; return new AppendOnlyTreeSnapshot( - Fr.fromBuffer(tree.getRoot(includeUncommitted)), + Fr.fromBuffer(await tree.getRoot(includeUncommitted)), Number(tree.getNumLeaves(includeUncommitted)), ); }; const state = new StateReference( - getAppendOnlyTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), + await getAppendOnlyTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), new PartialStateReference( - getAppendOnlyTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), - getAppendOnlyTreeSnapshot(MerkleTreeId.NULLIFIER_TREE), - getAppendOnlyTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), + await getAppendOnlyTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), + await getAppendOnlyTreeSnapshot(MerkleTreeId.NULLIFIER_TREE), + await getAppendOnlyTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), ), ); return Promise.resolve(state); @@ -528,7 +528,7 @@ export class MerkleTrees implements MerkleTreeAdminDatabase { throw new Error('State in header does not match current state'); } - const blockHash = header.hash(); + const blockHash = await header.hash(); await this.#appendLeaves(MerkleTreeId.ARCHIVE, [blockHash]); } @@ -538,10 +538,10 @@ export class MerkleTrees implements MerkleTreeAdminDatabase { * @param includeUncommitted - Indicates whether to include uncommitted data. * @returns The tree info for the specified tree. */ - #getTreeInfo(treeId: MerkleTreeId, includeUncommitted: boolean): Promise { + async #getTreeInfo(treeId: MerkleTreeId, includeUncommitted: boolean): Promise { const treeInfo = { treeId, - root: this.trees[treeId].getRoot(includeUncommitted), + root: await this.trees[treeId].getRoot(includeUncommitted), size: this.trees[treeId].getNumLeaves(includeUncommitted), depth: this.trees[treeId].getDepth(), } as TreeInfo; @@ -631,11 +631,13 @@ export class MerkleTrees implements MerkleTreeAdminDatabase { [l2Block.header.state.l1ToL2MessageTree.root, MerkleTreeId.L1_TO_L2_MESSAGE_TREE], [l2Block.archive.root, MerkleTreeId.ARCHIVE], ] as const; - const compareRoot = (root: Fr, treeId: MerkleTreeId) => { - const treeRoot = this.trees[treeId].getRoot(true); + const compareRoot = async (root: Fr, treeId: MerkleTreeId) => { + const treeRoot = await this.trees[treeId].getRoot(true); return treeRoot.equals(root.toBuffer()); }; - const ourBlock = treeRootWithIdPairs.every(([root, id]) => compareRoot(root, id)); + const ourBlock = (await Promise.all(treeRootWithIdPairs.map(([root, id]) => compareRoot(root, id)))).every( + (x: boolean) => x, + ); if (ourBlock) { this.log.verbose(`Block ${l2Block.number} is ours, committing world state`); await this.#commit();