diff --git a/spartan/aztec-network/values/ci.yaml b/spartan/aztec-network/values/ci.yaml index aeab0e6b0f6b..d8fc4e57a5b8 100644 --- a/spartan/aztec-network/values/ci.yaml +++ b/spartan/aztec-network/values/ci.yaml @@ -2,6 +2,8 @@ aztec: slotDuration: 24 epochDuration: 4 proofSubmissionWindow: 8 + testAccounts: true + sponsoredFPC: true ethereum: acceleratedTestDeployments: true diff --git a/spartan/scripts/deploy_k8s.sh b/spartan/scripts/deploy_k8s.sh index f8f51ac00806..98804f77e5d4 100755 --- a/spartan/scripts/deploy_k8s.sh +++ b/spartan/scripts/deploy_k8s.sh @@ -27,6 +27,7 @@ helm_instance="${6:-spartan}" # Default values for environment variables chaos_values="${CHAOS_VALUES:-}" +clear_chaos_mesh="${CLEAR_CHAOS_MESH:-}" aztec_docker_tag=${AZTEC_DOCKER_TAG:-$(git rev-parse HEAD)} install_timeout=${INSTALL_TIMEOUT:-30m} overrides="${OVERRIDES:-}" @@ -83,7 +84,7 @@ function generate_overrides { } # if we don't have a chaos values, remove any existing chaos experiments -if [ -z "$chaos_values" ]; then +if [ -z "$chaos_values" && -n "$clear_chaos_mesh" ]; then echo "Deleting existing network chaos experiments..." kubectl delete networkchaos --all --all-namespaces 2>/dev/null || true fi diff --git a/spartan/scripts/test_k8s.sh b/spartan/scripts/test_k8s.sh index e89f89c37b78..24ef9b85e56c 100755 --- a/spartan/scripts/test_k8s.sh +++ b/spartan/scripts/test_k8s.sh @@ -47,6 +47,9 @@ OVERRIDES="${OVERRIDES:-}" if [ "$target" = "kind" ]; then echo "Deploying to kind" export K8S="local" +elif [ "$target" = "local" ]; then + echo "Using local credentials" + export K8S="local" elif [ "$target" = "gke" ]; then echo "Deploying to GKE" export K8S="gcloud" diff --git a/yarn-project/aztec.js/src/contract/proven_tx.ts b/yarn-project/aztec.js/src/contract/proven_tx.ts index 43763834daa7..c0c92a240537 100644 --- a/yarn-project/aztec.js/src/contract/proven_tx.ts +++ b/yarn-project/aztec.js/src/contract/proven_tx.ts @@ -12,7 +12,7 @@ export class ProvenTx extends Tx { } // Clone the TX data to get a serializable object. - protected getPlainDataTx(): Tx { + public getPlainDataTx(): Tx { return new Tx(this.data, this.clientIvcProof, this.contractClassLogs, this.publicFunctionCalldata); } diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 2220cf3fbaa2..bb6fd1bbdc1e 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -62,7 +62,11 @@ import type { SequencerClient } from '@aztec/sequencer-client'; import type { TestSequencerClient } from '@aztec/sequencer-client/test'; import { WASMSimulator } from '@aztec/simulator/client'; import { SimulationProviderRecorderWrapper } from '@aztec/simulator/testing'; -import { getContractClassFromArtifact, getContractInstanceFromDeployParams } from '@aztec/stdlib/contract'; +import { + type ContractInstanceWithAddress, + getContractClassFromArtifact, + getContractInstanceFromDeployParams, +} from '@aztec/stdlib/contract'; import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import { @@ -736,10 +740,19 @@ export async function expectMappingDelta( * but by conventions its address is computed with a salt of 0. * @returns The address of the sponsored FPC contract */ -export async function getSponsoredFPCAddress() { - const sponsoredFPCInstance = await getContractInstanceFromDeployParams(SponsoredFPCContract.artifact, { +export async function getSponsoredFPCInstance(): Promise { + return getContractInstanceFromDeployParams(SponsoredFPCContract.artifact, { salt: new Fr(SPONSORED_FPC_SALT), }); +} + +/** + * Computes the address of the "canonical" SponosoredFPCContract. This is not a protocol contract + * but by conventions its address is computed with a salt of 0. + * @returns The address of the sponsored FPC contract + */ +export async function getSponsoredFPCAddress() { + const sponsoredFPCInstance = await getSponsoredFPCInstance(); return sponsoredFPCInstance.address; } @@ -765,6 +778,14 @@ export async function setupSponsoredFPC(pxe: PXE) { return deployed; } +/** + * Registers the SponsoredFPC in this PXE instance + * @param pxe - The pxe client + */ +export async function registerSponsoredFPC(pxe: PXE): Promise { + await pxe.registerContract({ instance: await getSponsoredFPCInstance(), artifact: SponsoredFPCContract.artifact }); +} + export async function waitForProvenChain(node: AztecNode, targetBlock?: number, timeoutSec = 60, intervalSec = 1) { targetBlock ??= await node.getBlockNumber(); diff --git a/yarn-project/end-to-end/src/spartan/mempool_limit.test.ts b/yarn-project/end-to-end/src/spartan/mempool_limit.test.ts new file mode 100644 index 000000000000..15bab3c9dfe5 --- /dev/null +++ b/yarn-project/end-to-end/src/spartan/mempool_limit.test.ts @@ -0,0 +1,177 @@ +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { + AztecAddress, + type ContractInstanceWithAddress, + Fr, + SponsoredFeePaymentMethod, + Tx, + TxStatus, + type Wallet, +} from '@aztec/aztec.js'; +import type { UserFeeOptions } from '@aztec/entrypoints/interfaces'; +import { asyncPool } from '@aztec/foundation/async-pool'; +import { times, timesAsync } from '@aztec/foundation/collection'; +import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici'; +import { createLogger } from '@aztec/foundation/log'; +import { TokenContract } from '@aztec/noir-contracts.js/Token'; +import { createPXEService } from '@aztec/pxe/server'; +import { + type AztecNode, + type AztecNodeAdmin, + createAztecNodeAdminClient, + createAztecNodeClient, +} from '@aztec/stdlib/interfaces/client'; +import { deriveSigningKey } from '@aztec/stdlib/keys'; +import { makeTracedFetch } from '@aztec/telemetry-client'; + +import type { ChildProcess } from 'child_process'; + +import { getSponsoredFPCAddress, registerSponsoredFPC } from '../fixtures/utils.js'; +import { isK8sConfig, setupEnvironment, startPortForward } from './utils.js'; + +const config = setupEnvironment(process.env); + +const debugLogger = createLogger('e2e:spartan-test:mempool_limiter'); + +const pxeOptions = { + dataDirectory: undefined, + dataStoreMapSizeKB: 1024 ** 2, // max size is 1GB +}; + +const TX_FLOOD_SIZE = 100; +const TX_MEMPOOL_LIMIT = 25; +const CONCURRENCY = 25; + +describe('mempool limiter test', () => { + // we need a node to change its mempoolTxSize for this test + let nodeAdmin: AztecNodeAdmin; + // the regular API for the same node + let node: AztecNode; + + let accountSecretKey: Fr; + let accountSalt: Fr; + let tokenInstance: ContractInstanceWithAddress; + let tokenContractAddress: AztecAddress; + let sampleTx: Tx; + + let fee: UserFeeOptions; + + const forwardProcesses: ChildProcess[] = []; + + beforeAll(async () => { + let NODE_URL: string; + let NODE_ADMIN_URL: string; + + if (isK8sConfig(config)) { + const nodeAdminFwd = await startPortForward({ + resource: `svc/${config.INSTANCE_NAME}-aztec-network-full-node-admin`, + namespace: config.NAMESPACE, + containerPort: config.CONTAINER_NODE_ADMIN_PORT, + }); + + const nodeFwd = await startPortForward({ + resource: `svc/${config.INSTANCE_NAME}-aztec-network-full-node`, + namespace: config.NAMESPACE, + containerPort: config.CONTAINER_NODE_PORT, + }); + + forwardProcesses.push(nodeAdminFwd.process, nodeFwd.process); + NODE_ADMIN_URL = `http://127.0.0.1:${nodeAdminFwd.port}`; + NODE_URL = `http://127.0.0.1:${nodeFwd.port}`; + } else { + NODE_ADMIN_URL = config.NODE_ADMIN_URL; + NODE_URL = config.NODE_URL; + } + + const fetch = makeTracedFetch( + times(10, () => 1), + false, + makeUndiciFetch(new Agent({ connections: CONCURRENCY })), + ); + nodeAdmin = createAztecNodeAdminClient(NODE_ADMIN_URL, {}, fetch); + node = createAztecNodeClient(NODE_URL, {}, fetch); + }); + + beforeAll(async () => { + debugLogger.debug(`Preparing account and token contract`); + + // set a large pool size so that deploy txs fit + await nodeAdmin.setConfig({ maxTxPoolSize: 1e9 }); + + const pxe = await createPXEService(node, pxeOptions); + + await registerSponsoredFPC(pxe); + fee = { + paymentMethod: new SponsoredFeePaymentMethod(await getSponsoredFPCAddress()), + }; + + accountSecretKey = Fr.fromHexString('0xcafe'); + accountSalt = Fr.ONE; + const account = await getSchnorrAccount(pxe, accountSecretKey, deriveSigningKey(accountSecretKey), accountSalt); + const meta = await pxe.getContractMetadata(account.getAddress()); + let wallet: Wallet; + if (meta.isContractInitialized) { + wallet = await account.register(); + } else { + const res = await account.deploy({ fee }).wait(); + wallet = res.wallet; + } + + debugLogger.info(`Deployed account: ${account.getAddress()}`); + + const tokenDeploy = TokenContract.deploy(wallet, wallet.getAddress(), 'TEST', 'T', 18); + const token = await tokenDeploy.register({ contractAddressSalt: Fr.ONE }); + tokenContractAddress = token.address; + tokenInstance = token.instance; + + const tokenMeta = await pxe.getContractMetadata(token.address); + if (!tokenMeta.isContractInitialized) { + await tokenDeploy.send({ contractAddressSalt: Fr.ONE, fee }).wait(); + debugLogger.info(`Deployed token contract: ${tokenContractAddress}`); + + await token.methods + .mint_to_public(wallet.getAddress(), 10n ** 18n) + .send({ fee }) + .wait(); + debugLogger.info(`Minted tokens`); + } else { + debugLogger.info(`Token contract already deployed at: ${token.address}`); + } + + debugLogger.debug(`Calculating mempool limits`); + + const proventx = await token.methods + .transfer_in_public(wallet.getAddress(), await AztecAddress.random(), 1, 0) + .prove({ fee }); + sampleTx = proventx.getPlainDataTx(); + const sampleTxSize = sampleTx.getSize(); + const maxTxPoolSize = TX_MEMPOOL_LIMIT * sampleTxSize; + + await nodeAdmin.setConfig({ maxTxPoolSize }); + + debugLogger.info(`Sample tx size: ${sampleTxSize} bytes`); + debugLogger.info(`Mempool limited to: ${maxTxPoolSize} bytes`); + + await pxe.stop(); + }, 240_000); + + afterAll(async () => { + await nodeAdmin.setConfig({ maxTxPoolSize: 1e9 }); + forwardProcesses.forEach(p => p.kill()); + }); + + it('evicts txs to keep mempool under specified limit', async () => { + const txs = await timesAsync(TX_FLOOD_SIZE, async () => { + const tx = Tx.fromBuffer(sampleTx.toBuffer()); + // this only works on unproven networks, otherwise this will fail verification + tx.data.forPublic!.nonRevertibleAccumulatedData.nullifiers[0] = Fr.random(); + await tx.getTxHash(true); + return tx; + }); + + await asyncPool(CONCURRENCY, txs, async tx => node.sendTx(tx)); + const receipts = await asyncPool(CONCURRENCY, txs, async tx => node.getTxReceipt(await tx.getTxHash())); + const pending = receipts.reduce((count, receipt) => (receipt.status === TxStatus.PENDING ? count + 1 : count), 0); + expect(pending).toBeLessThanOrEqual(TX_MEMPOOL_LIMIT); + }, 600_000); +}); diff --git a/yarn-project/pxe/src/entrypoints/server/utils.ts b/yarn-project/pxe/src/entrypoints/server/utils.ts index 819f6034249b..51d87f12fd5b 100644 --- a/yarn-project/pxe/src/entrypoints/server/utils.ts +++ b/yarn-project/pxe/src/entrypoints/server/utils.ts @@ -12,6 +12,11 @@ import type { PXEServiceConfig } from '../../config/index.js'; import { PXEService } from '../../pxe_service/pxe_service.js'; import { PXE_DATA_SCHEMA_VERSION } from '../../storage/index.js'; +type PXEConfigWithoutDefaults = Omit< + PXEServiceConfig, + 'l1Contracts' | 'l1ChainId' | 'l2BlockBatchSize' | 'rollupVersion' +>; + /** * Create and start an PXEService instance with the given AztecNode and config. * @@ -22,7 +27,7 @@ import { PXE_DATA_SCHEMA_VERSION } from '../../storage/index.js'; */ export function createPXEService( aztecNode: AztecNode, - config: PXEServiceConfig, + config: PXEConfigWithoutDefaults, useLogSuffix: string | boolean | undefined = undefined, ) { const simulationProvider = new WASMSimulator(); @@ -42,18 +47,20 @@ export function createPXEService( export async function createPXEServiceWithSimulationProvider( aztecNode: AztecNode, simulationProvider: SimulationProvider, - config: PXEServiceConfig, + config: PXEConfigWithoutDefaults, useLogSuffix: string | boolean | undefined = undefined, ) { const logSuffix = typeof useLogSuffix === 'boolean' ? (useLogSuffix ? randomBytes(3).toString('hex') : undefined) : useLogSuffix; - const l1Contracts = await aztecNode.getL1ContractAddresses(); - const configWithContracts = { + const { l1ChainId, l1ContractAddresses: l1Contracts, rollupVersion } = await aztecNode.getNodeInfo(); + const configWithContracts: PXEServiceConfig = { ...config, l1Contracts, + l1ChainId, + rollupVersion, l2BlockBatchSize: 200, - } as PXEServiceConfig; + }; const store = await createStore( 'pxe_data', @@ -70,13 +77,17 @@ export async function createPXEServiceWithSimulationProvider( prover, simulationProvider, protocolContractsProvider, - config, + configWithContracts, logSuffix, ); return pxe; } -function createProver(config: PXEServiceConfig, simulationProvider: SimulationProvider, logSuffix?: string) { +function createProver( + config: Pick, + simulationProvider: SimulationProvider, + logSuffix?: string, +) { if (!config.bbBinaryPath || !config.bbWorkingDirectory) { return new BBWASMBundlePrivateKernelProver(simulationProvider, 16); } else { diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 30545590030d..5caa86fae114 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -930,4 +930,8 @@ export class PXEService implements PXE { async resetNoteSyncData() { return await this.taggingDataProvider.resetNoteSyncData(); } + + public stop(): Promise { + return this.jobQueue.end(); + } } diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts index 975ab2ed8403..d54da950eb10 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts @@ -31,7 +31,14 @@ export interface AztecNodeAdmin { } export const AztecNodeAdminApiSchema: ApiSchemaFor = { - setConfig: z.function().args(SequencerConfigSchema.merge(ProverConfigSchema).partial()).returns(z.void()), + setConfig: z + .function() + .args( + SequencerConfigSchema.merge(ProverConfigSchema) + .merge(z.object({ maxTxPoolSize: z.number() })) + .partial(), + ) + .returns(z.void()), flushTxs: z.function().returns(z.void()), startSnapshotUpload: z.function().args(z.string()).returns(z.void()), };