From 30fedcfb3ca0ffc4ffd9b5a38426e2cd0d93e963 Mon Sep 17 00:00:00 2001 From: spypsy Date: Mon, 9 Feb 2026 17:22:11 +0000 Subject: [PATCH 01/62] fix: stringify all bigints in pino-logger --- .../src/log/gcloud-logger-config.ts | 26 ++++++ .../foundation/src/log/pino-logger.test.ts | 85 +++++++++++++++++++ .../foundation/src/log/pino-logger.ts | 26 ++++++ .../slasher/src/slash_offenses_collector.ts | 6 +- 4 files changed, 138 insertions(+), 5 deletions(-) diff --git a/yarn-project/foundation/src/log/gcloud-logger-config.ts b/yarn-project/foundation/src/log/gcloud-logger-config.ts index db3b331141df..7bff7a334240 100644 --- a/yarn-project/foundation/src/log/gcloud-logger-config.ts +++ b/yarn-project/foundation/src/log/gcloud-logger-config.ts @@ -6,6 +6,29 @@ const GOOGLE_CLOUD_TRACE_ID = 'logging.googleapis.com/trace'; const GOOGLE_CLOUD_SPAN_ID = 'logging.googleapis.com/spanId'; const GOOGLE_CLOUD_TRACE_SAMPLED = 'logging.googleapis.com/trace_sampled'; +/** + * Converts bigint values to strings recursively in a log object to avoid serialization issues. + */ +function convertBigintsToStrings(obj: unknown): unknown { + if (typeof obj === 'bigint') { + return String(obj); + } + + if (Array.isArray(obj)) { + return obj.map(item => convertBigintsToStrings(item)); + } + + if (obj !== null && typeof obj === 'object') { + const result: Record = {}; + for (const key in obj) { + result[key] = convertBigintsToStrings((obj as Record)[key]); + } + return result; + } + + return obj; +} + /** * Pino configuration for google cloud observability. Tweaks message and timestamp, * adds trace context attributes, and injects severity level. @@ -15,6 +38,9 @@ export const GoogleCloudLoggerConfig = { messageKey: 'message', formatters: { log(object: Record): Record { + // Convert bigints to strings recursively to avoid serialization issues + convertBigintsToStrings(object) as Record; + // Add trace context attributes following Cloud Logging structured log format described // in https://cloud.google.com/logging/docs/structured-logging#special-payload-fields const { trace_id, span_id, trace_flags, ...rest } = object; diff --git a/yarn-project/foundation/src/log/pino-logger.test.ts b/yarn-project/foundation/src/log/pino-logger.test.ts index 1efdcf038f18..9881535d4f58 100644 --- a/yarn-project/foundation/src/log/pino-logger.test.ts +++ b/yarn-project/foundation/src/log/pino-logger.test.ts @@ -188,6 +188,91 @@ describe('pino-logger', () => { }); }); + it('converts bigints to strings recursively ', () => { + const testLogger = createLogger('bigint-test'); + capturingStream.clear(); + + testLogger.info('comprehensive bigint conversion', { + // Top-level bigints + amount: 123456789012345678901234n, + slot: 42n, + // Nested objects + nested: { + value: 999999999999999999n, + deepNested: { + id: 12345678901234567890n, + }, + }, + // Arrays with bigints + array: [1n, 2n, 3n], + mixedArray: [{ id: 999n }, { id: 888n }], + // Mixed types + numberValue: 123, + stringValue: 'test', + boolValue: true, + nullValue: null, + }); + + const entries = capturingStream.getJsonLines(); + expect(entries).toHaveLength(1); + expect(entries[0]).toMatchObject({ + module: 'bigint-test', + msg: 'comprehensive bigint conversion', + // All bigints converted to strings + amount: '123456789012345678901234', + slot: '42', + nested: { + value: '999999999999999999', + deepNested: { + id: '12345678901234567890', + }, + }, + array: ['1', '2', '3'], + mixedArray: [{ id: '999' }, { id: '888' }], + // Other types preserved + numberValue: 123, + stringValue: 'test', + boolValue: true, + nullValue: null, + }); + }); + + it('does not mutate the original log data object', () => { + const testLogger = createLogger('mutation-test'); + capturingStream.clear(); + + const originalData = { + amount: 123456789012345678901234n, + nested: { + value: 999n, + }, + array: [1n, 2n, 3n], + }; + + // Keep references to verify mutation + const originalAmount = originalData.amount; + const originalNestedValue = originalData.nested.value; + const originalArrayItem = originalData.array[0]; + + testLogger.info('mutation test', originalData); + + // Verify the original object was NOT mutated + expect(originalData.amount).toBe(originalAmount); + expect(typeof originalData.amount).toBe('bigint'); + expect(originalData.nested.value).toBe(originalNestedValue); + expect(typeof originalData.nested.value).toBe('bigint'); + expect(originalData.array[0]).toBe(originalArrayItem); + expect(typeof originalData.array[0]).toBe('bigint'); + + // But the logged version should have strings + const entries = capturingStream.getJsonLines(); + expect(entries[0]).toMatchObject({ + amount: '123456789012345678901234', + nested: { value: '999' }, + array: ['1', '2', '3'], + }); + }); + it('returns bindings via getBindings', () => { const testLogger = createLogger('bindings-test', { actor: 'main', instanceId: 'id-123' }); const bindings = testLogger.getBindings(); diff --git a/yarn-project/foundation/src/log/pino-logger.ts b/yarn-project/foundation/src/log/pino-logger.ts index 2a41d44bf117..bff4deca0c49 100644 --- a/yarn-project/foundation/src/log/pino-logger.ts +++ b/yarn-project/foundation/src/log/pino-logger.ts @@ -134,6 +134,29 @@ const customLevels = { verbose: 25 }; // Global pino options, tweaked for google cloud if running there. const useGcloudLogging = parseBooleanEnv(process.env['USE_GCLOUD_LOGGING' satisfies EnvVar]); +/** + * Converts bigint values to strings recursively in a log object to avoid serialization issues. + */ +function convertBigintsToStrings(obj: unknown): unknown { + if (typeof obj === 'bigint') { + return String(obj); + } + + if (Array.isArray(obj)) { + return obj.map(item => convertBigintsToStrings(item)); + } + + if (obj !== null && typeof obj === 'object') { + const result: Record = {}; + for (const key in obj) { + result[key] = convertBigintsToStrings((obj as Record)[key]); + } + return result; + } + + return obj; +} + const redactedPaths = [ 'validatorPrivateKeys', // for both the validator and the prover @@ -165,6 +188,9 @@ const pinoOpts: pino.LoggerOptions = { ...redactedPaths.map(p => `opts.${p}`), ], }, + formatters: { + log: obj => convertBigintsToStrings(obj) as Record, + }, ...(useGcloudLogging ? GoogleCloudLoggerConfig : {}), }; diff --git a/yarn-project/slasher/src/slash_offenses_collector.ts b/yarn-project/slasher/src/slash_offenses_collector.ts index 274357a97c6a..551f868ccec3 100644 --- a/yarn-project/slasher/src/slash_offenses_collector.ts +++ b/yarn-project/slasher/src/slash_offenses_collector.ts @@ -85,11 +85,7 @@ export class SlashOffensesCollector { } } - this.log.info(`Adding pending offense for validator ${arg.validator}`, { - ...pendingOffense, - epochOrSlot: pendingOffense.epochOrSlot.toString(), - amount: pendingOffense.amount.toString(), - }); + this.log.info(`Adding pending offense for validator ${arg.validator}`, pendingOffense); await this.offensesStore.addPendingOffense(pendingOffense); } } From ebfa25ad6f067c674cbd64a11ba2f9f99cbf8071 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 9 Feb 2026 19:17:16 -0300 Subject: [PATCH 02/62] refactor(ethereum): config-driven tx delayer replaces manual DelayedTxUtils wrapping E2e tests previously reached deep into component internals to replace L1TxUtils instances with DelayedTxUtils wrappers, resulting in fragile code like: `(((proverNode as TestProverNode).publisher as ProverNodePublisher).l1TxUtils as DelayedTxUtils).delayer!` This refactoring makes the delayer config-driven: - Add `enableDelayer` and `txDelayerMaxInclusionTimeIntoSlot` config fields to L1TxUtilsConfig - Move tx_delayer.ts from test/ to l1_tx_utils/ so factories can use it - Delete DelayedTxUtils class (replaced by delayer field on L1TxUtils) - Apply delayer automatically in L1TxUtils factories when enabled via config - Share a single DelayerImpl across all L1TxUtils instances per component - Store sequencer delayer in SequencerClient, accessible via getDelayer() - Remove Sequencer.publisher field (no longer needed by any test) - Add interruptAll() to SequencerPublisherFactory for shutdown Co-Authored-By: Claude Opus 4.6 --- .../aztec-node/src/aztec-node/server.ts | 4 +- .../src/e2e_epochs/epochs_l1_reorgs.test.ts | 3 +- .../epochs_proof_fails.parallel.test.ts | 10 +-- .../end-to-end/src/e2e_epochs/epochs_test.ts | 53 ++---------- yarn-project/end-to-end/src/fixtures/setup.ts | 53 ++++-------- .../ethereum/src/l1_tx_utils/config.ts | 13 +++ .../ethereum/src/l1_tx_utils/factory.ts | 13 ++- .../src/l1_tx_utils/forwarder_l1_tx_utils.ts | 13 ++- .../ethereum/src/l1_tx_utils/index.ts | 1 + .../ethereum/src/l1_tx_utils/l1_tx_utils.ts | 3 + .../src/l1_tx_utils/l1_tx_utils_with_blobs.ts | 13 ++- .../src/{test => l1_tx_utils}/tx_delayer.ts | 71 +++++++++++---- .../ethereum/src/test/delayed_tx_utils.ts | 52 ----------- yarn-project/ethereum/src/test/index.ts | 2 - .../ethereum/src/test/tx_delayer.test.ts | 2 +- .../node-lib/src/factories/l1_tx_utils.ts | 86 ++++++++++++++----- .../src/client/sequencer-client.ts | 14 ++- .../publisher/sequencer-publisher-factory.ts | 5 ++ .../src/sequencer/sequencer.ts | 16 +--- .../sequencer-client/src/test/index.ts | 2 - 20 files changed, 225 insertions(+), 204 deletions(-) rename yarn-project/ethereum/src/{test => l1_tx_utils}/tx_delayer.ts (80%) delete mode 100644 yarn-project/ethereum/src/test/delayed_tx_utils.ts diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 181b0f60e6f5..c1ee8481c5bf 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -468,7 +468,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { slotDuration: Number(slotDuration), }); - return new AztecNodeService( + const node = new AztecNodeService( config, p2pClient, archiver, @@ -490,6 +490,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { log, blobClient, ); + + return node; } /** diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts index 58bbab1d6806..063287d2ec1b 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts @@ -6,7 +6,8 @@ import type { Logger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; import { createBlobClient } from '@aztec/blob-client/client'; import { Blob } from '@aztec/blob-lib'; -import type { ChainMonitor, ChainMonitorEventMap, Delayer } from '@aztec/ethereum/test'; +import type { Delayer } from '@aztec/ethereum/l1-tx-utils'; +import type { ChainMonitor, ChainMonitorEventMap } from '@aztec/ethereum/test'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { CheckpointNumber } from '@aztec/foundation/branded-types'; import { timesAsync } from '@aztec/foundation/collection'; diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts index 18c6209ef233..ec62cb814a5a 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts @@ -2,12 +2,12 @@ import { getTimestampRangeForEpoch } from '@aztec/aztec.js/block'; import type { Logger } from '@aztec/aztec.js/log'; import { BatchedBlob } from '@aztec/blob-lib/types'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { ChainMonitor, DelayedTxUtils, type Delayer, waitUntilL1Timestamp } from '@aztec/ethereum/test'; +import { type Delayer, waitUntilL1Timestamp } from '@aztec/ethereum/l1-tx-utils'; +import { ChainMonitor } from '@aztec/ethereum/test'; import type { ViemClient } from '@aztec/ethereum/types'; import { CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { promiseWithResolvers } from '@aztec/foundation/promise'; import { sleep } from '@aztec/foundation/sleep'; -import type { ProverNodePublisher } from '@aztec/prover-node'; import type { TestProverNode } from '@aztec/prover-node/test'; import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; import { Proof } from '@aztec/stdlib/proofs'; @@ -72,8 +72,7 @@ describe('e2e_epochs/epochs_proof_fails', () => { context.proverNode = proverNode; // Get the prover delayer from the newly created prover node - proverDelayer = (((proverNode as TestProverNode).publisher as ProverNodePublisher).l1TxUtils as DelayedTxUtils) - .delayer!; + proverDelayer = (proverNode as TestProverNode).publisher.l1TxUtils.delayer!; // Hold off prover tx until end epoch 1 const [epoch2Start] = getTimestampRangeForEpoch(EpochNumber(2), constants); @@ -114,8 +113,7 @@ describe('e2e_epochs/epochs_proof_fails', () => { const proverNode = await test.createProverNode({ cancelTxOnTimeout: false, maxSpeedUpAttempts: 0 }); // Get the prover delayer from the newly created prover node - proverDelayer = (((proverNode as TestProverNode).publisher as ProverNodePublisher).l1TxUtils as DelayedTxUtils) - .delayer!; + proverDelayer = (proverNode as TestProverNode).publisher.l1TxUtils.delayer!; // Inject a delay in prover node proving equal to the length of an epoch, to make sure deadline will be hit const epochProverManager = (proverNode as TestProverNode).prover; diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts index 4d0c64b7980e..8bdf61ea8d4f 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts @@ -9,7 +9,8 @@ import { EpochCache } from '@aztec/epoch-cache'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { DefaultL1ContractsConfig } from '@aztec/ethereum/config'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { ChainMonitor, DelayedTxUtils, type Delayer, waitUntilL1Timestamp, withDelayer } from '@aztec/ethereum/test'; +import { type Delayer, waitUntilL1Timestamp, withDelayer } from '@aztec/ethereum/l1-tx-utils'; +import { ChainMonitor } from '@aztec/ethereum/test'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; import { SecretValue } from '@aztec/foundation/config'; @@ -20,16 +21,9 @@ import { sleep } from '@aztec/foundation/sleep'; import { SpamContract } from '@aztec/noir-test-contracts.js/Spam'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; import { getMockPubSubP2PServiceFactory } from '@aztec/p2p/test-helpers'; -import { ProverNode, type ProverNodeConfig, ProverNodePublisher } from '@aztec/prover-node'; -import type { TestProverNode } from '@aztec/prover-node/test'; +import { ProverNode, type ProverNodeConfig } from '@aztec/prover-node'; import type { PXEConfig } from '@aztec/pxe/config'; -import { - type SequencerClient, - type SequencerEvents, - type SequencerPublisher, - SequencerState, -} from '@aztec/sequencer-client'; -import type { TestSequencerClient } from '@aztec/sequencer-client/test'; +import { type SequencerClient, type SequencerEvents, SequencerState } from '@aztec/sequencer-client'; import { type BlockParameter, EthAddress } from '@aztec/stdlib/block'; import { type L1RollupConstants, getProofSubmissionDeadlineTimestamp } from '@aztec/stdlib/epoch-helpers'; import { tryStop } from '@aztec/stdlib/interfaces/server'; @@ -169,17 +163,8 @@ export class EpochsTestContext { // Loop that tracks L1 and L2 block numbers and logs whenever there's a new one. this.monitor = new ChainMonitor(this.rollup, context.dateProvider, this.logger).start(); - // This is hideous. - // We ought to have a definite reference to the l1TxUtils that we're using in both places, provided by the test context. - this.proverDelayer = context.proverNode - ? (((context.proverNode as TestProverNode).publisher as ProverNodePublisher).l1TxUtils as DelayedTxUtils).delayer! - : undefined!; - this.sequencerDelayer = context.sequencer - ? ( - ((context.sequencer as TestSequencerClient).sequencer.publisher as SequencerPublisher) - .l1TxUtils as DelayedTxUtils - ).delayer! - : undefined!; + this.proverDelayer = context.proverDelayer!; + this.sequencerDelayer = context.sequencerDelayer!; if ((context.proverNode && !this.proverDelayer) || (context.sequencer && !this.sequencerDelayer)) { throw new Error(`Could not find prover or sequencer delayer`); @@ -248,15 +233,13 @@ export class EpochsTestContext { public createValidatorNode( privateKeys: `0x${string}`[], - opts: Partial & { txDelayerMaxInclusionTimeIntoSlot?: number; dontStartSequencer?: boolean } = {}, + opts: Partial & { dontStartSequencer?: boolean } = {}, ) { this.logger.warn('Creating and syncing a validator node...'); return this.createNode({ ...opts, disableValidator: false, validatorPrivateKeys: new SecretValue(privateKeys) }); } - private async createNode( - opts: Partial & { txDelayerMaxInclusionTimeIntoSlot?: number; dontStartSequencer?: boolean } = {}, - ) { + private async createNode(opts: Partial & { dontStartSequencer?: boolean } = {}) { const nodeIndex = this.nodes.length + 1; const actorPrefix = opts.disableValidator ? 'node' : 'validator'; const { mockGossipSubNetwork } = this.context; @@ -285,26 +268,6 @@ export class EpochsTestContext { ), ); - // REFACTOR: We're getting too much into the internals of the sequencer here. - // We should have a single method for constructing an aztec node that returns a TestAztecNodeService - // which directly exposes the delayer and sets any test config. - if (opts.txDelayerMaxInclusionTimeIntoSlot !== undefined) { - this.logger.info( - `Setting tx delayer max inclusion time into slot to ${opts.txDelayerMaxInclusionTimeIntoSlot} seconds`, - ); - // Here we reach into the sequencer and hook in a tx delayer. The problem is that the sequencer's l1 utils only uses a public client, not a wallet. - // The delayer needs a wallet (a client that can sign), so we have to create one here. - const l1Client = createExtendedL1Client( - resolvedConfig.l1RpcUrls!, - resolvedConfig.publisherPrivateKeys![0]!.getValue(), - ); - const sequencer = node.getSequencer() as TestSequencerClient; - const publisher = sequencer.sequencer.publisher; - const delayed = DelayedTxUtils.fromL1TxUtils(publisher.l1TxUtils, this.L1_BLOCK_TIME_IN_S, l1Client); - delayed.delayer!.setMaxInclusionTimeIntoSlot(opts.txDelayerMaxInclusionTimeIntoSlot); - publisher.l1TxUtils = delayed; - } - this.nodes.push(node); return node; } diff --git a/yarn-project/end-to-end/src/fixtures/setup.ts b/yarn-project/end-to-end/src/fixtures/setup.ts index eb323801fa1b..f4b9da66393b 100644 --- a/yarn-project/end-to-end/src/fixtures/setup.ts +++ b/yarn-project/end-to-end/src/fixtures/setup.ts @@ -30,13 +30,8 @@ import { type ZKPassportArgs, deployAztecL1Contracts, } from '@aztec/ethereum/deploy-aztec-l1-contracts'; -import { - DelayedTxUtils, - EthCheatCodes, - EthCheatCodesWithState, - createDelayedL1TxUtilsFromViemWallet, - startAnvil, -} from '@aztec/ethereum/test'; +import type { Delayer } from '@aztec/ethereum/l1-tx-utils'; +import { EthCheatCodes, EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types'; import { SecretValue } from '@aztec/foundation/config'; import { randomBytes } from '@aztec/foundation/crypto/random'; @@ -44,7 +39,7 @@ import { tryRmDir } from '@aztec/foundation/fs'; import { withLoggerBindings } from '@aztec/foundation/log/server'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; -import { DateProvider, TestDateProvider } from '@aztec/foundation/timer'; +import { TestDateProvider } from '@aztec/foundation/timer'; import type { DataStoreConfig } from '@aztec/kv-store/config'; import { SponsoredFPCContract } from '@aztec/noir-contracts.js/SponsoredFPC'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; @@ -52,9 +47,9 @@ import type { P2PClientDeps } from '@aztec/p2p'; import { MockGossipSubNetwork, getMockPubSubP2PServiceFactory } from '@aztec/p2p/test-helpers'; import { protocolContractsHash } from '@aztec/protocol-contracts'; import { type ProverNode, type ProverNodeConfig, type ProverNodeDeps, createProverNode } from '@aztec/prover-node'; +import type { TestProverNode } from '@aztec/prover-node/test'; import { type PXEConfig, getPXEConfig } from '@aztec/pxe/server'; import type { SequencerClient } from '@aztec/sequencer-client'; -import type { TestSequencerClient } from '@aztec/sequencer-client/test'; import { type ContractInstanceWithAddress, getContractInstanceFromInstantiationParams } from '@aztec/stdlib/contract'; import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; import { tryStop } from '@aztec/stdlib/interfaces/server'; @@ -248,6 +243,10 @@ export type EndToEndContext = { telemetryClient: TelemetryClient; /** Mock gossip sub network used for gossipping messages (only if mockGossipSubNetwork was set to true in opts) */ mockGossipSubNetwork: MockGossipSubNetwork | undefined; + /** Delayer for sequencer L1 txs (only when enableDelayer is true). */ + sequencerDelayer: Delayer | undefined; + /** Delayer for prover node L1 txs (only when enableDelayer and startProverNode are true). */ + proverDelayer: Delayer | undefined; /** Prefilled public data used for setting up nodes. */ prefilledPublicData: PublicDataTreeLeaf[] | undefined; /** ACVM config (only set if running locally). */ @@ -287,6 +286,7 @@ export async function setup( config.realProofs = !!opts.realProofs; // Only enforce the time table if requested config.enforceTimeTable = !!opts.enforceTimeTable; + config.enableDelayer = true; config.listenAddress = '127.0.0.1'; const logger = getLogger(); @@ -491,11 +491,6 @@ export async function setup( ); const sequencerClient = aztecNodeService.getSequencer(); - if (sequencerClient) { - const publisher = (sequencerClient as TestSequencerClient).sequencer.publisher; - publisher.l1TxUtils = DelayedTxUtils.fromL1TxUtils(publisher.l1TxUtils, config.ethereumSlotDuration, l1Client); - } - let proverNode: ProverNode | undefined = undefined; if (opts.startProverNode) { logger.verbose('Creating and syncing a simulated prover node...'); @@ -521,6 +516,9 @@ export async function setup( ); } + const sequencerDelayer = sequencerClient?.getDelayer(); + const proverDelayer = proverNode ? (proverNode as TestProverNode).publisher.l1TxUtils.delayer : undefined; + logger.verbose('Creating a pxe...'); const pxeConfig = { ...getPXEConfig(), ...pxeOpts }; pxeConfig.dataDirectory = path.join(directoryToCleanup, randomBytes(8).toString('hex')); @@ -621,6 +619,8 @@ export async function setup( mockGossipSubNetwork, prefilledPublicData, proverNode, + sequencerDelayer, + proverDelayer, sequencer: sequencerClient, teardown, telemetryClient, @@ -747,19 +747,12 @@ export function createAndSyncProverNode( ...proverNodeConfig, }; - const l1TxUtils = createDelayedL1TxUtils( - aztecNodeConfig, - proverNodePrivateKey, - 'prover-node', - proverNodeDeps.dateProvider, - ); - const proverNode = await createProverNode( proverConfig, - { ...proverNodeDeps, aztecNodeTxProvider, archiver: archiver as Archiver, l1TxUtils }, + { ...proverNodeDeps, aztecNodeTxProvider, archiver: archiver as Archiver }, { prefilledPublicData }, ); - getLogger().info(`Created and synced prover node`, { publisherAddress: l1TxUtils.client.account!.address }); + getLogger().info(`Created and synced prover node`); if (!proverNodeConfig.dontStart) { await proverNode.start(); } @@ -767,20 +760,6 @@ export function createAndSyncProverNode( }); } -function createDelayedL1TxUtils( - aztecNodeConfig: AztecNodeConfig, - privateKey: `0x${string}`, - logName: string, - dateProvider?: DateProvider, -) { - const l1Client = createExtendedL1Client(aztecNodeConfig.l1RpcUrls, privateKey, foundry); - - const log = createLogger(logName); - const l1TxUtils = createDelayedL1TxUtilsFromViemWallet(l1Client, log, dateProvider, aztecNodeConfig); - l1TxUtils.enableDelayer(aztecNodeConfig.ethereumSlotDuration); - return l1TxUtils; -} - export type BalancesFn = ReturnType; export function getBalancesFn( symbol: string, diff --git a/yarn-project/ethereum/src/l1_tx_utils/config.ts b/yarn-project/ethereum/src/l1_tx_utils/config.ts index 958fef38d16e..e5f52877cf56 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/config.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/config.ts @@ -5,6 +5,7 @@ import { getConfigFromMappings, getDefaultConfig, numberConfigHelper, + optionalNumberConfigHelper, } from '@aztec/foundation/config'; export interface L1TxUtilsConfig { @@ -60,6 +61,10 @@ export interface L1TxUtilsConfig { * How long a tx nonce can be unseen in the mempool before considering it dropped */ txUnseenConsideredDroppedMs?: number; + /** Enable tx delayer. When true, wraps the viem client to intercept and delay txs. Test-only. */ + enableDelayer?: boolean; + /** Max seconds into an L1 slot for tx inclusion. Txs sent later are deferred to next slot. Only used when enableDelayer is true. */ + txDelayerMaxInclusionTimeIntoSlot?: number; } export const l1TxUtilsConfigMappings: ConfigMappingsType = { @@ -142,6 +147,14 @@ export const l1TxUtilsConfigMappings: ConfigMappingsType = { env: 'L1_TX_MONITOR_TX_UNSEEN_CONSIDERED_DROPPED_MS', ...numberConfigHelper(6 * 12 * 1000), // 6 L1 blocks }, + enableDelayer: { + description: 'Enable tx delayer for testing.', + ...booleanConfigHelper(false), + }, + txDelayerMaxInclusionTimeIntoSlot: { + description: 'Max seconds into L1 slot for tx inclusion when delayer is enabled.', + ...optionalNumberConfigHelper(), + }, }; // We abuse the fact that all mappings above have a non null default value and force-type this to Required diff --git a/yarn-project/ethereum/src/l1_tx_utils/factory.ts b/yarn-project/ethereum/src/l1_tx_utils/factory.ts index 10d75b9f8eda..4ea3daa03062 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/factory.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/factory.ts @@ -10,6 +10,7 @@ import type { L1TxUtilsConfig } from './config.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { L1TxUtils } from './l1_tx_utils.js'; import { createViemSigner } from './signer.js'; +import { type Delayer, applyDelayer } from './tx_delayer.js'; import type { SigningCallback } from './types.js'; export function createL1TxUtilsFromViemWallet( @@ -19,10 +20,12 @@ export function createL1TxUtilsFromViemWallet( dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; + ethereumSlotDuration?: number; + delayer?: Delayer; }, config?: Partial & { debugMaxGasLimit?: boolean }, ): L1TxUtils { - return new L1TxUtils( + const l1TxUtils = new L1TxUtils( client, EthAddress.fromString(client.account.address), createViemSigner(client), @@ -33,6 +36,8 @@ export function createL1TxUtilsFromViemWallet( deps?.store, deps?.metrics, ); + applyDelayer(l1TxUtils, config ?? {}, deps?.ethereumSlotDuration, deps?.delayer); + return l1TxUtils; } export function createL1TxUtilsFromEthSigner( @@ -43,6 +48,8 @@ export function createL1TxUtilsFromEthSigner( dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; + ethereumSlotDuration?: number; + delayer?: Delayer; }, config?: Partial & { debugMaxGasLimit?: boolean }, ): L1TxUtils { @@ -50,7 +57,7 @@ export function createL1TxUtilsFromEthSigner( return (await signer.signTransaction(transaction)).toViemTransactionSignature(); }; - return new L1TxUtils( + const l1TxUtils = new L1TxUtils( client, signer.address, callback, @@ -61,4 +68,6 @@ export function createL1TxUtilsFromEthSigner( deps?.store, deps?.metrics, ); + applyDelayer(l1TxUtils, config ?? {}, deps?.ethereumSlotDuration, deps?.delayer); + return l1TxUtils; } diff --git a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts index ca7811980c95..2efebfc8c8cb 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts @@ -11,6 +11,7 @@ import type { L1TxUtilsConfig } from './config.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { L1TxUtilsWithBlobs } from './l1_tx_utils_with_blobs.js'; import { createViemSigner } from './signer.js'; +import { type Delayer, applyDelayer } from './tx_delayer.js'; import type { L1BlobInputs, L1TxConfig, L1TxRequest, SigningCallback } from './types.js'; /** @@ -69,11 +70,13 @@ export function createForwarderL1TxUtilsFromViemWallet( dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; + ethereumSlotDuration?: number; + delayer?: Delayer; } = {}, config: Partial = {}, debugMaxGasLimit: boolean = false, ) { - return new ForwarderL1TxUtils( + const l1TxUtils = new ForwarderL1TxUtils( client, EthAddress.fromString(client.account.address), createViemSigner(client), @@ -85,6 +88,8 @@ export function createForwarderL1TxUtilsFromViemWallet( deps.metrics, forwarderAddress, ); + applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); + return l1TxUtils; } export function createForwarderL1TxUtilsFromEthSigner( @@ -96,6 +101,8 @@ export function createForwarderL1TxUtilsFromEthSigner( dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; + ethereumSlotDuration?: number; + delayer?: Delayer; } = {}, config: Partial = {}, debugMaxGasLimit: boolean = false, @@ -104,7 +111,7 @@ export function createForwarderL1TxUtilsFromEthSigner( return (await signer.signTransaction(transaction)).toViemTransactionSignature(); }; - return new ForwarderL1TxUtils( + const l1TxUtils = new ForwarderL1TxUtils( client, signer.address, callback, @@ -116,4 +123,6 @@ export function createForwarderL1TxUtilsFromEthSigner( deps.metrics, forwarderAddress, ); + applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); + return l1TxUtils; } diff --git a/yarn-project/ethereum/src/l1_tx_utils/index.ts b/yarn-project/ethereum/src/l1_tx_utils/index.ts index 2d51cf6745e2..24605d9d45db 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/index.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/index.ts @@ -8,6 +8,7 @@ export * from './l1_tx_utils.js'; export * from './readonly_l1_tx_utils.js'; export * from './signer.js'; export * from './types.js'; +export * from './tx_delayer.js'; export * from './utils.js'; // Note: We intentionally do not export l1_tx_utils_with_blobs.js diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts index ab091aa4eba3..dd4241132339 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts @@ -30,6 +30,7 @@ import { type L1TxUtilsConfig, l1TxUtilsConfigMappings } from './config.js'; import { MAX_L1_TX_LIMIT } from './constants.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { ReadOnlyL1TxUtils } from './readonly_l1_tx_utils.js'; +import type { Delayer } from './tx_delayer.js'; import { DroppedTransactionError, type L1BlobInputs, @@ -47,6 +48,8 @@ const MAX_L1_TX_STATES = 32; export class L1TxUtils extends ReadOnlyL1TxUtils { protected nonceManager: NonceManager; protected txs: L1TxState[] = []; + /** Tx delayer for testing. Only set when enableDelayer config is true. */ + public delayer: Delayer | undefined; constructor( public override client: ViemClient, diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts index 4f20c383ee80..86e249ebda53 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts @@ -11,6 +11,7 @@ import type { L1TxUtilsConfig } from './config.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { L1TxUtils } from './l1_tx_utils.js'; import { createViemSigner } from './signer.js'; +import { type Delayer, applyDelayer } from './tx_delayer.js'; import type { L1BlobInputs, SigningCallback } from './types.js'; /** Extends L1TxUtils with the capability to cancel blobs. This needs to be a separate class so we don't require a dependency on blob-lib unnecessarily. */ @@ -30,11 +31,13 @@ export function createL1TxUtilsWithBlobsFromViemWallet( dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; + ethereumSlotDuration?: number; + delayer?: Delayer; } = {}, config: Partial = {}, debugMaxGasLimit: boolean = false, ) { - return new L1TxUtilsWithBlobs( + const l1TxUtils = new L1TxUtilsWithBlobs( client, EthAddress.fromString(client.account.address), createViemSigner(client), @@ -45,6 +48,8 @@ export function createL1TxUtilsWithBlobsFromViemWallet( deps.store, deps.metrics, ); + applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); + return l1TxUtils; } export function createL1TxUtilsWithBlobsFromEthSigner( @@ -55,6 +60,8 @@ export function createL1TxUtilsWithBlobsFromEthSigner( dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; + ethereumSlotDuration?: number; + delayer?: Delayer; } = {}, config: Partial = {}, debugMaxGasLimit: boolean = false, @@ -63,7 +70,7 @@ export function createL1TxUtilsWithBlobsFromEthSigner( return (await signer.signTransaction(transaction)).toViemTransactionSignature(); }; - return new L1TxUtilsWithBlobs( + const l1TxUtils = new L1TxUtilsWithBlobs( client, signer.address, callback, @@ -74,4 +81,6 @@ export function createL1TxUtilsWithBlobsFromEthSigner( deps.store, deps.metrics, ); + applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); + return l1TxUtils; } diff --git a/yarn-project/ethereum/src/test/tx_delayer.ts b/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts similarity index 80% rename from yarn-project/ethereum/src/test/tx_delayer.ts rename to yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts index 4776f012ec92..98cfa6399598 100644 --- a/yarn-project/ethereum/src/test/tx_delayer.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts @@ -16,7 +16,9 @@ import { walletActions, } from 'viem'; -import { type ViemClient, isExtendedClient } from '../types.js'; +import { type ExtendedViemWalletClient, type ViemClient, isExtendedClient } from '../types.js'; +import type { L1TxUtilsConfig } from './config.js'; +import type { L1TxUtils } from './l1_tx_utils.js'; const MAX_WAIT_TIME_SECONDS = 180; @@ -132,23 +134,32 @@ class DelayerImpl implements Delayer { } } +/** + * Creates a new DelayerImpl instance. Exposed so callers can create a single shared delayer + * and pass it to multiple `withDelayer` / `applyDelayer` calls. + */ +export function createDelayer(dateProvider: DateProvider, opts: { ethereumSlotDuration: bigint | number }): Delayer { + return new DelayerImpl(dateProvider, opts); +} + /** * Returns a new client (without modifying the one passed in) with an injected tx delayer. * The delayer can be used to hold off the next tx to be sent until a given block number. + * If an existing delayer is provided, it will be reused instead of creating a new one. * TODO(#10824): This doesn't play along well with blob txs for some reason. */ export function withDelayer( client: T, dateProvider: DateProvider, opts: { ethereumSlotDuration: bigint | number }, + existingDelayer?: Delayer, ): { client: T; delayer: Delayer } { - if (!isExtendedClient(client)) { - throw new Error('withDelayer has to be instantiated with a wallet viem client.'); - } const logger = createLogger('ethereum:tx_delayer'); - const delayer = new DelayerImpl(dateProvider, opts); + const delayer = (existingDelayer as DelayerImpl | undefined) ?? new DelayerImpl(dateProvider, opts); - const extended = client + // Cast to ExtendedViemWalletClient for the extend chain since it has sendRawTransaction. + // The sendRawTransaction override is applied to all clients regardless of type. + const withRawTx = (client as unknown as ExtendedViemWalletClient) // Tweak sendRawTransaction so it uses the delay defined in the delayer. // Note that this will only work with local accounts (ie accounts for which we have the private key). // Transactions signed by the node will not be delayed since they use sendTransaction directly, @@ -237,16 +248,46 @@ export function withDelayer( return txHash; } }, - })) - // Re-extend with sendTransaction so it uses the modified sendRawTransaction. - .extend(client => ({ sendTransaction: walletActions(client).sendTransaction })) - // And with the actions that depend on the modified sendTransaction - .extend(client => ({ - writeContract: walletActions(client).writeContract, - deployContract: walletActions(client).deployContract, - })) as T; + })); + + // Only re-bind wallet actions (sendTransaction, writeContract, deployContract) for wallet clients. + const extended = isExtendedClient(client) + ? withRawTx + // Re-extend with sendTransaction so it uses the modified sendRawTransaction. + .extend(client => ({ sendTransaction: walletActions(client).sendTransaction })) + // And with the actions that depend on the modified sendTransaction + .extend(client => ({ + writeContract: walletActions(client).writeContract, + deployContract: walletActions(client).deployContract, + })) + : withRawTx; - return { client: extended, delayer }; + return { client: extended as T, delayer }; +} + +/** Applies a tx delayer to an L1TxUtils instance if enableDelayer is set in config. + * If an existing delayer is provided, it will be shared instead of creating a new one. + */ +export function applyDelayer( + l1TxUtils: L1TxUtils, + config: Partial, + ethereumSlotDuration?: number, + existingDelayer?: Delayer, +) { + if (!config.enableDelayer || ethereumSlotDuration === undefined) { + return; + } + const { client, delayer } = withDelayer( + l1TxUtils.client, + l1TxUtils.dateProvider, + { ethereumSlotDuration }, + existingDelayer, + ); + l1TxUtils.client = client; + l1TxUtils.delayer = delayer; + if (config.txDelayerMaxInclusionTimeIntoSlot !== undefined) { + delayer.setMaxInclusionTimeIntoSlot(config.txDelayerMaxInclusionTimeIntoSlot); + } } /** diff --git a/yarn-project/ethereum/src/test/delayed_tx_utils.ts b/yarn-project/ethereum/src/test/delayed_tx_utils.ts deleted file mode 100644 index 136f700aa5c4..000000000000 --- a/yarn-project/ethereum/src/test/delayed_tx_utils.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { EthAddress } from '@aztec/foundation/eth-address'; -import { type Logger, createLogger } from '@aztec/foundation/log'; -import { DateProvider } from '@aztec/foundation/timer'; - -import { type L1TxUtilsConfig, createViemSigner } from '../l1_tx_utils/index.js'; -import { L1TxUtilsWithBlobs } from '../l1_tx_utils/l1_tx_utils_with_blobs.js'; -import type { ExtendedViemWalletClient } from '../types.js'; -import { type Delayer, withDelayer } from './tx_delayer.js'; - -export class DelayedTxUtils extends L1TxUtilsWithBlobs { - public delayer: Delayer | undefined; - - public static fromL1TxUtils( - l1TxUtils: L1TxUtilsWithBlobs, - ethereumSlotDuration: number, - wallet: ExtendedViemWalletClient, - ) { - const { client, delayer } = withDelayer(wallet, l1TxUtils.dateProvider, { - ethereumSlotDuration, - }); - const casted = l1TxUtils as unknown as DelayedTxUtils; - casted.delayer = delayer; - casted.client = client; - return casted; - } - - public enableDelayer(ethereumSlotDuration: number) { - const { client, delayer } = withDelayer(this.client, this.dateProvider, { - ethereumSlotDuration, - }); - this.delayer = delayer; - this.client = client; - } -} - -export function createDelayedL1TxUtilsFromViemWallet( - client: ExtendedViemWalletClient, - logger: Logger = createLogger('L1TxUtils'), - dateProvider: DateProvider = new DateProvider(), - config?: Partial, - debugMaxGasLimit: boolean = false, -) { - return new DelayedTxUtils( - client, - EthAddress.fromString(client.account.address), - createViemSigner(client), - logger, - dateProvider, - config, - debugMaxGasLimit, - ); -} diff --git a/yarn-project/ethereum/src/test/index.ts b/yarn-project/ethereum/src/test/index.ts index ba2c6035af70..1b4ce5a97cfb 100644 --- a/yarn-project/ethereum/src/test/index.ts +++ b/yarn-project/ethereum/src/test/index.ts @@ -1,8 +1,6 @@ -export * from './delayed_tx_utils.js'; export * from './eth_cheat_codes.js'; export * from './eth_cheat_codes_with_state.js'; export * from './start_anvil.js'; -export * from './tx_delayer.js'; export * from './upgrade_utils.js'; export * from './chain_monitor.js'; export * from './rollup_cheat_codes.js'; diff --git a/yarn-project/ethereum/src/test/tx_delayer.test.ts b/yarn-project/ethereum/src/test/tx_delayer.test.ts index f2e03cde63da..e630ac32ae66 100644 --- a/yarn-project/ethereum/src/test/tx_delayer.test.ts +++ b/yarn-project/ethereum/src/test/tx_delayer.test.ts @@ -10,10 +10,10 @@ import { type PrivateKeyAccount, createWalletClient, fallback, getContract, http import { privateKeyToAccount } from 'viem/accounts'; import { foundry } from 'viem/chains'; +import { type Delayer, waitUntilBlock, withDelayer } from '../l1_tx_utils/tx_delayer.js'; import type { ExtendedViemWalletClient } from '../types.js'; import { EthCheatCodes } from './eth_cheat_codes.js'; import { startAnvil } from './start_anvil.js'; -import { type Delayer, waitUntilBlock, withDelayer } from './tx_delayer.js'; describe('tx_delayer', () => { let anvil: Anvil; diff --git a/yarn-project/node-lib/src/factories/l1_tx_utils.ts b/yarn-project/node-lib/src/factories/l1_tx_utils.ts index 7e1a54c3cfb0..0f35f25c11d4 100644 --- a/yarn-project/node-lib/src/factories/l1_tx_utils.ts +++ b/yarn-project/node-lib/src/factories/l1_tx_utils.ts @@ -1,5 +1,6 @@ import type { EthSigner } from '@aztec/ethereum/eth-signer'; import { + createDelayer, createL1TxUtilsFromEthSigner as createL1TxUtilsFromEthSignerBase, createL1TxUtilsFromViemWallet as createL1TxUtilsFromViemWalletBase, } from '@aztec/ethereum/l1-tx-utils'; @@ -25,10 +26,11 @@ import { L1TxStore } from '../stores/l1_tx_store.js'; const L1_TX_STORE_NAME = 'l1-tx-utils'; /** - * Creates shared dependencies (logger, store, metrics) for L1TxUtils instances. + * Creates shared dependencies (logger, store, metrics, delayer) for L1TxUtils instances. + * When enableDelayer is set in config, a single shared delayer is created and passed to all instances. */ async function createSharedDeps( - config: DataStoreConfig & { scope?: L1TxScope }, + config: DataStoreConfig & Partial & { scope?: L1TxScope; ethereumSlotDuration?: number }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -46,15 +48,22 @@ async function createSharedDeps( const meter = deps.telemetry.getMeter('L1TxUtils'); const metrics = new L1TxMetrics(meter, config.scope ?? 'other', logger); - return { logger, store, metrics, dateProvider: deps.dateProvider }; + // Create a single shared delayer for all L1TxUtils instances in this group + const delayer = + config.enableDelayer && config.ethereumSlotDuration !== undefined && deps.dateProvider + ? createDelayer(deps.dateProvider, { ethereumSlotDuration: config.ethereumSlotDuration }) + : undefined; + + return { logger, store, metrics, dateProvider: deps.dateProvider, delayer }; } /** - * Creates L1TxUtils with blobs from multiple Viem wallets, sharing store and metrics. + * Creates L1TxUtils with blobs from multiple Viem wallets, sharing store, metrics, and delayer. */ export async function createL1TxUtilsWithBlobsFromViemWallet( clients: ExtendedViemWalletClient[], - config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, + config: DataStoreConfig & + Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -64,17 +73,23 @@ export async function createL1TxUtilsWithBlobsFromViemWallet( const sharedDeps = await createSharedDeps(config, deps); return clients.map(client => - createL1TxUtilsWithBlobsFromViemWalletBase(client, sharedDeps, config, config.debugMaxGasLimit), + createL1TxUtilsWithBlobsFromViemWalletBase( + client, + { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, + config, + config.debugMaxGasLimit, + ), ); } /** - * Creates L1TxUtils with blobs from multiple EthSigners, sharing store and metrics. Removes duplicates + * Creates L1TxUtils with blobs from multiple EthSigners, sharing store, metrics, and delayer. Removes duplicates. */ export async function createL1TxUtilsWithBlobsFromEthSigner( client: ViemClient, signers: EthSigner[], - config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, + config: DataStoreConfig & + Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -102,16 +117,23 @@ export async function createL1TxUtilsWithBlobsFromEthSigner( } return uniqueSigners.map(signer => - createL1TxUtilsWithBlobsFromEthSignerBase(client, signer, sharedDeps, config, config.debugMaxGasLimit), + createL1TxUtilsWithBlobsFromEthSignerBase( + client, + signer, + { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, + config, + config.debugMaxGasLimit, + ), ); } /** - * Creates L1TxUtils (without blobs) from multiple Viem wallets, sharing store and metrics. + * Creates L1TxUtils (without blobs) from multiple Viem wallets, sharing store, metrics, and delayer. */ export async function createL1TxUtilsFromViemWalletWithStore( clients: ExtendedViemWalletClient[], - config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, + config: DataStoreConfig & + Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -121,16 +143,23 @@ export async function createL1TxUtilsFromViemWalletWithStore( ) { const sharedDeps = await createSharedDeps(config, deps); - return clients.map(client => createL1TxUtilsFromViemWalletBase(client, sharedDeps, config)); + return clients.map(client => + createL1TxUtilsFromViemWalletBase( + client, + { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, + config, + ), + ); } /** - * Creates L1TxUtils (without blobs) from multiple EthSigners, sharing store and metrics. Removes duplicates. + * Creates L1TxUtils (without blobs) from multiple EthSigners, sharing store, metrics, and delayer. Removes duplicates. */ export async function createL1TxUtilsFromEthSignerWithStore( client: ViemClient, signers: EthSigner[], - config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, + config: DataStoreConfig & + Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -158,17 +187,25 @@ export async function createL1TxUtilsFromEthSignerWithStore( ); } - return uniqueSigners.map(signer => createL1TxUtilsFromEthSignerBase(client, signer, sharedDeps, config)); + return uniqueSigners.map(signer => + createL1TxUtilsFromEthSignerBase( + client, + signer, + { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, + config, + ), + ); } /** - * Creates ForwarderL1TxUtils from multiple Viem wallets, sharing store and metrics. + * Creates ForwarderL1TxUtils from multiple Viem wallets, sharing store, metrics, and delayer. * This wraps all transactions through a forwarder contract for testing purposes. */ export async function createForwarderL1TxUtilsFromViemWallet( clients: ExtendedViemWalletClient[], forwarderAddress: import('@aztec/foundation/eth-address').EthAddress, - config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, + config: DataStoreConfig & + Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -178,19 +215,26 @@ export async function createForwarderL1TxUtilsFromViemWallet( const sharedDeps = await createSharedDeps(config, deps); return clients.map(client => - createForwarderL1TxUtilsFromViemWalletBase(client, forwarderAddress, sharedDeps, config, config.debugMaxGasLimit), + createForwarderL1TxUtilsFromViemWalletBase( + client, + forwarderAddress, + { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, + config, + config.debugMaxGasLimit, + ), ); } /** - * Creates ForwarderL1TxUtils from multiple EthSigners, sharing store and metrics. + * Creates ForwarderL1TxUtils from multiple EthSigners, sharing store, metrics, and delayer. * This wraps all transactions through a forwarder contract for testing purposes. */ export async function createForwarderL1TxUtilsFromEthSigner( client: ViemClient, signers: EthSigner[], forwarderAddress: import('@aztec/foundation/eth-address').EthAddress, - config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, + config: DataStoreConfig & + Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -204,7 +248,7 @@ export async function createForwarderL1TxUtilsFromEthSigner( client, signer, forwarderAddress, - sharedDeps, + { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, config, config.debugMaxGasLimit, ), diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 9f1d93d36067..d500d33fd4e7 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -3,6 +3,7 @@ import { EpochCache } from '@aztec/epoch-cache'; import { isAnvilTestChain } from '@aztec/ethereum/chain'; import { getPublicClient } from '@aztec/ethereum/client'; import { GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; +import type { Delayer } from '@aztec/ethereum/l1-tx-utils'; import { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; import { PublisherManager } from '@aztec/ethereum/publisher-manager'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -33,6 +34,7 @@ export class SequencerClient { protected checkpointsBuilder: FullNodeCheckpointsBuilder, protected validatorClient?: ValidatorClient, private l1Metrics?: L1Metrics, + private delayer_?: Delayer, ) {} /** @@ -171,9 +173,12 @@ export class SequencerClient { log, ); - await sequencer.init(); + sequencer.init(); - return new SequencerClient(publisherManager, sequencer, checkpointsBuilder, validatorClient, l1Metrics); + // Extract the shared delayer from the first L1TxUtils instance (all instances share the same delayer) + const delayer = l1TxUtils[0]?.delayer; + + return new SequencerClient(publisherManager, sequencer, checkpointsBuilder, validatorClient, l1Metrics, delayer); } /** @@ -208,6 +213,11 @@ export class SequencerClient { return this.sequencer; } + /** Returns the shared tx delayer for sequencer L1 txs, if enabled. Test-only. */ + getDelayer(): Delayer | undefined { + return this.delayer_; + } + get validatorAddresses(): EthAddress[] | undefined { return this.sequencer.getValidatorAddresses(); } diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts index 2942b7ec3be1..57d74d12bde2 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts @@ -89,4 +89,9 @@ export class SequencerPublisherFactory { publisher, }; } + + /** Interrupts all publishers managed by this factory. Used during sequencer shutdown. */ + public interruptAll(): void { + this.deps.publisherManager.interrupt(); + } } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index fd14e9b12e4a..de3dd62cd897 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -75,14 +75,6 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter { this.log.info(`Stopping sequencer`); this.setState(SequencerState.STOPPING, undefined, { force: true }); - this.publisher?.interrupt(); + this.publisherFactory.interruptAll(); await this.runningPromise?.stop(); this.setState(SequencerState.STOPPED, undefined, { force: true }); this.log.info('Stopped sequencer'); @@ -326,7 +317,6 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter Date: Wed, 11 Feb 2026 15:09:22 +0000 Subject: [PATCH 03/62] chore: log L1 trace errors once in debug Fixes [A-393](https://linear.app/aztec-labs/issue/A-393/non-debug-tx-nodes-spam-logs-when-encountering-bad-blocks) --- .../src/l1/calldata_retriever.test.ts | 27 +++++++++++++++++++ .../archiver/src/l1/calldata_retriever.ts | 22 +++++++++++++-- 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/yarn-project/archiver/src/l1/calldata_retriever.test.ts b/yarn-project/archiver/src/l1/calldata_retriever.test.ts index 9dd7f5e7187a..0cb5bdf04650 100644 --- a/yarn-project/archiver/src/l1/calldata_retriever.test.ts +++ b/yarn-project/archiver/src/l1/calldata_retriever.test.ts @@ -14,6 +14,7 @@ import { GasFees } from '@aztec/stdlib/gas'; import { ConsensusPayload, SignatureDomainSeparator } from '@aztec/stdlib/p2p'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type Hex, @@ -1017,6 +1018,32 @@ describe('CalldataRetriever', () => { expect(debugClient.request).toHaveBeenCalledTimes(2); }); + it('should log trace+debug failure warn only once per tx hash', async () => { + CalldataRetriever.resetTraceFailureWarnedForTesting(); + const warnSpy = jest.spyOn(logger, 'warn'); + + // First attempt: both trace and debug fail + debugClient.request.mockRejectedValueOnce(new Error('trace_transaction not supported')); + debugClient.request.mockRejectedValueOnce(new Error('debug_traceTransaction not supported')); + + await expect(retriever.extractCalldataViaTrace(txHash)).rejects.toThrow( + 'Failed to trace transaction ' + txHash + ' to extract propose calldata', + ); + expect(warnSpy).toHaveBeenCalledTimes(1); + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining('Cannot decode L1 tx')); + + // Second attempt: same tx, both fail again - should not log warn again + debugClient.request.mockRejectedValueOnce(new Error('trace_transaction not supported')); + debugClient.request.mockRejectedValueOnce(new Error('debug_traceTransaction not supported')); + + await expect(retriever.extractCalldataViaTrace(txHash)).rejects.toThrow( + 'Failed to trace transaction ' + txHash + ' to extract propose calldata', + ); + expect(warnSpy).toHaveBeenCalledTimes(1); + + warnSpy.mockRestore(); + }); + it('should throw when no propose calls found', async () => { // Mock debug client to return empty trace debugClient.request.mockResolvedValueOnce([]); diff --git a/yarn-project/archiver/src/l1/calldata_retriever.ts b/yarn-project/archiver/src/l1/calldata_retriever.ts index 84f227b9617b..160123c13338 100644 --- a/yarn-project/archiver/src/l1/calldata_retriever.ts +++ b/yarn-project/archiver/src/l1/calldata_retriever.ts @@ -39,6 +39,14 @@ import type { CallInfo } from './types.js'; * in order to reconstruct an L2 block header. */ export class CalldataRetriever { + /** Tx hashes we've already logged for trace+debug failure (log once per tx per process). */ + private static readonly traceFailureWarnedTxHashes = new Set(); + + /** Clears the trace-failure warned set. For testing only. */ + static resetTraceFailureWarnedForTesting(): void { + CalldataRetriever.traceFailureWarnedTxHashes.clear(); + } + /** Pre-computed valid contract calls for validation */ private readonly validContractCalls: ValidContractCall[]; @@ -313,7 +321,8 @@ export class CalldataRetriever { this.logger.debug(`Successfully traced using trace_transaction, found ${calls.length} calls`); } catch (err) { const traceError = err instanceof Error ? err : new Error(String(err)); - this.logger.verbose(`Failed trace_transaction for ${txHash}`, { traceError }); + this.logger.verbose(`Failed trace_transaction for ${txHash}: ${traceError.message}`); + this.logger.debug(`Trace failure details for ${txHash}`, { traceError }); try { // Fall back to debug_traceTransaction (Geth RPC) @@ -322,7 +331,16 @@ export class CalldataRetriever { this.logger.debug(`Successfully traced using debug_traceTransaction, found ${calls.length} calls`); } catch (debugErr) { const debugError = debugErr instanceof Error ? debugErr : new Error(String(debugErr)); - this.logger.warn(`All tracing methods failed for tx ${txHash}`, { + // Log once per tx so we don't spam on every sync cycle when sync point doesn't advance + if (!CalldataRetriever.traceFailureWarnedTxHashes.has(txHash)) { + CalldataRetriever.traceFailureWarnedTxHashes.add(txHash); + this.logger.warn( + `Cannot decode L1 tx ${txHash}: trace and debug RPC failed or unavailable. ` + + `trace_transaction: ${traceError.message}; debug_traceTransaction: ${debugError.message}`, + ); + } + // Full error objects can be very long; keep at debug only + this.logger.debug(`Trace/debug failure details for tx ${txHash}`, { traceError, debugError, txHash, From 9f0518efa889100b1cb03c04df247ca28e4672f1 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 11 Feb 2026 17:52:32 +0000 Subject: [PATCH 04/62] remove duplicate function --- .../foundation/src/log/bigint-utils.ts | 22 +++++++++++++++ .../src/log/gcloud-logger-config.ts | 27 +++---------------- .../foundation/src/log/pino-logger.ts | 24 +---------------- 3 files changed, 26 insertions(+), 47 deletions(-) create mode 100644 yarn-project/foundation/src/log/bigint-utils.ts diff --git a/yarn-project/foundation/src/log/bigint-utils.ts b/yarn-project/foundation/src/log/bigint-utils.ts new file mode 100644 index 000000000000..6cc94101ac2f --- /dev/null +++ b/yarn-project/foundation/src/log/bigint-utils.ts @@ -0,0 +1,22 @@ +/** + * Converts bigint values to strings recursively in a log object to avoid serialization issues. + */ +export function convertBigintsToStrings(obj: unknown): unknown { + if (typeof obj === 'bigint') { + return String(obj); + } + + if (Array.isArray(obj)) { + return obj.map(item => convertBigintsToStrings(item)); + } + + if (obj !== null && typeof obj === 'object') { + const result: Record = {}; + for (const key in obj) { + result[key] = convertBigintsToStrings((obj as Record)[key]); + } + return result; + } + + return obj; +} diff --git a/yarn-project/foundation/src/log/gcloud-logger-config.ts b/yarn-project/foundation/src/log/gcloud-logger-config.ts index 7bff7a334240..2e036212af71 100644 --- a/yarn-project/foundation/src/log/gcloud-logger-config.ts +++ b/yarn-project/foundation/src/log/gcloud-logger-config.ts @@ -1,34 +1,13 @@ import type { pino } from 'pino'; +import { convertBigintsToStrings } from './bigint-utils.js'; + /* eslint-disable camelcase */ const GOOGLE_CLOUD_TRACE_ID = 'logging.googleapis.com/trace'; const GOOGLE_CLOUD_SPAN_ID = 'logging.googleapis.com/spanId'; const GOOGLE_CLOUD_TRACE_SAMPLED = 'logging.googleapis.com/trace_sampled'; -/** - * Converts bigint values to strings recursively in a log object to avoid serialization issues. - */ -function convertBigintsToStrings(obj: unknown): unknown { - if (typeof obj === 'bigint') { - return String(obj); - } - - if (Array.isArray(obj)) { - return obj.map(item => convertBigintsToStrings(item)); - } - - if (obj !== null && typeof obj === 'object') { - const result: Record = {}; - for (const key in obj) { - result[key] = convertBigintsToStrings((obj as Record)[key]); - } - return result; - } - - return obj; -} - /** * Pino configuration for google cloud observability. Tweaks message and timestamp, * adds trace context attributes, and injects severity level. @@ -39,7 +18,7 @@ export const GoogleCloudLoggerConfig = { formatters: { log(object: Record): Record { // Convert bigints to strings recursively to avoid serialization issues - convertBigintsToStrings(object) as Record; + object = convertBigintsToStrings(object) as Record; // Add trace context attributes following Cloud Logging structured log format described // in https://cloud.google.com/logging/docs/structured-logging#special-payload-fields diff --git a/yarn-project/foundation/src/log/pino-logger.ts b/yarn-project/foundation/src/log/pino-logger.ts index bff4deca0c49..2395cc908ec0 100644 --- a/yarn-project/foundation/src/log/pino-logger.ts +++ b/yarn-project/foundation/src/log/pino-logger.ts @@ -7,6 +7,7 @@ import { inspect } from 'util'; import { compactArray } from '../collection/array.js'; import type { EnvVar } from '../config/index.js'; import { parseBooleanEnv } from '../config/parse-env.js'; +import { convertBigintsToStrings } from './bigint-utils.js'; import { GoogleCloudLoggerConfig } from './gcloud-logger-config.js'; import { getLogLevelFromFilters, parseEnv } from './log-filters.js'; import type { LogLevel } from './log-levels.js'; @@ -134,29 +135,6 @@ const customLevels = { verbose: 25 }; // Global pino options, tweaked for google cloud if running there. const useGcloudLogging = parseBooleanEnv(process.env['USE_GCLOUD_LOGGING' satisfies EnvVar]); -/** - * Converts bigint values to strings recursively in a log object to avoid serialization issues. - */ -function convertBigintsToStrings(obj: unknown): unknown { - if (typeof obj === 'bigint') { - return String(obj); - } - - if (Array.isArray(obj)) { - return obj.map(item => convertBigintsToStrings(item)); - } - - if (obj !== null && typeof obj === 'object') { - const result: Record = {}; - for (const key in obj) { - result[key] = convertBigintsToStrings((obj as Record)[key]); - } - return result; - } - - return obj; -} - const redactedPaths = [ 'validatorPrivateKeys', // for both the validator and the prover From 72174ab00fc14787137e7d86754945e7ef7b1c46 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 11 Feb 2026 11:03:27 -0300 Subject: [PATCH 05/62] refactor(ethereum): simplify L1TxUtils factories and internalize delayer - Unify ViemWallet/EthSigner factory pairs into single functions using L1SignerSource union type (6 low-level factories -> 2) - Delete L1TxUtilsWithBlobs class; blob support is now opt-in via kzg parameter on L1TxUtils constructor - Internalize delayer: L1TxUtils constructor wraps its own client when enableDelayer is set in config, deleting applyDelayer and withDelayer - Move ethereumSlotDuration from deps threading into L1TxUtilsConfig - Simplify node-lib factories from 6 to 4 Co-Authored-By: Claude Opus 4.6 --- yarn-project/aztec-node/package.json | 1 + .../aztec-node/src/aztec-node/server.ts | 14 +- yarn-project/aztec-node/tsconfig.json | 3 + .../cli/src/cmds/l1/update_l1_validators.ts | 10 +- .../end-to-end/src/e2e_debug_trace.test.ts | 6 +- .../end-to-end/src/e2e_epochs/epochs_test.ts | 16 +- .../e2e_l1_publisher/e2e_l1_publisher.test.ts | 10 +- .../end-to-end/src/e2e_multi_eoa.test.ts | 4 +- .../end-to-end/src/e2e_p2p/add_rollup.test.ts | 4 +- .../end-to-end/src/e2e_p2p/p2p_network.ts | 4 +- .../src/e2e_p2p/slash_veto_demo.test.ts | 6 +- .../upgrade_governance_proposer.test.ts | 4 +- .../end-to-end/src/e2e_synching.test.ts | 7 +- yarn-project/end-to-end/src/fixtures/setup.ts | 3 +- .../upgrade_governance_proposer.test.ts | 4 +- .../spartan/upgrade_rollup_version.test.ts | 4 +- yarn-project/ethereum/src/config.ts | 3 +- .../src/contracts/fee_asset_handler.test.ts | 4 +- .../ethereum/src/contracts/governance.ts | 6 +- .../ethereum/src/contracts/multicall.test.ts | 4 +- .../ethereum/src/deploy_aztec_l1_contracts.ts | 21 ++- .../ethereum/src/deploy_l1_contract.ts | 6 +- .../ethereum/src/l1_tx_utils/config.ts | 7 + .../ethereum/src/l1_tx_utils/factory.ts | 71 ++++---- .../l1_tx_utils/forwarder_l1_tx_utils.test.ts | 3 + .../src/l1_tx_utils/forwarder_l1_tx_utils.ts | 106 +++++------ .../ethereum/src/l1_tx_utils/index-blobs.ts | 4 +- .../src/l1_tx_utils/l1_tx_utils.test.ts | 20 ++- .../ethereum/src/l1_tx_utils/l1_tx_utils.ts | 41 ++++- .../src/l1_tx_utils/l1_tx_utils_with_blobs.ts | 86 --------- .../ethereum/src/l1_tx_utils/tx_delayer.ts | 74 +++----- .../ethereum/src/test/tx_delayer.test.ts | 5 +- .../ethereum/src/test/upgrade_utils.ts | 4 +- .../node-lib/src/factories/l1_tx_utils.ts | 166 ++++-------------- .../factories/l1_tx_utils_integration.test.ts | 9 +- yarn-project/prover-node/src/factory.ts | 12 +- .../src/client/sequencer-client.ts | 7 +- .../sequencer-publisher-factory.test.ts | 10 +- .../publisher/sequencer-publisher-factory.ts | 8 +- .../src/publisher/sequencer-publisher.test.ts | 15 +- .../src/publisher/sequencer-publisher.ts | 6 +- .../checkpoint_voter.ha.integration.test.ts | 8 +- .../sequencer-client/src/test/index.ts | 4 +- yarn-project/yarn.lock | 1 + 44 files changed, 319 insertions(+), 492 deletions(-) delete mode 100644 yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index 646153664c8c..77fb0c217327 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -68,6 +68,7 @@ "@aztec/archiver": "workspace:^", "@aztec/bb-prover": "workspace:^", "@aztec/blob-client": "workspace:^", + "@aztec/blob-lib": "workspace:^", "@aztec/constants": "workspace:^", "@aztec/epoch-cache": "workspace:^", "@aztec/ethereum": "workspace:^", diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index c1ee8481c5bf..539991cbfdf5 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -1,6 +1,7 @@ import { Archiver, createArchiver } from '@aztec/archiver'; import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover'; import { type BlobClientInterface, createBlobClientWithFileStores } from '@aztec/blob-client/client'; +import { Blob } from '@aztec/blob-lib'; import { ARCHIVE_HEIGHT, type L1_TO_L2_MSG_TREE_HEIGHT, type NOTE_HASH_TREE_HEIGHT } from '@aztec/constants'; import { EpochCache, type EpochCacheInterface } from '@aztec/epoch-cache'; import { createEthereumChain } from '@aztec/ethereum/chain'; @@ -18,10 +19,7 @@ import { DateProvider, Timer } from '@aztec/foundation/timer'; import { MembershipWitness, SiblingPath } from '@aztec/foundation/trees'; import { KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore'; import { trySnapshotSync, uploadSnapshot } from '@aztec/node-lib/actions'; -import { - createForwarderL1TxUtilsFromEthSigner, - createL1TxUtilsWithBlobsFromEthSigner, -} from '@aztec/node-lib/factories'; +import { createForwarderL1TxUtilsFromSigners, createL1TxUtilsFromSigners } from '@aztec/node-lib/factories'; import { type P2P, type P2PClientDeps, createP2PClient, getDefaultAllowedSetupFunctions } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { GlobalVariableBuilder, SequencerClient, type SequencerPublisher } from '@aztec/sequencer-client'; @@ -413,18 +411,18 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { await slasherClient.start(); const l1TxUtils = config.publisherForwarderAddress - ? await createForwarderL1TxUtilsFromEthSigner( + ? await createForwarderL1TxUtilsFromSigners( publicClient, keyStoreManager!.createAllValidatorPublisherSigners(), config.publisherForwarderAddress, { ...config, scope: 'sequencer' }, - { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider }, + { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider, kzg: Blob.getViemKzgInstance() }, ) - : await createL1TxUtilsWithBlobsFromEthSigner( + : await createL1TxUtilsFromSigners( publicClient, keyStoreManager!.createAllValidatorPublisherSigners(), { ...config, scope: 'sequencer' }, - { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider }, + { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider, kzg: Blob.getViemKzgInstance() }, ); // Create and start the sequencer client diff --git a/yarn-project/aztec-node/tsconfig.json b/yarn-project/aztec-node/tsconfig.json index 90a91bf65be1..e640323316a3 100644 --- a/yarn-project/aztec-node/tsconfig.json +++ b/yarn-project/aztec-node/tsconfig.json @@ -15,6 +15,9 @@ { "path": "../blob-client" }, + { + "path": "../blob-lib" + }, { "path": "../constants" }, diff --git a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts index 6bb8ed4af8ac..6329dcdba50d 100644 --- a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts +++ b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts @@ -2,7 +2,7 @@ import { createEthereumChain, isAnvilTestChain } from '@aztec/ethereum/chain'; import { createExtendedL1Client, getPublicClient } from '@aztec/ethereum/client'; import { getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import { GSEContract, RollupContract } from '@aztec/ethereum/contracts'; -import { createL1TxUtilsFromViemWallet } from '@aztec/ethereum/l1-tx-utils'; +import { createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { EthCheatCodes } from '@aztec/ethereum/test'; import type { EthAddress } from '@aztec/foundation/eth-address'; import type { LogFn, Logger } from '@aztec/foundation/log'; @@ -88,7 +88,7 @@ export async function addL1Validator({ const gse = new GSEContract(l1Client, gseAddress); const registrationTuple = await gse.makeRegistrationTuple(blsSecretKey); - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client, { logger: debugLogger }); + const l1TxUtils = createL1TxUtils(l1Client, { logger: debugLogger }); const proofParamsObj = ZkPassportProofParams.fromBuffer(proofParams); // Step 1: Claim STK tokens from the faucet @@ -194,7 +194,7 @@ export async function addL1ValidatorViaRollup({ const registrationTuple = await gse.makeRegistrationTuple(blsSecretKey); - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client, { logger: debugLogger }); + const l1TxUtils = createL1TxUtils(l1Client, { logger: debugLogger }); const { receipt } = await l1TxUtils.sendAndMonitorTransaction({ to: rollupAddress.toString(), @@ -241,7 +241,7 @@ export async function removeL1Validator({ const account = getAccount(privateKey, mnemonic); const chain = createEthereumChain(rpcUrls, chainId); const l1Client = createExtendedL1Client(rpcUrls, account, chain.chainInfo); - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client, { logger: debugLogger }); + const l1TxUtils = createL1TxUtils(l1Client, { logger: debugLogger }); dualLog(`Removing validator ${validatorAddress.toString()} from rollup ${rollupAddress.toString()}`); const { receipt } = await l1TxUtils.sendAndMonitorTransaction({ @@ -268,7 +268,7 @@ export async function pruneRollup({ const account = getAccount(privateKey, mnemonic); const chain = createEthereumChain(rpcUrls, chainId); const l1Client = createExtendedL1Client(rpcUrls, account, chain.chainInfo); - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client, { logger: debugLogger }); + const l1TxUtils = createL1TxUtils(l1Client, { logger: debugLogger }); dualLog(`Trying prune`); const { receipt } = await l1TxUtils.sendAndMonitorTransaction({ diff --git a/yarn-project/end-to-end/src/e2e_debug_trace.test.ts b/yarn-project/end-to-end/src/e2e_debug_trace.test.ts index 6a61339bd083..71f950086660 100644 --- a/yarn-project/end-to-end/src/e2e_debug_trace.test.ts +++ b/yarn-project/end-to-end/src/e2e_debug_trace.test.ts @@ -1,7 +1,7 @@ import type { AztecNodeConfig } from '@aztec/aztec-node'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { FORWARDER_ABI, deployForwarderProxy } from '@aztec/ethereum/forwarder-proxy'; -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { PublisherManager } from '@aztec/ethereum/publisher-manager'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -79,7 +79,7 @@ describe('e2e_debug_trace_transaction', () => { // In this test we deploy a simple forwarder contract to L1, this serves as an additional proxy it('can process blocks using debug trace', async () => { // We intercept calls to sendAndMonitorTransaction to forward inner calls via the forwarder - const l1Utils: L1TxUtilsWithBlobs[] = (publisherManager as any).publishers; + const l1Utils: L1TxUtils[] = (publisherManager as any).publishers; // Intercept sendAndMonitorTransaction to access blobInputs directly const originalSendAndMonitor = l1Utils[0].sendAndMonitorTransaction.bind(l1Utils[0]); @@ -146,7 +146,7 @@ describe('e2e_debug_trace_transaction', () => { // 2. Duplicate the inner call to the rollup // 3. Corrupt the first call so it reverts (with allowFailure: true) // 4. Keep the second call intact so it succeeds - const l1Utils: L1TxUtilsWithBlobs[] = (publisherManager as any).publishers; + const l1Utils: L1TxUtils[] = (publisherManager as any).publishers; const originalSendAndMonitor = l1Utils[0].sendAndMonitorTransaction.bind(l1Utils[0]); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts index 8bdf61ea8d4f..a1cbb145722a 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts @@ -9,7 +9,7 @@ import { EpochCache } from '@aztec/epoch-cache'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { DefaultL1ContractsConfig } from '@aztec/ethereum/config'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { type Delayer, waitUntilL1Timestamp, withDelayer } from '@aztec/ethereum/l1-tx-utils'; +import { type Delayer, createDelayer, waitUntilL1Timestamp, wrapClientWithDelayer } from '@aztec/ethereum/l1-tx-utils'; import { ChainMonitor } from '@aztec/ethereum/test'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; @@ -371,15 +371,13 @@ export class EpochsTestContext { /** Creates an L1 client using a fresh account with funds from anvil, with a tx delayer already set up. */ public async createL1Client() { - const { client, delayer } = withDelayer( - createExtendedL1Client( - [...this.l1Client.chain.rpcUrls.default.http], - privateKeyToAccount(this.getNextPrivateKey()), - this.l1Client.chain, - ), - this.context.dateProvider, - { ethereumSlotDuration: this.L1_BLOCK_TIME_IN_S }, + const rawClient = createExtendedL1Client( + [...this.l1Client.chain.rpcUrls.default.http], + privateKeyToAccount(this.getNextPrivateKey()), + this.l1Client.chain, ); + const delayer = createDelayer(this.context.dateProvider, { ethereumSlotDuration: this.L1_BLOCK_TIME_IN_S }); + const client = wrapClientWithDelayer(rawClient, delayer); expect(await client.getBalance({ address: client.account.address })).toBeGreaterThan(0n); return { client, delayer }; } diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 593a096647f2..28489eb537f0 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -8,6 +8,7 @@ import { createBlobClient } from '@aztec/blob-client/client'; import { BatchedBlob, BatchedBlobAccumulator, + Blob, getBlobsPerL1Block, getPrefixedEthBlobCommitments, } from '@aztec/blob-lib'; @@ -24,8 +25,7 @@ import { getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import { GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; import { type DeployAztecL1ContractsArgs, deployAztecL1Contracts } from '@aztec/ethereum/deploy-aztec-l1-contracts'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; -import { TxUtilsState } from '@aztec/ethereum/l1-tx-utils'; -import { createL1TxUtilsWithBlobsFromViemWallet } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import { TxUtilsState, createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { EthCheatCodesWithState, RollupCheatCodes, startAnvil } from '@aztec/ethereum/test'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { range } from '@aztec/foundation/array'; @@ -255,7 +255,11 @@ describe('L1Publisher integration', () => { await worldStateSynchronizer.start(); const sequencerL1Client = createExtendedL1Client(config.l1RpcUrls, sequencerPK, foundry); - const l1TxUtils = createL1TxUtilsWithBlobsFromViemWallet(sequencerL1Client, { logger, dateProvider }, config); + const l1TxUtils = createL1TxUtils( + sequencerL1Client, + { logger, dateProvider, kzg: Blob.getViemKzgInstance() }, + config, + ); const rollupContract = new RollupContract(sequencerL1Client, l1ContractAddresses.rollupAddress.toString()); const slashingProposerContract = await rollupContract.getSlashingProposer(); governanceProposerContract = new GovernanceProposerContract( diff --git a/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts b/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts index 39ecf816d95d..7fb0b71684b4 100644 --- a/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts +++ b/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts @@ -4,7 +4,7 @@ import { Fr } from '@aztec/aztec.js/fields'; import type { Logger } from '@aztec/aztec.js/log'; import { waitForTx } from '@aztec/aztec.js/node'; import { EthCheatCodes } from '@aztec/aztec/testing'; -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { PublisherManager } from '@aztec/ethereum/publisher-manager'; import type { ViemClient } from '@aztec/ethereum/types'; import { times } from '@aztec/foundation/collection'; @@ -116,7 +116,7 @@ describe('e2e_multi_eoa', () => { from: defaultAccountAddress, }); - const l1Utils: L1TxUtilsWithBlobs[] = (publisherManager as any).publishers; + const l1Utils: L1TxUtils[] = (publisherManager as any).publishers; const blockedSender = l1Utils[expectedFirstSender].getSenderAddress(); const blockedTxs: Hex[] = []; diff --git a/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts b/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts index 24f282843330..385b93fcd240 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts @@ -9,7 +9,7 @@ import { FeeAssetHandlerContract, RegistryContract, RollupContract } from '@azte import { deployRollupForUpgrade } from '@aztec/ethereum/deploy-aztec-l1-contracts'; import { deployL1Contract } from '@aztec/ethereum/deploy-l1-contract'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; -import { L1TxUtils, createL1TxUtilsFromViemWallet } from '@aztec/ethereum/l1-tx-utils'; +import { L1TxUtils, createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { retryUntil } from '@aztec/foundation/retry'; @@ -88,7 +88,7 @@ describe('e2e_p2p_add_rollup', () => { await t.applyBaseSetup(); await t.removeInitialNode(); - l1TxUtils = createL1TxUtilsFromViemWallet(t.ctx.deployL1ContractsValues.l1Client); + l1TxUtils = createL1TxUtils(t.ctx.deployL1ContractsValues.l1Client); t.ctx.watcher.setIsMarkingAsProven(false); }); diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 4dd5cefce8fd..2c2be1afd048 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -12,7 +12,7 @@ import { import type { Operator } from '@aztec/ethereum/deploy-aztec-l1-contracts'; import { deployL1Contract } from '@aztec/ethereum/deploy-l1-contract'; import { MultiAdderArtifact } from '@aztec/ethereum/l1-artifacts'; -import { createL1TxUtilsFromViemWallet } from '@aztec/ethereum/l1-tx-utils'; +import { createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { ChainMonitor } from '@aztec/ethereum/test'; import type { ExtendedViemWalletClient, ViemClient } from '@aztec/ethereum/types'; import { EpochNumber } from '@aztec/foundation/branded-types'; @@ -343,7 +343,7 @@ export class P2PNetworkTest { } private async _sendDummyTx(l1Client: ExtendedViemWalletClient) { - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client); + const l1TxUtils = createL1TxUtils(l1Client); return await l1TxUtils.sendAndMonitorTransaction({ to: l1Client.account!.address, value: 1n, diff --git a/yarn-project/end-to-end/src/e2e_p2p/slash_veto_demo.test.ts b/yarn-project/end-to-end/src/e2e_p2p/slash_veto_demo.test.ts index f7cd63e5bfc0..532b94e0dd98 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/slash_veto_demo.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/slash_veto_demo.test.ts @@ -13,7 +13,7 @@ import { SlasherArtifact, TallySlashingProposerArtifact, } from '@aztec/ethereum/l1-artifacts'; -import { L1TxUtils, createL1TxUtilsFromViemWallet } from '@aztec/ethereum/l1-tx-utils'; +import { L1TxUtils, createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { tryJsonStringify } from '@aztec/foundation/json-rpc'; import { promiseWithResolvers } from '@aztec/foundation/promise'; @@ -115,7 +115,7 @@ describe('veto slash', () => { t.ctx.aztecNodeConfig.l1RpcUrls, bufferToHex(getPrivateKeyFromIndex(VETOER_PRIVATE_KEY_INDEX)!), ); - vetoerL1TxUtils = createL1TxUtilsFromViemWallet(vetoerL1Client, { + vetoerL1TxUtils = createL1TxUtils(vetoerL1Client, { logger: t.logger, dateProvider: t.ctx.dateProvider, }); @@ -199,7 +199,7 @@ describe('veto slash', () => { } debugLogger.info(`\n\ninitializing slasher with proposer: ${proposer}\n\n`); - const txUtils = createL1TxUtilsFromViemWallet(deployerClient, { + const txUtils = createL1TxUtils(deployerClient, { logger: t.logger, dateProvider: t.ctx.dateProvider, }); diff --git a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts index 0eed2f499a95..d523ef582e5b 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts @@ -1,7 +1,7 @@ import type { AztecNodeService } from '@aztec/aztec-node'; import { RollupContract } from '@aztec/ethereum/contracts'; import { deployL1Contract } from '@aztec/ethereum/deploy-l1-contract'; -import { L1TxUtils, createL1TxUtilsFromViemWallet } from '@aztec/ethereum/l1-tx-utils'; +import { L1TxUtils, createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { SlotNumber } from '@aztec/foundation/branded-types'; import { sleep } from '@aztec/foundation/sleep'; import { @@ -60,7 +60,7 @@ describe('e2e_p2p_governance_proposer', () => { await t.setup(); await t.applyBaseSetup(); - l1TxUtils = createL1TxUtilsFromViemWallet(t.ctx.deployL1ContractsValues.l1Client); + l1TxUtils = createL1TxUtils(t.ctx.deployL1ContractsValues.l1Client); }); afterEach(async () => { diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index d81037b0a638..db4ebe5a78ae 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -40,10 +40,11 @@ import { type Logger, createLogger } from '@aztec/aztec.js/log'; import { waitForTx } from '@aztec/aztec.js/node'; import { AnvilTestWatcher } from '@aztec/aztec/testing'; import { createBlobClientWithFileStores } from '@aztec/blob-client/client'; +import { Blob } from '@aztec/blob-lib'; import { EpochCache } from '@aztec/epoch-cache'; import { getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import { EmpireSlashingProposerContract, GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; -import { createL1TxUtilsWithBlobsFromViemWallet } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import { createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { CheckpointNumber } from '@aztec/foundation/branded-types'; import { SecretValue } from '@aztec/foundation/config'; import { Signature } from '@aztec/foundation/eth-signature'; @@ -409,9 +410,9 @@ describe('e2e_synching', () => { const sequencerPK: `0x${string}` = `0x${getPrivateKeyFromIndex(0)!.toString('hex')}`; - const l1TxUtils = createL1TxUtilsWithBlobsFromViemWallet( + const l1TxUtils = createL1TxUtils( deployL1ContractsValues.l1Client, - { logger, dateProvider }, + { logger, dateProvider, kzg: Blob.getViemKzgInstance() }, config, ); const rollupAddress = deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(); diff --git a/yarn-project/end-to-end/src/fixtures/setup.ts b/yarn-project/end-to-end/src/fixtures/setup.ts index f4b9da66393b..01f7d83149a1 100644 --- a/yarn-project/end-to-end/src/fixtures/setup.ts +++ b/yarn-project/end-to-end/src/fixtures/setup.ts @@ -286,7 +286,8 @@ export async function setup( config.realProofs = !!opts.realProofs; // Only enforce the time table if requested config.enforceTimeTable = !!opts.enforceTimeTable; - config.enableDelayer = true; + // Default to enabling the tx delayer unless explicitly disabled + config.enableDelayer = config.enableDelayer ?? true; config.listenAddress = '127.0.0.1'; const logger = getLogger(); diff --git a/yarn-project/end-to-end/src/spartan/upgrade_governance_proposer.test.ts b/yarn-project/end-to-end/src/spartan/upgrade_governance_proposer.test.ts index 476584522266..242db2945d6d 100644 --- a/yarn-project/end-to-end/src/spartan/upgrade_governance_proposer.test.ts +++ b/yarn-project/end-to-end/src/spartan/upgrade_governance_proposer.test.ts @@ -4,7 +4,7 @@ import { createEthereumChain } from '@aztec/ethereum/chain'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; import { deployL1Contract } from '@aztec/ethereum/deploy-l1-contract'; -import { createL1TxUtilsFromViemWallet } from '@aztec/ethereum/l1-tx-utils'; +import { createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { createLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; import { NewGovernanceProposerPayloadAbi } from '@aztec/l1-artifacts/NewGovernanceProposerPayloadAbi'; @@ -161,7 +161,7 @@ describe('spartan_upgrade_governance_proposer', () => { debugLogger.info(`Executing proposal ${info.round}`); - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client, { logger: debugLogger }); + const l1TxUtils = createL1TxUtils(l1Client, { logger: debugLogger }); const { receipt } = await governanceProposer.submitRoundWinner(executableRound, l1TxUtils); expect(receipt).toBeDefined(); expect(receipt.status).toEqual('success'); diff --git a/yarn-project/end-to-end/src/spartan/upgrade_rollup_version.test.ts b/yarn-project/end-to-end/src/spartan/upgrade_rollup_version.test.ts index bec7c674e878..02eaa828162a 100644 --- a/yarn-project/end-to-end/src/spartan/upgrade_rollup_version.test.ts +++ b/yarn-project/end-to-end/src/spartan/upgrade_rollup_version.test.ts @@ -3,7 +3,7 @@ import { createEthereumChain } from '@aztec/ethereum/chain'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { GovernanceProposerContract, RegistryContract, RollupContract } from '@aztec/ethereum/contracts'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; -import { createL1TxUtilsFromViemWallet } from '@aztec/ethereum/l1-tx-utils'; +import { createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; import { retryUntil } from '@aztec/foundation/retry'; @@ -264,7 +264,7 @@ describe('spartan_upgrade_rollup_version', () => { ({ round } = await govInfo()); } - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client, { logger: debugLogger }); + const l1TxUtils = createL1TxUtils(l1Client, { logger: debugLogger }); const { receipt: proposerReceipt, proposalId } = await governanceProposer.submitRoundWinner( executableRound, l1TxUtils, diff --git a/yarn-project/ethereum/src/config.ts b/yarn-project/ethereum/src/config.ts index c9ca44d1ca38..5c271cd915cb 100644 --- a/yarn-project/ethereum/src/config.ts +++ b/yarn-project/ethereum/src/config.ts @@ -6,6 +6,7 @@ import { getConfigFromMappings, getDefaultConfig, numberConfigHelper, + omitConfigMappings, optionalNumberConfigHelper, } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -238,7 +239,7 @@ export const l1ContractsConfigMappings: ConfigMappingsType = description: 'The delay before a validator can exit the set', ...numberConfigHelper(l1ContractsDefaultEnv.AZTEC_EXIT_DELAY_SECONDS), }, - ...l1TxUtilsConfigMappings, + ...omitConfigMappings(l1TxUtilsConfigMappings, ['ethereumSlotDuration']), }; /** diff --git a/yarn-project/ethereum/src/contracts/fee_asset_handler.test.ts b/yarn-project/ethereum/src/contracts/fee_asset_handler.test.ts index d74c6b10e1c7..f49407b1c08d 100644 --- a/yarn-project/ethereum/src/contracts/fee_asset_handler.test.ts +++ b/yarn-project/ethereum/src/contracts/fee_asset_handler.test.ts @@ -12,7 +12,7 @@ import { foundry } from 'viem/chains'; import { createExtendedL1Client } from '../client.js'; import { DefaultL1ContractsConfig } from '../config.js'; import { deployAztecL1Contracts } from '../deploy_aztec_l1_contracts.js'; -import { L1TxUtils, createL1TxUtilsFromViemWallet } from '../l1_tx_utils/index.js'; +import { L1TxUtils, createL1TxUtils } from '../l1_tx_utils/index.js'; import { startAnvil } from '../test/start_anvil.js'; import type { ExtendedViemWalletClient } from '../types.js'; import { FeeAssetHandlerContract } from './fee_asset_handler.js'; @@ -48,7 +48,7 @@ describe('FeeAssetHandler', () => { }); // Since the registry cannot "see" the slash factory, we omit it from the addresses for this test const deployedAddresses = omit(deployed.l1ContractAddresses, 'slashFactoryAddress'); - txUtils = createL1TxUtilsFromViemWallet(l1Client, { logger }); + txUtils = createL1TxUtils(l1Client, { logger }); feeAssetHandler = new FeeAssetHandlerContract(l1Client, deployedAddresses.feeAssetHandlerAddress!); feeAsset = getContract({ address: deployedAddresses.feeJuiceAddress!.toString(), diff --git a/yarn-project/ethereum/src/contracts/governance.ts b/yarn-project/ethereum/src/contracts/governance.ts index ad4c38b32ef4..d4aa7b396e1e 100644 --- a/yarn-project/ethereum/src/contracts/governance.ts +++ b/yarn-project/ethereum/src/contracts/governance.ts @@ -14,7 +14,7 @@ import { } from 'viem'; import type { L1ContractAddresses } from '../l1_contract_addresses.js'; -import { createL1TxUtilsFromViemWallet } from '../l1_tx_utils/index.js'; +import { createL1TxUtils } from '../l1_tx_utils/index.js'; import { type ExtendedViemWalletClient, type ViemClient, isExtendedClient } from '../types.js'; export type L1GovernanceContractAddresses = Pick< @@ -194,7 +194,7 @@ export class GovernanceContract extends ReadOnlyGovernanceContract { retries: number; logger: Logger; }) { - const l1TxUtils = createL1TxUtilsFromViemWallet(this.client, { logger }); + const l1TxUtils = createL1TxUtils(this.client, { logger }); const retryDelaySeconds = 12; voteAmount = voteAmount ?? (await this.getPowerForProposal(proposalId)); @@ -252,7 +252,7 @@ export class GovernanceContract extends ReadOnlyGovernanceContract { retries: number; logger: Logger; }) { - const l1TxUtils = createL1TxUtilsFromViemWallet(this.client, { logger }); + const l1TxUtils = createL1TxUtils(this.client, { logger }); const retryDelaySeconds = 12; let success = false; for (let i = 0; i < retries; i++) { diff --git a/yarn-project/ethereum/src/contracts/multicall.test.ts b/yarn-project/ethereum/src/contracts/multicall.test.ts index 391e1107ff3c..650598743b5e 100644 --- a/yarn-project/ethereum/src/contracts/multicall.test.ts +++ b/yarn-project/ethereum/src/contracts/multicall.test.ts @@ -14,7 +14,7 @@ import { createExtendedL1Client } from '../client.js'; import { DefaultL1ContractsConfig } from '../config.js'; import { type DeployAztecL1ContractsReturnType, deployAztecL1Contracts } from '../deploy_aztec_l1_contracts.js'; import { deployL1Contract } from '../deploy_l1_contract.js'; -import { L1TxUtils, createL1TxUtilsFromViemWallet } from '../l1_tx_utils/index.js'; +import { L1TxUtils, createL1TxUtils } from '../l1_tx_utils/index.js'; import { startAnvil } from '../test/start_anvil.js'; import type { ExtendedViemWalletClient } from '../types.js'; import { FormattedViemError } from '../utils.js'; @@ -67,7 +67,7 @@ describe('Multicall3', () => { client: walletClient, }); - l1TxUtils = createL1TxUtilsFromViemWallet(walletClient, { logger }); + l1TxUtils = createL1TxUtils(walletClient, { logger }); const addMinterHash = await tokenContract.write.addMinter([MULTI_CALL_3_ADDRESS], { account: privateKey }); await walletClient.waitForTransactionReceipt({ hash: addMinterHash }); diff --git a/yarn-project/ethereum/src/deploy_aztec_l1_contracts.ts b/yarn-project/ethereum/src/deploy_aztec_l1_contracts.ts index 6497f5c0400e..50ec9d32e54f 100644 --- a/yarn-project/ethereum/src/deploy_aztec_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_aztec_l1_contracts.ts @@ -23,7 +23,6 @@ import type { L1ContractsConfig } from './config.js'; import { deployMulticall3 } from './contracts/multicall.js'; import { RollupContract } from './contracts/rollup.js'; import type { L1ContractAddresses } from './l1_contract_addresses.js'; -import type { L1TxUtilsConfig } from './l1_tx_utils/config.js'; import type { ExtendedViemWalletClient } from './types.js'; const logger = createLogger('ethereum:deploy_aztec_l1_contracts'); @@ -491,7 +490,25 @@ export type VerificationRecord = { libraries: VerificationLibraryEntry[]; }; -export interface DeployAztecL1ContractsArgs extends Omit { +export interface DeployAztecL1ContractsArgs + extends Omit< + L1ContractsConfig, + | 'gasLimitBufferPercentage' + | 'maxGwei' + | 'maxBlobGwei' + | 'priorityFeeBumpPercentage' + | 'priorityFeeRetryBumpPercentage' + | 'minimumPriorityFeePerGas' + | 'maxSpeedUpAttempts' + | 'checkIntervalMs' + | 'stallTimeMs' + | 'txTimeoutMs' + | 'cancelTxOnTimeout' + | 'txCancellationFinalTimeoutMs' + | 'txUnseenConsideredDroppedMs' + | 'enableDelayer' + | 'txDelayerMaxInclusionTimeIntoSlot' + > { /** The vk tree root. */ vkTreeRoot: Fr; /** The hash of the protocol contracts. */ diff --git a/yarn-project/ethereum/src/deploy_l1_contract.ts b/yarn-project/ethereum/src/deploy_l1_contract.ts index fce35b26ff71..28228a3c3bf7 100644 --- a/yarn-project/ethereum/src/deploy_l1_contract.ts +++ b/yarn-project/ethereum/src/deploy_l1_contract.ts @@ -24,7 +24,7 @@ import { } from './deploy_aztec_l1_contracts.js'; import { RegisterNewRollupVersionPayloadArtifact } from './l1_artifacts.js'; import { type L1TxUtilsConfig, getL1TxUtilsConfigEnvVars } from './l1_tx_utils/config.js'; -import { createL1TxUtilsFromViemWallet } from './l1_tx_utils/factory.js'; +import { createL1TxUtils } from './l1_tx_utils/factory.js'; import type { L1TxUtils } from './l1_tx_utils/l1_tx_utils.js'; import type { GasPrice, L1TxConfig, L1TxRequest } from './l1_tx_utils/types.js'; import type { ExtendedViemWalletClient } from './types.js'; @@ -46,7 +46,7 @@ export class L1Deployer { private createVerificationJson: boolean = false, ) { this.salt = maybeSalt ? padHex(numberToHex(maybeSalt), { size: 32 }) : undefined; - this.l1TxUtils = createL1TxUtilsFromViemWallet( + this.l1TxUtils = createL1TxUtils( this.client, { logger: this.logger, dateProvider }, { ...this.txUtilsConfig, debugMaxGasLimit: acceleratedTestDeployments }, @@ -179,7 +179,7 @@ export async function deployL1Contract( if (!l1TxUtils) { const config = getL1TxUtilsConfigEnvVars(); - l1TxUtils = createL1TxUtilsFromViemWallet( + l1TxUtils = createL1TxUtils( extendedClient, { logger }, { ...config, debugMaxGasLimit: acceleratedTestDeployments }, diff --git a/yarn-project/ethereum/src/l1_tx_utils/config.ts b/yarn-project/ethereum/src/l1_tx_utils/config.ts index e5f52877cf56..531dc6579f4f 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/config.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/config.ts @@ -65,6 +65,8 @@ export interface L1TxUtilsConfig { enableDelayer?: boolean; /** Max seconds into an L1 slot for tx inclusion. Txs sent later are deferred to next slot. Only used when enableDelayer is true. */ txDelayerMaxInclusionTimeIntoSlot?: number; + /** How many seconds an L1 slot lasts. */ + ethereumSlotDuration?: number; } export const l1TxUtilsConfigMappings: ConfigMappingsType = { @@ -155,6 +157,11 @@ export const l1TxUtilsConfigMappings: ConfigMappingsType = { description: 'Max seconds into L1 slot for tx inclusion when delayer is enabled.', ...optionalNumberConfigHelper(), }, + ethereumSlotDuration: { + env: 'ETHEREUM_SLOT_DURATION', + description: 'How many seconds an L1 slot lasts.', + ...numberConfigHelper(12), + }, }; // We abuse the fact that all mappings above have a non null default value and force-type this to Required diff --git a/yarn-project/ethereum/src/l1_tx_utils/factory.ts b/yarn-project/ethereum/src/l1_tx_utils/factory.ts index 4ea3daa03062..970398f7badb 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/factory.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/factory.ts @@ -1,73 +1,64 @@ +import type { BlobKzgInstance } from '@aztec/blob-lib/types'; import { EthAddress } from '@aztec/foundation/eth-address'; import type { Logger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; -import type { TransactionSerializable } from 'viem'; - import type { EthSigner } from '../eth-signer/eth-signer.js'; import type { ExtendedViemWalletClient, ViemClient } from '../types.js'; import type { L1TxUtilsConfig } from './config.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { L1TxUtils } from './l1_tx_utils.js'; import { createViemSigner } from './signer.js'; -import { type Delayer, applyDelayer } from './tx_delayer.js'; +import type { Delayer } from './tx_delayer.js'; import type { SigningCallback } from './types.js'; -export function createL1TxUtilsFromViemWallet( - client: ExtendedViemWalletClient, - deps?: { - logger?: Logger; - dateProvider?: DateProvider; - store?: IL1TxStore; - metrics?: IL1TxMetrics; - ethereumSlotDuration?: number; - delayer?: Delayer; - }, - config?: Partial & { debugMaxGasLimit?: boolean }, -): L1TxUtils { - const l1TxUtils = new L1TxUtils( +/** Source of signing capability: either a wallet client or a separate client + signer. */ +export type L1SignerSource = ExtendedViemWalletClient | { client: ViemClient; signer: EthSigner }; + +export function resolveSignerSource(source: L1SignerSource): { + client: ViemClient; + address: EthAddress; + signingCallback: SigningCallback; +} { + if ('account' in source && source.account) { + return { + client: source as ExtendedViemWalletClient, + address: EthAddress.fromString((source as ExtendedViemWalletClient).account.address), + signingCallback: createViemSigner(source as ExtendedViemWalletClient), + }; + } + const { client, signer } = source as { client: ViemClient; signer: EthSigner }; + return { client, - EthAddress.fromString(client.account.address), - createViemSigner(client), - deps?.logger, - deps?.dateProvider, - config, - config?.debugMaxGasLimit ?? false, - deps?.store, - deps?.metrics, - ); - applyDelayer(l1TxUtils, config ?? {}, deps?.ethereumSlotDuration, deps?.delayer); - return l1TxUtils; + address: signer.address, + signingCallback: async (tx, _addr) => (await signer.signTransaction(tx)).toViemTransactionSignature(), + }; } -export function createL1TxUtilsFromEthSigner( - client: ViemClient, - signer: EthSigner, +export function createL1TxUtils( + source: L1SignerSource, deps?: { logger?: Logger; dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; - ethereumSlotDuration?: number; + kzg?: BlobKzgInstance; delayer?: Delayer; }, config?: Partial & { debugMaxGasLimit?: boolean }, ): L1TxUtils { - const callback: SigningCallback = async (transaction: TransactionSerializable, _signingAddress) => { - return (await signer.signTransaction(transaction)).toViemTransactionSignature(); - }; - - const l1TxUtils = new L1TxUtils( + const { client, address, signingCallback } = resolveSignerSource(source); + return new L1TxUtils( client, - signer.address, - callback, + address, + signingCallback, deps?.logger, deps?.dateProvider, config, config?.debugMaxGasLimit ?? false, deps?.store, deps?.metrics, + deps?.kzg, + deps?.delayer, ); - applyDelayer(l1TxUtils, config ?? {}, deps?.ethereumSlotDuration, deps?.delayer); - return l1TxUtils; } diff --git a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.test.ts b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.test.ts index 66c9d6fd8dce..3a3612efbef1 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.test.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.test.ts @@ -1,3 +1,4 @@ +import { Blob } from '@aztec/blob-lib'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; import { TestDateProvider } from '@aztec/foundation/timer'; @@ -105,6 +106,8 @@ describe('ForwarderL1TxUtils', () => { false, undefined, undefined, + Blob.getViemKzgInstance(), + undefined, forwarderAddress, ); diff --git a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts index 2efebfc8c8cb..f9549fdd9772 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts @@ -1,26 +1,27 @@ +import type { BlobKzgInstance } from '@aztec/blob-lib/types'; import { EthAddress } from '@aztec/foundation/eth-address'; import type { Logger } from '@aztec/foundation/log'; import type { DateProvider } from '@aztec/foundation/timer'; import { type Hex, encodeFunctionData } from 'viem'; -import type { EthSigner } from '../eth-signer/eth-signer.js'; import { FORWARDER_ABI } from '../forwarder_proxy.js'; -import type { ExtendedViemWalletClient, ViemClient } from '../types.js'; +import type { ViemClient } from '../types.js'; import type { L1TxUtilsConfig } from './config.js'; +import type { L1SignerSource } from './factory.js'; +import { resolveSignerSource } from './factory.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; -import { L1TxUtilsWithBlobs } from './l1_tx_utils_with_blobs.js'; -import { createViemSigner } from './signer.js'; -import { type Delayer, applyDelayer } from './tx_delayer.js'; +import { L1TxUtils } from './l1_tx_utils.js'; +import type { Delayer } from './tx_delayer.js'; import type { L1BlobInputs, L1TxConfig, L1TxRequest, SigningCallback } from './types.js'; /** - * Extends L1TxUtilsWithBlobs to wrap all transactions through a forwarder contract. + * Extends L1TxUtils to wrap all transactions through a forwarder contract. * This is mainly used for testing the archiver's ability to decode transactions that go through proxies. */ -export class ForwarderL1TxUtils extends L1TxUtilsWithBlobs { +export class ForwarderL1TxUtils extends L1TxUtils { constructor( - client: ViemClient | ExtendedViemWalletClient, + client: ViemClient, senderAddress: EthAddress, signingCallback: SigningCallback, logger: Logger | undefined, @@ -29,9 +30,23 @@ export class ForwarderL1TxUtils extends L1TxUtilsWithBlobs { debugMaxGasLimit: boolean, store: IL1TxStore | undefined, metrics: IL1TxMetrics | undefined, + kzg: BlobKzgInstance | undefined, + delayer: Delayer | undefined, private readonly forwarderAddress: EthAddress, ) { - super(client, senderAddress, signingCallback, logger, dateProvider, config, debugMaxGasLimit, store, metrics); + super( + client, + senderAddress, + signingCallback, + logger, + dateProvider, + config, + debugMaxGasLimit, + store, + metrics, + kzg, + delayer, + ); } /** @@ -62,67 +77,32 @@ export class ForwarderL1TxUtils extends L1TxUtilsWithBlobs { } } -export function createForwarderL1TxUtilsFromViemWallet( - client: ExtendedViemWalletClient, +export function createForwarderL1TxUtils( + source: L1SignerSource, forwarderAddress: EthAddress, - deps: { + deps?: { logger?: Logger; dateProvider?: DateProvider; store?: IL1TxStore; metrics?: IL1TxMetrics; - ethereumSlotDuration?: number; + kzg?: BlobKzgInstance; delayer?: Delayer; - } = {}, - config: Partial = {}, - debugMaxGasLimit: boolean = false, -) { - const l1TxUtils = new ForwarderL1TxUtils( + }, + config?: Partial & { debugMaxGasLimit?: boolean }, +): ForwarderL1TxUtils { + const { client, address, signingCallback } = resolveSignerSource(source); + return new ForwarderL1TxUtils( client, - EthAddress.fromString(client.account.address), - createViemSigner(client), - deps.logger, - deps.dateProvider, - config, - debugMaxGasLimit, - deps.store, - deps.metrics, + address, + signingCallback, + deps?.logger, + deps?.dateProvider, + config ?? {}, + config?.debugMaxGasLimit ?? false, + deps?.store, + deps?.metrics, + deps?.kzg, + deps?.delayer, forwarderAddress, ); - applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); - return l1TxUtils; -} - -export function createForwarderL1TxUtilsFromEthSigner( - client: ViemClient, - signer: EthSigner, - forwarderAddress: EthAddress, - deps: { - logger?: Logger; - dateProvider?: DateProvider; - store?: IL1TxStore; - metrics?: IL1TxMetrics; - ethereumSlotDuration?: number; - delayer?: Delayer; - } = {}, - config: Partial = {}, - debugMaxGasLimit: boolean = false, -) { - const callback: SigningCallback = async (transaction, _signingAddress) => { - return (await signer.signTransaction(transaction)).toViemTransactionSignature(); - }; - - const l1TxUtils = new ForwarderL1TxUtils( - client, - signer.address, - callback, - deps.logger, - deps.dateProvider, - config, - debugMaxGasLimit, - deps.store, - deps.metrics, - forwarderAddress, - ); - applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); - return l1TxUtils; } diff --git a/yarn-project/ethereum/src/l1_tx_utils/index-blobs.ts b/yarn-project/ethereum/src/l1_tx_utils/index-blobs.ts index 9796ce24da1b..bba69654cf89 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/index-blobs.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/index-blobs.ts @@ -1,2 +1,2 @@ -export * from './forwarder_l1_tx_utils.js'; -export * from './l1_tx_utils_with_blobs.js'; +export { createForwarderL1TxUtils, ForwarderL1TxUtils } from './forwarder_l1_tx_utils.js'; +export { createL1TxUtils, type L1SignerSource, resolveSignerSource } from './factory.js'; diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts index 5d75639dcc20..152abf04e680 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts @@ -41,10 +41,10 @@ import { ReadOnlyL1TxUtils, TxUtilsState, UnknownMinedTxError, - createL1TxUtilsFromViemWallet, + createL1TxUtils, defaultL1TxUtilsConfig, } from './index.js'; -import { L1TxUtilsWithBlobs } from './l1_tx_utils_with_blobs.js'; +import { L1TxUtils } from './l1_tx_utils.js'; import { createViemSigner } from './signer.js'; const MNEMONIC = 'test test test test test test test test test test test junk'; @@ -96,8 +96,8 @@ describe('L1TxUtils', () => { await anvil.stop().catch(err => createLogger('cleanup').error(err)); }, 5000); - describe('L1TxUtilsWithBlobs', () => { - let gasUtils: TestL1TxUtilsWithBlobs; + describe('L1TxUtils with blobs', () => { + let gasUtils: TestL1TxUtils; let config: Partial; const request = { @@ -107,7 +107,7 @@ describe('L1TxUtils', () => { }; const createL1TxUtils = () => - new TestL1TxUtilsWithBlobs( + new TestL1TxUtils( l1Client, EthAddress.fromString(l1Client.account.address), createViemSigner(l1Client), @@ -117,6 +117,8 @@ describe('L1TxUtils', () => { undefined, undefined, metrics, + Blob.getViemKzgInstance(), + undefined, ); beforeEach(() => { @@ -1777,7 +1779,7 @@ describe('L1TxUtils', () => { }); it('L1TxUtils can be instantiated with wallet client and has write methods', () => { - const l1TxUtils = createL1TxUtilsFromViemWallet(walletClient, { logger }); + const l1TxUtils = createL1TxUtils(walletClient, { logger }); expect(l1TxUtils).toBeDefined(); expect(l1TxUtils.client).toBe(walletClient); @@ -1789,7 +1791,7 @@ describe('L1TxUtils', () => { }); it('L1TxUtils inherits all read-only methods from ReadOnlyL1TxUtils', () => { - const l1TxUtils = createL1TxUtilsFromViemWallet(walletClient, { logger }); + const l1TxUtils = createL1TxUtils(walletClient, { logger }); // Verify all read-only methods are available expect(l1TxUtils.getBlock).toBeDefined(); @@ -1803,13 +1805,13 @@ describe('L1TxUtils', () => { it('L1TxUtils cannot be instantiated with public client', () => { expect(() => { - createL1TxUtilsFromViemWallet(publicClient as any, { logger }); + createL1TxUtils(publicClient as any, { logger }); }).toThrow(); }); }); }); -class TestL1TxUtilsWithBlobs extends L1TxUtilsWithBlobs { +class TestL1TxUtils extends L1TxUtils { declare public txs: L1TxState[]; public setMetrics(metrics: IL1TxMetrics) { diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts index dd4241132339..b0c7de888cf7 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts @@ -1,3 +1,4 @@ +import type { BlobKzgInstance } from '@aztec/blob-lib/types'; import { maxBigint } from '@aztec/foundation/bigint'; import { merge, pick } from '@aztec/foundation/collection'; import { InterruptError, TimeoutError } from '@aztec/foundation/error'; @@ -30,7 +31,7 @@ import { type L1TxUtilsConfig, l1TxUtilsConfigMappings } from './config.js'; import { MAX_L1_TX_LIMIT } from './constants.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { ReadOnlyL1TxUtils } from './readonly_l1_tx_utils.js'; -import type { Delayer } from './tx_delayer.js'; +import { type Delayer, createDelayer, wrapClientWithDelayer } from './tx_delayer.js'; import { DroppedTransactionError, type L1BlobInputs, @@ -50,6 +51,8 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { protected txs: L1TxState[] = []; /** Tx delayer for testing. Only set when enableDelayer config is true. */ public delayer: Delayer | undefined; + /** KZG instance for blob operations. */ + protected kzg?: BlobKzgInstance; constructor( public override client: ViemClient, @@ -61,9 +64,25 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { debugMaxGasLimit: boolean = false, protected store?: IL1TxStore, protected metrics?: IL1TxMetrics, + kzg?: BlobKzgInstance, + delayer?: Delayer, ) { super(client, logger, dateProvider, config, debugMaxGasLimit); this.nonceManager = createNonceManager({ source: jsonRpc() }); + this.kzg = kzg; + + // Set up delayer: use provided one or create new + if (config?.enableDelayer && config?.ethereumSlotDuration) { + this.delayer = delayer ?? this.createDelayer({ ethereumSlotDuration: config.ethereumSlotDuration }); + this.client = this.wrapClientWithDelayer(this.client, this.delayer); + if (config.txDelayerMaxInclusionTimeIntoSlot !== undefined) { + this.delayer.setMaxInclusionTimeIntoSlot(config.txDelayerMaxInclusionTimeIntoSlot); + } + } else if (delayer) { + // Delayer provided but enableDelayer not set — just store it without wrapping + // This shouldn't normally happen but handle gracefully + this.delayer = delayer; + } } public get state() { @@ -734,8 +753,22 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { return Number(timestamp) * 1000; } - /** Makes empty blob inputs for the cancellation tx. To be overridden in L1TxUtilsWithBlobs. */ - protected makeEmptyBlobInputs(_maxFeePerBlobGas: bigint): Required { - throw new Error('Cannot make empty blob inputs for cancellation'); + /** Makes empty blob inputs for the cancellation tx. */ + protected makeEmptyBlobInputs(maxFeePerBlobGas: bigint): Required { + if (!this.kzg) { + throw new Error('Cannot make empty blob inputs for cancellation without kzg'); + } + const blobData = new Uint8Array(131072).fill(0); + return { blobs: [blobData], kzg: this.kzg, maxFeePerBlobGas }; + } + + /** Creates a new delayer instance. */ + protected createDelayer(opts: { ethereumSlotDuration: bigint | number }): Delayer { + return createDelayer(this.dateProvider, opts); + } + + /** Wraps the client with delayer logic. */ + protected wrapClientWithDelayer(client: T, delayer: Delayer): T { + return wrapClientWithDelayer(client, delayer); } } diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts deleted file mode 100644 index 86e249ebda53..000000000000 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils_with_blobs.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { Blob } from '@aztec/blob-lib'; -import { EthAddress } from '@aztec/foundation/eth-address'; -import type { Logger } from '@aztec/foundation/log'; -import { DateProvider } from '@aztec/foundation/timer'; - -import type { TransactionSerializable } from 'viem'; - -import type { EthSigner } from '../eth-signer/eth-signer.js'; -import type { ExtendedViemWalletClient, ViemClient } from '../types.js'; -import type { L1TxUtilsConfig } from './config.js'; -import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; -import { L1TxUtils } from './l1_tx_utils.js'; -import { createViemSigner } from './signer.js'; -import { type Delayer, applyDelayer } from './tx_delayer.js'; -import type { L1BlobInputs, SigningCallback } from './types.js'; - -/** Extends L1TxUtils with the capability to cancel blobs. This needs to be a separate class so we don't require a dependency on blob-lib unnecessarily. */ -export class L1TxUtilsWithBlobs extends L1TxUtils { - /** Makes empty blob inputs for the cancellation tx. */ - protected override makeEmptyBlobInputs(maxFeePerBlobGas: bigint): Required { - const blobData = new Uint8Array(131072).fill(0); - const kzg = Blob.getViemKzgInstance(); - return { blobs: [blobData], kzg, maxFeePerBlobGas }; - } -} - -export function createL1TxUtilsWithBlobsFromViemWallet( - client: ExtendedViemWalletClient, - deps: { - logger?: Logger; - dateProvider?: DateProvider; - store?: IL1TxStore; - metrics?: IL1TxMetrics; - ethereumSlotDuration?: number; - delayer?: Delayer; - } = {}, - config: Partial = {}, - debugMaxGasLimit: boolean = false, -) { - const l1TxUtils = new L1TxUtilsWithBlobs( - client, - EthAddress.fromString(client.account.address), - createViemSigner(client), - deps.logger, - deps.dateProvider, - config, - debugMaxGasLimit, - deps.store, - deps.metrics, - ); - applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); - return l1TxUtils; -} - -export function createL1TxUtilsWithBlobsFromEthSigner( - client: ViemClient, - signer: EthSigner, - deps: { - logger?: Logger; - dateProvider?: DateProvider; - store?: IL1TxStore; - metrics?: IL1TxMetrics; - ethereumSlotDuration?: number; - delayer?: Delayer; - } = {}, - config: Partial = {}, - debugMaxGasLimit: boolean = false, -) { - const callback: SigningCallback = async (transaction: TransactionSerializable, _signingAddress) => { - return (await signer.signTransaction(transaction)).toViemTransactionSignature(); - }; - - const l1TxUtils = new L1TxUtilsWithBlobs( - client, - signer.address, - callback, - deps.logger, - deps.dateProvider, - config, - debugMaxGasLimit, - deps.store, - deps.metrics, - ); - applyDelayer(l1TxUtils, config, deps.ethereumSlotDuration, deps.delayer); - return l1TxUtils; -} diff --git a/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts b/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts index 98cfa6399598..b4a637b3308e 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts @@ -16,9 +16,7 @@ import { walletActions, } from 'viem'; -import { type ExtendedViemWalletClient, type ViemClient, isExtendedClient } from '../types.js'; -import type { L1TxUtilsConfig } from './config.js'; -import type { L1TxUtils } from './l1_tx_utils.js'; +import type { ExtendedViemWalletClient, ViemClient } from '../types.js'; const MAX_WAIT_TIME_SECONDS = 180; @@ -136,26 +134,19 @@ class DelayerImpl implements Delayer { /** * Creates a new DelayerImpl instance. Exposed so callers can create a single shared delayer - * and pass it to multiple `withDelayer` / `applyDelayer` calls. + * and pass it to multiple `wrapClientWithDelayer` calls. */ export function createDelayer(dateProvider: DateProvider, opts: { ethereumSlotDuration: bigint | number }): Delayer { return new DelayerImpl(dateProvider, opts); } /** - * Returns a new client (without modifying the one passed in) with an injected tx delayer. - * The delayer can be used to hold off the next tx to be sent until a given block number. - * If an existing delayer is provided, it will be reused instead of creating a new one. - * TODO(#10824): This doesn't play along well with blob txs for some reason. + * Wraps a viem client with tx delaying logic. Returns the wrapped client. + * The delayer intercepts sendRawTransaction calls and delays them based on the delayer's state. */ -export function withDelayer( - client: T, - dateProvider: DateProvider, - opts: { ethereumSlotDuration: bigint | number }, - existingDelayer?: Delayer, -): { client: T; delayer: Delayer } { +export function wrapClientWithDelayer(client: T, delayer: Delayer): T { const logger = createLogger('ethereum:tx_delayer'); - const delayer = (existingDelayer as DelayerImpl | undefined) ?? new DelayerImpl(dateProvider, opts); + const delayerImpl = delayer as DelayerImpl; // Cast to ExtendedViemWalletClient for the extend chain since it has sendRawTransaction. // The sendRawTransaction override is applied to all clients regardless of type. @@ -172,10 +163,10 @@ export function withDelayer( const { serializedTransaction } = args[0]; const publicClient = client as unknown as PublicClient; - if (delayer.nextWait !== undefined) { + if (delayerImpl.nextWait !== undefined) { // Check if we have been instructed to delay the next tx. - const waitUntil = delayer.nextWait; - delayer.nextWait = undefined; + const waitUntil = delayerImpl.nextWait; + delayerImpl.nextWait = undefined; // Compute the tx hash manually so we emulate sendRawTransaction response txHash = computeTxHash(serializedTransaction); @@ -183,7 +174,7 @@ export function withDelayer( // Cancel tx outright if instructed if ('indefinitely' in waitUntil && waitUntil.indefinitely) { logger.info(`Cancelling tx ${txHash}`); - delayer.cancelledTxs.push(serializedTransaction); + delayerImpl.cancelledTxs.push(serializedTransaction); return Promise.resolve(txHash); } @@ -192,28 +183,28 @@ export function withDelayer( 'l1BlockNumber' in waitUntil ? waitUntilBlock(publicClient, waitUntil.l1BlockNumber - 1n, logger) : 'l1Timestamp' in waitUntil - ? waitUntilL1Timestamp(publicClient, waitUntil.l1Timestamp - delayer.ethereumSlotDuration, logger) + ? waitUntilL1Timestamp(publicClient, waitUntil.l1Timestamp - delayerImpl.ethereumSlotDuration, logger) : undefined; logger.info(`Delaying tx ${txHash} until ${inspect(waitUntil)}`, { argsLen: args.length, ...omit(parseTransaction(serializedTransaction), 'data', 'sidecars'), }); - } else if (delayer.maxInclusionTimeIntoSlot !== undefined) { + } else if (delayerImpl.maxInclusionTimeIntoSlot !== undefined) { // Check if we need to delay txs sent too close to the end of the slot. const currentBlock = await publicClient.getBlock({ includeTransactions: false }); const { timestamp: lastBlockTimestamp, number } = currentBlock; - const now = delayer.dateProvider.now(); + const now = delayerImpl.dateProvider.now(); txHash = computeTxHash(serializedTransaction); const logData = { ...omit(parseTransaction(serializedTransaction), 'data', 'sidecars'), lastBlockTimestamp, now, - maxInclusionTimeIntoSlot: delayer.maxInclusionTimeIntoSlot, + maxInclusionTimeIntoSlot: delayerImpl.maxInclusionTimeIntoSlot, }; - if (now / 1000 - Number(lastBlockTimestamp) > delayer.maxInclusionTimeIntoSlot) { + if (now / 1000 - Number(lastBlockTimestamp) > delayerImpl.maxInclusionTimeIntoSlot) { // If the last block was mined more than `maxInclusionTimeIntoSlot` seconds ago, then we cannot include // any txs in the current slot, so we delay the tx until the next slot. logger.info(`Delaying inclusion of tx ${txHash} until the next slot since it was sent too late`, logData); @@ -237,21 +228,23 @@ export function withDelayer( }); } logger.info(`Sent previously delayed tx ${clientTxHash}`); - delayer.sentTxHashes.push(clientTxHash); + delayerImpl.sentTxHashes.push(clientTxHash); }) .catch(err => logger.error(`Error sending tx after delay`, err)); return Promise.resolve(txHash!); } else { const txHash = await client.sendRawTransaction(...args); logger.debug(`Sent tx immediately ${txHash}`); - delayer.sentTxHashes.push(txHash); + delayerImpl.sentTxHashes.push(txHash); return txHash; } }, })); // Only re-bind wallet actions (sendTransaction, writeContract, deployContract) for wallet clients. - const extended = isExtendedClient(client) + // This is needed for tests that use wallet actions directly rather than sendRawTransaction. + const isWalletClient = 'account' in client && client.account !== undefined; + const extended = isWalletClient ? withRawTx // Re-extend with sendTransaction so it uses the modified sendRawTransaction. .extend(client => ({ sendTransaction: walletActions(client).sendTransaction })) @@ -262,32 +255,7 @@ export function withDelayer( })) : withRawTx; - return { client: extended as T, delayer }; -} - -/** Applies a tx delayer to an L1TxUtils instance if enableDelayer is set in config. - * If an existing delayer is provided, it will be shared instead of creating a new one. - */ -export function applyDelayer( - l1TxUtils: L1TxUtils, - config: Partial, - ethereumSlotDuration?: number, - existingDelayer?: Delayer, -) { - if (!config.enableDelayer || ethereumSlotDuration === undefined) { - return; - } - const { client, delayer } = withDelayer( - l1TxUtils.client, - l1TxUtils.dateProvider, - { ethereumSlotDuration }, - existingDelayer, - ); - l1TxUtils.client = client; - l1TxUtils.delayer = delayer; - if (config.txDelayerMaxInclusionTimeIntoSlot !== undefined) { - delayer.setMaxInclusionTimeIntoSlot(config.txDelayerMaxInclusionTimeIntoSlot); - } + return extended as T; } /** diff --git a/yarn-project/ethereum/src/test/tx_delayer.test.ts b/yarn-project/ethereum/src/test/tx_delayer.test.ts index e630ac32ae66..055a3e77fa0b 100644 --- a/yarn-project/ethereum/src/test/tx_delayer.test.ts +++ b/yarn-project/ethereum/src/test/tx_delayer.test.ts @@ -10,7 +10,7 @@ import { type PrivateKeyAccount, createWalletClient, fallback, getContract, http import { privateKeyToAccount } from 'viem/accounts'; import { foundry } from 'viem/chains'; -import { type Delayer, waitUntilBlock, withDelayer } from '../l1_tx_utils/tx_delayer.js'; +import { type Delayer, createDelayer, waitUntilBlock, wrapClientWithDelayer } from '../l1_tx_utils/tx_delayer.js'; import type { ExtendedViemWalletClient } from '../types.js'; import { EthCheatCodes } from './eth_cheat_codes.js'; import { startAnvil } from './start_anvil.js'; @@ -41,7 +41,8 @@ describe('tx_delayer', () => { chain: foundry, account, }).extend(publicActions); - ({ client, delayer } = withDelayer(_client, dateProvider, { ethereumSlotDuration: ETHEREUM_SLOT_DURATION })); + delayer = createDelayer(dateProvider, { ethereumSlotDuration: ETHEREUM_SLOT_DURATION }); + client = wrapClientWithDelayer(_client, delayer); }); const receiptNotFound = expect.objectContaining({ name: 'TransactionReceiptNotFoundError' }); diff --git a/yarn-project/ethereum/src/test/upgrade_utils.ts b/yarn-project/ethereum/src/test/upgrade_utils.ts index 9c4b8511aa73..191002b5aeb4 100644 --- a/yarn-project/ethereum/src/test/upgrade_utils.ts +++ b/yarn-project/ethereum/src/test/upgrade_utils.ts @@ -7,7 +7,7 @@ import { type GetContractReturnType, type PrivateKeyAccount, getContract } from import { extractProposalIdFromLogs } from '../contracts/governance.js'; import type { L1ContractAddresses } from '../l1_contract_addresses.js'; -import { createL1TxUtilsFromViemWallet } from '../l1_tx_utils/index.js'; +import { createL1TxUtils } from '../l1_tx_utils/index.js'; import type { ExtendedViemWalletClient, ViemPublicClient } from '../types.js'; import { EthCheatCodes } from './eth_cheat_codes.js'; @@ -22,7 +22,7 @@ export async function executeGovernanceProposal( ) { const proposal = await governance.read.getProposal([proposalId]); - const l1TxUtils = createL1TxUtilsFromViemWallet(l1Client); + const l1TxUtils = createL1TxUtils(l1Client); const waitL1Block = async () => { await l1TxUtils.sendAndMonitorTransaction({ diff --git a/yarn-project/node-lib/src/factories/l1_tx_utils.ts b/yarn-project/node-lib/src/factories/l1_tx_utils.ts index 0f35f25c11d4..2f436d0a37e2 100644 --- a/yarn-project/node-lib/src/factories/l1_tx_utils.ts +++ b/yarn-project/node-lib/src/factories/l1_tx_utils.ts @@ -1,18 +1,11 @@ +import type { BlobKzgInstance } from '@aztec/blob-lib/types'; import type { EthSigner } from '@aztec/ethereum/eth-signer'; -import { - createDelayer, - createL1TxUtilsFromEthSigner as createL1TxUtilsFromEthSignerBase, - createL1TxUtilsFromViemWallet as createL1TxUtilsFromViemWalletBase, -} from '@aztec/ethereum/l1-tx-utils'; +import { createDelayer, createL1TxUtils as createL1TxUtilsBase } from '@aztec/ethereum/l1-tx-utils'; import type { L1TxUtilsConfig } from '@aztec/ethereum/l1-tx-utils'; -import { - createForwarderL1TxUtilsFromEthSigner as createForwarderL1TxUtilsFromEthSignerBase, - createForwarderL1TxUtilsFromViemWallet as createForwarderL1TxUtilsFromViemWalletBase, - createL1TxUtilsWithBlobsFromEthSigner as createL1TxUtilsWithBlobsFromEthSignerBase, - createL1TxUtilsWithBlobsFromViemWallet as createL1TxUtilsWithBlobsFromViemWalletBase, -} from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import { createForwarderL1TxUtils as createForwarderL1TxUtilsBase } from '@aztec/ethereum/l1-tx-utils-with-blobs'; import type { ExtendedViemWalletClient, ViemClient } from '@aztec/ethereum/types'; import { omit } from '@aztec/foundation/collection'; +import type { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; import type { DateProvider } from '@aztec/foundation/timer'; import type { DataStoreConfig } from '@aztec/kv-store/config'; @@ -30,7 +23,7 @@ const L1_TX_STORE_NAME = 'l1-tx-utils'; * When enableDelayer is set in config, a single shared delayer is created and passed to all instances. */ async function createSharedDeps( - config: DataStoreConfig & Partial & { scope?: L1TxScope; ethereumSlotDuration?: number }, + config: DataStoreConfig & Partial & { scope?: L1TxScope }, deps: { telemetry: TelemetryClient; logger?: ReturnType; @@ -58,42 +51,38 @@ async function createSharedDeps( } /** - * Creates L1TxUtils with blobs from multiple Viem wallets, sharing store, metrics, and delayer. + * Creates L1TxUtils from multiple Viem wallet clients, sharing store, metrics, and delayer. + * When kzg is provided in deps, blob support is enabled. */ -export async function createL1TxUtilsWithBlobsFromViemWallet( +export async function createL1TxUtilsFromWallets( clients: ExtendedViemWalletClient[], - config: DataStoreConfig & - Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, + config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, deps: { telemetry: TelemetryClient; logger?: ReturnType; dateProvider?: DateProvider; + kzg?: BlobKzgInstance; }, ) { const sharedDeps = await createSharedDeps(config, deps); - return clients.map(client => - createL1TxUtilsWithBlobsFromViemWalletBase( - client, - { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, - config, - config.debugMaxGasLimit, - ), - ); + return clients.map(client => createL1TxUtilsBase(client, { ...sharedDeps, kzg: deps.kzg }, config)); } /** - * Creates L1TxUtils with blobs from multiple EthSigners, sharing store, metrics, and delayer. Removes duplicates. + * Creates L1TxUtils from multiple EthSigners, sharing store, metrics, and delayer. + * When kzg is provided in deps, blob support is enabled. + * Deduplicates signers by address to avoid creating multiple instances for the same publisher. */ -export async function createL1TxUtilsWithBlobsFromEthSigner( +export async function createL1TxUtilsFromSigners( client: ViemClient, signers: EthSigner[], - config: DataStoreConfig & - Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, + config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, deps: { telemetry: TelemetryClient; logger?: ReturnType; dateProvider?: DateProvider; + kzg?: BlobKzgInstance; }, ) { const sharedDeps = await createSharedDeps(config, deps); @@ -116,141 +105,52 @@ export async function createL1TxUtilsWithBlobsFromEthSigner( ); } - return uniqueSigners.map(signer => - createL1TxUtilsWithBlobsFromEthSignerBase( - client, - signer, - { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, - config, - config.debugMaxGasLimit, - ), - ); -} - -/** - * Creates L1TxUtils (without blobs) from multiple Viem wallets, sharing store, metrics, and delayer. - */ -export async function createL1TxUtilsFromViemWalletWithStore( - clients: ExtendedViemWalletClient[], - config: DataStoreConfig & - Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, - deps: { - telemetry: TelemetryClient; - logger?: ReturnType; - dateProvider?: DateProvider; - scope?: L1TxScope; - }, -) { - const sharedDeps = await createSharedDeps(config, deps); - - return clients.map(client => - createL1TxUtilsFromViemWalletBase( - client, - { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, - config, - ), - ); -} - -/** - * Creates L1TxUtils (without blobs) from multiple EthSigners, sharing store, metrics, and delayer. Removes duplicates. - */ -export async function createL1TxUtilsFromEthSignerWithStore( - client: ViemClient, - signers: EthSigner[], - config: DataStoreConfig & - Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, - deps: { - telemetry: TelemetryClient; - logger?: ReturnType; - dateProvider?: DateProvider; - scope?: L1TxScope; - }, -) { - const sharedDeps = await createSharedDeps(config, deps); - - // Deduplicate signers by address to avoid creating multiple L1TxUtils instances - // for the same publisher address (e.g., when multiple attesters share the same publisher key) - const signersByAddress = new Map(); - for (const signer of signers) { - const addressKey = signer.address.toString().toLowerCase(); - if (!signersByAddress.has(addressKey)) { - signersByAddress.set(addressKey, signer); - } - } - - const uniqueSigners = Array.from(signersByAddress.values()); - - if (uniqueSigners.length < signers.length) { - sharedDeps.logger.info( - `Deduplicated ${signers.length} signers to ${uniqueSigners.length} unique publisher addresses`, - ); - } - - return uniqueSigners.map(signer => - createL1TxUtilsFromEthSignerBase( - client, - signer, - { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, - config, - ), - ); + return uniqueSigners.map(signer => createL1TxUtilsBase({ client, signer }, { ...sharedDeps, kzg: deps.kzg }, config)); } /** - * Creates ForwarderL1TxUtils from multiple Viem wallets, sharing store, metrics, and delayer. - * This wraps all transactions through a forwarder contract for testing purposes. + * Creates ForwarderL1TxUtils from multiple Viem wallet clients, sharing store, metrics, and delayer. + * Wraps all transactions through a forwarder contract for testing purposes. + * When kzg is provided in deps, blob support is enabled. */ -export async function createForwarderL1TxUtilsFromViemWallet( +export async function createForwarderL1TxUtilsFromWallets( clients: ExtendedViemWalletClient[], - forwarderAddress: import('@aztec/foundation/eth-address').EthAddress, - config: DataStoreConfig & - Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, + forwarderAddress: EthAddress, + config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, deps: { telemetry: TelemetryClient; logger?: ReturnType; dateProvider?: DateProvider; + kzg?: BlobKzgInstance; }, ) { const sharedDeps = await createSharedDeps(config, deps); return clients.map(client => - createForwarderL1TxUtilsFromViemWalletBase( - client, - forwarderAddress, - { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, - config, - config.debugMaxGasLimit, - ), + createForwarderL1TxUtilsBase(client, forwarderAddress, { ...sharedDeps, kzg: deps.kzg }, config), ); } /** * Creates ForwarderL1TxUtils from multiple EthSigners, sharing store, metrics, and delayer. - * This wraps all transactions through a forwarder contract for testing purposes. + * Wraps all transactions through a forwarder contract for testing purposes. + * When kzg is provided in deps, blob support is enabled. */ -export async function createForwarderL1TxUtilsFromEthSigner( +export async function createForwarderL1TxUtilsFromSigners( client: ViemClient, signers: EthSigner[], - forwarderAddress: import('@aztec/foundation/eth-address').EthAddress, - config: DataStoreConfig & - Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope; ethereumSlotDuration?: number }, + forwarderAddress: EthAddress, + config: DataStoreConfig & Partial & { debugMaxGasLimit?: boolean; scope?: L1TxScope }, deps: { telemetry: TelemetryClient; logger?: ReturnType; dateProvider?: DateProvider; + kzg?: BlobKzgInstance; }, ) { const sharedDeps = await createSharedDeps(config, deps); return signers.map(signer => - createForwarderL1TxUtilsFromEthSignerBase( - client, - signer, - forwarderAddress, - { ...sharedDeps, ethereumSlotDuration: config.ethereumSlotDuration }, - config, - config.debugMaxGasLimit, - ), + createForwarderL1TxUtilsBase({ client, signer }, forwarderAddress, { ...sharedDeps, kzg: deps.kzg }, config), ); } diff --git a/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts b/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts index 1d68d0b35aae..2d049d12f488 100644 --- a/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts +++ b/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts @@ -1,3 +1,4 @@ +import { Blob } from '@aztec/blob-lib'; import { getAddressFromPrivateKey } from '@aztec/ethereum/account'; import type { ViemClient } from '@aztec/ethereum/types'; import { times } from '@aztec/foundation/collection'; @@ -11,7 +12,7 @@ import type { TelemetryClient } from '@aztec/telemetry-client'; import { generatePrivateKey } from 'viem/accounts'; -import { createL1TxUtilsWithBlobsFromEthSigner } from './l1_tx_utils.js'; +import { createL1TxUtilsFromSigners } from './l1_tx_utils.js'; describe('L1TxUtils Integration - Publisher Deduplication', () => { let kvStore: AztecAsyncKVStore; @@ -80,8 +81,9 @@ describe('L1TxUtils Integration - Publisher Deduplication', () => { // we should have publishers for each validator expect(allPublisherSigners).toHaveLength(keystore.validators!.length); - const l1TxUtils = await createL1TxUtilsWithBlobsFromEthSigner(mockClient, allPublisherSigners, mockConfig, { + const l1TxUtils = await createL1TxUtilsFromSigners(mockClient, allPublisherSigners, mockConfig, { telemetry: mockTelemetry, + kzg: Blob.getViemKzgInstance(), }); // all of the publisherSigners should deduplicate to one L1TxUtils instance @@ -139,8 +141,9 @@ describe('L1TxUtils Integration - Publisher Deduplication', () => { expect(allPublisherSigners).toHaveLength(keystore.validators!.length); - const l1TxUtils = await createL1TxUtilsWithBlobsFromEthSigner(mockClient, allPublisherSigners, mockConfig, { + const l1TxUtils = await createL1TxUtilsFromSigners(mockClient, allPublisherSigners, mockConfig, { telemetry: mockTelemetry, + kzg: Blob.getViemKzgInstance(), }); expect(l1TxUtils).toHaveLength(3); diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index dd8b755985f1..6d61df3dc101 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -1,6 +1,7 @@ import { type Archiver, createArchiver } from '@aztec/archiver'; import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover'; import { createBlobClientWithFileStores } from '@aztec/blob-client/client'; +import { Blob } from '@aztec/blob-lib'; import { EpochCache } from '@aztec/epoch-cache'; import { createEthereumChain } from '@aztec/ethereum/chain'; import { RollupContract } from '@aztec/ethereum/contracts'; @@ -12,10 +13,7 @@ import { DateProvider } from '@aztec/foundation/timer'; import type { DataStoreConfig } from '@aztec/kv-store/config'; import { type KeyStoreConfig, KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore'; import { trySnapshotSync } from '@aztec/node-lib/actions'; -import { - createForwarderL1TxUtilsFromEthSigner, - createL1TxUtilsFromEthSignerWithStore, -} from '@aztec/node-lib/factories'; +import { createForwarderL1TxUtilsFromSigners, createL1TxUtilsFromSigners } from '@aztec/node-lib/factories'; import { NodeRpcTxSource, type P2PClientDeps, createP2PClient } from '@aztec/p2p'; import { type ProverClientConfig, createProverClient } from '@aztec/prover-client'; import { createAndStartProvingBroker } from '@aztec/prover-client/broker'; @@ -136,14 +134,14 @@ export async function createProverNode( const l1TxUtils = deps.l1TxUtils ? [deps.l1TxUtils] : config.publisherForwarderAddress - ? await createForwarderL1TxUtilsFromEthSigner( + ? await createForwarderL1TxUtilsFromSigners( publicClient, proverSigners.signers, config.publisherForwarderAddress, { ...config, scope: 'prover' }, - { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider }, + { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider, kzg: Blob.getViemKzgInstance() }, ) - : await createL1TxUtilsFromEthSignerWithStore( + : await createL1TxUtilsFromSigners( publicClient, proverSigners.signers, { ...config, scope: 'prover' }, diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index d500d33fd4e7..7b3135501ce3 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -3,8 +3,7 @@ import { EpochCache } from '@aztec/epoch-cache'; import { isAnvilTestChain } from '@aztec/ethereum/chain'; import { getPublicClient } from '@aztec/ethereum/client'; import { GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; -import type { Delayer } from '@aztec/ethereum/l1-tx-utils'; -import { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import { type Delayer, L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { PublisherManager } from '@aztec/ethereum/publisher-manager'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; @@ -29,7 +28,7 @@ import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; */ export class SequencerClient { constructor( - protected publisherManager: PublisherManager, + protected publisherManager: PublisherManager, protected sequencer: Sequencer, protected checkpointsBuilder: FullNodeCheckpointsBuilder, protected validatorClient?: ValidatorClient, @@ -64,7 +63,7 @@ export class SequencerClient { blobClient: BlobClientInterface; dateProvider: DateProvider; epochCache?: EpochCache; - l1TxUtils: L1TxUtilsWithBlobs[]; + l1TxUtils: L1TxUtils[]; nodeKeyStore: KeystoreManager; }, ) { diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts index 0f960c656d32..178c90f24127 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts @@ -1,7 +1,7 @@ import type { BlobClientInterface } from '@aztec/blob-client/client'; import type { EpochCache } from '@aztec/epoch-cache'; import type { GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { PublisherManager } from '@aztec/ethereum/publisher-manager'; import { EthAddress } from '@aztec/foundation/eth-address'; import type { DateProvider } from '@aztec/foundation/timer'; @@ -17,7 +17,7 @@ import { SequencerPublisherFactory } from './sequencer-publisher-factory.js'; describe('SequencerPublisherFactory', () => { let factory: SequencerPublisherFactory; let mockConfig: SequencerClientConfig; - let mockPublisherManager: MockProxy>; + let mockPublisherManager: MockProxy>; let mockBlobClient: MockProxy; let mockDateProvider: MockProxy; let mockEpochCache: MockProxy; @@ -25,7 +25,7 @@ describe('SequencerPublisherFactory', () => { let mockGovernanceProposerContract: MockProxy; let mockSlashFactoryContract: MockProxy; let mockNodeKeyStore: MockProxy; - let mockL1TxUtils: MockProxy; + let mockL1TxUtils: MockProxy; const validatorAddress = EthAddress.random(); const publisherAddress = EthAddress.random(); @@ -35,12 +35,12 @@ describe('SequencerPublisherFactory', () => { mockConfig = { ethereumSlotDuration: 12, } as SequencerClientConfig; - mockPublisherManager = mock>(); + mockPublisherManager = mock>(); mockBlobClient = mock(); mockDateProvider = mock(); mockEpochCache = mock(); mockNodeKeyStore = mock(); - mockL1TxUtils = mock(); + mockL1TxUtils = mock(); mockRollupContract = mock(); mockGovernanceProposerContract = mock(); mockSlashFactoryContract = mock(); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts index 57d74d12bde2..3d79eea915b7 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts @@ -3,7 +3,7 @@ import { type Logger, createLogger } from '@aztec/aztec.js/log'; import type { BlobClientInterface } from '@aztec/blob-client/client'; import type { EpochCache } from '@aztec/epoch-cache'; import type { GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { PublisherFilter, PublisherManager } from '@aztec/ethereum/publisher-manager'; import { SlotNumber } from '@aztec/foundation/branded-types'; import type { DateProvider } from '@aztec/foundation/timer'; @@ -32,7 +32,7 @@ export class SequencerPublisherFactory { private sequencerConfig: SequencerClientConfig, private deps: { telemetry: TelemetryClient; - publisherManager: PublisherManager; + publisherManager: PublisherManager; blobClient: BlobClientInterface; dateProvider: DateProvider; epochCache: EpochCache; @@ -55,9 +55,9 @@ export class SequencerPublisherFactory { // If we have been given an attestor address we must only allow publishers permitted for that attestor const allowedPublishers = !validatorAddress ? [] : this.deps.nodeKeyStore.getPublisherAddresses(validatorAddress); - const filter: PublisherFilter = !validatorAddress + const filter: PublisherFilter = !validatorAddress ? () => true - : (utils: L1TxUtilsWithBlobs) => { + : (utils: L1TxUtils) => { const publisherAddress = utils.getSenderAddress(); return allowedPublishers.some(allowedPublisher => allowedPublisher.equals(publisherAddress)); }; diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index 75796e050874..29f39e0c2581 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -1,15 +1,19 @@ import type { BlobClientInterface } from '@aztec/blob-client/client'; import { getBlobsPerL1Block, getPrefixedEthBlobCommitments } from '@aztec/blob-lib'; import type { EpochCache } from '@aztec/epoch-cache'; -import { DefaultL1ContractsConfig, type L1ContractsConfig } from '@aztec/ethereum/config'; +import type { L1ContractsConfig } from '@aztec/ethereum/config'; import { type EmpireSlashingProposerContract, type GovernanceProposerContract, Multicall3, type RollupContract, } from '@aztec/ethereum/contracts'; -import { type GasPrice, type L1TxUtilsConfig, defaultL1TxUtilsConfig } from '@aztec/ethereum/l1-tx-utils'; -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import { + type GasPrice, + type L1TxUtils, + type L1TxUtilsConfig, + defaultL1TxUtilsConfig, +} from '@aztec/ethereum/l1-tx-utils'; import { FormattedViemError } from '@aztec/ethereum/utils'; import { BlockNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -58,7 +62,7 @@ describe('SequencerPublisher', () => { let slashingProposerContract: MockProxy; let governanceProposerContract: MockProxy; let slashFactoryContract: MockProxy; - let l1TxUtils: MockProxy; + let l1TxUtils: MockProxy; let l1Metrics: MockProxy; let forwardSpy: jest.SpiedFunction; @@ -101,7 +105,7 @@ describe('SequencerPublisher', () => { testHarnessAttesterAccount = privateKeyToAccount( '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80', ); - l1TxUtils = mock(); + l1TxUtils = mock(); l1TxUtils.getBlock.mockResolvedValue({ timestamp: 12n } as any); l1TxUtils.getBlockNumber.mockResolvedValue(1n); l1TxUtils.getSenderAddress.mockReturnValue(EthAddress.fromString(testHarnessAttesterAccount.address)); @@ -113,7 +117,6 @@ describe('SequencerPublisher', () => { rollupAddress: EthAddress.ZERO.toString(), governanceProposerAddress: mockGovernanceProposerAddress, }, - ethereumSlotDuration: DefaultL1ContractsConfig.ethereumSlotDuration, ...defaultL1TxUtilsConfig, } as unknown as TxSenderConfig & diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index 892ad2081c0f..d5df5fdbf3c6 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -18,11 +18,11 @@ import { type L1BlobInputs, type L1TxConfig, type L1TxRequest, + type L1TxUtils, MAX_L1_TX_LIMIT, type TransactionStats, WEI_CONST, } from '@aztec/ethereum/l1-tx-utils'; -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; import { FormattedViemError, formatViemError, mergeAbis, tryExtractEvent } from '@aztec/ethereum/utils'; import { sumBigint } from '@aztec/foundation/bigint'; import { toHex as toPaddedHex } from '@aztec/foundation/bigint-buffer'; @@ -129,7 +129,7 @@ export class SequencerPublisher { // Gas report for VotingWithSigTest shows a max gas of 100k, but we've seen it cost 700k+ in testnet public static VOTE_GAS_GUESS: bigint = 800_000n; - public l1TxUtils: L1TxUtilsWithBlobs; + public l1TxUtils: L1TxUtils; public rollupContract: RollupContract; public govProposerContract: GovernanceProposerContract; public slashingProposerContract: EmpireSlashingProposerContract | TallySlashingProposerContract | undefined; @@ -144,7 +144,7 @@ export class SequencerPublisher { deps: { telemetry?: TelemetryClient; blobClient: BlobClientInterface; - l1TxUtils: L1TxUtilsWithBlobs; + l1TxUtils: L1TxUtils; rollupContract: RollupContract; slashingProposerContract: EmpireSlashingProposerContract | TallySlashingProposerContract | undefined; governanceProposerContract: GovernanceProposerContract; diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts index 91e778c05105..65ed41a5ae48 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts @@ -14,7 +14,7 @@ import type { RollupContract, } from '@aztec/ethereum/contracts'; import { Multicall3 } from '@aztec/ethereum/contracts'; -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { SecretValue } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -53,7 +53,7 @@ describe('CheckpointVoter HA Integration', () => { let rollupContract: MockProxy; let governanceProposerContract: MockProxy; let slashingProposerContract: MockProxy; - let l1TxUtils: MockProxy; + let l1TxUtils: MockProxy; let dateProvider: TestDateProvider; let sequencerMetrics: MockProxy; let publisherMetrics: MockProxy; @@ -147,8 +147,8 @@ describe('CheckpointVoter HA Integration', () => { /** * Helper to create mock L1 tx utils */ - function createMockL1TxUtils(validatorAccount: PrivateKeyAccount): MockProxy { - const txUtils = mock(); + function createMockL1TxUtils(validatorAccount: PrivateKeyAccount): MockProxy { + const txUtils = mock(); txUtils.client = { account: validatorAccount, getCode: () => Promise.resolve('0x1234' as `0x${string}`), diff --git a/yarn-project/sequencer-client/src/test/index.ts b/yarn-project/sequencer-client/src/test/index.ts index c42e11bfb457..87b9ed8c1a31 100644 --- a/yarn-project/sequencer-client/src/test/index.ts +++ b/yarn-project/sequencer-client/src/test/index.ts @@ -1,4 +1,4 @@ -import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { PublisherManager } from '@aztec/ethereum/publisher-manager'; import type { PublicProcessorFactory } from '@aztec/simulator/server'; import type { FullNodeCheckpointsBuilder, ValidatorClient } from '@aztec/validator-client'; @@ -20,7 +20,7 @@ export type TestSequencer = TestSequencer_; class TestSequencerClient_ extends SequencerClient { declare public sequencer: TestSequencer; - declare public publisherManager: PublisherManager; + declare public publisherManager: PublisherManager; } export type TestSequencerClient = TestSequencerClient_; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index d72ee1067484..f3795668b2cb 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -802,6 +802,7 @@ __metadata: "@aztec/archiver": "workspace:^" "@aztec/bb-prover": "workspace:^" "@aztec/blob-client": "workspace:^" + "@aztec/blob-lib": "workspace:^" "@aztec/constants": "workspace:^" "@aztec/epoch-cache": "workspace:^" "@aztec/ethereum": "workspace:^" From 5af0a01e832e88e94cbc81c01fbcc0206d9fb702 Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Wed, 11 Feb 2026 20:11:52 +0000 Subject: [PATCH 06/62] Initial commit --- .../mem_pools/tx_pool_v2/instrumentation.ts | 58 ++++++++ .../src/mem_pools/tx_pool_v2/interfaces.ts | 3 + .../mem_pools/tx_pool_v2/tx_pool_v2.test.ts | 138 ++++++++++++++++++ .../src/mem_pools/tx_pool_v2/tx_pool_v2.ts | 4 +- .../mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 96 ++++++++++-- yarn-project/telemetry-client/src/metrics.ts | 31 ++++ 6 files changed, 314 insertions(+), 16 deletions(-) create mode 100644 yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts new file mode 100644 index 000000000000..a6f39f037a72 --- /dev/null +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts @@ -0,0 +1,58 @@ +import { + type Meter, + Metrics, + type TelemetryClient, + type UpDownCounter, + createUpDownCounterWithDefault, +} from '@aztec/telemetry-client'; + +/** Instrumentation for TxPoolV2Impl internal operations. */ +export class TxPoolV2Instrumentation { + #evictedCounter: UpDownCounter; + #ignoredCounter: UpDownCounter; + #rejectedCounter: UpDownCounter; + #softDeletedHitsCounter: UpDownCounter; + #missingOnProtectCounter: UpDownCounter; + #missingPreviouslyEvictedCounter: UpDownCounter; + + constructor(telemetry: TelemetryClient) { + const meter: Meter = telemetry.getMeter('TxPoolV2Impl'); + + this.#evictedCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_EVICTED_COUNT); + this.#ignoredCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_IGNORED_COUNT); + this.#rejectedCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_REJECTED_COUNT); + this.#softDeletedHitsCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_SOFT_DELETED_HITS); + this.#missingOnProtectCounter = createUpDownCounterWithDefault( + meter, + Metrics.MEMPOOL_TX_POOL_V2_MISSING_ON_PROTECT, + ); + this.#missingPreviouslyEvictedCounter = createUpDownCounterWithDefault( + meter, + Metrics.MEMPOOL_TX_POOL_V2_MISSING_PREVIOUSLY_EVICTED, + ); + } + + recordEvictions(count: number) { + this.#evictedCounter.add(count); + } + + recordIgnored(count: number) { + this.#ignoredCounter.add(count); + } + + recordRejected(count: number) { + this.#rejectedCounter.add(count); + } + + recordSoftDeletedHits(count: number) { + this.#softDeletedHitsCounter.add(count); + } + + recordMissingOnProtect(count: number) { + this.#missingOnProtectCounter.add(count); + } + + recordMissingPreviouslyEvicted(count: number) { + this.#missingPreviouslyEvictedCounter.add(count); + } +} diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts index 1057f57e3954..8636961f123f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts @@ -39,6 +39,8 @@ export type TxPoolV2Config = { archivedTxLimit: number; /** Minimum age (ms) a transaction must have been in the pool before it's eligible for block building */ minTxPoolAgeMs: number; + /** Maximum number of evicted tx hashes to remember for metrics tracking */ + evictedTxCacheSize: number; }; /** @@ -48,6 +50,7 @@ export const DEFAULT_TX_POOL_V2_CONFIG: TxPoolV2Config = { maxPendingTxCount: 0, // 0 = disabled archivedTxLimit: 0, // 0 = disabled minTxPoolAgeMs: 2_000, + evictedTxCacheSize: 1_000, }; /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts index 278cc846f162..f9f5e2334941 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts @@ -1158,6 +1158,144 @@ describe('TxPoolV2', () => { }); }); }); + + describe('soft-deleted tx resurrection', () => { + let mockValidator: MockProxy>; + let poolWithValidator: AztecKVTxPoolV2; + let validatorStore: Awaited>; + let validatorArchiveStore: Awaited>; + + beforeEach(async () => { + mockValidator = mock>(); + mockValidator.validateTx.mockResolvedValue({ result: 'valid' }); + + validatorStore = await openTmpStore('p2p-protect-soft-delete'); + validatorArchiveStore = await openTmpStore('archive-protect-soft-delete'); + poolWithValidator = new AztecKVTxPoolV2(validatorStore, validatorArchiveStore, { + l2BlockSource: mockL2BlockSource, + worldStateSynchronizer: mockWorldState, + createTxValidator: () => Promise.resolve(mockValidator), + }); + await poolWithValidator.start(); + }); + + afterEach(async () => { + await poolWithValidator.stop(); + await validatorStore.delete(); + await validatorArchiveStore.delete(); + }); + + /** Helper: add tx, mine it, prune it, fail validation -> soft-deleted */ + const softDeleteTx = async (tx: Tx) => { + await poolWithValidator.addPendingTxs([tx]); + await poolWithValidator.handleMinedBlock(makeBlock([tx], slot1Header)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('mined'); + + // Make validator reject so tx is soft-deleted on prune + mockValidator.validateTx.mockResolvedValue({ + result: 'invalid', + reason: ['timestamp expired'], + }); + await poolWithValidator.handlePrunedBlocks(block0Id); + + // Verify soft-deleted + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('deleted'); + expect(await poolWithValidator.getTxByHash(tx.getTxHash())).toBeDefined(); + + // Restore validator for subsequent operations + mockValidator.validateTx.mockResolvedValue({ result: 'valid' }); + }; + + it('resurrects a soft-deleted tx as protected instead of reporting it missing', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + // protectTxs should find the soft-deleted tx and resurrect it + const missing = await poolWithValidator.protectTxs([tx.getTxHash()], slot2Header); + + expect(missing).toHaveLength(0); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + }); + + it('resurrected soft-deleted tx is retrievable and in indices', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + await poolWithValidator.protectTxs([tx.getTxHash()], slot2Header); + + // Should be retrievable + const retrieved = await poolWithValidator.getTxByHash(tx.getTxHash()); + expect(retrieved).toBeDefined(); + expect(retrieved!.getTxHash().toString()).toEqual(tx.getTxHash().toString()); + + // hasTxs should return true (in indices, not just soft-deleted) + const [hasTx] = await poolWithValidator.hasTxs([tx.getTxHash()]); + expect(hasTx).toBe(true); + }); + + it('resurrected tx is unprotected on the next slot', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + await poolWithValidator.protectTxs([tx.getTxHash()], slot1Header); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Advance to slot 2 — protection from slot 1 expires + await poolWithValidator.prepareForSlot(SlotNumber(2)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('pending'); + }); + + it('mix of existing, soft-deleted, and truly missing txs', async () => { + const txExisting = await mockTx(1); + const txSoftDeleted = await mockTx(2); + const txMissing = await mockTx(3); + + // Add txExisting as a regular pending tx + await poolWithValidator.addPendingTxs([txExisting]); + expect(await poolWithValidator.getTxStatus(txExisting.getTxHash())).toBe('pending'); + + // Soft-delete txSoftDeleted + await softDeleteTx(txSoftDeleted); + + // Protect all three + const missing = await poolWithValidator.protectTxs( + [txExisting.getTxHash(), txSoftDeleted.getTxHash(), txMissing.getTxHash()], + slot2Header, + ); + + // Only txMissing should be reported as missing + expect(toStrings(missing)).toEqual([hashOf(txMissing)]); + + // txExisting: protected (was pending, now protected) + expect(await poolWithValidator.getTxStatus(txExisting.getTxHash())).toBe('protected'); + // txSoftDeleted: protected (resurrected from soft-deleted) + expect(await poolWithValidator.getTxStatus(txSoftDeleted.getTxHash())).toBe('protected'); + // txMissing: pre-recorded protection, not in pool yet + expect(await poolWithValidator.getTxStatus(txMissing.getTxHash())).toBeUndefined(); + }); + + it('resurrected tx survives a second protectTxs call', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + // Resurrect via protectTxs at slot 1 + await poolWithValidator.protectTxs([tx.getTxHash()], slot1Header); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Re-protect at slot 2 — should update slot, not report missing + const missing = await poolWithValidator.protectTxs([tx.getTxHash()], slot2Header); + expect(missing).toHaveLength(0); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Should survive prepareForSlot(2) + await poolWithValidator.prepareForSlot(SlotNumber(2)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Should unprotect at slot 3 + await poolWithValidator.prepareForSlot(SlotNumber(3)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('pending'); + }); + }); }); describe('handleMinedBlock', () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts index c37702c77a8f..e60e562702e0 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts @@ -61,7 +61,7 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte }; // Create the implementation - this.#impl = new TxPoolV2Impl(store, archiveStore, deps, callbacks, config, dateProvider, log); + this.#impl = new TxPoolV2Impl(store, archiveStore, deps, callbacks, telemetry, config, dateProvider, log); } // ============================================================================ @@ -83,7 +83,7 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte } protectTxs(txHashes: TxHash[], block: BlockHeader): Promise { - return this.#queue.put(() => Promise.resolve(this.#impl.protectTxs(txHashes, block))); + return this.#queue.put(() => this.#impl.protectTxs(txHashes, block)); } addMinedTxs(txs: Tx[], block: BlockHeader, opts: { source?: string } = {}): Promise { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index bc07801d8aaa..965bfc5f1a22 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -9,6 +9,7 @@ import type { L2Block, L2BlockId, L2BlockSource } from '@aztec/stdlib/block'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { DatabasePublicStateSource } from '@aztec/stdlib/trees'; import { BlockHeader, Tx, TxHash, type TxValidator } from '@aztec/stdlib/tx'; +import type { TelemetryClient } from '@aztec/telemetry-client'; import { TxArchive } from './archive/index.js'; import { DeletedPool } from './deleted_pool.js'; @@ -24,6 +25,7 @@ import { type PoolOperations, type PreAddPoolAccess, } from './eviction/index.js'; +import { TxPoolV2Instrumentation } from './instrumentation.js'; import { type AddTxsResult, DEFAULT_TX_POOL_V2_CONFIG, @@ -66,6 +68,8 @@ export class TxPoolV2Impl { #deletedPool: DeletedPool; #evictionManager: EvictionManager; #dateProvider: DateProvider; + #instrumentation: TxPoolV2Instrumentation; + #evictedTxHashes: Set = new Set(); #log: Logger; #callbacks: TxPoolV2Callbacks; @@ -74,6 +78,7 @@ export class TxPoolV2Impl { archiveStore: AztecAsyncKVStore, deps: TxPoolV2Dependencies, callbacks: TxPoolV2Callbacks, + telemetry: TelemetryClient, config: Partial = {}, dateProvider: DateProvider, log: Logger, @@ -89,6 +94,7 @@ export class TxPoolV2Impl { this.#archive = new TxArchive(archiveStore, this.#config.archivedTxLimit, log); this.#deletedPool = new DeletedPool(store, this.#txsDB, log); this.#dateProvider = dateProvider; + this.#instrumentation = new TxPoolV2Instrumentation(telemetry); this.#log = log; this.#callbacks = callbacks; @@ -221,6 +227,14 @@ export class TxPoolV2Impl { accepted.push(TxHash.fromString(txHashStr)); } + // Record metrics + if (ignored.length > 0) { + this.#instrumentation.recordIgnored(ignored.length); + } + if (rejected.length > 0) { + this.#instrumentation.recordRejected(rejected.length); + } + // Run post-add eviction rules for pending txs if (acceptedPending.size > 0) { const feePayers = Array.from(acceptedPending).map(txHash => this.#indices.getMetadata(txHash)!.feePayer); @@ -257,13 +271,15 @@ export class TxPoolV2Impl { } // Evict conflicts - for (const evictHashStr of preAddResult.txHashesToEvict) { - await this.#deleteTx(evictHashStr); - this.#log.debug(`Evicted tx ${evictHashStr} due to higher-fee tx ${txHashStr}`); - if (acceptedPending.has(evictHashStr)) { - // Evicted tx was from this batch - mark as ignored in result - acceptedPending.delete(evictHashStr); - ignored.push(TxHash.fromString(evictHashStr)); + if (preAddResult.txHashesToEvict.length > 0) { + await this.#evictTxs(preAddResult.txHashesToEvict); + for (const evictHashStr of preAddResult.txHashesToEvict) { + this.#log.debug(`Evicted tx ${evictHashStr} due to higher-fee tx ${txHashStr}`); + if (acceptedPending.has(evictHashStr)) { + // Evicted tx was from this batch - mark as ignored in result + acceptedPending.delete(evictHashStr); + ignored.push(TxHash.fromString(evictHashStr)); + } } } @@ -324,9 +340,11 @@ export class TxPoolV2Impl { }); } - protectTxs(txHashes: TxHash[], block: BlockHeader): TxHash[] { + async protectTxs(txHashes: TxHash[], block: BlockHeader): Promise { const slotNumber = block.globalVariables.slotNumber; const missing: TxHash[] = []; + let softDeletedHits = 0; + let missingPreviouslyEvicted = 0; for (const txHash of txHashes) { const txHashStr = txHash.toString(); @@ -334,13 +352,39 @@ export class TxPoolV2Impl { if (this.#indices.has(txHashStr)) { // Update protection for existing tx this.#indices.updateProtection(txHashStr, slotNumber); + } else if (this.#deletedPool.isSoftDeleted(txHashStr)) { + // Resurrect soft-deleted tx as protected + const buffer = await this.#txsDB.getAsync(txHashStr); + if (buffer) { + const tx = Tx.fromBuffer(buffer); + await this.#addTx(tx, { protected: slotNumber }); + softDeletedHits++; + } else { + // Data missing despite soft-delete flag — treat as truly missing + this.#indices.setProtection(txHashStr, slotNumber); + missing.push(txHash); + } } else { - // Pre-record protection for tx we don't have yet + // Truly missing — pre-record protection for tx we don't have yet this.#indices.setProtection(txHashStr, slotNumber); missing.push(txHash); + if (this.#evictedTxHashes.has(txHashStr)) { + missingPreviouslyEvicted++; + } } } + // Record metrics + if (softDeletedHits > 0) { + this.#instrumentation.recordSoftDeletedHits(softDeletedHits); + } + if (missing.length > 0) { + this.#instrumentation.recordMissingOnProtect(missing.length); + } + if (missingPreviouslyEvicted > 0) { + this.#instrumentation.recordMissingPreviouslyEvicted(missingPreviouslyEvicted); + } + return missing; } @@ -420,8 +464,9 @@ export class TxPoolV2Impl { // Step 5: Resolve nullifier conflicts and add winners to pending indices const { added, toEvict } = this.#applyNullifierConflictResolution(valid); - // Step 6: Delete invalid and evicted txs - await this.#deleteTxsBatch([...invalid, ...toEvict]); + // Step 6: Delete invalid txs and evict conflict losers + await this.#deleteTxsBatch(invalid); + await this.#evictTxs(toEvict); // Step 7: Run eviction rules (enforce pool size limit) if (added.length > 0) { @@ -468,8 +513,9 @@ export class TxPoolV2Impl { // Step 6: Resolve nullifier conflicts and add winners to pending indices const { toEvict } = this.#applyNullifierConflictResolution(valid); - // Step 7: Delete invalid and evicted txs - await this.#deleteTxsBatch([...invalid, ...toEvict]); + // Step 7: Delete invalid txs and evict conflict losers + await this.#deleteTxsBatch(invalid); + await this.#evictTxs(toEvict); // Step 8: Run eviction rules for ALL pending txs (not just restored ones) // This handles cases like existing pending txs with invalid fee payer balances @@ -692,6 +738,28 @@ export class TxPoolV2Impl { } } + /** Evicts transactions: records eviction metric, caches hashes, then deletes. */ + async #evictTxs(txHashes: string[]): Promise { + if (txHashes.length === 0) { + return; + } + this.#instrumentation.recordEvictions(txHashes.length); + for (const txHashStr of txHashes) { + this.#addToEvictedCache(txHashStr); + } + await this.#deleteTxsBatch(txHashes); + } + + /** Adds a tx hash to the bounded evicted cache, evicting the oldest entry if at capacity. */ + #addToEvictedCache(txHashStr: string): void { + if (this.#evictedTxHashes.size >= this.#config.evictedTxCacheSize) { + // FIFO eviction: remove the first (oldest) entry + const oldest = this.#evictedTxHashes.values().next().value!; + this.#evictedTxHashes.delete(oldest); + } + this.#evictedTxHashes.add(txHashStr); + } + // ============================================================================ // PRIVATE HELPERS - Validation & Conflict Resolution // ============================================================================ @@ -883,7 +951,7 @@ export class TxPoolV2Impl { getFeePayerPendingTxs: (feePayer: string) => this.#indices.getFeePayerPendingTxs(feePayer), getPendingTxCount: () => this.#indices.getPendingTxCount(), getLowestPriorityPending: (limit: number) => this.#indices.getLowestPriorityPending(limit), - deleteTxs: (txHashes: string[]) => this.#deleteTxsBatch(txHashes), + deleteTxs: (txHashes: string[]) => this.#evictTxs(txHashes), }; } diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index eaaf772e54e5..b4a3da7a8943 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -167,6 +167,37 @@ export const MEMPOOL_TX_MINED_DELAY: MetricDefinition = { valueType: ValueType.INT, }; +export const MEMPOOL_TX_POOL_V2_EVICTED_COUNT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.evicted_count', + description: 'The number of transactions evicted from the tx pool', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_IGNORED_COUNT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.ignored_count', + description: 'The number of transactions ignored in addPendingTxs', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_REJECTED_COUNT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.rejected_count', + description: 'The number of transactions rejected in addPendingTxs', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_SOFT_DELETED_HITS: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.soft_deleted_hits', + description: 'The number of transactions found in the soft-deleted pool', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_MISSING_ON_PROTECT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.missing_on_protect', + description: 'The number of truly missing transactions in protectTxs', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_MISSING_PREVIOUSLY_EVICTED: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.missing_previously_evicted', + description: 'The number of truly missing transactions in protectTxs that were previously evicted', + valueType: ValueType.INT, +}; + export const DB_NUM_ITEMS: MetricDefinition = { name: 'aztec.db.num_items', description: 'LMDB Num Items', From 38e7ef0ae6f8878f566d860659ec6a587db7cbbc Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Wed, 11 Feb 2026 21:12:07 +0000 Subject: [PATCH 07/62] Add memory usage metric --- .../eviction/eviction_manager.test.ts | 2 ++ .../fee_payer_balance_eviction_rule.test.ts | 1 + .../fee_payer_balance_pre_add_rule.test.ts | 1 + .../invalid_txs_after_mining_rule.test.ts | 1 + .../invalid_txs_after_reorg_rule.test.ts | 1 + .../low_priority_pre_add_rule.test.ts | 1 + .../eviction/nullifier_conflict_rule.test.ts | 1 + .../mem_pools/tx_pool_v2/instrumentation.ts | 12 ++++++++- .../mem_pools/tx_pool_v2/tx_metadata.test.ts | 2 ++ .../src/mem_pools/tx_pool_v2/tx_metadata.ts | 27 +++++++++++++++++++ .../mem_pools/tx_pool_v2/tx_pool_indices.ts | 17 +++++++++--- .../mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 4 +-- yarn-project/telemetry-client/src/metrics.ts | 6 +++++ 13 files changed, 70 insertions(+), 6 deletions(-) diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts index bb134e70df2a..5064159a151e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts @@ -183,6 +183,7 @@ describe('EvictionManager', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); @@ -318,6 +319,7 @@ describe('EvictionManager', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts index de7c1d2bf76f..7e8ff3ae4909 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts @@ -43,6 +43,7 @@ describe('FeePayerBalanceEvictionRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts index 2a9ca2ea552e..1c7898a3491e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts @@ -24,6 +24,7 @@ describe('FeePayerBalancePreAddRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts index 51fa1fd3b3cb..bf1211d7ea6e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts @@ -37,6 +37,7 @@ describe('InvalidTxsAfterMiningRule', () => { nullifiers, includeByTimestamp, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData({ includeByTimestamp }), }; }; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts index b07974d893b0..dfdc7a0f29e3 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts @@ -31,6 +31,7 @@ describe('InvalidTxsAfterReorgRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts index c4ff83c31aab..cfce1d74dd73 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts @@ -16,6 +16,7 @@ describe('LowPriorityPreAddRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts index 507f2718c678..4a3b0d6297b7 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts @@ -21,6 +21,7 @@ describe('NullifierConflictRule', () => { nullifiers, includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts index a6f39f037a72..0d39be259d7e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts @@ -1,11 +1,16 @@ import { type Meter, Metrics, + type ObservableGauge, + type ObservableResult, type TelemetryClient, type UpDownCounter, createUpDownCounterWithDefault, } from '@aztec/telemetry-client'; +/** Callback that returns the current estimated metadata memory in bytes. */ +export type MetadataMemoryCallback = () => number; + /** Instrumentation for TxPoolV2Impl internal operations. */ export class TxPoolV2Instrumentation { #evictedCounter: UpDownCounter; @@ -14,8 +19,9 @@ export class TxPoolV2Instrumentation { #softDeletedHitsCounter: UpDownCounter; #missingOnProtectCounter: UpDownCounter; #missingPreviouslyEvictedCounter: UpDownCounter; + #metadataMemoryGauge: ObservableGauge; - constructor(telemetry: TelemetryClient) { + constructor(telemetry: TelemetryClient, metadataMemoryCallback: MetadataMemoryCallback) { const meter: Meter = telemetry.getMeter('TxPoolV2Impl'); this.#evictedCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_EVICTED_COUNT); @@ -30,6 +36,10 @@ export class TxPoolV2Instrumentation { meter, Metrics.MEMPOOL_TX_POOL_V2_MISSING_PREVIOUSLY_EVICTED, ); + this.#metadataMemoryGauge = meter.createObservableGauge(Metrics.MEMPOOL_TX_POOL_V2_METADATA_MEMORY); + this.#metadataMemoryGauge.addCallback((result: ObservableResult) => { + result.observe(metadataMemoryCallback()); + }); } recordEvictions(count: number) { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts index 287883580a1b..54123c98a798 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts @@ -48,6 +48,7 @@ describe('TxMetaData', () => { nullifiers: [], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); @@ -81,6 +82,7 @@ describe('TxMetaData', () => { nullifiers, includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts index 529484c84b6f..64b35b374401 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts @@ -63,6 +63,9 @@ export type TxMetaData = { /** Timestamp (ms) when the tx was received into the pool. 0 for hydrated txs (always eligible). */ receivedAt: number; + + /** Estimated memory footprint of this metadata object in bytes */ + readonly estimatedSizeBytes: number; }; /** Transaction state derived from TxMetaData fields and pool protection status */ @@ -86,6 +89,8 @@ export async function buildTxMetaData(tx: Tx): Promise { const { feeLimit, claimAmount } = await getFeePayerBalanceDelta(tx, ProtocolContractAddress.FeeJuice); + const estimatedSizeBytes = estimateTxMetaDataSize(nullifiers.length); + return { txHash, anchorBlockHeaderHash, @@ -96,6 +101,7 @@ export async function buildTxMetaData(tx: Tx): Promise { nullifiers, includeByTimestamp, receivedAt: 0, + estimatedSizeBytes, data: { getNonEmptyNullifiers: () => nullifierFrs, includeByTimestamp, @@ -109,6 +115,27 @@ export async function buildTxMetaData(tx: Tx): Promise { }; } +// V8 JS object overhead (~64 bytes for a plain object with hidden class). +// String overhead: ~32 bytes header + 1 byte per ASCII char (V8 one-byte strings). +// Hex string (0x + 64 hex chars = 66 chars): ~98 bytes per string. +// bigint: ~32 bytes. number: 8 bytes. Fr: ~80 bytes (32 data + object overhead). +const OBJECT_OVERHEAD = 64; +const HEX_STRING_BYTES = 98; +const BIGINT_BYTES = 32; +const FR_BYTES = 80; +// Fixed cost: object shell + txHash + anchorBlockHeaderHash + feePayer (3 hex strings) +// + priorityFee + claimAmount + feeLimit + includeByTimestamp (4 bigints) +// + receivedAt (number, 8 bytes) + estimatedSizeBytes (number, 8 bytes) +// + data closure object (~OBJECT_OVERHEAD + anchorBlockHeaderHashFr Fr + anchorBlockNumber number) +const FIXED_METADATA_BYTES = + OBJECT_OVERHEAD + 3 * HEX_STRING_BYTES + 4 * BIGINT_BYTES + 8 + 8 + OBJECT_OVERHEAD + FR_BYTES + 8; + +/** Estimates the in-memory size of a TxMetaData object based on the number of nullifiers. */ +function estimateTxMetaDataSize(nullifierCount: number): number { + // Per nullifier: one hex string in nullifiers[] + one Fr in the captured nullifierFrs[] + return FIXED_METADATA_BYTES + nullifierCount * (HEX_STRING_BYTES + FR_BYTES); +} + /** Minimal fields required for priority comparison. */ type PriorityComparable = Pick; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts index cf8291a17bad..a9a368dce37c 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts @@ -348,13 +348,15 @@ export class TxPoolIndices { // METRICS // ============================================================================ - /** Counts transactions by state */ - countTxs(): { pending: number; protected: number; mined: number } { + /** Counts transactions by state and estimates total metadata memory usage */ + countTxs(): { pending: number; protected: number; mined: number; totalMetadataBytes: number } { let pending = 0; let protected_ = 0; let mined = 0; + let totalMetadataBytes = 0; for (const meta of this.#metadata.values()) { + totalMetadataBytes += meta.estimatedSizeBytes; const state = this.getTxState(meta); if (state === 'pending') { pending++; @@ -365,7 +367,16 @@ export class TxPoolIndices { } } - return { pending, protected: protected_, mined }; + return { pending, protected: protected_, mined, totalMetadataBytes }; + } + + /** Returns the estimated total memory consumed by all metadata objects */ + getTotalMetadataBytes(): number { + let total = 0; + for (const meta of this.#metadata.values()) { + total += meta.estimatedSizeBytes; + } + return total; } /** Gets all mined transactions with their block IDs */ diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index 965bfc5f1a22..d19fb9faf3eb 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -94,7 +94,7 @@ export class TxPoolV2Impl { this.#archive = new TxArchive(archiveStore, this.#config.archivedTxLimit, log); this.#deletedPool = new DeletedPool(store, this.#txsDB, log); this.#dateProvider = dateProvider; - this.#instrumentation = new TxPoolV2Instrumentation(telemetry); + this.#instrumentation = new TxPoolV2Instrumentation(telemetry, () => this.#indices.getTotalMetadataBytes()); this.#log = log; this.#callbacks = callbacks; @@ -673,7 +673,7 @@ export class TxPoolV2Impl { // === Metrics === - countTxs(): { pending: number; protected: number; mined: number } { + countTxs(): { pending: number; protected: number; mined: number; totalMetadataBytes: number } { return this.#indices.countTxs(); } diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index b4a3da7a8943..7a3e269de18c 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -197,6 +197,12 @@ export const MEMPOOL_TX_POOL_V2_MISSING_PREVIOUSLY_EVICTED: MetricDefinition = { description: 'The number of truly missing transactions in protectTxs that were previously evicted', valueType: ValueType.INT, }; +export const MEMPOOL_TX_POOL_V2_METADATA_MEMORY: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.metadata_memory', + description: 'Estimated total memory consumed by in-memory transaction metadata', + unit: 'By', + valueType: ValueType.INT, +}; export const DB_NUM_ITEMS: MetricDefinition = { name: 'aztec.db.num_items', From 7517cdbf23a9fe6f03079e7c82534e48155478b3 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 11 Feb 2026 18:18:21 -0300 Subject: [PATCH 08/62] fix(node-lib): add missing dateProvider to L1TxUtils integration test The createL1TxUtilsFromSigners factory now requires a dateProvider for the config-driven delayer, but the integration test was not providing it. Co-Authored-By: Claude Opus 4.6 --- .../epochs_proof_fails.parallel.test.ts | 4 +- .../end-to-end/src/e2e_epochs/epochs_test.ts | 4 +- yarn-project/end-to-end/src/fixtures/setup.ts | 3 +- .../ethereum/src/l1_tx_utils/factory.ts | 2 +- .../src/l1_tx_utils/forwarder_l1_tx_utils.ts | 2 +- .../ethereum/src/l1_tx_utils/l1_tx_utils.ts | 22 ++-- .../ethereum/src/l1_tx_utils/tx_delayer.ts | 109 ++++++++++-------- .../ethereum/src/test/tx_delayer.test.ts | 4 +- .../node-lib/src/factories/l1_tx_utils.ts | 14 +-- .../factories/l1_tx_utils_integration.test.ts | 7 ++ yarn-project/prover-node/src/factory.ts | 4 + yarn-project/prover-node/src/prover-node.ts | 7 ++ 12 files changed, 107 insertions(+), 75 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts index ec62cb814a5a..c228d7694280 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts @@ -72,7 +72,7 @@ describe('e2e_epochs/epochs_proof_fails', () => { context.proverNode = proverNode; // Get the prover delayer from the newly created prover node - proverDelayer = (proverNode as TestProverNode).publisher.l1TxUtils.delayer!; + proverDelayer = proverNode.getDelayer()!; // Hold off prover tx until end epoch 1 const [epoch2Start] = getTimestampRangeForEpoch(EpochNumber(2), constants); @@ -113,7 +113,7 @@ describe('e2e_epochs/epochs_proof_fails', () => { const proverNode = await test.createProverNode({ cancelTxOnTimeout: false, maxSpeedUpAttempts: 0 }); // Get the prover delayer from the newly created prover node - proverDelayer = (proverNode as TestProverNode).publisher.l1TxUtils.delayer!; + proverDelayer = proverNode.getDelayer()!; // Inject a delay in prover node proving equal to the length of an epoch, to make sure deadline will be hit const epochProverManager = (proverNode as TestProverNode).prover; diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts index a1cbb145722a..951e686db373 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts @@ -9,7 +9,7 @@ import { EpochCache } from '@aztec/epoch-cache'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { DefaultL1ContractsConfig } from '@aztec/ethereum/config'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { type Delayer, createDelayer, waitUntilL1Timestamp, wrapClientWithDelayer } from '@aztec/ethereum/l1-tx-utils'; +import { Delayer, createDelayer, waitUntilL1Timestamp, wrapClientWithDelayer } from '@aztec/ethereum/l1-tx-utils'; import { ChainMonitor } from '@aztec/ethereum/test'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; @@ -376,7 +376,7 @@ export class EpochsTestContext { privateKeyToAccount(this.getNextPrivateKey()), this.l1Client.chain, ); - const delayer = createDelayer(this.context.dateProvider, { ethereumSlotDuration: this.L1_BLOCK_TIME_IN_S }); + const delayer = createDelayer(this.context.dateProvider, { ethereumSlotDuration: this.L1_BLOCK_TIME_IN_S }, {}); const client = wrapClientWithDelayer(rawClient, delayer); expect(await client.getBalance({ address: client.account.address })).toBeGreaterThan(0n); return { client, delayer }; diff --git a/yarn-project/end-to-end/src/fixtures/setup.ts b/yarn-project/end-to-end/src/fixtures/setup.ts index 01f7d83149a1..4c6f0fa62e5e 100644 --- a/yarn-project/end-to-end/src/fixtures/setup.ts +++ b/yarn-project/end-to-end/src/fixtures/setup.ts @@ -47,7 +47,6 @@ import type { P2PClientDeps } from '@aztec/p2p'; import { MockGossipSubNetwork, getMockPubSubP2PServiceFactory } from '@aztec/p2p/test-helpers'; import { protocolContractsHash } from '@aztec/protocol-contracts'; import { type ProverNode, type ProverNodeConfig, type ProverNodeDeps, createProverNode } from '@aztec/prover-node'; -import type { TestProverNode } from '@aztec/prover-node/test'; import { type PXEConfig, getPXEConfig } from '@aztec/pxe/server'; import type { SequencerClient } from '@aztec/sequencer-client'; import { type ContractInstanceWithAddress, getContractInstanceFromInstantiationParams } from '@aztec/stdlib/contract'; @@ -518,7 +517,7 @@ export async function setup( } const sequencerDelayer = sequencerClient?.getDelayer(); - const proverDelayer = proverNode ? (proverNode as TestProverNode).publisher.l1TxUtils.delayer : undefined; + const proverDelayer = proverNode?.getDelayer(); logger.verbose('Creating a pxe...'); const pxeConfig = { ...getPXEConfig(), ...pxeOpts }; diff --git a/yarn-project/ethereum/src/l1_tx_utils/factory.ts b/yarn-project/ethereum/src/l1_tx_utils/factory.ts index 970398f7badb..9ac1560b7aed 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/factory.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/factory.ts @@ -9,7 +9,7 @@ import type { L1TxUtilsConfig } from './config.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { L1TxUtils } from './l1_tx_utils.js'; import { createViemSigner } from './signer.js'; -import type { Delayer } from './tx_delayer.js'; +import { Delayer } from './tx_delayer.js'; import type { SigningCallback } from './types.js'; /** Source of signing capability: either a wallet client or a separate client + signer. */ diff --git a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts index f9549fdd9772..da2f96ae1c63 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/forwarder_l1_tx_utils.ts @@ -12,7 +12,7 @@ import type { L1SignerSource } from './factory.js'; import { resolveSignerSource } from './factory.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { L1TxUtils } from './l1_tx_utils.js'; -import type { Delayer } from './tx_delayer.js'; +import { Delayer } from './tx_delayer.js'; import type { L1BlobInputs, L1TxConfig, L1TxRequest, SigningCallback } from './types.js'; /** diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts index b0c7de888cf7..f91539bfb946 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts @@ -3,7 +3,7 @@ import { maxBigint } from '@aztec/foundation/bigint'; import { merge, pick } from '@aztec/foundation/collection'; import { InterruptError, TimeoutError } from '@aztec/foundation/error'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; import { DateProvider } from '@aztec/foundation/timer'; @@ -31,7 +31,7 @@ import { type L1TxUtilsConfig, l1TxUtilsConfigMappings } from './config.js'; import { MAX_L1_TX_LIMIT } from './constants.js'; import type { IL1TxMetrics, IL1TxStore } from './interfaces.js'; import { ReadOnlyL1TxUtils } from './readonly_l1_tx_utils.js'; -import { type Delayer, createDelayer, wrapClientWithDelayer } from './tx_delayer.js'; +import { Delayer, createDelayer, wrapClientWithDelayer } from './tx_delayer.js'; import { DroppedTransactionError, type L1BlobInputs, @@ -50,7 +50,7 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { protected nonceManager: NonceManager; protected txs: L1TxState[] = []; /** Tx delayer for testing. Only set when enableDelayer config is true. */ - public delayer: Delayer | undefined; + public delayer?: Delayer; /** KZG instance for blob operations. */ protected kzg?: BlobKzgInstance; @@ -73,14 +73,15 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { // Set up delayer: use provided one or create new if (config?.enableDelayer && config?.ethereumSlotDuration) { - this.delayer = delayer ?? this.createDelayer({ ethereumSlotDuration: config.ethereumSlotDuration }); - this.client = this.wrapClientWithDelayer(this.client, this.delayer); + this.delayer = + delayer ?? this.createDelayer({ ethereumSlotDuration: config.ethereumSlotDuration }, logger.getBindings()); + this.client = wrapClientWithDelayer(this.client, this.delayer); if (config.txDelayerMaxInclusionTimeIntoSlot !== undefined) { this.delayer.setMaxInclusionTimeIntoSlot(config.txDelayerMaxInclusionTimeIntoSlot); } } else if (delayer) { // Delayer provided but enableDelayer not set — just store it without wrapping - // This shouldn't normally happen but handle gracefully + logger.warn('Delayer provided but enableDelayer config is not set; delayer will not be used'); this.delayer = delayer; } } @@ -763,12 +764,7 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { } /** Creates a new delayer instance. */ - protected createDelayer(opts: { ethereumSlotDuration: bigint | number }): Delayer { - return createDelayer(this.dateProvider, opts); - } - - /** Wraps the client with delayer logic. */ - protected wrapClientWithDelayer(client: T, delayer: Delayer): T { - return wrapClientWithDelayer(client, delayer); + protected createDelayer(opts: { ethereumSlotDuration: bigint | number }, bindings: LoggerBindings): Delayer { + return createDelayer(this.dateProvider, opts, bindings); } } diff --git a/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts b/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts index b4a637b3308e..a5bb36d63c39 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/tx_delayer.ts @@ -1,5 +1,5 @@ import { omit } from '@aztec/foundation/collection'; -import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log'; import { retryUntil } from '@aztec/foundation/retry'; import type { DateProvider } from '@aztec/foundation/timer'; @@ -9,9 +9,11 @@ import { type Hex, type PublicClient, type TransactionSerializableEIP4844, + type TransactionSerialized, keccak256, parseTransaction, publicActions, + recoverTransactionAddress, serializeTransaction, walletActions, } from 'viem'; @@ -73,71 +75,86 @@ export function waitUntilL1Timestamp( ); } -export interface Delayer { - /** Returns the hashes of all effectively sent txs. */ - getSentTxHashes(): Hex[]; - /** Returns the raw hex for all cancelled txs. */ - getCancelledTxs(): Hex[]; - /** Delays the next tx to be sent so it lands on the given L1 block number. */ - pauseNextTxUntilBlock(l1BlockNumber: number | bigint | undefined): void; - /** Delays the next tx to be sent so it lands on the given timestamp. */ - pauseNextTxUntilTimestamp(l1Timestamp: number | bigint | undefined): void; - /** Delays the next tx to be sent indefinitely. */ - cancelNextTx(): void; - /** - * Sets max inclusion time into slot. If more than this many seconds have passed - * since the last L1 block was mined, then any tx will not be mined in the current - * L1 slot but will be deferred for the next one. - */ - setMaxInclusionTimeIntoSlot(seconds: number | bigint | undefined): void; -} +/** Manages tx delaying for testing, intercepting sendRawTransaction calls to delay or cancel them. */ +export class Delayer { + private logger: Logger; + + public maxInclusionTimeIntoSlot: number | undefined = undefined; + public ethereumSlotDuration: bigint; + public nextWait: { l1Timestamp: bigint } | { l1BlockNumber: bigint } | { indefinitely: true } | undefined = undefined; + public sentTxHashes: Hex[] = []; + public cancelledTxs: Hex[] = []; -class DelayerImpl implements Delayer { - private logger = createLogger('ethereum:tx_delayer'); constructor( public dateProvider: DateProvider, opts: { ethereumSlotDuration: bigint | number }, + bindings: LoggerBindings, ) { this.ethereumSlotDuration = BigInt(opts.ethereumSlotDuration); + this.logger = createLogger('ethereum:tx_delayer', bindings); } - public maxInclusionTimeIntoSlot: number | undefined = undefined; - public ethereumSlotDuration: bigint; - public nextWait: { l1Timestamp: bigint } | { l1BlockNumber: bigint } | { indefinitely: true } | undefined = undefined; - public sentTxHashes: Hex[] = []; - public cancelledTxs: Hex[] = []; + /** Returns the logger instance used by this delayer. */ + getLogger(): Logger { + return this.logger; + } + /** Returns the hashes of all effectively sent txs. */ getSentTxHashes() { return this.sentTxHashes; } + /** Returns the raw hex for all cancelled txs. */ getCancelledTxs(): Hex[] { return this.cancelledTxs; } + /** Delays the next tx to be sent so it lands on the given L1 block number. */ pauseNextTxUntilBlock(l1BlockNumber: number | bigint) { this.nextWait = { l1BlockNumber: BigInt(l1BlockNumber) }; } + /** Delays the next tx to be sent so it lands on the given timestamp. */ pauseNextTxUntilTimestamp(l1Timestamp: number | bigint) { this.nextWait = { l1Timestamp: BigInt(l1Timestamp) }; } + /** Delays the next tx to be sent indefinitely. */ cancelNextTx() { this.nextWait = { indefinitely: true }; } + /** + * Sets max inclusion time into slot. If more than this many seconds have passed + * since the last L1 block was mined, then any tx will not be mined in the current + * L1 slot but will be deferred for the next one. + */ setMaxInclusionTimeIntoSlot(seconds: number | undefined) { this.maxInclusionTimeIntoSlot = seconds; } } /** - * Creates a new DelayerImpl instance. Exposed so callers can create a single shared delayer + * Creates a new Delayer instance. Exposed so callers can create a single shared delayer * and pass it to multiple `wrapClientWithDelayer` calls. */ -export function createDelayer(dateProvider: DateProvider, opts: { ethereumSlotDuration: bigint | number }): Delayer { - return new DelayerImpl(dateProvider, opts); +export function createDelayer( + dateProvider: DateProvider, + opts: { ethereumSlotDuration: bigint | number }, + bindings: LoggerBindings, +): Delayer { + return new Delayer(dateProvider, opts, bindings); +} + +/** Tries to recover the sender address from a serialized signed transaction. */ +async function tryRecoverSender(serializedTransaction: Hex): Promise { + try { + return await recoverTransactionAddress({ + serializedTransaction: serializedTransaction as TransactionSerialized, + }); + } catch { + return undefined; + } } /** @@ -145,8 +162,7 @@ export function createDelayer(dateProvider: DateProvider, opts: { ethereumSlotDu * The delayer intercepts sendRawTransaction calls and delays them based on the delayer's state. */ export function wrapClientWithDelayer(client: T, delayer: Delayer): T { - const logger = createLogger('ethereum:tx_delayer'); - const delayerImpl = delayer as DelayerImpl; + const logger = delayer.getLogger(); // Cast to ExtendedViemWalletClient for the extend chain since it has sendRawTransaction. // The sendRawTransaction override is applied to all clients regardless of type. @@ -162,19 +178,20 @@ export function wrapClientWithDelayer(client: T, delayer: const { serializedTransaction } = args[0]; const publicClient = client as unknown as PublicClient; + const sender = await tryRecoverSender(serializedTransaction); - if (delayerImpl.nextWait !== undefined) { + if (delayer.nextWait !== undefined) { // Check if we have been instructed to delay the next tx. - const waitUntil = delayerImpl.nextWait; - delayerImpl.nextWait = undefined; + const waitUntil = delayer.nextWait; + delayer.nextWait = undefined; // Compute the tx hash manually so we emulate sendRawTransaction response txHash = computeTxHash(serializedTransaction); // Cancel tx outright if instructed if ('indefinitely' in waitUntil && waitUntil.indefinitely) { - logger.info(`Cancelling tx ${txHash}`); - delayerImpl.cancelledTxs.push(serializedTransaction); + logger.info(`Cancelling tx ${txHash}`, { sender }); + delayer.cancelledTxs.push(serializedTransaction); return Promise.resolve(txHash); } @@ -183,28 +200,30 @@ export function wrapClientWithDelayer(client: T, delayer: 'l1BlockNumber' in waitUntil ? waitUntilBlock(publicClient, waitUntil.l1BlockNumber - 1n, logger) : 'l1Timestamp' in waitUntil - ? waitUntilL1Timestamp(publicClient, waitUntil.l1Timestamp - delayerImpl.ethereumSlotDuration, logger) + ? waitUntilL1Timestamp(publicClient, waitUntil.l1Timestamp - delayer.ethereumSlotDuration, logger) : undefined; logger.info(`Delaying tx ${txHash} until ${inspect(waitUntil)}`, { + sender, argsLen: args.length, ...omit(parseTransaction(serializedTransaction), 'data', 'sidecars'), }); - } else if (delayerImpl.maxInclusionTimeIntoSlot !== undefined) { + } else if (delayer.maxInclusionTimeIntoSlot !== undefined) { // Check if we need to delay txs sent too close to the end of the slot. const currentBlock = await publicClient.getBlock({ includeTransactions: false }); const { timestamp: lastBlockTimestamp, number } = currentBlock; - const now = delayerImpl.dateProvider.now(); + const now = delayer.dateProvider.now(); txHash = computeTxHash(serializedTransaction); const logData = { + sender, ...omit(parseTransaction(serializedTransaction), 'data', 'sidecars'), lastBlockTimestamp, now, - maxInclusionTimeIntoSlot: delayerImpl.maxInclusionTimeIntoSlot, + maxInclusionTimeIntoSlot: delayer.maxInclusionTimeIntoSlot, }; - if (now / 1000 - Number(lastBlockTimestamp) > delayerImpl.maxInclusionTimeIntoSlot) { + if (now / 1000 - Number(lastBlockTimestamp) > delayer.maxInclusionTimeIntoSlot) { // If the last block was mined more than `maxInclusionTimeIntoSlot` seconds ago, then we cannot include // any txs in the current slot, so we delay the tx until the next slot. logger.info(`Delaying inclusion of tx ${txHash} until the next slot since it was sent too late`, logData); @@ -227,15 +246,15 @@ export function wrapClientWithDelayer(client: T, delayer: computedTxHash: txHash, }); } - logger.info(`Sent previously delayed tx ${clientTxHash}`); - delayerImpl.sentTxHashes.push(clientTxHash); + logger.info(`Sent previously delayed tx ${clientTxHash}`, { sender }); + delayer.sentTxHashes.push(clientTxHash); }) .catch(err => logger.error(`Error sending tx after delay`, err)); return Promise.resolve(txHash!); } else { const txHash = await client.sendRawTransaction(...args); - logger.debug(`Sent tx immediately ${txHash}`); - delayerImpl.sentTxHashes.push(txHash); + logger.debug(`Sent tx immediately ${txHash}`, { sender }); + delayer.sentTxHashes.push(txHash); return txHash; } }, diff --git a/yarn-project/ethereum/src/test/tx_delayer.test.ts b/yarn-project/ethereum/src/test/tx_delayer.test.ts index 055a3e77fa0b..00abbb4c0cff 100644 --- a/yarn-project/ethereum/src/test/tx_delayer.test.ts +++ b/yarn-project/ethereum/src/test/tx_delayer.test.ts @@ -10,7 +10,7 @@ import { type PrivateKeyAccount, createWalletClient, fallback, getContract, http import { privateKeyToAccount } from 'viem/accounts'; import { foundry } from 'viem/chains'; -import { type Delayer, createDelayer, waitUntilBlock, wrapClientWithDelayer } from '../l1_tx_utils/tx_delayer.js'; +import { Delayer, createDelayer, waitUntilBlock, wrapClientWithDelayer } from '../l1_tx_utils/tx_delayer.js'; import type { ExtendedViemWalletClient } from '../types.js'; import { EthCheatCodes } from './eth_cheat_codes.js'; import { startAnvil } from './start_anvil.js'; @@ -41,7 +41,7 @@ describe('tx_delayer', () => { chain: foundry, account, }).extend(publicActions); - delayer = createDelayer(dateProvider, { ethereumSlotDuration: ETHEREUM_SLOT_DURATION }); + delayer = createDelayer(dateProvider, { ethereumSlotDuration: ETHEREUM_SLOT_DURATION }, {}); client = wrapClientWithDelayer(_client, delayer); }); diff --git a/yarn-project/node-lib/src/factories/l1_tx_utils.ts b/yarn-project/node-lib/src/factories/l1_tx_utils.ts index 2f436d0a37e2..d3a441c66bb9 100644 --- a/yarn-project/node-lib/src/factories/l1_tx_utils.ts +++ b/yarn-project/node-lib/src/factories/l1_tx_utils.ts @@ -27,7 +27,7 @@ async function createSharedDeps( deps: { telemetry: TelemetryClient; logger?: ReturnType; - dateProvider?: DateProvider; + dateProvider: DateProvider; }, ) { const logger = deps.logger ?? createLogger('l1-tx-utils'); @@ -43,8 +43,8 @@ async function createSharedDeps( // Create a single shared delayer for all L1TxUtils instances in this group const delayer = - config.enableDelayer && config.ethereumSlotDuration !== undefined && deps.dateProvider - ? createDelayer(deps.dateProvider, { ethereumSlotDuration: config.ethereumSlotDuration }) + config.enableDelayer && config.ethereumSlotDuration !== undefined + ? createDelayer(deps.dateProvider, { ethereumSlotDuration: config.ethereumSlotDuration }, logger.getBindings()) : undefined; return { logger, store, metrics, dateProvider: deps.dateProvider, delayer }; @@ -60,7 +60,7 @@ export async function createL1TxUtilsFromWallets( deps: { telemetry: TelemetryClient; logger?: ReturnType; - dateProvider?: DateProvider; + dateProvider: DateProvider; kzg?: BlobKzgInstance; }, ) { @@ -81,7 +81,7 @@ export async function createL1TxUtilsFromSigners( deps: { telemetry: TelemetryClient; logger?: ReturnType; - dateProvider?: DateProvider; + dateProvider: DateProvider; kzg?: BlobKzgInstance; }, ) { @@ -120,7 +120,7 @@ export async function createForwarderL1TxUtilsFromWallets( deps: { telemetry: TelemetryClient; logger?: ReturnType; - dateProvider?: DateProvider; + dateProvider: DateProvider; kzg?: BlobKzgInstance; }, ) { @@ -144,7 +144,7 @@ export async function createForwarderL1TxUtilsFromSigners( deps: { telemetry: TelemetryClient; logger?: ReturnType; - dateProvider?: DateProvider; + dateProvider: DateProvider; kzg?: BlobKzgInstance; }, ) { diff --git a/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts b/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts index 2d049d12f488..abbf3259ef21 100644 --- a/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts +++ b/yarn-project/node-lib/src/factories/l1_tx_utils_integration.test.ts @@ -3,6 +3,7 @@ import { getAddressFromPrivateKey } from '@aztec/ethereum/account'; import type { ViemClient } from '@aztec/ethereum/types'; import { times } from '@aztec/foundation/collection'; import { EthAddress } from '@aztec/foundation/eth-address'; +import type { DateProvider } from '@aztec/foundation/timer'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { KeystoreManager } from '@aztec/node-keystore'; @@ -18,6 +19,7 @@ describe('L1TxUtils Integration - Publisher Deduplication', () => { let kvStore: AztecAsyncKVStore; let mockClient: ViemClient; let mockTelemetry: TelemetryClient; + let mockDateProvider: DateProvider; let count = 0; const mockConfig = { @@ -45,6 +47,9 @@ describe('L1TxUtils Integration - Publisher Deduplication', () => { createUpDownCounter: () => ({ add: () => {} }), }), } as any; + + // Mock DateProvider + mockDateProvider = { now: () => Date.now(), nowInSeconds: () => Math.floor(Date.now() / 1000) } as DateProvider; }); afterEach(async () => { @@ -83,6 +88,7 @@ describe('L1TxUtils Integration - Publisher Deduplication', () => { const l1TxUtils = await createL1TxUtilsFromSigners(mockClient, allPublisherSigners, mockConfig, { telemetry: mockTelemetry, + dateProvider: mockDateProvider, kzg: Blob.getViemKzgInstance(), }); @@ -143,6 +149,7 @@ describe('L1TxUtils Integration - Publisher Deduplication', () => { const l1TxUtils = await createL1TxUtilsFromSigners(mockClient, allPublisherSigners, mockConfig, { telemetry: mockTelemetry, + dateProvider: mockDateProvider, kzg: Blob.getViemKzgInstance(), }); diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index 6d61df3dc101..ac79e73e22ff 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -214,6 +214,9 @@ export async function createProverNode( l1TxUtils.map(utils => utils.getSenderAddress()), ); + // Extract the shared delayer from the first L1TxUtils instance (all instances share the same delayer) + const delayer = l1TxUtils[0]?.delayer; + return new ProverNode( prover, publisherFactory, @@ -227,5 +230,6 @@ export async function createProverNode( l1Metrics, proverNodeConfig, telemetry, + delayer, ); } diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 06d61cb3f080..47f704581168 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -1,5 +1,6 @@ import type { Archiver } from '@aztec/archiver'; import type { RollupContract } from '@aztec/ethereum/contracts'; +import type { Delayer } from '@aztec/ethereum/l1-tx-utils'; import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; import { assertRequired, compact, pick, sum } from '@aztec/foundation/collection'; import type { Fr } from '@aztec/foundation/curves/bn254'; @@ -79,6 +80,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable protected readonly l1Metrics: L1Metrics, config: Partial = {}, protected readonly telemetryClient: TelemetryClient = getTelemetryClient(), + private delayer?: Delayer, ) { this.config = { proverNodePollingIntervalMs: 1_000, @@ -111,6 +113,11 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable return this.p2pClient; } + /** Returns the shared tx delayer for prover L1 txs, if enabled. Test-only. */ + public getDelayer(): Delayer | undefined { + return this.delayer; + } + /** * Handles an epoch being completed by starting a proof for it if there are no active jobs for it. * @param epochNumber - The epoch number that was just completed. From ddc5e2f54fe7c5f6cc7465f4e8c3384de7155444 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 12 Feb 2026 13:03:37 +0000 Subject: [PATCH 09/62] fix(end-to-end): force-enable tx delayer in e2e setup The default config maps `enableDelayer` to `false`. The previous code used `config.enableDelayer ?? true`, but `??` only triggers on null/undefined, not on `false`, so the delayer was never enabled in tests. This caused all e2e_epochs tests to fail with "Could not find prover or sequencer delayer". Co-Authored-By: Claude Opus 4.6 --- yarn-project/end-to-end/src/fixtures/setup.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/fixtures/setup.ts b/yarn-project/end-to-end/src/fixtures/setup.ts index 4c6f0fa62e5e..f4beb8213752 100644 --- a/yarn-project/end-to-end/src/fixtures/setup.ts +++ b/yarn-project/end-to-end/src/fixtures/setup.ts @@ -285,8 +285,8 @@ export async function setup( config.realProofs = !!opts.realProofs; // Only enforce the time table if requested config.enforceTimeTable = !!opts.enforceTimeTable; - // Default to enabling the tx delayer unless explicitly disabled - config.enableDelayer = config.enableDelayer ?? true; + // Enable the tx delayer for tests (default config has it disabled, so we force-enable it here) + config.enableDelayer = true; config.listenAddress = '127.0.0.1'; const logger = getLogger(); From 5e9908ef09e0a8b1240d3274f90f7ac5e22a5932 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 12 Feb 2026 12:21:52 -0300 Subject: [PATCH 10/62] test(archiver): add missing reorg and prune unit tests Add four new tests covering previously untested archiver reorg/prune scenarios: - Upcoming L2 prune (handleEpochPrune removes unproven checkpoints) - Lost proof (proven checkpoint rolls back to zero) - Re-proof after prune (pruned blocks get re-proposed and proven) - New checkpoint behind L1 syncpoint (L1 reorg adds checkpoint in already-scanned range) Also adds canPruneAtTime mock support to FakeL1State and removes two xit placeholders. Co-Authored-By: Claude Opus 4.6 --- .../archiver/src/archiver-sync.test.ts | 248 +++++++++++++++++- .../archiver/src/test/fake_l1_state.ts | 8 + 2 files changed, 254 insertions(+), 2 deletions(-) diff --git a/yarn-project/archiver/src/archiver-sync.test.ts b/yarn-project/archiver/src/archiver-sync.test.ts index ab5ae6f9d2bd..11cd821b297d 100644 --- a/yarn-project/archiver/src/archiver-sync.test.ts +++ b/yarn-project/archiver/src/archiver-sync.test.ts @@ -970,9 +970,253 @@ describe('Archiver Sync', () => { expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(3)); }); - xit('handles an upcoming L2 prune', () => {}); + it('handles an upcoming L2 prune', async () => { + const pruneSpy = jest.fn(); + archiver.events.on(L2BlockSourceEvents.L2PruneUnproven, pruneSpy); - xit('does not attempt to download data for a checkpoint that has been pruned', () => {}); + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0)); + + // Add and sync checkpoints 1, 2, 3 + const { checkpoint: cp1 } = await fake.addCheckpoint(CheckpointNumber(1), { + l1BlockNumber: 70n, + messagesL1BlockNumber: 50n, + numL1ToL2Messages: 3, + }); + + await fake.addCheckpoint(CheckpointNumber(2), { + l1BlockNumber: 80n, + messagesL1BlockNumber: 60n, + numL1ToL2Messages: 3, + }); + + await fake.addCheckpoint(CheckpointNumber(3), { + l1BlockNumber: 90n, + messagesL1BlockNumber: 66n, + numL1ToL2Messages: 3, + }); + + fake.setL1BlockNumber(100n); + await archiver.syncImmediate(); + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(3)); + + // Mark checkpoint 1 as proven + fake.markCheckpointAsProven(CheckpointNumber(1)); + expect(await archiver.getProvenCheckpointNumber()).toEqual(CheckpointNumber(0)); + + // Enable pruning (simulate proof window about to expire) + fake.setCanPrune(true); + + // Sync again — handleEpochPrune should remove checkpoints 2 and 3 + fake.setL1BlockNumber(101n); + await archiver.syncImmediate(); + + // Proven checkpoint should advance to 1 since we synced it + expect(await archiver.getProvenCheckpointNumber()).toEqual(CheckpointNumber(1)); + + // Checkpoints 2 and 3 should be removed, archiver at checkpoint 1 + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(1)); + + // L2PruneUnproven event should have been emitted with the correct epoch + // CP2 is at L1 block 80 → slot = (80 * 12) / 24 = 40 → epoch = 40 / 4 = 10 + expect(pruneSpy).toHaveBeenCalledWith( + expect.objectContaining({ + type: L2BlockSourceEvents.L2PruneUnproven, + epochNumber: EpochNumber(10), + }), + ); + + // L2Tips should reflect rollback to checkpoint 1 + const lastBlockInCheckpoint1 = cp1.blocks[cp1.blocks.length - 1].number; + const tips = await archiver.getL2Tips(); + expect(tips.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); + expect(tips.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); + + // Data from checkpoints 2 and 3 should be removed + expect(await archiver.getCheckpoints(CheckpointNumber(2), 1)).toEqual([]); + expect(await archiver.getCheckpoints(CheckpointNumber(3), 1)).toEqual([]); + + archiver.events.off(L2BlockSourceEvents.L2PruneUnproven, pruneSpy); + }, 15_000); + + it('lost a proof (proven checkpoint rolls back to zero)', async () => { + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0)); + + // Add and sync checkpoints 1 and 2 + await fake.addCheckpoint(CheckpointNumber(1), { + l1BlockNumber: 70n, + messagesL1BlockNumber: 50n, + numL1ToL2Messages: 3, + }); + + await fake.addCheckpoint(CheckpointNumber(2), { + l1BlockNumber: 80n, + messagesL1BlockNumber: 60n, + numL1ToL2Messages: 3, + }); + + fake.setL1BlockNumber(90n); + await archiver.syncImmediate(); + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(2)); + + // Mark checkpoint 1 as proven, sync + fake.markCheckpointAsProven(CheckpointNumber(1)); + fake.setL1BlockNumber(91n); + await archiver.syncImmediate(); + expect(await archiver.getProvenCheckpointNumber()).toEqual(CheckpointNumber(1)); + + // Reset proven to 0 (simulate lost proof due to L1 reorg) + fake.markCheckpointAsProven(CheckpointNumber(0)); + fake.setL1BlockNumber(92n); + await archiver.syncImmediate(); + + // Proven checkpoint should be back at 0 + expect(await archiver.getProvenCheckpointNumber()).toEqual(CheckpointNumber(0)); + + // Pending/checkpointed chain should still be at checkpoint 2 + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(2)); + + // L2Tips proven tip should reflect rollback + const tips = await archiver.getL2Tips(); + expect(tips.proven.block.number).toEqual(0); + }, 10_000); + + it('new proof appeared for previously pruned blocks', async () => { + const provenSpy = jest.fn(); + archiver.events.on(L2BlockSourceEvents.L2BlockProven, provenSpy); + + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0)); + + // Add and sync checkpoints 1, 2, 3 + const cp1NumMessages = 3; + const { checkpoint: cp1 } = await fake.addCheckpoint(CheckpointNumber(1), { + l1BlockNumber: 70n, + messagesL1BlockNumber: 50n, + numL1ToL2Messages: cp1NumMessages, + }); + const cp1Archive = cp1.blocks[cp1.blocks.length - 1].archive; + + await fake.addCheckpoint(CheckpointNumber(2), { + l1BlockNumber: 80n, + messagesL1BlockNumber: 60n, + numL1ToL2Messages: 3, + }); + + await fake.addCheckpoint(CheckpointNumber(3), { + l1BlockNumber: 90n, + messagesL1BlockNumber: 66n, + numL1ToL2Messages: 3, + }); + + fake.setL1BlockNumber(100n); + await archiver.syncImmediate(); + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(3)); + + // Mark checkpoint 1 as proven so epoch prune only removes 2 and 3 + fake.markCheckpointAsProven(CheckpointNumber(1)); + + // Enable pruning to trigger epoch prune (unwind checkpoints 2 and 3) + fake.setCanPrune(true); + fake.setL1BlockNumber(101n); + await archiver.syncImmediate(); + + // Verify checkpoints 2 and 3 are pruned (only proven checkpoint 1 remains) + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(1)); + expect(await archiver.getProvenCheckpointNumber()).toEqual(CheckpointNumber(1)); + + // Disable pruning + fake.setCanPrune(false); + + // Re-add checkpoints 2 and 3 on L1 (new epoch proposal). + // Remove old checkpoint events and their messages from the fake. + // The message removal triggers rolling hash recalculation, and on next sync + // handleL1ToL2Messages detects the mismatch and clears the archiver's message store. + fake.removeCheckpoint(CheckpointNumber(2)); + fake.removeCheckpoint(CheckpointNumber(3)); + fake.removeMessagesAfter(cp1NumMessages); + + await fake.addCheckpoint(CheckpointNumber(2), { + l1BlockNumber: 110n, + numL1ToL2Messages: 0, + previousArchive: cp1Archive, + }); + + await fake.addCheckpoint(CheckpointNumber(3), { + l1BlockNumber: 120n, + numL1ToL2Messages: 0, + }); + + // Mark checkpoint 2 as proven + fake.markCheckpointAsProven(CheckpointNumber(2)); + + // Sync + fake.setL1BlockNumber(130n); + await archiver.syncImmediate(); + + // Archiver should re-sync checkpoints 2 and 3 + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(3)); + + // Proven checkpoint should advance to 2 + expect(await archiver.getProvenCheckpointNumber()).toEqual(CheckpointNumber(2)); + + // L2BlockProven event should have been emitted + expect(provenSpy).toHaveBeenCalledWith( + expect.objectContaining({ + type: L2BlockSourceEvents.L2BlockProven, + }), + ); + + archiver.events.off(L2BlockSourceEvents.L2BlockProven, provenSpy); + }, 15_000); + + it('detects new checkpoint behind L1 syncpoint due to L1 reorg', async () => { + const loggerSpy = jest.spyOn(syncLogger, 'warn'); + + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0)); + + // Sync checkpoint 1 from L1 to establish baseline (sync point = 70) + await fake.addCheckpoint(CheckpointNumber(1), { + l1BlockNumber: 70n, + messagesL1BlockNumber: 50n, + numL1ToL2Messages: 3, + }); + + fake.setL1BlockNumber(100n); + await archiver.syncImmediate(); + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(1)); + + // Manually advance the sync point past where the new checkpoint will appear. + // This simulates a scenario where the sync point was advanced (e.g., via invalid + // attestation handling at line 204), placing it ahead of a new checkpoint. + await archiverStore.setCheckpointSynchedL1BlockNumber(200n); + // checkForNewCheckpointsBeforeL1SyncPoint requires validationResult?.valid to be true + await archiverStore.setPendingChainValidationStatus({ valid: true }); + + // Add checkpoint 2 at L1 block 150 (behind the manual sync point of 200). + // This simulates an L1 reorg that added a new checkpoint in a range already scanned. + await fake.addCheckpoint(CheckpointNumber(2), { + l1BlockNumber: 150n, + messagesL1BlockNumber: 130n, + numL1ToL2Messages: 3, + }); + + // Sync: searches from 201 onward, doesn't find CP2 at 150. + // checkForNewCheckpointsBeforeL1SyncPoint detects latestLocal(1) < pending(2) + // and rolls back the sync point to CP1's L1 block (70). + // The rollback does NOT re-fetch in the same iteration. + fake.setL1BlockNumber(201n); + await archiver.syncImmediate(); + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(1)); + expect(loggerSpy).toHaveBeenCalledWith( + expect.stringMatching(/Failed to reach checkpoint 2.*Rolling back/), + expect.anything(), + ); + + // Second sync: fetches from the rolled-back sync point (70) and finds CP2 at L1 block 150 + fake.setL1BlockNumber(202n); + await archiver.syncImmediate(); + + expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(2)); + }, 15_000); }); describe('checkpointing local proposed blocks', () => { diff --git a/yarn-project/archiver/src/test/fake_l1_state.ts b/yarn-project/archiver/src/test/fake_l1_state.ts index 6ff0a0d3dcb9..4afb84902926 100644 --- a/yarn-project/archiver/src/test/fake_l1_state.ts +++ b/yarn-project/archiver/src/test/fake_l1_state.ts @@ -131,6 +131,7 @@ export class FakeL1State { private provenCheckpointNumber: CheckpointNumber = CheckpointNumber(0); private targetCommitteeSize: number = 0; private version: bigint = 1n; + private canPruneResult: boolean = false; // Computed from checkpoints based on L1 block visibility private pendingCheckpointNumber: CheckpointNumber = CheckpointNumber(0); @@ -276,6 +277,11 @@ export class FakeL1State { this.targetCommitteeSize = size; } + /** Sets whether the rollup contract would allow pruning at the next block. */ + setCanPrune(value: boolean): void { + this.canPruneResult = value; + } + /** * Removes all entries for a checkpoint number (simulates L1 reorg or prune). * Note: Does NOT remove messages for this checkpoint (use numL1ToL2Messages: 0 when re-adding). @@ -384,6 +390,8 @@ export class FakeL1State { }); }); + mockRollup.canPruneAtTime.mockImplementation(() => Promise.resolve(this.canPruneResult)); + // Mock the wrapper method for fetching checkpoint events mockRollup.getCheckpointProposedEvents.mockImplementation((fromBlock: bigint, toBlock: bigint) => Promise.resolve(this.getCheckpointProposedLogs(fromBlock, toBlock)), From 62e99e0f22e7fb4b8e35b874c12b299346292762 Mon Sep 17 00:00:00 2001 From: spypsy <6403450+spypsy@users.noreply.github.com> Date: Thu, 12 Feb 2026 17:11:29 +0000 Subject: [PATCH 11/62] chore: ensure consistent HA DB timestamps Fixes [A-543](https://linear.app/aztec-labs/issue/A-543/ensure-no-timezone-issues-when-cleaning-up-old-duties) Also add note to docs for users to know that they can't use the same DB for nodes running on different rollup versions Follow-up from comments on #20060 --- .../src/composed/ha/e2e_ha_full.test.ts | 229 +++++++++++++++++- yarn-project/validator-ha-signer/README.md | 10 + .../src/db/postgres.test.ts | 14 +- .../validator-ha-signer/src/db/postgres.ts | 6 +- .../validator-ha-signer/src/db/schema.ts | 7 +- 5 files changed, 250 insertions(+), 16 deletions(-) diff --git a/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts b/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts index 9b6426aa45dd..be6446fa4b9b 100644 --- a/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts +++ b/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts @@ -14,14 +14,17 @@ import type { Logger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; import { GovernanceProposerContract } from '@aztec/ethereum/contracts'; import type { DeployAztecL1ContractsReturnType } from '@aztec/ethereum/deploy-aztec-l1-contracts'; -import { SlotNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { SecretValue } from '@aztec/foundation/config'; import { withLoggerBindings } from '@aztec/foundation/log/server'; import { retryUntil } from '@aztec/foundation/retry'; +import { sleep } from '@aztec/foundation/sleep'; import type { TestDateProvider } from '@aztec/foundation/timer'; import { StatefulTestContractArtifact } from '@aztec/noir-test-contracts.js/StatefulTest'; import { type AttestationInfo, getAttestationInfoFromPublishedCheckpoint } from '@aztec/stdlib/block'; -import { type DutyRow, DutyStatus } from '@aztec/validator-ha-signer/types'; +import { PostgresSlashingProtectionDatabase } from '@aztec/validator-ha-signer/db'; +import { type DutyRow, DutyStatus, DutyType } from '@aztec/validator-ha-signer/types'; import { jest } from '@jest/globals'; import { Pool } from 'pg'; @@ -261,6 +264,9 @@ describe('HA Full Setup', () => { }); afterEach(async () => { + // Restore any mocked functions + jest.restoreAllMocks(); + // Clean up database state between tests try { await mainPool.query('DELETE FROM validator_duties'); @@ -671,4 +677,223 @@ describe('HA Full Setup', () => { } } }); + + describe('Clock Skew and Timezone Safety', () => { + const rollupAddress = EthAddress.random(); + const validatorAddress = EthAddress.random(); + it('should not be affected by process.env.TZ changes', async () => { + const spDb = new PostgresSlashingProtectionDatabase(mainPool); + const originalTZ = process.env.TZ; + + try { + // Node 1 in UTC creates and signs a duty + process.env.TZ = 'UTC'; + const duty1 = await spDb.tryInsertOrGetExisting({ + rollupAddress, + validatorAddress, + slot: SlotNumber(100), + blockNumber: BlockNumber(100), + dutyType: DutyType.ATTESTATION, + messageHash: Buffer32.random().toString(), + nodeId: 'node-utc', + }); + expect(duty1.isNew).toBe(true); + await spDb.updateDutySigned( + rollupAddress, + validatorAddress, + SlotNumber(100), + DutyType.ATTESTATION, + '0xsig', + duty1.record.lockToken, + -1, + ); + + await sleep(100); + + // Node 2 in Tokyo creates and signs a duty at approximately the same time + process.env.TZ = 'Asia/Tokyo'; + const duty2 = await spDb.tryInsertOrGetExisting({ + rollupAddress, + validatorAddress, + slot: SlotNumber(101), + blockNumber: BlockNumber(101), + dutyType: DutyType.ATTESTATION, + messageHash: Buffer32.random().toString(), + nodeId: 'node-tokyo', + }); + expect(duty2.isNew).toBe(true); + await spDb.updateDutySigned( + rollupAddress, + validatorAddress, + SlotNumber(101), + DutyType.ATTESTATION, + '0xsig', + duty2.record.lockToken, + -1, + ); + + // Verify both duties were stored at correct absolute times (seconds apart, not hours) + const result = await mainPool.query<{ slot: string; unix_timestamp: string }>( + `SELECT slot, EXTRACT(EPOCH FROM started_at) as unix_timestamp + FROM validator_duties + WHERE slot IN ('100', '101') + ORDER BY slot DESC`, + ); + + const timestamp1 = parseFloat(result.rows[0].unix_timestamp); + const timestamp2 = parseFloat(result.rows[1].unix_timestamp); + const diffSeconds = Math.abs(timestamp1 - timestamp2); + + // Should be less than 10 seconds apart (not hours due to timezone interpretation) + expect(diffSeconds).toBeLessThan(10); + } finally { + process.env.TZ = originalTZ; + } + }); + + it('should not delete recent duties when node clock is ahead (using cleanupOldDuties)', async () => { + const spDb = new PostgresSlashingProtectionDatabase(mainPool); + + // Ensure clean slate for this test + await mainPool.query('DELETE FROM validator_duties WHERE slot = $1', ['200']); + + // Create and sign a duty using our actual methods + const duty = await spDb.tryInsertOrGetExisting({ + rollupAddress, + validatorAddress, + slot: SlotNumber(200), + blockNumber: BlockNumber(200), + dutyType: DutyType.ATTESTATION, + messageHash: Buffer32.random().toString(), + nodeId: 'test-node', + }); + expect(duty.isNew).toBe(true); + + await spDb.updateDutySigned( + rollupAddress, + validatorAddress, + SlotNumber(200), + DutyType.ATTESTATION, + '0xsig', + duty.record.lockToken, + -1, + ); + + // Verify duty exists before cleanup + const beforeCleanup = await mainPool.query( + `SELECT * FROM validator_duties WHERE slot = $1 AND validator_address = $2`, + ['200', validatorAddress.toString().toLowerCase()], + ); + expect(beforeCleanup.rows.length).toBe(1); + expect(beforeCleanup.rows[0].status).toBe('signed'); + + // Simulate node with clock 2 hours ahead + const realNow = Date.now; + jest.spyOn(Date, 'now').mockImplementation(() => realNow() + 2 * 60 * 60 * 1000); + + // Use our actual cleanupOldDuties method + const numCleaned = await spDb.cleanupOldDuties(60 * 60 * 1000); // 1 hour + + // Should NOT delete the duty we just created (it uses DB's clock, not node's) + expect(numCleaned).toBe(0); + + // Verify duty still exists + const result = await mainPool.query( + `SELECT * FROM validator_duties WHERE slot = $1 AND validator_address = $2`, + ['200', validatorAddress.toString().toLowerCase()], + ); + expect(result.rows.length).toBe(1); + }); + + it('should delete old duties based on DB time, not node time (using cleanupOldDuties)', async () => { + const spDb = new PostgresSlashingProtectionDatabase(mainPool); + + // Ensure clean slate for this test + await mainPool.query('DELETE FROM validator_duties WHERE slot = $1', ['300']); + + // Create and sign a duty using our actual methods + const duty = await spDb.tryInsertOrGetExisting({ + rollupAddress, + validatorAddress, + slot: SlotNumber(300), + blockNumber: BlockNumber(300), + dutyType: DutyType.ATTESTATION, + messageHash: Buffer32.random().toString(), + nodeId: 'test-node', + }); + expect(duty.isNew).toBe(true); + + await spDb.updateDutySigned( + rollupAddress, + validatorAddress, + SlotNumber(300), + DutyType.ATTESTATION, + '0xsig', + duty.record.lockToken, + -1, + ); + + // Manually backdate the duty to 2 hours old (simulating an old duty from DB's perspective) + const updateResult = await mainPool.query( + `UPDATE validator_duties + SET started_at = CURRENT_TIMESTAMP - INTERVAL '2 hours', + completed_at = CURRENT_TIMESTAMP - INTERVAL '2 hours' + WHERE slot = $1 AND validator_address = $2`, + ['300', validatorAddress.toString().toLowerCase()], + ); + expect(updateResult.rowCount).toBe(1); + + // Verify duty is backdated (should be ~2 hours old) + const beforeCleanup = await mainPool.query( + `SELECT *, EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - started_at)) as age_seconds + FROM validator_duties WHERE slot = $1`, + ['300'], + ); + expect(beforeCleanup.rows.length).toBe(1); + expect(beforeCleanup.rows[0].status).toBe('signed'); + expect(parseFloat(beforeCleanup.rows[0].age_seconds)).toBeGreaterThan(7000); // ~2 hours in seconds + + // Simulate node with clock 1 hour behind + const realNow = Date.now; + jest.spyOn(Date, 'now').mockImplementation(() => realNow() - 1 * 60 * 60 * 1000); + + // Use our actual cleanupOldDuties method - should delete based on DB time + const numCleaned = await spDb.cleanupOldDuties(60 * 60 * 1000); // 1 hour + expect(numCleaned).toBeGreaterThanOrEqual(1); + + // Verify duty was deleted + const result = await mainPool.query( + `SELECT * FROM validator_duties WHERE slot = $1 AND validator_address = $2`, + ['300', validatorAddress.toString().toLowerCase()], + ); + expect(result.rows.length).toBe(0); + }); + + it('should not delete recent stuck duties when node clock is ahead (using cleanupOwnStuckDuties)', async () => { + const spDb = new PostgresSlashingProtectionDatabase(mainPool); + + // Create a signing duty (stuck, not completed) using our actual method + const duty = await spDb.tryInsertOrGetExisting({ + rollupAddress, + validatorAddress, + slot: SlotNumber(400), + blockNumber: BlockNumber(400), + dutyType: DutyType.ATTESTATION, + messageHash: Buffer32.random().toString(), + nodeId: 'stuck-node', + }); + expect(duty.isNew).toBe(true); + // Don't call updateDutySigned - leave it in 'signing' state (stuck) + + // Simulate node with clock 3 hours ahead + const realNow = Date.now; + jest.spyOn(Date, 'now').mockImplementation(() => realNow() + 3 * 60 * 60 * 1000); + + // Use our actual cleanupOwnStuckDuties method + const numCleaned = await spDb.cleanupOwnStuckDuties('stuck-node', 60 * 60 * 1000); // 1 hour + + // Should NOT delete the duty (it uses DB's clock, not node's) + expect(numCleaned).toBe(0); + }); + }); }); diff --git a/yarn-project/validator-ha-signer/README.md b/yarn-project/validator-ha-signer/README.md index fa69ded55bc5..3969f709e34f 100644 --- a/yarn-project/validator-ha-signer/README.md +++ b/yarn-project/validator-ha-signer/README.md @@ -178,6 +178,16 @@ All signing operations require a `SigningContext` that includes: Note: `AUTH_REQUEST` duties bypass HA protection since signing multiple times is safe for authentication requests. +## Important Limitations + +### Database Isolation Per Rollup Version + +**You cannot use the same database to provide slashing protection for validator nodes running on different rollup versions** (e.g., current rollup and old rollup simultaneously). + +When the HA signer performs background cleanup via `cleanupOutdatedRollupDuties()`, it removes all duties where the rollup address doesn't match the current rollup address. If two validators running on different rollup versions share the same database, they will delete each other's duties during cleanup. + +**Solution**: Use separate databases for validators running on different rollup versions. Each rollup version requires its own isolated slashing protection database. + ## Development ```bash diff --git a/yarn-project/validator-ha-signer/src/db/postgres.test.ts b/yarn-project/validator-ha-signer/src/db/postgres.test.ts index 125f15bb593c..92758dde54ef 100644 --- a/yarn-project/validator-ha-signer/src/db/postgres.test.ts +++ b/yarn-project/validator-ha-signer/src/db/postgres.test.ts @@ -1476,15 +1476,16 @@ describe('PostgresSlashingProtectionDatabase', () => { it('should only clean up old signed duties, not signing or recent duties', async () => { const spDb = new PostgresSlashingProtectionDatabase(pool); - const oldTimestamp = new Date(Date.now() - 2 * 60 * 60 * 1000); // 2 hours ago - // Insert old signed duties (should be cleaned up) + // Insert old signed duties (should be cleaned up) - 2 hours old for (let i = 0; i < 2; i++) { await pglite.query( `INSERT INTO validator_duties ( rollup_address, validator_address, slot, block_number, block_index_within_checkpoint, duty_type, status, message_hash, signature, node_id, lock_token, started_at, completed_at - ) VALUES ($1, $2, $3, $4, $5, $6, 'signed', $7, '0xsignature', $8, 'token', $9, $9)`, + ) VALUES ($1, $2, $3, $4, $5, $6, 'signed', $7, '0xsignature', $8, 'token', + CURRENT_TIMESTAMP - INTERVAL '2 hours', + CURRENT_TIMESTAMP - INTERVAL '2 hours')`, [ ROLLUP_ADDRESS.toString(), VALIDATOR_ADDRESS.toString(), @@ -1494,18 +1495,18 @@ describe('PostgresSlashingProtectionDatabase', () => { DutyType.BLOCK_PROPOSAL, Buffer32.random().toString(), NODE_ID, - oldTimestamp, ], ); } - // Insert old signing duties (should NOT be cleaned up) + // Insert old signing duties (should NOT be cleaned up) - 2 hours old but still signing for (let i = 0; i < 2; i++) { await pglite.query( `INSERT INTO validator_duties ( rollup_address, validator_address, slot, block_number, block_index_within_checkpoint, duty_type, status, message_hash, node_id, lock_token, started_at - ) VALUES ($1, $2, $3, $4, $5, $6, 'signing', $7, $8, 'token', $9)`, + ) VALUES ($1, $2, $3, $4, $5, $6, 'signing', $7, $8, 'token', + CURRENT_TIMESTAMP - INTERVAL '2 hours')`, [ ROLLUP_ADDRESS.toString(), VALIDATOR_ADDRESS.toString(), @@ -1515,7 +1516,6 @@ describe('PostgresSlashingProtectionDatabase', () => { DutyType.BLOCK_PROPOSAL, Buffer32.random().toString(), NODE_ID, - oldTimestamp, ], ); } diff --git a/yarn-project/validator-ha-signer/src/db/postgres.ts b/yarn-project/validator-ha-signer/src/db/postgres.ts index 73aad3799b65..8c80f22ddd7b 100644 --- a/yarn-project/validator-ha-signer/src/db/postgres.ts +++ b/yarn-project/validator-ha-signer/src/db/postgres.ts @@ -254,8 +254,7 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat * @returns the number of duties cleaned up */ async cleanupOwnStuckDuties(nodeId: string, maxAgeMs: number): Promise { - const cutoff = new Date(Date.now() - maxAgeMs); - const result = await this.pool.query(CLEANUP_OWN_STUCK_DUTIES, [nodeId, cutoff]); + const result = await this.pool.query(CLEANUP_OWN_STUCK_DUTIES, [nodeId, maxAgeMs]); return result.rowCount ?? 0; } @@ -277,8 +276,7 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat * @returns the number of duties cleaned up */ async cleanupOldDuties(maxAgeMs: number): Promise { - const cutoff = new Date(Date.now() - maxAgeMs); - const result = await this.pool.query(CLEANUP_OLD_DUTIES, [cutoff]); + const result = await this.pool.query(CLEANUP_OLD_DUTIES, [maxAgeMs]); return result.rowCount ?? 0; } } diff --git a/yarn-project/validator-ha-signer/src/db/schema.ts b/yarn-project/validator-ha-signer/src/db/schema.ts index 92cd57c5e618..8b06a2c812a4 100644 --- a/yarn-project/validator-ha-signer/src/db/schema.ts +++ b/yarn-project/validator-ha-signer/src/db/schema.ts @@ -203,23 +203,24 @@ WHERE status = 'signed' /** * Query to clean up old duties (for maintenance) - * Removes SIGNED duties older than a specified timestamp + * Removes SIGNED duties older than a specified age (in milliseconds) */ export const CLEANUP_OLD_DUTIES = ` DELETE FROM validator_duties WHERE status = 'signed' - AND started_at < $1; + AND started_at < CURRENT_TIMESTAMP - ($1 || ' milliseconds')::INTERVAL; `; /** * Query to cleanup own stuck duties * Removes duties in 'signing' status for a specific node that are older than maxAgeMs + * Uses DB's CURRENT_TIMESTAMP to avoid clock skew issues between nodes */ export const CLEANUP_OWN_STUCK_DUTIES = ` DELETE FROM validator_duties WHERE node_id = $1 AND status = 'signing' - AND started_at < $2; + AND started_at < CURRENT_TIMESTAMP - ($2 || ' milliseconds')::INTERVAL; `; /** From 79294850dc9fc99f4042185074947c535f2d8a2e Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Thu, 12 Feb 2026 20:58:46 +0000 Subject: [PATCH 12/62] More metrics --- .../values/network-requirements.yaml | 2 +- spartan/environments/five-tps-long-epoch.env | 2 +- spartan/environments/five-tps-short-epoch.env | 4 +- spartan/environments/ten-tps-long-epoch.env | 2 +- spartan/environments/ten-tps-short-epoch.env | 2 +- spartan/environments/tps-scenario.env | 2 +- .../end-to-end/src/spartan/n_tps.test.ts | 3 +- .../src/mem_pools/tx_pool_v2/deleted_pool.ts | 11 ++++ .../tx_pool_v2/eviction/eviction_manager.ts | 14 +++-- .../fee_payer_balance_eviction_rule.test.ts | 4 +- .../fee_payer_balance_eviction_rule.ts | 2 +- .../mem_pools/tx_pool_v2/eviction/index.ts | 1 + .../tx_pool_v2/eviction/interfaces.ts | 12 ++++- .../invalid_txs_after_mining_rule.test.ts | 8 +-- .../eviction/invalid_txs_after_mining_rule.ts | 2 +- .../invalid_txs_after_reorg_rule.test.ts | 2 +- .../eviction/invalid_txs_after_reorg_rule.ts | 2 +- .../low_priority_eviction_rule.test.ts | 4 +- .../eviction/low_priority_eviction_rule.ts | 2 +- .../src/mem_pools/tx_pool_v2/interfaces.ts | 2 +- .../src/mem_pools/tx_pool_v2/tx_pool_v2.ts | 7 ++- .../mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 53 ++++++++++++++----- .../telemetry-client/src/attributes.ts | 3 ++ yarn-project/telemetry-client/src/metrics.ts | 12 +++++ 24 files changed, 116 insertions(+), 42 deletions(-) diff --git a/spartan/aztec-chaos-scenarios/values/network-requirements.yaml b/spartan/aztec-chaos-scenarios/values/network-requirements.yaml index a56dbe192d68..f300e19f1880 100644 --- a/spartan/aztec-chaos-scenarios/values/network-requirements.yaml +++ b/spartan/aztec-chaos-scenarios/values/network-requirements.yaml @@ -9,7 +9,7 @@ networkShaping: correlation: "75" bandwidth: enabled: true - rate: 25mbps + rate: 200mbps packetLoss: enabled: true loss: diff --git a/spartan/environments/five-tps-long-epoch.env b/spartan/environments/five-tps-long-epoch.env index 84bd8ee6e591..ff87b7ef63a2 100644 --- a/spartan/environments/five-tps-long-epoch.env +++ b/spartan/environments/five-tps-long-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=32 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=64 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" diff --git a/spartan/environments/five-tps-short-epoch.env b/spartan/environments/five-tps-short-epoch.env index 56141ee724c4..b6601d0ad866 100644 --- a/spartan/environments/five-tps-short-epoch.env +++ b/spartan/environments/five-tps-short-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=8 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=16 +AZTEC_PROOF_SUBMISSION_EPOCHS=10 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" @@ -55,7 +55,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=180 +SEQ_MAX_TX_PER_BLOCK=36 SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests diff --git a/spartan/environments/ten-tps-long-epoch.env b/spartan/environments/ten-tps-long-epoch.env index 39ea3d75e197..e3fefc644364 100644 --- a/spartan/environments/ten-tps-long-epoch.env +++ b/spartan/environments/ten-tps-long-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=32 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=64 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" diff --git a/spartan/environments/ten-tps-short-epoch.env b/spartan/environments/ten-tps-short-epoch.env index 35868695e0f6..90f16277c385 100644 --- a/spartan/environments/ten-tps-short-epoch.env +++ b/spartan/environments/ten-tps-short-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=8 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=16 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" diff --git a/spartan/environments/tps-scenario.env b/spartan/environments/tps-scenario.env index 18ecd87bc070..b7548574fd39 100644 --- a/spartan/environments/tps-scenario.env +++ b/spartan/environments/tps-scenario.env @@ -4,7 +4,7 @@ GCP_REGION=us-west1-a AZTEC_EPOCH_DURATION=8 AZTEC_SLOT_DURATION=72 -AZTEC_PROOF_SUBMISSION_WINDOW=16 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 AZTEC_LAG_IN_EPOCHS=1 CREATE_ETH_DEVNET=false diff --git a/yarn-project/end-to-end/src/spartan/n_tps.test.ts b/yarn-project/end-to-end/src/spartan/n_tps.test.ts index 8e8e022171de..e52495e4cc4d 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps.test.ts @@ -345,7 +345,8 @@ describe('sustained N TPS test', () => { let lowValueTxs = 0; const lowValueSendTx = async (wallet: TestWallet) => { lowValueTxs++; - const feeAmount = Number(randomBigInt(10n)) + 1; + //const feeAmount = Number(randomBigInt(10n)) + 1; + const feeAmount = 1; const fee = new GasFees(0, feeAmount); logger.info('Sending low value tx ' + lowValueTxs + ' with fee ' + feeAmount); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts index e4eea8793967..b5621d8e85ab 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts @@ -298,6 +298,17 @@ export class DeletedPool { return this.#state.size; } + /** Gets the count of soft-deleted transactions (both prune-based and slot-based). */ + getSoftDeletedCount(): number { + let count = this.#slotDeletedTxs.size; + for (const state of this.#state.values()) { + if (state.softDeleted) { + count++; + } + } + return count; + } + /** * Gets all transaction hashes from pruned blocks. */ diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts index 476f40eb0be7..9c7d01c9d941 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts @@ -12,6 +12,7 @@ import { type PreAddPoolAccess, type PreAddResult, type PreAddRule, + type TaggedEviction, } from './interfaces.js'; /** @@ -48,7 +49,8 @@ export class EvictionManager { * Returns combined result of all rules. */ async runPreAddRules(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { - const allTxHashesToEvict: string[] = []; + const evictions: TaggedEviction[] = []; + const seen = new Set(); for (const rule of this.preAddRules) { try { @@ -58,10 +60,11 @@ export class EvictionManager { return result; } - // Collect txs to evict from all rules + // Collect txs to evict from all rules, tagged with the rule name for (const txHash of result.txHashesToEvict) { - if (!allTxHashesToEvict.includes(txHash)) { - allTxHashesToEvict.push(txHash); + if (!seen.has(txHash)) { + seen.add(txHash); + evictions.push({ txHash, reason: rule.name }); } } } catch (err) { @@ -77,7 +80,8 @@ export class EvictionManager { return { shouldIgnore: false, - txHashesToEvict: allTxHashesToEvict, + txHashesToEvict: evictions.map(e => e.txHash), + evictions, }; } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts index 7e8ff3ae4909..b36f4f506795 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts @@ -145,7 +145,7 @@ describe('FeePayerBalanceEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // Low priority evicted - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'FeePayerBalanceEviction'); }); it('evicts multiple low-priority txs when balance is insufficient', async () => { @@ -194,7 +194,7 @@ describe('FeePayerBalanceEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0xaaaa']); // Only lowest priority evicted - expect(deleteTxsMock).toHaveBeenCalledWith(['0xaaaa']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0xaaaa'], 'FeePayerBalanceEviction'); }); it('considers claim amount when calculating available balance', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts index 7cd1b48a4a08..e01469bf28f8 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts @@ -67,7 +67,7 @@ export class FeePayerBalanceEvictionRule implements EvictionRule { ).flat(); if (txsToEvict.length > 0) { - await pool.deleteTxs(txsToEvict); + await pool.deleteTxs(txsToEvict, this.name); } return { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts index faf1cc5b9615..c0f4b573f362 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts @@ -9,6 +9,7 @@ export { type PreAddPoolAccess, type PreAddResult, type PreAddRule, + type TaggedEviction, } from './interfaces.js'; // Pre-add rules diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts index 1d0ba416013c..905203b1157c 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts @@ -67,6 +67,12 @@ export interface PreAddPoolAccess { getLowestPriorityPendingTx(): TxMetaData | undefined; } +/** A single eviction tagged with the rule that caused it. */ +export interface TaggedEviction { + readonly txHash: string; + readonly reason: string; +} + /** * Result of a pre-add check for a single transaction. */ @@ -75,6 +81,8 @@ export interface PreAddResult { readonly shouldIgnore: boolean; /** Tx hashes (as strings) that should be evicted if this tx is added */ readonly txHashesToEvict: string[]; + /** Evictions tagged with the rule name that produced them. Populated by EvictionManager. */ + readonly evictions?: TaggedEviction[]; /** Optional reason for ignoring */ readonly reason?: string; } @@ -120,8 +128,8 @@ export interface PoolOperations { /** Get the N lowest priority pending tx hashes */ getLowestPriorityPending(limit: number): string[]; - /** Delete transactions by hash */ - deleteTxs(txHashes: string[]): Promise; + /** Delete transactions by hash, with an optional reason for metrics */ + deleteTxs(txHashes: string[], reason?: string): Promise; } /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts index bf1211d7ea6e..fc5acf684f1f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts @@ -123,7 +123,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // Only tx1 has duplicate nullifier - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('evicts transactions with expired timestamps', async () => { @@ -143,7 +143,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // Only tx1 is expired - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('evicts transactions with timestamp equal to block timestamp', async () => { @@ -163,7 +163,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // tx1 has timestamp <= block timestamp - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('handles transactions with both duplicate nullifiers and expired timestamps', async () => { @@ -183,7 +183,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('handles empty pending transactions list', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts index 593898ef9182..340c5953dc69 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts @@ -51,7 +51,7 @@ export class InvalidTxsAfterMiningRule implements EvictionRule { } if (txsToEvict.length > 0) { - await pool.deleteTxs(txsToEvict); + await pool.deleteTxs(txsToEvict, this.name); } this.log.debug(`Evicted ${txsToEvict.length} invalid txs after block mined`); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts index dfdc7a0f29e3..378431d2d7c2 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts @@ -161,7 +161,7 @@ describe('InvalidTxsAfterReorgRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted.length).toBe(pendingTxs.length); - expect(deleteTxsMock).toHaveBeenCalledWith(result.txsEvicted); + expect(deleteTxsMock).toHaveBeenCalledWith(result.txsEvicted, 'InvalidTxsAfterReorg'); }); it('handles error from deleteTxs operation', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts index 7e4630e55269..d6b1f381fd48 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts @@ -73,7 +73,7 @@ export class InvalidTxsAfterReorgRule implements EvictionRule { if (txsToEvict.length > 0) { this.log.verbose(`Evicting ${txsToEvict.length} txs from pool due to referencing pruned blocks`); - await pool.deleteTxs(txsToEvict); + await pool.deleteTxs(txsToEvict, this.name); } const keptCount = pendingTxs.length - txsToEvict.length; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts index 75aabcd10439..93744abc603b 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts @@ -132,7 +132,7 @@ describe('LowPriorityEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x3333', '0x4444']); - expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x4444']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x4444'], 'LowPriorityEviction'); }); it('tracks newly added transactions that were evicted', async () => { @@ -148,7 +148,7 @@ describe('LowPriorityEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x3333', '0x1111']); - expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x1111'], 'LowPriorityEviction'); }); it('handles all transactions being non-evictable', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts index c9a015abe3ad..6c5b545709cb 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts @@ -55,7 +55,7 @@ export class LowPriorityEvictionRule implements EvictionRule { const txsToEvict = pool.getLowestPriorityPending(numberToEvict); if (txsToEvict.length > 0) { - await pool.deleteTxs(txsToEvict); + await pool.deleteTxs(txsToEvict, this.name); } const numNewTxsEvicted = context.newTxHashes.filter(newTxHash => txsToEvict.includes(newTxHash)).length; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts index 8636961f123f..9cb5bd836eaf 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts @@ -50,7 +50,7 @@ export const DEFAULT_TX_POOL_V2_CONFIG: TxPoolV2Config = { maxPendingTxCount: 0, // 0 = disabled archivedTxLimit: 0, // 0 = disabled minTxPoolAgeMs: 2_000, - evictedTxCacheSize: 1_000, + evictedTxCacheSize: 10_000, }; /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts index e60e562702e0..a885529a99da 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts @@ -195,7 +195,12 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte this.#queue.put(() => { const counts = this.#impl.countTxs(); return Promise.resolve({ - itemCount: { pending: counts.pending, protected: counts.protected, mined: counts.mined }, + itemCount: { + pending: counts.pending, + protected: counts.protected, + mined: counts.mined, + softDeleted: counts.softDeleted, + }, }); }), () => this.#store.estimateSize(), diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index d19fb9faf3eb..5a8c041637c4 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -270,9 +270,20 @@ export class TxPoolV2Impl { return { status: 'ignored' }; } - // Evict conflicts - if (preAddResult.txHashesToEvict.length > 0) { - await this.#evictTxs(preAddResult.txHashesToEvict); + // Evict conflicts, grouped by rule name for metrics + if (preAddResult.evictions && preAddResult.evictions.length > 0) { + const byReason = new Map(); + for (const { txHash: evictHash, reason } of preAddResult.evictions) { + const group = byReason.get(reason); + if (group) { + group.push(evictHash); + } else { + byReason.set(reason, [evictHash]); + } + } + for (const [reason, hashes] of byReason) { + await this.#evictTxs(hashes, reason); + } for (const evictHashStr of preAddResult.txHashesToEvict) { this.#log.debug(`Evicted tx ${evictHashStr} due to higher-fee tx ${txHashStr}`); if (acceptedPending.has(evictHashStr)) { @@ -379,12 +390,17 @@ export class TxPoolV2Impl { this.#instrumentation.recordSoftDeletedHits(softDeletedHits); } if (missing.length > 0) { + this.#log.debug(`protectTxs missing tx hashes: ${missing.map(h => h.toString()).join(', ')}`); this.#instrumentation.recordMissingOnProtect(missing.length); } if (missingPreviouslyEvicted > 0) { this.#instrumentation.recordMissingPreviouslyEvicted(missingPreviouslyEvicted); } + this.#log.info( + `Protected ${txHashes.length} txs, missing: ${missing.length}, soft-deleted hits: ${softDeletedHits}`, + ); + return missing; } @@ -453,6 +469,7 @@ export class TxPoolV2Impl { // Step 3: Filter to only txs that have metadata and are not mined const txsToRestore = this.#indices.filterRestorable(expiredProtected); if (txsToRestore.length === 0) { + this.#log.debug(`Preparing for slot ${slotNumber}, no txs to unprotect`); return; } @@ -466,7 +483,7 @@ export class TxPoolV2Impl { // Step 6: Delete invalid txs and evict conflict losers await this.#deleteTxsBatch(invalid); - await this.#evictTxs(toEvict); + await this.#evictTxs(toEvict, 'NullifierConflict'); // Step 7: Run eviction rules (enforce pool size limit) if (added.length > 0) { @@ -515,7 +532,7 @@ export class TxPoolV2Impl { // Step 7: Delete invalid txs and evict conflict losers await this.#deleteTxsBatch(invalid); - await this.#evictTxs(toEvict); + await this.#evictTxs(toEvict, 'NullifierConflict'); // Step 8: Run eviction rules for ALL pending txs (not just restored ones) // This handles cases like existing pending txs with invalid fee payer balances @@ -673,8 +690,17 @@ export class TxPoolV2Impl { // === Metrics === - countTxs(): { pending: number; protected: number; mined: number; totalMetadataBytes: number } { - return this.#indices.countTxs(); + countTxs(): { + pending: number; + protected: number; + mined: number; + softDeleted: number; + totalMetadataBytes: number; + } { + return { + ...this.#indices.countTxs(), + softDeleted: this.#deletedPool.getSoftDeletedCount(), + }; } // ============================================================================ @@ -708,9 +734,11 @@ export class TxPoolV2Impl { } const stateStr = typeof state === 'string' ? state : Object.keys(state)[0]; - this.#log.verbose(`Added ${stateStr} tx ${txHashStr}`, { + this.#log.debug(`Added tx ${txHashStr} as ${stateStr}`, { eventName: 'tx-added-to-pool', + txHash: txHashStr, state: stateStr, + source: opts.source, }); return meta; @@ -738,13 +766,14 @@ export class TxPoolV2Impl { } } - /** Evicts transactions: records eviction metric, caches hashes, then deletes. */ - async #evictTxs(txHashes: string[]): Promise { + /** Evicts transactions: records eviction metric with reason, caches hashes, then deletes. */ + async #evictTxs(txHashes: string[], reason: string): Promise { if (txHashes.length === 0) { return; } - this.#instrumentation.recordEvictions(txHashes.length); + this.#instrumentation.recordEvictions(txHashes.length, reason); for (const txHashStr of txHashes) { + this.#log.debug(`Evicting tx ${txHashStr}`, { txHash: txHashStr, reason }); this.#addToEvictedCache(txHashStr); } await this.#deleteTxsBatch(txHashes); @@ -951,7 +980,7 @@ export class TxPoolV2Impl { getFeePayerPendingTxs: (feePayer: string) => this.#indices.getFeePayerPendingTxs(feePayer), getPendingTxCount: () => this.#indices.getPendingTxCount(), getLowestPriorityPending: (limit: number) => this.#indices.getLowestPriorityPending(limit), - deleteTxs: (txHashes: string[]) => this.#evictTxs(txHashes), + deleteTxs: (txHashes: string[], reason?: string) => this.#evictTxs(txHashes, reason ?? 'unknown'), }; } diff --git a/yarn-project/telemetry-client/src/attributes.ts b/yarn-project/telemetry-client/src/attributes.ts index 4e9a843ba6e7..06bf938dbb66 100644 --- a/yarn-project/telemetry-client/src/attributes.ts +++ b/yarn-project/telemetry-client/src/attributes.ts @@ -128,6 +128,9 @@ export const NODEJS_EVENT_LOOP_STATE = 'nodejs.eventloop.state'; export const TOPIC_NAME = 'aztec.gossip.topic_name'; +/** The reason a transaction was evicted from the tx pool */ +export const TX_POOL_EVICTION_REASON = 'aztec.mempool.eviction_reason'; + export const TX_COLLECTION_METHOD = 'aztec.tx_collection.method'; /** Scope of L1 transaction (sequencer, prover, or other) */ diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 7a3e269de18c..583815d082c7 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -204,6 +204,18 @@ export const MEMPOOL_TX_POOL_V2_METADATA_MEMORY: MetricDefinition = { valueType: ValueType.INT, }; +export const MEMPOOL_TX_POOL_V2_DUPLICATE_ADD: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.duplicate_add', + description: 'Transactions received via addPendingTxs that were already in the pool', + valueType: ValueType.INT, +}; + +export const MEMPOOL_TX_POOL_V2_ALREADY_PROTECTED_ADD: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.already_protected_add', + description: 'Transactions received via addPendingTxs that were already pre-protected', + valueType: ValueType.INT, +}; + export const DB_NUM_ITEMS: MetricDefinition = { name: 'aztec.db.num_items', description: 'LMDB Num Items', From c9c15bffa869fa9e350af195c130823be1655bf5 Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Thu, 12 Feb 2026 22:52:57 +0000 Subject: [PATCH 13/62] Fee only priority checks --- yarn-project/p2p/src/client/p2p_client.ts | 4 +- .../eviction/eviction_manager.test.ts | 17 ++- .../tx_pool_v2/eviction/eviction_manager.ts | 9 +- .../fee_payer_balance_pre_add_rule.ts | 4 +- .../mem_pools/tx_pool_v2/eviction/index.ts | 1 + .../tx_pool_v2/eviction/interfaces.ts | 9 +- .../low_priority_pre_add_rule.test.ts | 72 +++++++++++- .../eviction/low_priority_pre_add_rule.ts | 16 ++- .../eviction/nullifier_conflict_rule.ts | 4 +- .../mem_pools/tx_pool_v2/instrumentation.ts | 5 +- .../src/mem_pools/tx_pool_v2/interfaces.ts | 2 +- .../mem_pools/tx_pool_v2/tx_pool_v2.test.ts | 105 ++++++++++++++++++ .../src/mem_pools/tx_pool_v2/tx_pool_v2.ts | 2 +- .../mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 16 ++- .../p2p/src/test-helpers/testbench-utils.ts | 2 +- 15 files changed, 243 insertions(+), 25 deletions(-) diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 588eccbc2269..8857575d8667 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -582,7 +582,7 @@ export class P2PClient **/ public async sendTx(tx: Tx): Promise { this.#assertIsReady(); - const result = await this.txPool.addPendingTxs([tx]); + const result = await this.txPool.addPendingTxs([tx], { feeOnly: true }); if (result.accepted.length === 1) { await this.p2pService.propagate(tx); } else { @@ -598,7 +598,7 @@ export class P2PClient **/ public async addTxsToPool(txs: Tx[]): Promise { this.#assertIsReady(); - return (await this.txPool.addPendingTxs(txs)).accepted.length; + return (await this.txPool.addPendingTxs(txs, { feeOnly: true })).accepted.length; } /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts index 5064159a151e..1419fd85e0be 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts @@ -205,7 +205,22 @@ describe('EvictionManager', () => { expect(result.shouldIgnore).toBe(false); expect(result.txHashesToEvict).toContain('0x2222'); - expect(preAddRule.check).toHaveBeenCalledWith(incomingMeta, poolAccess); + expect(preAddRule.check).toHaveBeenCalledWith(incomingMeta, poolAccess, undefined); + }); + + it('forwards PreAddContext to rules', async () => { + preAddRule.check.mockResolvedValue({ + shouldIgnore: false, + txHashesToEvict: [], + }); + + evictionManager.registerPreAddRule(preAddRule); + const incomingMeta = createMeta('0x1111', 100n); + const context = { feeOnly: true }; + + await evictionManager.runPreAddRules(incomingMeta, poolAccess, context); + + expect(preAddRule.check).toHaveBeenCalledWith(incomingMeta, poolAccess, context); }); it('returns ignore result immediately when a rule says to ignore', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts index 9c7d01c9d941..b2a8ad122011 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts @@ -9,6 +9,7 @@ import { EvictionEvent, type EvictionRule, type PoolOperations, + type PreAddContext, type PreAddPoolAccess, type PreAddResult, type PreAddRule, @@ -48,13 +49,17 @@ export class EvictionManager { * Runs all pre-add rules for an incoming transaction. * Returns combined result of all rules. */ - async runPreAddRules(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { + async runPreAddRules( + incomingMeta: TxMetaData, + poolAccess: PreAddPoolAccess, + context?: PreAddContext, + ): Promise { const evictions: TaggedEviction[] = []; const seen = new Set(); for (const rule of this.preAddRules) { try { - const result = await rule.check(incomingMeta, poolAccess); + const result = await rule.check(incomingMeta, poolAccess, context); if (result.shouldIgnore) { return result; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts index ae2ba0006058..daa5ce665cfc 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts @@ -1,7 +1,7 @@ import { createLogger } from '@aztec/foundation/log'; import { type TxMetaData, comparePriority } from '../tx_metadata.js'; -import type { PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import type { PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; /** * Pre-add rule that checks if a fee payer has sufficient balance to cover the incoming transaction. @@ -19,7 +19,7 @@ export class FeePayerBalancePreAddRule implements PreAddRule { private log = createLogger('p2p:tx_pool_v2:fee_payer_balance_pre_add_rule'); - async check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { + async check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, _context?: PreAddContext): Promise { // Get fee payer's on-chain balance const initialBalance = await poolAccess.getFeePayerBalance(incomingMeta.feePayer); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts index c0f4b573f362..e084e02039d8 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts @@ -6,6 +6,7 @@ export { type EvictionResult, type EvictionRule, type PoolOperations, + type PreAddContext, type PreAddPoolAccess, type PreAddResult, type PreAddRule, diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts index 905203b1157c..81d4bb659014 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts @@ -87,6 +87,12 @@ export interface PreAddResult { readonly reason?: string; } +/** Context passed to pre-add rules from addPendingTxs. */ +export interface PreAddContext { + /** If true, compare priority fee only (no tx hash tiebreaker). Used for RPC submissions. */ + feeOnly?: boolean; +} + /** * Pre-add rule interface. Rules check incoming txs before they're added to the pool. * All methods work with TxMetaData for efficiency. @@ -98,9 +104,10 @@ export interface PreAddRule { * Check if incoming tx should be added and which existing txs to evict. * @param incomingMeta - Metadata for the incoming transaction * @param poolAccess - Read-only access to current pool state + * @param context - Optional context from addPendingTxs caller * @returns Result indicating whether to ignore and what to evict */ - check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise; + check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, context?: PreAddContext): Promise; /** * Updates the configuration for this rule. diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts index cfce1d74dd73..21a782a5cbfb 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts @@ -1,5 +1,5 @@ -import { type TxMetaData, stubTxMetaValidationData } from '../tx_metadata.js'; -import type { PreAddPoolAccess } from './interfaces.js'; +import { type TxMetaData, comparePriority, stubTxMetaValidationData } from '../tx_metadata.js'; +import type { PreAddContext, PreAddPoolAccess } from './interfaces.js'; import { LowPriorityPreAddRule } from './low_priority_pre_add_rule.js'; describe('LowPriorityPreAddRule', () => { @@ -148,5 +148,73 @@ describe('LowPriorityPreAddRule', () => { expect(result.txHashesToEvict).toHaveLength(0); }); }); + + describe('feeOnly context', () => { + it('uses comparePriority (default): same fee, higher-priority hash evicts existing', async () => { + // Pick two hashes with the same fee, where incoming has higher priority by hash tiebreaker + const existing = createMeta('0x1111', 100n); + const incoming = createMeta('0x2222', 100n); + + // Determine which direction the tiebreaker goes and swap if needed + const cmp = comparePriority(incoming, existing); + const [incomingMeta, lowestPriorityMeta] = cmp > 0 ? [incoming, existing] : [existing, incoming]; + + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + + // Default context (no feeOnly) — uses full comparePriority + const result = await rule.check(incomingMeta, poolAccess); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain(lowestPriorityMeta.txHash); + }); + + it('uses feeOnly: same fee, incoming is ignored even if it wins hash tiebreaker', async () => { + const existing = createMeta('0x1111', 100n); + const incoming = createMeta('0x2222', 100n); + + // Determine which has higher hash priority and use that as incoming + const cmp = comparePriority(incoming, existing); + const [incomingMeta, lowestPriorityMeta] = cmp > 0 ? [incoming, existing] : [existing, incoming]; + + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const context: PreAddContext = { feeOnly: true }; + + // feeOnly mode: same fee means ignored (no hash tiebreaker) + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(true); + expect(result.txHashesToEvict).toHaveLength(0); + }); + + it('higher fee evicts regardless of feeOnly flag', async () => { + const lowestPriorityMeta = createMeta('0x2222', 50n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 100n); + + // Without feeOnly + const result1 = await rule.check(incomingMeta, poolAccess); + expect(result1.shouldIgnore).toBe(false); + expect(result1.txHashesToEvict).toContain('0x2222'); + + // With feeOnly + const result2 = await rule.check(incomingMeta, poolAccess, { feeOnly: true }); + expect(result2.shouldIgnore).toBe(false); + expect(result2.txHashesToEvict).toContain('0x2222'); + }); + + it('lower fee is always ignored regardless of feeOnly flag', async () => { + const lowestPriorityMeta = createMeta('0x2222', 100n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 50n); + + // Without feeOnly + const result1 = await rule.check(incomingMeta, poolAccess); + expect(result1.shouldIgnore).toBe(true); + + // With feeOnly + const result2 = await rule.check(incomingMeta, poolAccess, { feeOnly: true }); + expect(result2.shouldIgnore).toBe(true); + }); + }); }); }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts index a086cd64fc85..82b874303a63 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts @@ -1,7 +1,7 @@ import { createLogger } from '@aztec/foundation/log'; -import type { TxMetaData } from '../tx_metadata.js'; -import type { EvictionConfig, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import { type TxMetaData, comparePriority } from '../tx_metadata.js'; +import type { EvictionConfig, PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; /** * Pre-add rule that checks if the pool is at capacity and handles low-priority eviction. @@ -20,7 +20,7 @@ export class LowPriorityPreAddRule implements PreAddRule { this.maxPoolSize = config.maxPoolSize; } - check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { + check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, context?: PreAddContext): Promise { // Skip if max pool size is disabled (0 = unlimited) if (this.maxPoolSize === 0) { return Promise.resolve({ shouldIgnore: false, txHashesToEvict: [] }); @@ -40,8 +40,14 @@ export class LowPriorityPreAddRule implements PreAddRule { return Promise.resolve({ shouldIgnore: false, txHashesToEvict: [] }); } - // If incoming tx has strictly higher priority, evict the lowest priority tx - if (incomingMeta.priorityFee > lowestPriorityMeta.priorityFee) { + // Compare incoming tx against lowest priority tx. + // feeOnly mode (RPC): use strict fee comparison only — avoids churn from hash ordering + // Default (gossip): use full comparePriority (fee + tx hash tiebreaker) for determinism + const isHigherPriority = context?.feeOnly + ? incomingMeta.priorityFee > lowestPriorityMeta.priorityFee + : comparePriority(incomingMeta, lowestPriorityMeta) > 0; + + if (isHigherPriority) { this.log.debug( `Pool at capacity (${currentCount}/${this.maxPoolSize}), evicting ${lowestPriorityMeta.txHash} ` + `(priority ${lowestPriorityMeta.priorityFee}) for ${incomingMeta.txHash} (priority ${incomingMeta.priorityFee})`, diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts index 6eecac930709..05378999f704 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts @@ -1,7 +1,7 @@ import { createLogger } from '@aztec/foundation/log'; import { type TxMetaData, checkNullifierConflict } from '../tx_metadata.js'; -import type { PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import type { PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; /** * Pre-add rule that checks for nullifier conflicts between incoming and existing transactions. @@ -15,7 +15,7 @@ export class NullifierConflictRule implements PreAddRule { private log = createLogger('p2p:tx_pool_v2:nullifier_conflict_rule'); - check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { + check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, _context?: PreAddContext): Promise { const result = checkNullifierConflict( incomingMeta, nullifier => poolAccess.getTxHashByNullifier(nullifier), diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts index 0d39be259d7e..6ec711bb826c 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts @@ -1,4 +1,5 @@ import { + Attributes, type Meter, Metrics, type ObservableGauge, @@ -42,8 +43,8 @@ export class TxPoolV2Instrumentation { }); } - recordEvictions(count: number) { - this.#evictedCounter.add(count); + recordEvictions(count: number, reason: string) { + this.#evictedCounter.add(count, { [Attributes.TX_POOL_EVICTION_REASON]: reason }); } recordIgnored(count: number) { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts index 9cb5bd836eaf..84ae4097e7c8 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts @@ -101,7 +101,7 @@ export interface TxPoolV2 extends TypedEventEmitter { * @param opts - Optional metadata (e.g., source for logging) * @returns Result categorizing each transaction as accepted, rejected, or ignored */ - addPendingTxs(txs: Tx[], opts?: { source?: string }): Promise; + addPendingTxs(txs: Tx[], opts?: { source?: string; feeOnly?: boolean }): Promise; /** * Checks if a transaction can be added without modifying the pool. diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts index f9f5e2334941..c2c4eb933b36 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts @@ -3464,6 +3464,111 @@ describe('TxPoolV2', () => { }); }); + describe('feeOnly priority comparison', () => { + it('default (gossip): same-fee tx can evict via hash tiebreaker at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + // Create a tx with the same fee as the lowest (tx1, fee=10). + // Without feeOnly, comparePriority uses hash tiebreaker and may evict. + const tx3 = await mockTxWithFee(3, 10); + + // Determine tiebreaker direction + const tx3HashFr = Fr.fromHexString(tx3.getTxHash().toString()); + const tx1HashFr = Fr.fromHexString(tx1.getTxHash().toString()); + const tx3WinsTiebreaker = tx3HashFr.cmp(tx1HashFr) > 0; + + // Default: no feeOnly flag (gossip path) + const result = await pool.addPendingTxs([tx3]); + + if (tx3WinsTiebreaker) { + expect(toStrings(result.accepted)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expect(await pool.getTxStatus(tx1.getTxHash())).toBe('deleted'); + expect(await pool.getTxStatus(tx3.getTxHash())).toBe('pending'); + } else { + expect(toStrings(result.ignored)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expect(await pool.getTxStatus(tx1.getTxHash())).toBe('pending'); + } + }); + + it('feeOnly (RPC): same-fee tx is ignored at capacity regardless of hash', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + // Same fee as the lowest — with feeOnly, no hash tiebreaker, always ignored + const tx3 = await mockTxWithFee(3, 10); + const result = await pool.addPendingTxs([tx3], { feeOnly: true }); + + expect(toStrings(result.ignored)).toContain(hashOf(tx3)); + expect(result.accepted).toHaveLength(0); + expect(await pool.getPendingTxCount()).toBe(2); + expectNoCallbacks(); + }); + + it('feeOnly (RPC): higher-fee tx still evicts at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + const tx3 = await mockTxWithFee(3, 15); + const result = await pool.addPendingTxs([tx3], { feeOnly: true }); + + expect(toStrings(result.accepted)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expect(await pool.getTxStatus(tx1.getTxHash())).toBe('deleted'); // fee=10 evicted + expect(await pool.getTxStatus(tx3.getTxHash())).toBe('pending'); + }); + + it('feeOnly (RPC): lower-fee tx is ignored at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + const tx3 = await mockTxWithFee(3, 5); + const result = await pool.addPendingTxs([tx3], { feeOnly: true }); + + expect(toStrings(result.ignored)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expectNoCallbacks(); + }); + + it('feeOnly has no effect when pool is not at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 10 }); + + const tx1 = await mockTxWithFee(1, 10); + + // Both modes accept when below capacity + const result1 = await pool.addPendingTxs([tx1], { feeOnly: true }); + expect(result1.accepted).toHaveLength(1); + + const tx2 = await mockTxWithFee(2, 10); + const result2 = await pool.addPendingTxs([tx2]); + expect(result2.accepted).toHaveLength(1); + + expect(await pool.getPendingTxCount()).toBe(2); + }); + }); + describe('multiple nullifier conflicts', () => { it('handles tx with multiple nullifiers conflicting with different txs', async () => { const tx1 = await mockPublicTx(1, 5); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts index a885529a99da..6fc3b915df9f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts @@ -70,7 +70,7 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte // === Core Operations === - addPendingTxs(txs: Tx[], opts: { source?: string } = {}): Promise { + addPendingTxs(txs: Tx[], opts: { source?: string; feeOnly?: boolean } = {}): Promise { return this.#queue.put(() => this.#impl.addPendingTxs(txs, opts)); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index 5a8c041637c4..07d6b1dcc794 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -23,6 +23,7 @@ import { LowPriorityPreAddRule, NullifierConflictRule, type PoolOperations, + type PreAddContext, type PreAddPoolAccess, } from './eviction/index.js'; import { TxPoolV2Instrumentation } from './instrumentation.js'; @@ -177,13 +178,14 @@ export class TxPoolV2Impl { this.#log.info(`Deleted ${toDelete.length} invalid/rejected transactions on startup`); } - async addPendingTxs(txs: Tx[], opts: { source?: string }): Promise { + async addPendingTxs(txs: Tx[], opts: { source?: string; feeOnly?: boolean }): Promise { const accepted: TxHash[] = []; const ignored: TxHash[] = []; const rejected: TxHash[] = []; const acceptedPending = new Set(); const poolAccess = this.#createPreAddPoolAccess(); + const preAddContext: PreAddContext | undefined = opts.feeOnly !== undefined ? { feeOnly: opts.feeOnly } : undefined; await this.#store.transactionAsync(async () => { for (const tx of txs) { @@ -210,7 +212,14 @@ export class TxPoolV2Impl { accepted.push(txHash); } else { // Regular pending tx - validate and run pre-add rules - const result = await this.#tryAddRegularPendingTx(tx, opts, poolAccess, acceptedPending, ignored); + const result = await this.#tryAddRegularPendingTx( + tx, + opts, + poolAccess, + acceptedPending, + ignored, + preAddContext, + ); if (result.status === 'accepted') { acceptedPending.add(txHashStr); } else if (result.status === 'rejected') { @@ -252,6 +261,7 @@ export class TxPoolV2Impl { poolAccess: PreAddPoolAccess, acceptedPending: Set, ignored: TxHash[], + preAddContext?: PreAddContext, ): Promise<{ status: 'accepted' | 'ignored' | 'rejected' }> { const txHash = tx.getTxHash(); const txHashStr = txHash.toString(); @@ -263,7 +273,7 @@ export class TxPoolV2Impl { } // Run pre-add rules - const preAddResult = await this.#evictionManager.runPreAddRules(meta, poolAccess); + const preAddResult = await this.#evictionManager.runPreAddRules(meta, poolAccess, preAddContext); if (preAddResult.shouldIgnore) { this.#log.debug(`Ignoring tx ${txHashStr}: ${preAddResult.reason}`); diff --git a/yarn-project/p2p/src/test-helpers/testbench-utils.ts b/yarn-project/p2p/src/test-helpers/testbench-utils.ts index d0b8ce352fde..d556dbdcc9bf 100644 --- a/yarn-project/p2p/src/test-helpers/testbench-utils.ts +++ b/yarn-project/p2p/src/test-helpers/testbench-utils.ts @@ -59,7 +59,7 @@ export class InMemoryTxPool extends EventEmitter implements TxPoolV2 { // === Core Operations (TxPoolV2) === - addPendingTxs(txs: Tx[], opts?: { source?: string }): Promise { + addPendingTxs(txs: Tx[], opts?: { source?: string; feeOnly?: boolean }): Promise { const accepted: TxHash[] = []; const newTxs: Tx[] = []; for (const tx of txs) { From 02a460644726dcd079da424c5b6f26205aeff59e Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 13 Feb 2026 11:37:53 +0000 Subject: [PATCH 14/62] feat(ci.aztec-labs.com): CI cost and metrics tracking (#20100) --- .github/workflows/ci3.yml | 1 + ci3/aws_request_instance_type | 15 + ci3/bootstrap_ec2 | 15 +- ci3/ci-metrics/Dockerfile | 11 + ci3/ci-metrics/app.py | 848 ++++++ ci3/ci-metrics/billing/__init__.py | 14 + ci3/ci-metrics/billing/aws.py | 347 +++ ci3/ci-metrics/billing/billing-dashboard.html | 415 +++ ci3/ci-metrics/billing/explore.py | 619 ++++ ci3/ci-metrics/billing/fetch_billing.py | 262 ++ ci3/ci-metrics/billing/gcp.py | 289 ++ ci3/ci-metrics/ci-run-seed.json.gz | Bin 0 -> 357714 bytes ci3/ci-metrics/db.py | 107 + ci3/ci-metrics/ec2_pricing.py | 232 ++ ci3/ci-metrics/github_data.py | 666 +++++ ci3/ci-metrics/merge-queue-backfill.json | 2564 +++++++++++++++++ ci3/ci-metrics/metrics.py | 602 ++++ ci3/ci-metrics/requirements.txt | 8 + ci3/ci-metrics/sync_to_sqlite.py | 60 + ci3/ci-metrics/views/ci-insights.html | 658 +++++ ci3/ci-metrics/views/cost-overview.html | 905 ++++++ ci3/ci-metrics/views/test-timings.html | 289 ++ ci3/dashboard/Dockerfile | 7 +- ci3/dashboard/deploy.sh | 8 +- ci3/dashboard/rk.py | 88 +- ci3/log_ci_run | 16 +- ci3/run_test_cmd | 14 +- 27 files changed, 9041 insertions(+), 19 deletions(-) create mode 100644 ci3/ci-metrics/Dockerfile create mode 100644 ci3/ci-metrics/app.py create mode 100644 ci3/ci-metrics/billing/__init__.py create mode 100644 ci3/ci-metrics/billing/aws.py create mode 100644 ci3/ci-metrics/billing/billing-dashboard.html create mode 100644 ci3/ci-metrics/billing/explore.py create mode 100644 ci3/ci-metrics/billing/fetch_billing.py create mode 100644 ci3/ci-metrics/billing/gcp.py create mode 100644 ci3/ci-metrics/ci-run-seed.json.gz create mode 100644 ci3/ci-metrics/db.py create mode 100644 ci3/ci-metrics/ec2_pricing.py create mode 100644 ci3/ci-metrics/github_data.py create mode 100644 ci3/ci-metrics/merge-queue-backfill.json create mode 100644 ci3/ci-metrics/metrics.py create mode 100644 ci3/ci-metrics/requirements.txt create mode 100755 ci3/ci-metrics/sync_to_sqlite.py create mode 100644 ci3/ci-metrics/views/ci-insights.html create mode 100644 ci3/ci-metrics/views/cost-overview.html create mode 100644 ci3/ci-metrics/views/test-timings.html diff --git a/.github/workflows/ci3.yml b/.github/workflows/ci3.yml index d2b6d6be837c..ec0a4c81b234 100644 --- a/.github/workflows/ci3.yml +++ b/.github/workflows/ci3.yml @@ -94,6 +94,7 @@ jobs: PR_COMMITS: ${{ github.event.pull_request.commits }} PR_NUMBER: ${{ github.event.pull_request.number }} GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_ACTOR: ${{ github.actor }} # NOTE: $CI_MODE is set in the Determine CI Mode step. run: ./.github/ci3.sh $CI_MODE diff --git a/ci3/aws_request_instance_type b/ci3/aws_request_instance_type index 5f6aafbe4dd7..a48c5b58c241 100755 --- a/ci3/aws_request_instance_type +++ b/ci3/aws_request_instance_type @@ -86,6 +86,21 @@ if [ -z "${iid:-}" -o "${iid:-}" == "None" ]; then echo $iid > $iid_path fi +tags="Key=Name,Value=$name Key=Group,Value=build-instance" +[ -n "${GITHUB_ACTOR:-}" ] && tags+=" Key=GithubActor,Value=$GITHUB_ACTOR" +[ -n "${CI_MODE:-}" ] && tags+=" Key=CICommand,Value=$CI_MODE" +[ -n "${CI_DASHBOARD:-}" ] && tags+=" Key=Dashboard,Value=$CI_DASHBOARD" +if [ "${UNSAFE_AWS_KEEP_ALIVE:-0}" -eq 1 ]; then + echo_stderr "You have set UNSAFE_AWS_KEEP_ALIVE=1, so the instance will not be terminated after 1.5 hours by the reaper script. Make sure you shut the machine down when done." + tags+=" Key=Keep-Alive,Value=true" +fi +aws ec2 create-tags --resources $iid --tags $tags + +# Record the instance type so callers can pass it downstream (e.g. into Docker). +echo $instance_type > $state_dir/instance_type +# Record whether this is spot or on-demand. +[ -f "$sir_path" ] && echo spot > $state_dir/spot || echo ondemand > $state_dir/spot + while [ -z "${ip:-}" ]; do sleep 1 ip=$(aws ec2 describe-instances \ diff --git a/ci3/bootstrap_ec2 b/ci3/bootstrap_ec2 index a24f0cfc177b..eeffb180d390 100755 --- a/ci3/bootstrap_ec2 +++ b/ci3/bootstrap_ec2 @@ -89,6 +89,8 @@ if [[ -f "$state_dir/sir" ]]; then sir=$(cat $state_dir/sir) fi iid=$(cat $state_dir/iid) +export EC2_INSTANCE_TYPE=$(cat $state_dir/instance_type 2>/dev/null || echo "unknown") +export EC2_SPOT=$(cat $state_dir/spot 2>/dev/null || echo "unknown") # If AWS credentials are not set, try to load them from ~/.aws/build_instance_credentials. if [ -z "${AWS_ACCESS_KEY_ID:-}" ] || [ -z "${AWS_SECRET_ACCESS_KEY:-}" ]; then @@ -192,16 +194,6 @@ container_script=$( log_ci_run FAILED \$ci_log_id merge_train_failure_slack_notify \$ci_log_id release_canary_slack_notify \$ci_log_id - ci_failed_data=\$(jq -n \\ - --arg status "failed" \\ - --arg log_id "\$ci_log_id" \\ - --arg ref_name "\${TARGET_BRANCH:-\$REF_NAME}" \\ - --arg commit_hash "\$COMMIT_HASH" \\ - --arg commit_author "\$COMMIT_AUTHOR" \\ - --arg commit_msg "\$COMMIT_MSG" \\ - --argjson exit_code "\$code" \\ - '{status: \$status, log_id: \$log_id, ref_name: \$ref_name, commit_hash: \$commit_hash, commit_author: \$commit_author, commit_msg: \$commit_msg, exit_code: \$exit_code, timestamp: now | todate}') - redis_publish "ci:run:failed" "\$ci_failed_data" ;; esac exit \$code @@ -331,6 +323,9 @@ function run { -e AWS_TOKEN=\$aws_token \ -e NAMESPACE=${NAMESPACE:-} \ -e NETWORK=${NETWORK:-} \ + -e GITHUB_ACTOR=${GITHUB_ACTOR:-} \ + -e EC2_INSTANCE_TYPE=${EC2_INSTANCE_TYPE:-unknown} \ + -e EC2_SPOT=${EC2_SPOT:-unknown} \ --pids-limit=65536 \ --shm-size=2g \ aztecprotocol/devbox:3.0 bash -c $(printf '%q' "$container_script") diff --git a/ci3/ci-metrics/Dockerfile b/ci3/ci-metrics/Dockerfile new file mode 100644 index 000000000000..4013545da66d --- /dev/null +++ b/ci3/ci-metrics/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.12 + +RUN apt update && apt install -y jq redis-tools && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt gunicorn +RUN git config --global --add safe.directory /aztec-packages +COPY . . +EXPOSE 8081 +CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:8081", "app:app"] diff --git a/ci3/ci-metrics/app.py b/ci3/ci-metrics/app.py new file mode 100644 index 000000000000..c62875e7d19a --- /dev/null +++ b/ci3/ci-metrics/app.py @@ -0,0 +1,848 @@ +from flask import Flask, request, Response, redirect +from flask_compress import Compress +from flask_httpauth import HTTPBasicAuth +from datetime import datetime, timedelta +import json +import os +import re +import redis +import threading +from pathlib import Path + +import db +import metrics +import github_data +import billing.aws as billing_aws +from billing import ( + get_billing_files_in_range, + aggregate_billing_weekly, aggregate_billing_monthly, + serve_billing_dashboard, +) + +REDIS_HOST = os.getenv('REDIS_HOST', 'localhost') +REDIS_PORT = int(os.getenv('REDIS_PORT', '6379')) +LOGS_DISK_PATH = os.getenv('LOGS_DISK_PATH', '/logs-disk') +DASHBOARD_PASSWORD = os.getenv('DASHBOARD_PASSWORD', 'password') + +r = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=False) + +app = Flask(__name__) +Compress(app) +auth = HTTPBasicAuth() + + +@auth.verify_password +def verify_password(username, password): + return password == DASHBOARD_PASSWORD + + +def _init(): + """Initialize SQLite and start background threads.""" + try: + db.get_db() + metrics.start_test_listener(r) + metrics.start_ci_run_sync(r) + print("[ci-metrics] Background threads started") + except Exception as e: + print(f"[ci-metrics] Warning: startup failed: {e}") + +threading.Thread(target=_init, daemon=True, name='metrics-init').start() + + +# ---- Helpers ---- + +def _aggregate_dates(by_date_list, granularity, sum_fields, avg_fields=None): + """Aggregate a list of {date, ...} dicts by weekly/monthly granularity.""" + if granularity == 'daily' or not by_date_list: + return by_date_list + + buckets = {} + for entry in by_date_list: + d = datetime.strptime(entry['date'], '%Y-%m-%d') + if granularity == 'weekly': + key = (d - timedelta(days=d.weekday())).strftime('%Y-%m-%d') + else: # monthly + key = d.strftime('%Y-%m') + '-01' + + if key not in buckets: + buckets[key] = {'date': key} + for f in sum_fields: + buckets[key][f] = 0 + if avg_fields: + for f in avg_fields: + buckets[key][f'_avg_sum_{f}'] = 0 + buckets[key][f'_avg_cnt_{f}'] = 0 + + for f in sum_fields: + buckets[key][f] += entry.get(f) or 0 + if avg_fields: + for f in avg_fields: + val = entry.get(f) + if val is not None: + buckets[key][f'_avg_sum_{f}'] += val + buckets[key][f'_avg_cnt_{f}'] += 1 + + result = [] + for key in sorted(buckets): + b = buckets[key] + out = {'date': b['date']} + for f in sum_fields: + out[f] = round(b[f], 2) if isinstance(b[f], float) else b[f] + if avg_fields: + for f in avg_fields: + cnt = b[f'_avg_cnt_{f}'] + out[f] = round(b[f'_avg_sum_{f}'] / cnt, 1) if cnt else None + result.append(out) + + return result + + +def _json(data): + return Response(json.dumps(data), mimetype='application/json') + + +# ---- Namespace billing ---- + +@app.route('/namespace-billing') +@auth.login_required +def namespace_billing(): + html = serve_billing_dashboard() + if html: + return html + return "Billing dashboard not found", 404 + + +@app.route('/api/billing/data') +@auth.login_required +def billing_data(): + date_from_str = request.args.get('from') + date_to_str = request.args.get('to') + granularity = request.args.get('granularity', 'daily') + + if not date_from_str or not date_to_str: + return _json({'error': 'from and to date params required (YYYY-MM-DD)'}), 400 + try: + date_from = datetime.strptime(date_from_str, '%Y-%m-%d') + date_to = datetime.strptime(date_to_str, '%Y-%m-%d') + except ValueError: + return _json({'error': 'Invalid date format, use YYYY-MM-DD'}), 400 + + daily_data = get_billing_files_in_range(date_from, date_to) + + # Filter out namespaces costing less than $1 total across the range + ns_totals = {} + for entry in daily_data: + for ns, ns_data in entry.get('namespaces', {}).items(): + ns_totals[ns] = ns_totals.get(ns, 0) + ns_data.get('total', 0) + cheap_ns = {ns for ns, total in ns_totals.items() if total < 1.0} + if cheap_ns: + for entry in daily_data: + entry['namespaces'] = {ns: d for ns, d in entry.get('namespaces', {}).items() + if ns not in cheap_ns} + + if granularity == 'weekly': + result = aggregate_billing_weekly(daily_data) + elif granularity == 'monthly': + result = aggregate_billing_monthly(daily_data) + else: + result = daily_data + + return _json(result) + + +# ---- CI runs ---- + +@app.route('/api/ci/runs') +@auth.login_required +def api_ci_runs(): + date_from = request.args.get('from', '') + date_to = request.args.get('to', '') + status_filter = request.args.get('status', '') + author = request.args.get('author', '') + dashboard = request.args.get('dashboard', '') + limit = min(int(request.args.get('limit', 100)), 1000) + offset = int(request.args.get('offset', 0)) + + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) if date_from else None + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) if date_to else None + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + + if status_filter: + runs = [run for run in runs if run.get('status') == status_filter] + if author: + runs = [run for run in runs if run.get('author') == author] + if dashboard: + runs = [run for run in runs if run.get('dashboard') == dashboard] + + runs.sort(key=lambda x: x.get('timestamp', 0), reverse=True) + runs = runs[offset:offset + limit] + + return _json(runs) + + +@app.route('/api/ci/stats') +@auth.login_required +def api_ci_stats(): + ts_from = int((datetime.now() - timedelta(days=7)).timestamp() * 1000) + runs = metrics.get_ci_runs(r, ts_from) + + total = len(runs) + passed = sum(1 for run in runs if run.get('status') == 'PASSED') + failed = sum(1 for run in runs if run.get('status') == 'FAILED') + costs = [run['cost_usd'] for run in runs if run.get('cost_usd') is not None] + durations = [] + for run in runs: + complete = run.get('complete') + ts = run.get('timestamp') + if complete and ts: + durations.append((complete - ts) / 60000.0) + + return _json({ + 'total_runs': total, + 'passed': passed, + 'failed': failed, + 'total_cost': round(sum(costs), 2) if costs else None, + 'avg_duration_mins': round(sum(durations) / len(durations), 1) if durations else None, + }) + + +# ---- Cost endpoints ---- + +@app.route('/api/costs/overview') +@auth.login_required +def api_costs_overview(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + granularity = request.args.get('granularity', 'daily') + result = billing_aws.get_costs_overview(date_from, date_to) + if granularity != 'daily' and result.get('by_date'): + buckets = {} + for entry in result['by_date']: + d = datetime.strptime(entry['date'], '%Y-%m-%d') + if granularity == 'weekly': + key = (d - timedelta(days=d.weekday())).strftime('%Y-%m-%d') + else: + key = d.strftime('%Y-%m') + '-01' + if key not in buckets: + buckets[key] = {'date': key, 'aws': {}, 'gcp': {}, 'aws_total': 0, 'gcp_total': 0} + for cat, amt in entry.get('aws', {}).items(): + buckets[key]['aws'][cat] = buckets[key]['aws'].get(cat, 0) + amt + for cat, amt in entry.get('gcp', {}).items(): + buckets[key]['gcp'][cat] = buckets[key]['gcp'].get(cat, 0) + amt + buckets[key]['aws_total'] += entry.get('aws_total', 0) + buckets[key]['gcp_total'] += entry.get('gcp_total', 0) + result['by_date'] = sorted(buckets.values(), key=lambda x: x['date']) + return _json(result) + + +@app.route('/api/costs/details') +@auth.login_required +def api_costs_details(): + """Per-resource (USAGE_TYPE) cost breakdown.""" + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + + rows = billing_aws.get_aws_cost_details(date_from, date_to) + + usage_map = {} + for row in rows: + ut = row['usage_type'] + if ut not in usage_map: + usage_map[ut] = { + 'usage_type': ut, + 'service': row['service'], + 'category': row['category'], + 'total': 0, + 'by_date': {}, + 'is_ri': 'HeavyUsage' in ut, + } + usage_map[ut]['total'] += row['amount_usd'] + d = row['date'] + usage_map[ut]['by_date'][d] = usage_map[ut]['by_date'].get(d, 0) + row['amount_usd'] + + items = sorted(usage_map.values(), key=lambda x: -x['total']) + for item in items: + item['total'] = round(item['total'], 2) + item['by_date'] = {d: round(v, 4) for d, v in sorted(item['by_date'].items())} + + all_dates = sorted({row['date'] for row in rows}) + ri_items = [i for i in items if i['is_ri']] + ri_total = round(sum(i['total'] for i in ri_items), 2) + + return _json({ + 'items': items, + 'dates': all_dates, + 'ri_total': ri_total, + 'grand_total': round(sum(i['total'] for i in items), 2), + }) + + +@app.route('/api/costs/attribution') +@auth.login_required +def api_costs_attribution(): + """CI cost attribution by user, branch, instance.""" + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + runs_with_cost = [run for run in runs if run.get('cost_usd') is not None] + + # Enrich merge queue runs with PR author from GitHub + pr_numbers = {run.get('pr_number') for run in runs_with_cost if run.get('pr_number')} + pr_authors = github_data.batch_get_pr_authors(pr_numbers) + + granularity = request.args.get('granularity', 'daily') + + instances = [] + by_user = {} + by_branch = {} + by_type = {} + by_date_type = {} + + for run in runs_with_cost: + info = billing_aws.decode_branch_info(run) + cost = run['cost_usd'] + date = metrics._ts_to_date(run.get('timestamp', 0)) + + author = info['author'] + prn = info['pr_number'] + if prn and int(prn) in pr_authors: + author = pr_authors[int(prn)]['author'] + + inst_type = run.get('instance_type', 'unknown') + vcpus = run.get('instance_vcpus') + if inst_type == 'unknown' and vcpus: + inst_type = f'{vcpus}vcpu' + + instances.append({ + 'instance_name': info['instance_name'], + 'date': date, + 'cost_usd': cost, + 'author': author, + 'branch': info['branch'], + 'pr_number': prn, + 'type': info['type'], + 'instance_type': inst_type, + 'spot': run.get('spot', False), + 'job_id': run.get('job_id', ''), + 'duration_mins': round((run.get('complete', 0) - run.get('timestamp', 0)) / 60000, 1) if run.get('complete') else None, + }) + + if author not in by_user: + by_user[author] = {'aws_cost': 0, 'gcp_cost': 0, 'runs': 0, 'by_date': {}} + by_user[author]['aws_cost'] += cost + by_user[author]['runs'] += 1 + by_user[author]['by_date'][date] = by_user[author]['by_date'].get(date, 0) + cost + + branch_key = info['branch'] or info['type'] + if branch_key not in by_branch: + by_branch[branch_key] = {'cost': 0, 'runs': 0, 'type': info['type'], 'author': author} + by_branch[branch_key]['cost'] += cost + by_branch[branch_key]['runs'] += 1 + + rt = info['type'] + if rt not in by_type: + by_type[rt] = {'cost': 0, 'runs': 0} + by_type[rt]['cost'] += cost + by_type[rt]['runs'] += 1 + + if date not in by_date_type: + by_date_type[date] = {} + by_date_type[date][rt] = by_date_type[date].get(rt, 0) + cost + + # GCP costs — reported as total, no namespace→user heuristic + gcp_total = 0 + try: + from billing.gcp import get_billing_files_in_range as get_gcp_billing + gcp_data = get_gcp_billing( + datetime.strptime(date_from, '%Y-%m-%d'), + datetime.strptime(date_to, '%Y-%m-%d'), + ) + for entry in gcp_data: + for ns, ns_data in entry.get('namespaces', {}).items(): + gcp_total += ns_data.get('total', 0) + except Exception as e: + print(f"[attribution] GKE billing error: {e}") + + # Sort and format + user_list = [{'author': a, 'aws_cost': round(v['aws_cost'], 2), 'gcp_cost': round(v['gcp_cost'], 2), + 'total_cost': round(v['aws_cost'] + v['gcp_cost'], 2), 'runs': v['runs'], + 'by_date': {d: round(c, 2) for d, c in sorted(v['by_date'].items())}} + for a, v in sorted(by_user.items(), key=lambda x: -(x[1]['aws_cost'] + x[1]['gcp_cost']))] + + branch_list = [{'branch': b, 'cost': round(v['cost'], 2), 'runs': v['runs'], + 'type': v['type'], 'author': v['author']} + for b, v in sorted(by_branch.items(), key=lambda x: -x[1]['cost'])[:100]] + + type_list = [{'type': t, 'cost': round(v['cost'], 2), 'runs': v['runs']} + for t, v in sorted(by_type.items(), key=lambda x: -x[1]['cost'])] + + instances.sort(key=lambda x: -(x['cost_usd'] or 0)) + + all_types = sorted(by_type.keys()) + by_date_list = [] + for date in sorted(by_date_type): + entry = {'date': date, 'total': 0, 'runs': 0} + for rt in all_types: + entry[rt] = round(by_date_type[date].get(rt, 0), 2) + entry['total'] += by_date_type[date].get(rt, 0) + entry['total'] = round(entry['total'], 2) + entry['runs'] = sum(1 for inst in instances if inst['date'] == date) + by_date_list.append(entry) + + by_date_list = _aggregate_dates(by_date_list, granularity, + sum_fields=['total', 'runs'] + all_types) + + total_aws = sum(u['aws_cost'] for u in user_list) + + return _json({ + 'by_user': user_list, + 'by_branch': branch_list, + 'by_type': type_list, + 'by_date': by_date_list, + 'run_types': all_types, + 'instances': instances[:500], + 'totals': {'aws': round(total_aws, 2), 'gcp': round(gcp_total, 2), + 'gcp_unattributed': round(gcp_total, 2), + 'combined': round(total_aws + gcp_total, 2)}, + }) + + +@app.route('/api/costs/runners') +@auth.login_required +def api_costs_runners(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + granularity = request.args.get('granularity', 'daily') + dashboard = request.args.get('dashboard', '') + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + runs_with_cost = [run for run in runs if run.get('cost_usd') is not None] + if dashboard: + runs_with_cost = [run for run in runs_with_cost if run.get('dashboard') == dashboard] + + by_date_map = {} + for run in runs_with_cost: + date = metrics._ts_to_date(run.get('timestamp', 0)) + if date not in by_date_map: + by_date_map[date] = {'spot_cost': 0, 'ondemand_cost': 0, 'total': 0} + cost = run['cost_usd'] + if run.get('spot'): + by_date_map[date]['spot_cost'] += cost + else: + by_date_map[date]['ondemand_cost'] += cost + by_date_map[date]['total'] += cost + + by_date = [{'date': date, 'spot_cost': round(d['spot_cost'], 2), + 'ondemand_cost': round(d['ondemand_cost'], 2), 'total': round(d['total'], 2), + 'spot_pct': round(100.0 * d['spot_cost'] / max(d['total'], 0.01), 1)} + for date, d in sorted(by_date_map.items())] + + by_date = _aggregate_dates(by_date, granularity, + sum_fields=['spot_cost', 'ondemand_cost', 'total']) + for d in by_date: + d['spot_pct'] = round(100.0 * d['spot_cost'] / max(d['total'], 0.01), 1) + + by_instance_map = {} + for run in runs_with_cost: + inst = run.get('instance_type', 'unknown') + if inst not in by_instance_map: + by_instance_map[inst] = {'cost': 0, 'runs': 0} + by_instance_map[inst]['cost'] += run['cost_usd'] + by_instance_map[inst]['runs'] += 1 + by_instance = [{'instance_type': k, 'cost': round(v['cost'], 2), 'runs': v['runs']} + for k, v in sorted(by_instance_map.items(), key=lambda x: -x[1]['cost'])] + + by_dash_map = {} + for run in runs_with_cost: + dash = run.get('dashboard', 'unknown') + if dash not in by_dash_map: + by_dash_map[dash] = {'cost': 0, 'runs': 0} + by_dash_map[dash]['cost'] += run['cost_usd'] + by_dash_map[dash]['runs'] += 1 + by_dashboard = [{'dashboard': k, 'cost': round(v['cost'], 2), 'runs': v['runs']} + for k, v in sorted(by_dash_map.items(), key=lambda x: -x[1]['cost'])] + + total_cost = sum(run['cost_usd'] for run in runs_with_cost) + spot_cost = sum(run['cost_usd'] for run in runs_with_cost if run.get('spot')) + + return _json({ + 'by_date': by_date, + 'by_instance_type': by_instance, + 'by_dashboard': by_dashboard, + 'summary': { + 'total_cost': round(total_cost, 2), + 'spot_pct': round(100.0 * spot_cost / max(total_cost, 0.01), 1), + 'avg_cost_per_run': round(total_cost / max(len(runs_with_cost), 1), 2), + 'total_runs': len(runs_with_cost), + }, + }) + + +# ---- CI Performance ---- + +@app.route('/api/ci/performance') +@auth.login_required +def api_ci_performance(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + dashboard = request.args.get('dashboard', '') + granularity = request.args.get('granularity', 'daily') + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + runs = [run for run in runs if run.get('status') in ('PASSED', 'FAILED')] + if dashboard: + runs = [run for run in runs if run.get('dashboard') == dashboard] + + by_date_map = {} + for run in runs: + date = metrics._ts_to_date(run.get('timestamp', 0)) + if date not in by_date_map: + by_date_map[date] = {'total': 0, 'passed': 0, 'failed': 0, 'durations': []} + by_date_map[date]['total'] += 1 + if run.get('status') == 'PASSED': + by_date_map[date]['passed'] += 1 + else: + by_date_map[date]['failed'] += 1 + complete = run.get('complete') + ts = run.get('timestamp') + if complete and ts: + by_date_map[date]['durations'].append((complete - ts) / 60000.0) + + by_date = [] + for date in sorted(by_date_map): + d = by_date_map[date] + by_date.append({ + 'date': date, + 'total': d['total'], + 'passed': d['passed'], + 'failed': d['failed'], + 'pass_rate': round(100.0 * d['passed'] / max(d['total'], 1), 1), + 'failure_rate': round(100.0 * d['failed'] / max(d['total'], 1), 1), + 'avg_duration_mins': round(sum(d['durations']) / len(d['durations']), 1) if d['durations'] else None, + }) + + by_date = _aggregate_dates(by_date, granularity, + sum_fields=['total', 'passed', 'failed'], + avg_fields=['avg_duration_mins']) + for d in by_date: + d['pass_rate'] = round(100.0 * d['passed'] / max(d['total'], 1), 1) + d['failure_rate'] = round(100.0 * d['failed'] / max(d['total'], 1), 1) + + # Daily flake/failure counts from test_events + if dashboard: + flake_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'flaked' AND dashboard = ? + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (dashboard, date_from, date_to + 'T23:59:59')) + fail_test_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'failed' AND dashboard = ? + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + flake_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'flaked' + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (date_from, date_to + 'T23:59:59')) + fail_test_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'failed' + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (date_from, date_to + 'T23:59:59')) + flake_daily_map = {r['date']: r['count'] for r in flake_daily} + fail_test_daily_map = {r['date']: r['count'] for r in fail_test_daily} + for d in by_date: + d['flake_count'] = flake_daily_map.get(d['date'], 0) + d['test_failure_count'] = fail_test_daily_map.get(d['date'], 0) + + # Top flakes/failures + if dashboard: + top_flakes = db.query(''' + SELECT test_cmd, COUNT(*) as count, ref_name + FROM test_events WHERE status='flaked' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (dashboard, date_from, date_to + 'T23:59:59')) + top_failures = db.query(''' + SELECT test_cmd, COUNT(*) as count + FROM test_events WHERE status='failed' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + top_flakes = db.query(''' + SELECT test_cmd, COUNT(*) as count, ref_name + FROM test_events WHERE status='flaked' AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (date_from, date_to + 'T23:59:59')) + top_failures = db.query(''' + SELECT test_cmd, COUNT(*) as count + FROM test_events WHERE status='failed' AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (date_from, date_to + 'T23:59:59')) + + # Summary + total = len(runs) + passed = sum(1 for run in runs if run.get('status') == 'PASSED') + failed = total - passed + durations = [] + for run in runs: + complete = run.get('complete') + ts = run.get('timestamp') + if complete and ts: + durations.append((complete - ts) / 60000.0) + + if dashboard: + flake_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='flaked' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + ''', (dashboard, date_from, date_to + 'T23:59:59')) + total_tests = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status IN ('failed','flaked') AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + ''', (dashboard, date_from, date_to + 'T23:59:59')) + total_failures_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='failed' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + flake_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='flaked' AND timestamp >= ? AND timestamp <= ? + ''', (date_from, date_to + 'T23:59:59')) + total_tests = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status IN ('failed','flaked') AND timestamp >= ? AND timestamp <= ? + ''', (date_from, date_to + 'T23:59:59')) + total_failures_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='failed' AND timestamp >= ? AND timestamp <= ? + ''', (date_from, date_to + 'T23:59:59')) + + fc = flake_count[0]['c'] if flake_count else 0 + tc = total_tests[0]['c'] if total_tests else 0 + tfc = total_failures_count[0]['c'] if total_failures_count else 0 + + return _json({ + 'by_date': by_date, + 'top_flakes': top_flakes, + 'top_failures': top_failures, + 'summary': { + 'total_runs': total, + 'pass_rate': round(100.0 * passed / max(total, 1), 1), + 'failure_rate': round(100.0 * failed / max(total, 1), 1), + 'avg_duration_mins': round(sum(durations) / len(durations), 1) if durations else None, + 'flake_rate': round(100.0 * fc / max(tc, 1), 1) if tc else 0, + 'total_flakes': fc, + 'total_test_failures': tfc, + }, + }) + + +# ---- GitHub integration ---- + +@app.route('/api/deployments/speed') +@auth.login_required +def api_deploy_speed(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + workflow = request.args.get('workflow', '') + granularity = request.args.get('granularity', 'daily') + result = github_data.get_deployment_speed(date_from, date_to, workflow) + if granularity != 'daily' and result.get('by_date'): + result['by_date'] = _aggregate_dates( + result['by_date'], granularity, + sum_fields=['count', 'success', 'failure'], + avg_fields=['median_mins', 'p95_mins']) + return _json(result) + + +@app.route('/api/branches/lag') +@auth.login_required +def api_branch_lag(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + return _json(github_data.get_branch_lag(date_from, date_to)) + + +@app.route('/api/prs/metrics') +@auth.login_required +def api_pr_metrics(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + author = request.args.get('author', '') + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + ci_runs = metrics.get_ci_runs(r, ts_from, ts_to) + return _json(github_data.get_pr_metrics(date_from, date_to, author, ci_runs)) + + +@app.route('/api/merge-queue/stats') +@auth.login_required +def api_merge_queue_stats(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + return _json(github_data.get_merge_queue_stats(date_from, date_to)) + + +@app.route('/api/ci/flakes-by-command') +@auth.login_required +def api_flakes_by_command(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + dashboard = request.args.get('dashboard', '') + metrics.sync_failed_tests_to_sqlite(r) + return _json(metrics.get_flakes_by_command(date_from, date_to, dashboard)) + + +# ---- Test timings ---- + +@app.route('/api/tests/timings') +@auth.login_required +def api_test_timings(): + """Test timing statistics: duration by test command, with trends.""" + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + dashboard = request.args.get('dashboard', '') + status = request.args.get('status', '') # filter to specific status + test_cmd = request.args.get('test_cmd', '') # filter to specific test + + conditions = ['duration_secs IS NOT NULL', 'duration_secs > 0', + 'timestamp >= ?', "timestamp < ? || 'T23:59:59'"] + params = [date_from, date_to] + + if dashboard: + conditions.append('dashboard = ?') + params.append(dashboard) + if status: + conditions.append('status = ?') + params.append(status) + if test_cmd: + conditions.append('test_cmd = ?') + params.append(test_cmd) + + where = 'WHERE ' + ' AND '.join(conditions) + + # Per-test stats + by_test = db.query(f''' + SELECT test_cmd, + COUNT(*) as count, + ROUND(AVG(duration_secs), 1) as avg_secs, + ROUND(MIN(duration_secs), 1) as min_secs, + ROUND(MAX(duration_secs), 1) as max_secs, + SUM(CASE WHEN status = 'passed' THEN 1 ELSE 0 END) as passed, + SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed, + SUM(CASE WHEN status = 'flaked' THEN 1 ELSE 0 END) as flaked, + dashboard + FROM test_events {where} + GROUP BY test_cmd + ORDER BY count DESC + LIMIT 200 + ''', params) + + # Add pass rate + for row in by_test: + total = row['passed'] + row['failed'] + row['flaked'] + row['pass_rate'] = round(100.0 * row['passed'] / max(total, 1), 1) + row['total_time_secs'] = round(row['avg_secs'] * row['count'], 0) + + # Daily time series (aggregate across all tests or filtered test) + by_date = db.query(f''' + SELECT substr(timestamp, 1, 10) as date, + COUNT(*) as count, + ROUND(AVG(duration_secs), 1) as avg_secs, + ROUND(MAX(duration_secs), 1) as max_secs, + SUM(CASE WHEN status = 'passed' THEN 1 ELSE 0 END) as passed, + SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed, + SUM(CASE WHEN status = 'flaked' THEN 1 ELSE 0 END) as flaked + FROM test_events {where} + GROUP BY substr(timestamp, 1, 10) + ORDER BY date + ''', params) + + # Summary + summary_rows = db.query(f''' + SELECT COUNT(*) as count, + ROUND(AVG(duration_secs), 1) as avg_secs, + ROUND(MAX(duration_secs), 1) as max_secs, + SUM(duration_secs) as total_secs, + SUM(CASE WHEN status = 'passed' THEN 1 ELSE 0 END) as passed, + SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed, + SUM(CASE WHEN status = 'flaked' THEN 1 ELSE 0 END) as flaked + FROM test_events {where} + ''', params) + s = summary_rows[0] if summary_rows else {} + + # Slowest individual test runs + slowest = db.query(f''' + SELECT test_cmd, status, duration_secs, dashboard, + substr(timestamp, 1, 10) as date, commit_author, log_url + FROM test_events {where} + ORDER BY duration_secs DESC + LIMIT 50 + ''', params) + + return _json({ + 'by_test': by_test, + 'by_date': by_date, + 'slowest': slowest, + 'summary': { + 'total_runs': s.get('count', 0), + 'avg_duration_secs': s.get('avg_secs'), + 'max_duration_secs': s.get('max_secs'), + 'total_compute_secs': round(s.get('total_secs', 0) or 0, 0), + 'passed': s.get('passed', 0), + 'failed': s.get('failed', 0), + 'flaked': s.get('flaked', 0), + }, + }) + + +# ---- Dashboard views ---- + +@app.route('/ci-health') +@auth.login_required +def ci_health(): + return redirect('/ci-insights') + + +@app.route('/ci-insights') +@auth.login_required +def ci_insights(): + path = Path(__file__).parent / 'views' / 'ci-insights.html' + if path.exists(): + return path.read_text() + return "Dashboard not found", 404 + + +@app.route('/cost-overview') +@auth.login_required +def cost_overview(): + path = Path(__file__).parent / 'views' / 'cost-overview.html' + if path.exists(): + return path.read_text() + return "Dashboard not found", 404 + + +@app.route('/test-timings') +@auth.login_required +def test_timings(): + path = Path(__file__).parent / 'views' / 'test-timings.html' + if path.exists(): + return path.read_text() + return "Dashboard not found", 404 + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8081) diff --git a/ci3/ci-metrics/billing/__init__.py b/ci3/ci-metrics/billing/__init__.py new file mode 100644 index 000000000000..e097751047c2 --- /dev/null +++ b/ci3/ci-metrics/billing/__init__.py @@ -0,0 +1,14 @@ +"""Billing package: GKE namespace billing and AWS cost data.""" + +from billing.gcp import ( + get_billing_files_in_range, + aggregate_billing_weekly, + aggregate_billing_monthly, + serve_billing_dashboard, +) +from billing.aws import ( + get_costs_overview, + get_aws_cost_details, + decode_branch_info, + decode_instance_name, +) diff --git a/ci3/ci-metrics/billing/aws.py b/ci3/ci-metrics/billing/aws.py new file mode 100644 index 000000000000..481393d74ec3 --- /dev/null +++ b/ci3/ci-metrics/billing/aws.py @@ -0,0 +1,347 @@ +"""AWS Cost Explorer fetch with in-memory cache. + +Fetches on first request, caches for 6 hours. No SQLite, no background threads. +""" +import threading +import time +from datetime import datetime, timedelta, timezone + +SERVICE_CATEGORY_MAP = { + # Compute + 'Amazon Elastic Compute Cloud - Compute': 'ec2', + 'EC2 - Other': 'ec2', # EBS volumes, snapshots, NAT gateways, data transfer + 'Amazon Elastic Container Service': 'ecs', + 'Amazon Elastic Kubernetes Service': 'eks', + 'Amazon EC2 Container Registry (ECR)': 'ecr', + 'AWS Lambda': 'lambda', + 'Amazon Lightsail': 'lightsail', + # Storage + 'Amazon Simple Storage Service': 's3', + 'Amazon Elastic File System': 'efs', + 'Amazon Elastic Block Store': 'ebs', + 'Amazon ElastiCache': 'elasticache', + 'Amazon Relational Database Service': 'rds', + 'Amazon DynamoDB': 'dynamodb', + 'AWS Backup': 'backup', + # Networking + 'Amazon CloudFront': 'cloudfront', + 'CloudFront Flat-Rate Plans': 'cloudfront', + 'Amazon Virtual Private Cloud': 'vpc', + 'Elastic Load Balancing': 'elb', + 'Amazon Elastic Load Balancing': 'elb', + 'Amazon Route 53': 'route53', + 'Amazon API Gateway': 'apigateway', + 'AWS Data Transfer': 'data_transfer', + 'AWS Global Accelerator': 'global_accelerator', + # Monitoring & Security + 'AmazonCloudWatch': 'cloudwatch', + 'AWS CloudTrail': 'cloudtrail', + 'AWS Secrets Manager': 'secrets', + 'AWS Key Management Service': 'kms', + 'AWS WAF': 'waf', + 'AWS Config': 'config', + 'AWS Certificate Manager': 'acm', + # CI/CD & Dev Tools + 'AWS CodeBuild': 'codebuild', + 'AWS CodePipeline': 'codepipeline', + 'AWS CloudFormation': 'cloudformation', + 'AWS Amplify': 'amplify', + # Data & Analytics + 'AWS Glue': 'glue', + # IoT + 'AWS IoT': 'iot', + 'Amazon Location Service': 'location', + # Messaging + 'Amazon Simple Notification Service': 'sns', + 'Amazon Simple Queue Service': 'sqs', + # Other + 'Tax': 'tax', + 'AWS Support (Business)': 'support', + 'AWS Support (Enterprise)': 'support', + 'AWS Cost Explorer': 'cost_explorer', +} + +import re + +_cache = {'rows': [], 'ts': 0} +_cache_lock = threading.Lock() +_detail_cache = {'rows': [], 'ts': 0} +_detail_cache_lock = threading.Lock() +_CACHE_TTL = 6 * 3600 + +# Known job postfixes from ci.sh (these become INSTANCE_POSTFIX) +_JOB_POSTFIXES = re.compile( + r'_(x[0-9]+-(?:full|fast)|a[0-9]+-(?:full|fast)|n-deploy-[0-9]+|grind-test-[a-f0-9]+)$' +) +_ARCH_SUFFIXES = ('_amd64', '_arm64', '_x86_64', '_aarch64') + + +def decode_instance_name(run: dict) -> str: + """Reconstruct the EC2 instance name from CI run metadata. + + bootstrap_ec2 naming: + merge queue: pr-{number}_{arch}[_{postfix}] + branch: {sanitized_branch}_{arch}[_{postfix}] + """ + name = run.get('name', '') + pr = run.get('pr_number') + arch = run.get('arch', 'amd64') + # Normalize arch names + if arch in ('x86_64', 'amd64'): + arch = 'amd64' + elif arch in ('aarch64', 'arm64'): + arch = 'arm64' + job = run.get('job_id', '') + + if '(queue)' in name and pr: + base = f'pr-{pr}_{arch}' + elif pr: + base = f'pr-{pr}_{arch}' + else: + # Replicate: echo -n "$REF_NAME" | head -c 50 | tr -c 'a-zA-Z0-9-' '_' + sanitized = re.sub(r'[^a-zA-Z0-9-]', '_', name[:50]) + base = f'{sanitized}_{arch}' + if job: + return f'{base}_{job}' + return base + + +def decode_branch_info(run: dict) -> dict: + """Extract branch/PR/user context from a CI run.""" + name = run.get('name', '') + dashboard = run.get('dashboard', '') + pr = run.get('pr_number') + author = run.get('author', 'unknown') + + if '(queue)' in name or dashboard == 'next': + run_type = 'merge-queue' + branch = name.replace(' (queue)', '') + elif dashboard == 'prs': + run_type = 'pr' + branch = name + elif dashboard in ('nightly', 'releases', 'network', 'deflake'): + run_type = dashboard + branch = name + else: + run_type = 'other' + branch = name + + return { + 'type': run_type, + 'branch': branch, + 'pr_number': pr, + 'author': author, + 'instance_name': decode_instance_name(run), + } + + +def _fetch_aws_costs(date_from: str, date_to: str) -> list[dict]: + try: + import boto3 + except ImportError: + print("[rk_aws_costs] boto3 not installed, skipping") + return [] + + try: + client = boto3.client('ce', region_name='us-east-2') + rows = [] + next_token = None + + while True: + kwargs = dict( + TimePeriod={'Start': date_from, 'End': date_to}, + Granularity='DAILY', + Metrics=['UnblendedCost'], + GroupBy=[{'Type': 'DIMENSION', 'Key': 'SERVICE'}], + ) + if next_token: + kwargs['NextPageToken'] = next_token + + response = client.get_cost_and_usage(**kwargs) + + for result in response['ResultsByTime']: + date = result['TimePeriod']['Start'] + for group in result['Groups']: + service = group['Keys'][0] + amount = float(group['Metrics']['UnblendedCost']['Amount']) + if amount == 0: + continue + category = SERVICE_CATEGORY_MAP.get(service, 'other') + if category == 'other': + print(f"[rk_aws_costs] unmapped service: {service!r} (${amount:.2f})") + rows.append({ + 'date': date, + 'service': service, + 'category': category, + 'amount_usd': round(amount, 4), + }) + + next_token = response.get('NextPageToken') + if not next_token: + break + + return rows + except Exception as e: + print(f"[rk_aws_costs] Error: {e}") + return [] + + +def _ensure_cached(): + now = time.time() + if _cache['rows'] and now - _cache['ts'] < _CACHE_TTL: + return + if not _cache_lock.acquire(blocking=False): + return + try: + today = datetime.now(timezone.utc).date() + rows = _fetch_aws_costs( + (today - timedelta(days=365)).isoformat(), + today.isoformat(), + ) + if rows: + _cache['rows'] = rows + _cache['ts'] = now + finally: + _cache_lock.release() + + +def get_aws_costs(date_from: str, date_to: str) -> list[dict]: + """Get AWS costs for date range. Blocks on first fetch, async refresh after.""" + if not _cache['rows']: + _ensure_cached() # block on first load so dashboard isn't empty + else: + threading.Thread(target=_ensure_cached, daemon=True).start() + return [r for r in _cache['rows'] if date_from <= r['date'] <= date_to] + + +def _fetch_aws_cost_details(date_from: str, date_to: str) -> list[dict]: + """Fetch per-resource (USAGE_TYPE) cost breakdown from AWS Cost Explorer.""" + try: + import boto3 + except ImportError: + return [] + + try: + client = boto3.client('ce', region_name='us-east-2') + rows = [] + next_token = None + + while True: + kwargs = dict( + TimePeriod={'Start': date_from, 'End': date_to}, + Granularity='DAILY', + Metrics=['UnblendedCost'], + GroupBy=[ + {'Type': 'DIMENSION', 'Key': 'SERVICE'}, + {'Type': 'DIMENSION', 'Key': 'USAGE_TYPE'}, + ], + ) + if next_token: + kwargs['NextPageToken'] = next_token + + response = client.get_cost_and_usage(**kwargs) + + for result in response['ResultsByTime']: + date = result['TimePeriod']['Start'] + for group in result['Groups']: + service = group['Keys'][0] + usage_type = group['Keys'][1] + amount = float(group['Metrics']['UnblendedCost']['Amount']) + if amount == 0: + continue + category = SERVICE_CATEGORY_MAP.get(service, 'other') + rows.append({ + 'date': date, + 'service': service, + 'usage_type': usage_type, + 'category': category, + 'amount_usd': round(amount, 4), + }) + + next_token = response.get('NextPageToken') + if not next_token: + break + + return rows + except Exception as e: + print(f"[rk_aws_costs] Detail fetch error: {e}") + return [] + + +def _ensure_detail_cached(): + now = time.time() + if _detail_cache['rows'] and now - _detail_cache['ts'] < _CACHE_TTL: + return + if not _detail_cache_lock.acquire(blocking=False): + return + try: + today = datetime.now(timezone.utc).date() + rows = _fetch_aws_cost_details( + (today - timedelta(days=365)).isoformat(), + today.isoformat(), + ) + if rows: + _detail_cache['rows'] = rows + _detail_cache['ts'] = now + finally: + _detail_cache_lock.release() + + +def get_aws_cost_details(date_from: str, date_to: str) -> list[dict]: + """Get per-resource AWS cost details. Blocks on first fetch, async refresh after.""" + if not _detail_cache['rows']: + _ensure_detail_cached() + else: + threading.Thread(target=_ensure_detail_cached, daemon=True).start() + return [r for r in _detail_cache['rows'] if date_from <= r['date'] <= date_to] + + +def get_costs_overview(date_from: str, date_to: str) -> dict: + """Combined AWS + GCP cost overview. GCP data comes from billing JSON files.""" + aws_rows = get_aws_costs(date_from, date_to) + + # GCP data from billing files (already on disk, no SQLite needed) + gcp_by_date = {} + try: + from billing.gcp import get_billing_files_in_range + billing_data = get_billing_files_in_range( + datetime.strptime(date_from, '%Y-%m-%d'), + datetime.strptime(date_to, '%Y-%m-%d'), + ) + for entry in billing_data: + d = entry['date'] + if d not in gcp_by_date: + gcp_by_date[d] = {} + for ns_data in entry.get('namespaces', {}).values(): + for cat, amt in ns_data.get('breakdown', {}).items(): + gcp_by_date[d][cat] = gcp_by_date[d].get(cat, 0) + amt + except Exception as e: + print(f"[rk_aws_costs] GCP billing read failed: {e}") + + by_date = {} + for r in aws_rows: + d = r['date'] + if d not in by_date: + by_date[d] = {'date': d, 'aws': {}, 'gcp': {}, 'aws_total': 0, 'gcp_total': 0} + cat = r['category'] + by_date[d]['aws'][cat] = by_date[d]['aws'].get(cat, 0) + r['amount_usd'] + by_date[d]['aws_total'] += r['amount_usd'] + + for d, cats in gcp_by_date.items(): + if d not in by_date: + by_date[d] = {'date': d, 'aws': {}, 'gcp': {}, 'aws_total': 0, 'gcp_total': 0} + by_date[d]['gcp'] = cats + by_date[d]['gcp_total'] = sum(cats.values()) + + sorted_dates = sorted(by_date.values(), key=lambda x: x['date']) + aws_total = sum(d['aws_total'] for d in sorted_dates) + gcp_total = sum(d['gcp_total'] for d in sorted_dates) + + return { + 'by_date': sorted_dates, + 'totals': { + 'aws': round(aws_total, 2), + 'gcp': round(gcp_total, 2), + 'combined': round(aws_total + gcp_total, 2), + } + } diff --git a/ci3/ci-metrics/billing/billing-dashboard.html b/ci3/ci-metrics/billing/billing-dashboard.html new file mode 100644 index 000000000000..87193ffae207 --- /dev/null +++ b/ci3/ci-metrics/billing/billing-dashboard.html @@ -0,0 +1,415 @@ + + + + + ACI - Namespace Billing + + + + +

namespace billing

+ +
+ + + + | + + + | + + + + | + + + + + + | + + + +
+ +
+ +
+ +
+
+

cost over time

+
+
+
+

cost by namespace

+
+
+
+

cost by category

+
+
+
+ + + + +
+ + + + + diff --git a/ci3/ci-metrics/billing/explore.py b/ci3/ci-metrics/billing/explore.py new file mode 100644 index 000000000000..c591d8c847ef --- /dev/null +++ b/ci3/ci-metrics/billing/explore.py @@ -0,0 +1,619 @@ +#!/usr/bin/env python3 +"""CLI tool to explore GCP billing data from the Cloud Billing BigQuery export. + +Queries the actual billing export tables (not usage metering) to get real +invoice-level costs. Caches results in SQLite for fast re-queries. + +Usage: + python billing_explore.py discover # find billing export tables + python billing_explore.py fetch [--months N] # fetch & cache billing data + python billing_explore.py monthly # show monthly totals + python billing_explore.py monthly --by service # monthly by service + python billing_explore.py monthly --by sku # monthly by SKU + python billing_explore.py monthly --by project # monthly by project + python billing_explore.py daily [--month 2024-12] # daily for a month + python billing_explore.py top [--month 2024-12] # top costs for a month + python billing_explore.py compare # compare billing export vs usage metering +""" +import argparse +import os +import sqlite3 +import sys +from datetime import datetime, timedelta, timezone + +DB_PATH = os.path.join(os.getenv('LOGS_DISK_PATH', '/tmp'), 'billing_explore.db') + +SCHEMA = """ +CREATE TABLE IF NOT EXISTS gcp_billing ( + date TEXT NOT NULL, + project_id TEXT NOT NULL DEFAULT '', + service TEXT NOT NULL DEFAULT '', + sku TEXT NOT NULL DEFAULT '', + cost REAL NOT NULL DEFAULT 0, + credits REAL NOT NULL DEFAULT 0, + usage_amount REAL NOT NULL DEFAULT 0, + usage_unit TEXT NOT NULL DEFAULT '', + currency TEXT NOT NULL DEFAULT 'USD', + fetched_at TEXT NOT NULL, + PRIMARY KEY (date, project_id, service, sku) +); +CREATE INDEX IF NOT EXISTS idx_gcp_billing_date ON gcp_billing(date); +CREATE INDEX IF NOT EXISTS idx_gcp_billing_service ON gcp_billing(service); + +CREATE TABLE IF NOT EXISTS gcp_billing_meta ( + key TEXT PRIMARY KEY, + value TEXT +); +""" + + +def get_db(): + os.makedirs(os.path.dirname(DB_PATH) or '.', exist_ok=True) + conn = sqlite3.connect(DB_PATH) + conn.execute('PRAGMA busy_timeout = 5000') + conn.row_factory = sqlite3.Row + conn.executescript(SCHEMA) + return conn + + +def fmt_usd(v): + if v >= 1000: + return f'${v:,.0f}' + if v >= 1: + return f'${v:,.2f}' + return f'${v:,.4f}' + + +# ---- BigQuery Discovery ---- + +def cmd_discover(args): + """Find billing export tables in the project.""" + from google.cloud import bigquery + project = args.project + client = bigquery.Client(project=project) + + print(f'Listing datasets in project: {project}') + datasets = list(client.list_datasets()) + if not datasets: + print(' No datasets found.') + return + + for ds in datasets: + ds_id = ds.dataset_id + tables = list(client.list_tables(ds.reference)) + billing_tables = [t for t in tables if 'billing' in t.table_id.lower() or 'cost' in t.table_id.lower()] + if billing_tables: + print(f'\n Dataset: {ds_id}') + for t in billing_tables: + full = f'{project}.{ds_id}.{t.table_id}' + print(f' {full}') + # Show schema for first billing table + tbl = client.get_table(t.reference) + print(f' rows: {tbl.num_rows}, size: {tbl.num_bytes / 1e6:.1f} MB') + print(f' columns: {", ".join(f.name for f in tbl.schema[:15])}') + else: + # Check for usage metering tables too + usage_tables = [t for t in tables if 'gke_cluster' in t.table_id.lower()] + if usage_tables: + print(f'\n Dataset: {ds_id} (usage metering)') + for t in usage_tables: + print(f' {project}.{ds_id}.{t.table_id}') + + # Also try common billing export naming patterns + print(f'\n Trying common billing export table patterns...') + for ds in datasets: + for t in client.list_tables(ds.reference): + if t.table_id.startswith('gcp_billing_export'): + full = f'{project}.{ds.dataset_id}.{t.table_id}' + print(f' FOUND: {full}') + + +# ---- BigQuery Fetch ---- + +def cmd_fetch(args): + """Fetch billing data from BigQuery and cache in SQLite.""" + from google.cloud import bigquery + + table = args.table + project = args.project + months = args.months + + if not table: + print('ERROR: --table is required. Run "discover" first to find the billing export table.') + print(' e.g. --table project.dataset.gcp_billing_export_resource_v1_XXXXXX') + sys.exit(1) + + client = bigquery.Client(project=project) + end_date = datetime.now(timezone.utc).date() + start_date = end_date - timedelta(days=months * 31) + + print(f'Fetching billing data from {start_date} to {end_date}') + print(f'Table: {table}') + + # Query the billing export table + # The standard billing export has: billing_account_id, service.description, + # sku.description, usage_start_time, project.id, cost, credits, usage.amount, usage.unit + query = f""" + SELECT + DATE(usage_start_time) AS date, + COALESCE(project.id, '') AS project_id, + COALESCE(service.description, '') AS service, + COALESCE(sku.description, '') AS sku, + SUM(cost) AS cost, + SUM(IFNULL((SELECT SUM(c.amount) FROM UNNEST(credits) c), 0)) AS credits, + SUM(usage.amount) AS usage_amount, + MAX(usage.unit) AS usage_unit + FROM `{table}` + WHERE DATE(usage_start_time) BETWEEN @start_date AND @end_date + GROUP BY date, project_id, service, sku + HAVING ABS(cost) > 0.0001 OR ABS(credits) > 0.0001 + ORDER BY date, service, sku + """ + + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter('start_date', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end_date', 'DATE', end_date.isoformat()), + ] + ) + + print('Running query...') + result = list(client.query(query, job_config=job_config).result()) + print(f'Got {len(result)} rows') + + if not result: + print('No data returned. Check table name and date range.') + return + + # Store in SQLite + db = get_db() + now = datetime.now(timezone.utc).isoformat() + + db.execute('DELETE FROM gcp_billing WHERE date >= ? AND date <= ?', + (start_date.isoformat(), end_date.isoformat())) + + for row in result: + db.execute(''' + INSERT OR REPLACE INTO gcp_billing + (date, project_id, service, sku, cost, credits, usage_amount, usage_unit, fetched_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + row.date.isoformat() if hasattr(row.date, 'isoformat') else str(row.date), + row.project_id or '', + row.service or '', + row.sku or '', + float(row.cost or 0), + float(row.credits or 0), + float(row.usage_amount or 0), + row.usage_unit or '', + now, + )) + + db.commit() + db.execute("INSERT OR REPLACE INTO gcp_billing_meta VALUES ('last_fetch', ?)", (now,)) + db.execute("INSERT OR REPLACE INTO gcp_billing_meta VALUES ('table', ?)", (table,)) + db.commit() + + print(f'Cached {len(result)} rows in {DB_PATH}') + + # Show quick summary + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing GROUP BY month ORDER BY month + ''').fetchall() + print(f'\n{"Month":<10} {"Gross":>12} {"Credits":>12} {"Net":>12}') + print('-' * 48) + for r in rows: + net = r['cost'] + r['credits'] + print(f'{r["month"]:<10} {fmt_usd(r["cost"]):>12} {fmt_usd(r["credits"]):>12} {fmt_usd(net):>12}') + + +# ---- Reports ---- + +def cmd_monthly(args): + """Show monthly totals.""" + db = get_db() + group_by = args.by + + if group_by == 'service': + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, service, + SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing GROUP BY month, service ORDER BY month, cost DESC + ''').fetchall() + + current_month = None + for r in rows: + if r['month'] != current_month: + current_month = r['month'] + month_total = sum(row['cost'] + row['credits'] for row in rows if row['month'] == current_month) + print(f'\n {current_month} (net: {fmt_usd(month_total)})') + print(f' {"Service":<45} {"Gross":>10} {"Credits":>10} {"Net":>10}') + print(' ' + '-' * 77) + net = r['cost'] + r['credits'] + if abs(net) >= 0.01: + print(f' {r["service"]:<45} {fmt_usd(r["cost"]):>10} {fmt_usd(r["credits"]):>10} {fmt_usd(net):>10}') + + elif group_by == 'sku': + month_filter = args.month + if not month_filter: + # Use most recent month + row = db.execute('SELECT MAX(substr(date, 1, 7)) as m FROM gcp_billing').fetchone() + month_filter = row['m'] if row else None + + if not month_filter: + print('No data.') + return + + rows = db.execute(''' + SELECT service, sku, SUM(cost) as cost, SUM(credits) as credits, + SUM(usage_amount) as usage_amount, MAX(usage_unit) as usage_unit + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY service, sku ORDER BY cost DESC + ''', (month_filter,)).fetchall() + + total = sum(r['cost'] + r['credits'] for r in rows) + print(f'\n {month_filter} (net: {fmt_usd(total)})') + print(f' {"Service":<30} {"SKU":<40} {"Net":>10} {"Usage":>15}') + print(' ' + '-' * 97) + for r in rows[:40]: + net = r['cost'] + r['credits'] + if abs(net) >= 0.01: + usage = f'{r["usage_amount"]:.1f} {r["usage_unit"]}' if r['usage_amount'] else '' + print(f' {r["service"][:29]:<30} {r["sku"][:39]:<40} {fmt_usd(net):>10} {usage:>15}') + + elif group_by == 'project': + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, project_id, + SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing GROUP BY month, project_id ORDER BY month, cost DESC + ''').fetchall() + + current_month = None + for r in rows: + if r['month'] != current_month: + current_month = r['month'] + month_total = sum(row['cost'] + row['credits'] for row in rows if row['month'] == current_month) + print(f'\n {current_month} (net: {fmt_usd(month_total)})') + print(f' {"Project":<45} {"Net":>12}') + print(' ' + '-' * 59) + net = r['cost'] + r['credits'] + if abs(net) >= 0.01: + print(f' {r["project_id"]:<45} {fmt_usd(net):>12}') + + else: + # Default: just monthly totals + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, + SUM(cost) as cost, SUM(credits) as credits, + COUNT(DISTINCT date) as days + FROM gcp_billing GROUP BY month ORDER BY month + ''').fetchall() + + print(f'\n {"Month":<10} {"Gross":>12} {"Credits":>12} {"Net":>12} {"Days":>6} {"Daily Avg":>12}') + print(' ' + '-' * 68) + grand_total = 0 + for r in rows: + net = r['cost'] + r['credits'] + daily = net / max(r['days'], 1) + grand_total += net + print(f' {r["month"]:<10} {fmt_usd(r["cost"]):>12} {fmt_usd(r["credits"]):>12} {fmt_usd(net):>12} {r["days"]:>6} {fmt_usd(daily):>12}') + print(' ' + '-' * 68) + print(f' {"TOTAL":<10} {"":>12} {"":>12} {fmt_usd(grand_total):>12}') + + +def cmd_daily(args): + """Show daily costs for a month.""" + db = get_db() + month = args.month + if not month: + row = db.execute('SELECT MAX(substr(date, 1, 7)) as m FROM gcp_billing').fetchone() + month = row['m'] if row else None + + if not month: + print('No data.') + return + + rows = db.execute(''' + SELECT date, SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY date ORDER BY date + ''', (month,)).fetchall() + + total = 0 + print(f'\n {"Date":<12} {"Gross":>10} {"Credits":>10} {"Net":>10}') + print(' ' + '-' * 44) + for r in rows: + net = r['cost'] + r['credits'] + total += net + print(f' {r["date"]:<12} {fmt_usd(r["cost"]):>10} {fmt_usd(r["credits"]):>10} {fmt_usd(net):>10}') + print(' ' + '-' * 44) + print(f' {"TOTAL":<12} {"":>10} {"":>10} {fmt_usd(total):>10}') + + +def cmd_top(args): + """Show top cost items for a month.""" + db = get_db() + month = args.month + if not month: + row = db.execute('SELECT MAX(substr(date, 1, 7)) as m FROM gcp_billing').fetchone() + month = row['m'] if row else None + + if not month: + print('No data.') + return + + # Top services + services = db.execute(''' + SELECT service, SUM(cost + credits) as net, SUM(cost) as gross + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY service ORDER BY net DESC LIMIT 15 + ''', (month,)).fetchall() + + total = sum(r['net'] for r in services) + print(f'\n Top services for {month} (total: {fmt_usd(total)})') + print(f' {"Service":<45} {"Net":>12} {"% of Total":>10}') + print(' ' + '-' * 69) + for r in services: + pct = 100 * r['net'] / max(total, 0.01) + if abs(r['net']) >= 0.01: + print(f' {r["service"]:<45} {fmt_usd(r["net"]):>12} {pct:>9.1f}%') + + # Top SKUs + skus = db.execute(''' + SELECT service, sku, SUM(cost + credits) as net + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY service, sku ORDER BY net DESC LIMIT 20 + ''', (month,)).fetchall() + + print(f'\n Top SKUs for {month}') + print(f' {"Service":<25} {"SKU":<40} {"Net":>12}') + print(' ' + '-' * 79) + for r in skus: + if abs(r['net']) >= 0.01: + print(f' {r["service"][:24]:<25} {r["sku"][:39]:<40} {fmt_usd(r["net"]):>12}') + + +def cmd_compare(args): + """Compare billing export data vs usage metering estimates.""" + db = get_db() + + # Get billing export monthly totals + billing_rows = db.execute(''' + SELECT substr(date, 1, 7) as month, SUM(cost + credits) as net + FROM gcp_billing GROUP BY month ORDER BY month + ''').fetchall() + + if not billing_rows: + print('No billing export data cached. Run "fetch" first.') + return + + # Get usage metering estimates + try: + from billing import gcp as _gcp_billing + _gcp_billing._ensure_cached() + metering_data = _gcp_billing._cache.get('data', []) + except Exception as e: + print(f'Could not load usage metering data: {e}') + metering_data = [] + + metering_monthly = {} + for entry in metering_data: + month = entry['date'][:7] + day_total = sum(ns.get('total', 0) for ns in entry.get('namespaces', {}).values()) + metering_monthly[month] = metering_monthly.get(month, 0) + day_total + + print(f'\n {"Month":<10} {"Billing Export":>15} {"Usage Metering":>15} {"Ratio":>8}') + print(' ' + '-' * 50) + for r in billing_rows: + billing = r['net'] + metering = metering_monthly.get(r['month'], 0) + ratio = f'{billing / metering:.2f}x' if metering > 0 else '--' + print(f' {r["month"]:<10} {fmt_usd(billing):>15} {fmt_usd(metering):>15} {ratio:>8}') + + +def cmd_status(args): + """Show what data we have cached.""" + db = get_db() + meta = {r['key']: r['value'] for r in db.execute('SELECT * FROM gcp_billing_meta').fetchall()} + billing_count = db.execute('SELECT COUNT(*) as c FROM gcp_billing').fetchone()['c'] + billing_range = db.execute('SELECT MIN(date) as mn, MAX(date) as mx FROM gcp_billing').fetchone() + + print(f'\n Billing export cache:') + print(f' DB path: {DB_PATH}') + print(f' Table: {meta.get("table", "(not set)")}') + print(f' Last fetch: {meta.get("last_fetch", "(never)")}') + print(f' Rows: {billing_count}') + if billing_count: + print(f' Date range: {billing_range["mn"]} to {billing_range["mx"]}') + + # Also check billing export table status + try: + from google.cloud import bigquery + client = bigquery.Client(project=args.project) + table_id = 'testnet-440309.testnet440309billing.gcp_billing_export_v1_01EA8B_291C89_753ABC' + t = client.get_table(table_id) + print(f'\n BigQuery billing export:') + print(f' Table: {table_id}') + print(f' Rows: {t.num_rows}') + print(f' Modified: {t.modified}') + if t.num_rows > 0: + print(f' STATUS: Data available! Run "fetch --table {table_id}" to cache it.') + else: + print(f' STATUS: Not yet populated. GCP takes up to 24h after enabling export.') + except Exception as e: + print(f'\n BigQuery check failed: {e}') + + +def cmd_metering(args): + """Query both usage metering tables and compare with different approaches.""" + from google.cloud import bigquery + project = args.project + client = bigquery.Client(project=project) + months = args.months + + end_date = datetime.now(timezone.utc).date() + start_date = end_date - timedelta(days=months * 31) + + # Table names + usage_table = f'{project}.egress_consumption.gke_cluster_resource_usage' + consumption_table = f'{project}.egress_consumption.gke_cluster_resource_consumption' + + print(f'Date range: {start_date} to {end_date}') + + # 1. Current approach: usage table with our SKU pricing + print('\n=== Approach 1: gke_cluster_resource_usage (requests) with hardcoded SKU prices ===') + _query_metering_table(client, usage_table, start_date, end_date, 'REQUESTS') + + # 2. Consumption table with our SKU pricing + print('\n=== Approach 2: gke_cluster_resource_consumption (actual) with hardcoded SKU prices ===') + _query_metering_table(client, consumption_table, start_date, end_date, 'CONSUMPTION') + + # 3. Raw totals: what does each table report? + print('\n=== Approach 3: Raw resource totals from both tables ===') + for tname, label in [(usage_table, 'REQUESTS'), (consumption_table, 'CONSUMPTION')]: + query = f""" + SELECT + FORMAT_DATE('%Y-%m', DATE(start_time)) AS month, + resource_name, + SUM(usage.amount) AS total_amount, + usage.unit + FROM `{tname}` + WHERE DATE(start_time) BETWEEN @start AND @end + GROUP BY month, resource_name, usage.unit + ORDER BY month, resource_name + """ + job_config = bigquery.QueryJobConfig(query_parameters=[ + bigquery.ScalarQueryParameter('start', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end', 'DATE', end_date.isoformat()), + ]) + rows = list(client.query(query, job_config=job_config).result()) + print(f'\n {label} table raw resources:') + print(f' {"Month":<10} {"Resource":<20} {"Amount":>20} {"Unit":<15}') + print(' ' + '-' * 67) + for r in rows: + print(f' {r.month:<10} {r.resource_name:<20} {r.total_amount:>20,.0f} {r.unit:<15}') + + # 4. Count distinct SKUs + print('\n=== Approach 4: Distinct SKUs in usage table ===') + query = f""" + SELECT sku_id, resource_name, COUNT(*) as row_count, + SUM(usage.amount) as total_amount, usage.unit + FROM `{usage_table}` + WHERE DATE(start_time) BETWEEN @start AND @end + GROUP BY sku_id, resource_name, usage.unit + ORDER BY total_amount DESC + """ + job_config = bigquery.QueryJobConfig(query_parameters=[ + bigquery.ScalarQueryParameter('start', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end', 'DATE', end_date.isoformat()), + ]) + rows = list(client.query(query, job_config=job_config).result()) + # Import pricing to check + from billing.gcp import _SKU_PRICING + print(f' {"SKU ID":<20} {"Resource":<20} {"Rows":>10} {"Amount":>18} {"Unit":<12} {"Known?"}') + print(' ' + '-' * 90) + for r in rows: + known = 'YES' if r.sku_id in _SKU_PRICING else 'MISSING' + print(f' {r.sku_id:<20} {r.resource_name:<20} {r.row_count:>10,} {r.total_amount:>18,.0f} {r.unit:<12} {known}') + + +def _query_metering_table(client, table, start_date, end_date, label): + """Query a metering table and compute costs using our SKU pricing.""" + from google.cloud import bigquery + from billing.gcp import _SKU_PRICING, _usage_to_cost + + query = f""" + SELECT + FORMAT_DATE('%Y-%m', DATE(start_time)) AS month, + namespace, + sku_id, + resource_name, + SUM(usage.amount) AS total_usage + FROM `{table}` + WHERE DATE(start_time) BETWEEN @start AND @end + GROUP BY month, namespace, sku_id, resource_name + ORDER BY month, namespace + """ + job_config = bigquery.QueryJobConfig(query_parameters=[ + bigquery.ScalarQueryParameter('start', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end', 'DATE', end_date.isoformat()), + ]) + rows = list(client.query(query, job_config=job_config).result()) + + monthly = {} + monthly_by_cat = {} + missing_skus = set() + for r in rows: + cost, category = _usage_to_cost(r.sku_id, r.resource_name, float(r.total_usage)) + if r.sku_id not in _SKU_PRICING: + missing_skus.add(r.sku_id) + month = r.month + monthly[month] = monthly.get(month, 0) + cost + key = (month, category) + monthly_by_cat[key] = monthly_by_cat.get(key, 0) + cost + + print(f' {"Month":<10} {"Total":>12} {"compute_spot":>14} {"compute_od":>14} {"network":>10} {"storage":>10}') + print(' ' + '-' * 74) + for month in sorted(monthly.keys()): + total = monthly[month] + spot = monthly_by_cat.get((month, 'compute_spot'), 0) + od = monthly_by_cat.get((month, 'compute_ondemand'), 0) + net = monthly_by_cat.get((month, 'network'), 0) + stor = monthly_by_cat.get((month, 'storage'), 0) + print(f' {month:<10} {fmt_usd(total):>12} {fmt_usd(spot):>14} {fmt_usd(od):>14} {fmt_usd(net):>10} {fmt_usd(stor):>10}') + + if missing_skus: + print(f'\n WARNING: {len(missing_skus)} unknown SKU IDs (not priced): {", ".join(sorted(missing_skus)[:5])}...') + + +# ---- Main ---- + +def main(): + parser = argparse.ArgumentParser(description='Explore GCP billing data') + parser.add_argument('--project', default='testnet-440309', help='GCP project ID') + parser.add_argument('--table', default='', help='BigQuery billing export table') + sub = parser.add_subparsers(dest='command') + + sub.add_parser('discover', help='Find billing export tables') + + fetch_p = sub.add_parser('fetch', help='Fetch billing data from BigQuery') + fetch_p.add_argument('--months', type=int, default=6, help='How many months back to fetch') + + monthly_p = sub.add_parser('monthly', help='Monthly totals') + monthly_p.add_argument('--by', choices=['service', 'sku', 'project'], default='', help='Group by') + monthly_p.add_argument('--month', default='', help='Filter to month (YYYY-MM)') + + daily_p = sub.add_parser('daily', help='Daily costs') + daily_p.add_argument('--month', default='', help='Month to show (YYYY-MM)') + + top_p = sub.add_parser('top', help='Top cost items') + top_p.add_argument('--month', default='', help='Month to show (YYYY-MM)') + + sub.add_parser('compare', help='Compare billing export vs usage metering') + sub.add_parser('status', help='Show data status (what we have cached)') + + meter_p = sub.add_parser('metering', help='Query both metering tables directly and compare') + meter_p.add_argument('--months', type=int, default=6, help='How many months back') + + args = parser.parse_args() + + if not args.command: + parser.print_help() + sys.exit(1) + + cmds = { + 'discover': cmd_discover, + 'fetch': cmd_fetch, + 'monthly': cmd_monthly, + 'daily': cmd_daily, + 'top': cmd_top, + 'compare': cmd_compare, + 'metering': cmd_metering, + 'status': cmd_status, + } + cmds[args.command](args) + + +if __name__ == '__main__': + main() diff --git a/ci3/ci-metrics/billing/fetch_billing.py b/ci3/ci-metrics/billing/fetch_billing.py new file mode 100644 index 000000000000..271a788fc6bd --- /dev/null +++ b/ci3/ci-metrics/billing/fetch_billing.py @@ -0,0 +1,262 @@ +#!/usr/bin/env python3 +"""Fetch namespace billing data from GKE resource consumption metering in BigQuery. + +Queries the GKE cluster resource consumption table which records CPU and memory +usage per namespace per pod. Actual GCP SKU prices (from the Cloud Billing +Catalog API) are applied to convert resource usage into dollar costs. + +Categories produced: + - compute_spot (Spot / Preemptible VM cores + RAM) + - compute_ondemand (On-demand VM cores + RAM) + +Usage: + # Fetch last 30 days + python fetch-billing.py + + # Specific range + python fetch-billing.py --from 2026-01-01 --to 2026-01-31 + + # Custom output directory + python fetch-billing.py --output-dir /tmp/billing + +Environment: + Requires Application Default Credentials or GOOGLE_APPLICATION_CREDENTIALS. + pip install google-cloud-bigquery +""" +import argparse +import json +import os +import sys +from datetime import datetime, timedelta + +from google.cloud import bigquery + +# ---- defaults ---- +DEFAULT_PROJECT = 'testnet-440309' +DEFAULT_DATASET = 'egress_consumption' +DEFAULT_TABLE_CONSUMPTION = 'gke_cluster_resource_consumption' +DEFAULT_TABLE_USAGE = 'gke_cluster_resource_usage' +DEFAULT_OUTPUT_DIR = os.path.join( + os.getenv('LOGS_DISK_PATH', '/logs-disk'), 'billing' +) + +# ---- SKU pricing ---- +# Prices sourced from GCP Cloud Billing Catalog API for us-west1. +SKU_PRICING = { + # Compute - Spot (per vCPU-hour / per GiB-hour) + 'E7FF-A0FB-FA82': {'price': 0.00497, 'resource': 'cpu', 'category': 'compute_spot'}, + '48AB-89F5-9112': {'price': 0.000668, 'resource': 'memory', 'category': 'compute_spot'}, + # Compute - On-demand T2D + 'EFE6-E23C-19CB': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + 'FB05-036A-8982': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2 + 'BB77-5FDA-69D9': {'price': 0.031611, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5B01-D157-A097': {'price': 0.004237, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2D + 'A03E-E620-7389': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5535-6D2D-4B50': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Network Egress (per GiB) + '0C3C-6B13-B1E8': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + '6B8F-E63D-832B': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '92CB-C25F-B1D1': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '984A-1F27-2D1F': {'price': 0.04, 'resource': 'networkEgress', 'category': 'network'}, + '9DE9-9092-B3BC': {'price': 0.20, 'resource': 'networkEgress', 'category': 'network'}, + 'C863-37DA-506E': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'C8EA-1A86-3D28': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'DE9E-AFBC-A15A': {'price': 0.01, 'resource': 'networkEgress', 'category': 'network'}, + 'DFA5-B5C6-36D6': {'price': 0.085, 'resource': 'networkEgress', 'category': 'network'}, + 'F274-1692-F213': {'price': 0.08, 'resource': 'networkEgress', 'category': 'network'}, + 'FDBC-6E3B-D4D8': {'price': 0.15, 'resource': 'networkEgress', 'category': 'network'}, + # Storage (per GiB-month) + 'D973-5D65-BAB2': {'price': 0.04, 'resource': 'storage', 'category': 'storage'}, +} + + +def usage_to_cost(sku_id: str, resource_name: str, amount: float) -> tuple[float, str]: + """Convert raw usage amount to dollar cost. Returns (cost_usd, category).""" + info = SKU_PRICING.get(sku_id) + if not info: + return 0.0, 'other' + + price = info['price'] + if resource_name == 'cpu': + return (amount / 3600.0) * price, info['category'] + elif resource_name == 'memory': + return (amount / 3600.0 / (1024 ** 3)) * price, info['category'] + elif resource_name.startswith('networkEgress'): + return (amount / (1024 ** 3)) * price, info['category'] + elif resource_name == 'storage': + gib_months = amount / (1024 ** 3) / (730 * 3600) + return gib_months * price, info['category'] + return 0.0, info['category'] + + +# ---- BigQuery query ---- + +def fetch_usage_rows( + client: bigquery.Client, + project: str, + dataset: str, + date_from: str, + date_to: str, +) -> list[dict]: + """Query both metering tables for daily usage by namespace + SKU.""" + consumption = f'{project}.{dataset}.{DEFAULT_TABLE_CONSUMPTION}' + usage = f'{project}.{dataset}.{DEFAULT_TABLE_USAGE}' + query = f""" + SELECT date, namespace, sku_id, resource_name, SUM(total_usage) AS total_usage FROM ( + SELECT DATE(start_time) AS date, namespace, sku_id, resource_name, SUM(usage.amount) AS total_usage + FROM `{consumption}` + WHERE DATE(start_time) BETWEEN @date_from AND @date_to + GROUP BY date, namespace, sku_id, resource_name + UNION ALL + SELECT DATE(start_time) AS date, namespace, sku_id, resource_name, SUM(usage.amount) AS total_usage + FROM `{usage}` + WHERE DATE(start_time) BETWEEN @date_from AND @date_to + AND resource_name IN ('networkEgress', 'storage') + GROUP BY date, namespace, sku_id, resource_name + ) + GROUP BY date, namespace, sku_id, resource_name + ORDER BY date, namespace + """ + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter('date_from', 'DATE', date_from), + bigquery.ScalarQueryParameter('date_to', 'DATE', date_to), + ] + ) + rows = client.query(query, job_config=job_config).result() + return [dict(row) for row in rows] + + +# ---- aggregate into daily JSON ---- + +def build_daily_files(rows: list[dict]) -> tuple[dict[str, dict], set[str]]: + """Convert raw usage rows into daily billing JSON structures. + + Returns (days_dict, unknown_skus). + """ + days: dict[str, dict] = {} + unknown_skus: set[str] = set() + + for row in rows: + date_str = ( + row['date'].isoformat() + if hasattr(row['date'], 'isoformat') + else str(row['date']) + ) + ns = row['namespace'] + sku_id = row['sku_id'] + resource_name = row['resource_name'] + amount = float(row['total_usage']) + + cost, category = usage_to_cost(sku_id, resource_name, amount) + + if sku_id not in SKU_PRICING: + unknown_skus.add(sku_id) + + if cost <= 0: + continue + + if date_str not in days: + days[date_str] = {'date': date_str, 'namespaces': {}} + if ns not in days[date_str]['namespaces']: + days[date_str]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + + entry = days[date_str]['namespaces'][ns] + entry['breakdown'][category] = ( + entry['breakdown'].get(category, 0) + cost + ) + entry['total'] += cost + + # Round + for day in days.values(): + for ns_data in day['namespaces'].values(): + ns_data['total'] = round(ns_data['total'], 4) + ns_data['breakdown'] = { + k: round(v, 4) for k, v in ns_data['breakdown'].items() + } + + return days, unknown_skus + + +def write_files(days: dict[str, dict], output_dir: str) -> int: + os.makedirs(output_dir, exist_ok=True) + count = 0 + for date_str, data in sorted(days.items()): + filepath = os.path.join(output_dir, f'{date_str}.json') + with open(filepath, 'w') as f: + json.dump(data, f, indent=2) + count += 1 + return count + + +# ---- CLI ---- + +def main(): + parser = argparse.ArgumentParser( + description='Fetch GKE namespace compute billing from resource consumption metering' + ) + today = datetime.utcnow().strftime('%Y-%m-%d') + default_from = (datetime.utcnow() - timedelta(days=30)).strftime('%Y-%m-%d') + + parser.add_argument('--from', dest='date_from', default=default_from, + help='Start date YYYY-MM-DD (default: 30 days ago)') + parser.add_argument('--to', dest='date_to', default=today, + help='End date YYYY-MM-DD (default: today)') + parser.add_argument('--project', default=DEFAULT_PROJECT, + help=f'GCP project ID (default: {DEFAULT_PROJECT})') + parser.add_argument('--dataset', default=DEFAULT_DATASET, + help=f'BigQuery dataset (default: {DEFAULT_DATASET})') + parser.add_argument('--output-dir', default=DEFAULT_OUTPUT_DIR, + help=f'Output directory (default: {DEFAULT_OUTPUT_DIR})') + args = parser.parse_args() + + print(f'Connecting to BigQuery ({args.project})...') + client = bigquery.Client(project=args.project) + + print(f'Fetching metering data {args.date_from} to {args.date_to}...') + print(f' consumption: {args.project}.{args.dataset}.{DEFAULT_TABLE_CONSUMPTION}') + print(f' usage: {args.project}.{args.dataset}.{DEFAULT_TABLE_USAGE}') + rows = fetch_usage_rows( + client, args.project, args.dataset, + args.date_from, args.date_to, + ) + print(f'Got {len(rows)} aggregated rows') + + if not rows: + print('No metering data found. Check that:') + print(' 1. GKE resource consumption metering is enabled') + print(' 2. The date range has data') + return + + days, unknown_skus = build_daily_files(rows) + count = write_files(days, args.output_dir) + print(f'Wrote {count} daily billing files to {args.output_dir}') + + if unknown_skus: + print(f'\nWARNING: {len(unknown_skus)} unknown SKU(s) had zero cost assigned:') + for s in sorted(unknown_skus): + print(f' {s}') + print('Add these to SKU_PRICING in fetch-billing.py with prices from') + print('the GCP Cloud Billing Catalog API.') + + # Summary + total = sum( + ns['total'] for day in days.values() + for ns in day['namespaces'].values() + ) + ns_set: set[str] = set() + cat_set: set[str] = set() + for day in days.values(): + for ns_name, ns_data in day['namespaces'].items(): + ns_set.add(ns_name) + cat_set.update(ns_data['breakdown'].keys()) + + print(f'\nTotal cost: ${total:,.2f}') + print(f'Namespaces ({len(ns_set)}): {sorted(ns_set)}') + print(f'Categories: {sorted(cat_set)}') + + +if __name__ == '__main__': + main() diff --git a/ci3/ci-metrics/billing/gcp.py b/ci3/ci-metrics/billing/gcp.py new file mode 100644 index 000000000000..5254e20bbbf0 --- /dev/null +++ b/ci3/ci-metrics/billing/gcp.py @@ -0,0 +1,289 @@ +"""Namespace billing helpers for rkapp. + +Fetches GKE namespace billing from BigQuery with in-memory cache. +Route definitions remain in rk.py; this module provides the logic. + +SKU pricing: Queries the Cloud Billing pricing export table in BigQuery +if available, otherwise falls back to hardcoded rates. To enable the +pricing export: + 1. Go to GCP Console > Billing > Billing export + 2. Enable "Detailed usage cost" and "Pricing" exports + 3. Set the dataset to the _BQ_DATASET below +""" +import threading +import time +from datetime import datetime, timedelta, timezone +from pathlib import Path + +# BigQuery defaults +_BQ_PROJECT = 'testnet-440309' +_BQ_DATASET = 'egress_consumption' +_BQ_TABLE_USAGE = 'gke_cluster_resource_usage' +_BQ_TABLE_PRICING = 'cloud_pricing_export' + +# Hardcoded fallback SKU pricing (us-west1). +# cpu: price per vCPU-hour, memory: price per GiB-hour +# network: price per GiB, storage: price per GiB-month +_HARDCODED_SKU_PRICING = { + # Compute - Spot + 'E7FF-A0FB-FA82': {'price': 0.00497, 'resource': 'cpu', 'category': 'compute_spot'}, + '48AB-89F5-9112': {'price': 0.000668, 'resource': 'memory', 'category': 'compute_spot'}, + # Compute - On-demand T2D + 'EFE6-E23C-19CB': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + 'FB05-036A-8982': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2 + 'BB77-5FDA-69D9': {'price': 0.031611, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5B01-D157-A097': {'price': 0.004237, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2D + 'A03E-E620-7389': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5535-6D2D-4B50': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Network Egress (price per GiB) + '0C3C-6B13-B1E8': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + '6B8F-E63D-832B': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '92CB-C25F-B1D1': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '984A-1F27-2D1F': {'price': 0.04, 'resource': 'networkEgress', 'category': 'network'}, + '9DE9-9092-B3BC': {'price': 0.20, 'resource': 'networkEgress', 'category': 'network'}, + 'C863-37DA-506E': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'C8EA-1A86-3D28': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'DE9E-AFBC-A15A': {'price': 0.01, 'resource': 'networkEgress', 'category': 'network'}, + 'DFA5-B5C6-36D6': {'price': 0.085, 'resource': 'networkEgress', 'category': 'network'}, + 'F274-1692-F213': {'price': 0.08, 'resource': 'networkEgress', 'category': 'network'}, + 'FDBC-6E3B-D4D8': {'price': 0.15, 'resource': 'networkEgress', 'category': 'network'}, + # Storage (price per GiB-month) + 'D973-5D65-BAB2': {'price': 0.04, 'resource': 'storage', 'category': 'storage'}, +} + +# Resource name to category mapping for SKUs discovered from BigQuery +_RESOURCE_CATEGORIES = { + ('cpu', True): 'compute_spot', + ('cpu', False): 'compute_ondemand', + ('memory', True): 'compute_spot', + ('memory', False): 'compute_ondemand', +} + +# Active SKU pricing — updated from BigQuery if available +_SKU_PRICING = dict(_HARDCODED_SKU_PRICING) + +# In-memory caches +_cache = {'data': [], 'ts': 0} +_cache_lock = threading.Lock() +_CACHE_TTL = 6 * 3600 # 6 hours + +_pricing_cache = {'ts': 0} +_pricing_lock = threading.Lock() +_PRICING_CACHE_TTL = 24 * 3600 # 24 hours + + +def _refresh_sku_pricing(): + """Try to fetch SKU pricing from BigQuery pricing export table.""" + global _SKU_PRICING + now = time.time() + if _pricing_cache['ts'] and now - _pricing_cache['ts'] < _PRICING_CACHE_TTL: + return + if not _pricing_lock.acquire(blocking=False): + return + try: + if _pricing_cache['ts'] and time.time() - _pricing_cache['ts'] < _PRICING_CACHE_TTL: + return + from google.cloud import bigquery + client = bigquery.Client(project=_BQ_PROJECT) + table = f'{_BQ_PROJECT}.{_BQ_DATASET}.{_BQ_TABLE_PRICING}' + + # Get the known SKU IDs we need pricing for + sku_ids = list(_HARDCODED_SKU_PRICING.keys()) + placeholders = ', '.join(f"'{s}'" for s in sku_ids) + + query = f""" + SELECT sku.id AS sku_id, + pricing.effective_price AS price, + sku.description AS description + FROM `{table}` + WHERE sku.id IN ({placeholders}) + AND service.description = 'Compute Engine' + QUALIFY ROW_NUMBER() OVER (PARTITION BY sku.id ORDER BY export_time DESC) = 1 + """ + rows = list(client.query(query).result()) + if rows: + updated = dict(_HARDCODED_SKU_PRICING) + for row in rows: + sid = row.sku_id + if sid in updated: + updated[sid] = {**updated[sid], 'price': float(row.price)} + _SKU_PRICING = updated + _pricing_cache['ts'] = time.time() + print(f"[rk_billing] Updated {len(rows)} SKU prices from BigQuery") + else: + _pricing_cache['ts'] = time.time() + print("[rk_billing] No pricing rows returned, using hardcoded rates") + except Exception as e: + # Table probably doesn't exist yet — use hardcoded rates + _pricing_cache['ts'] = time.time() + print(f"[rk_billing] SKU pricing query failed (using hardcoded): {e}") + finally: + _pricing_lock.release() + + +# ---- BigQuery fetch ---- + +def _usage_to_cost(sku_id, resource_name, amount): + info = _SKU_PRICING.get(sku_id) + if not info: + return 0.0, 'other' + price = info['price'] + if resource_name == 'cpu': + # cpu-seconds -> hours + return (amount / 3600.0) * price, info['category'] + elif resource_name == 'memory': + # byte-seconds -> GiB-hours + return (amount / 3600.0 / (1024 ** 3)) * price, info['category'] + elif resource_name.startswith('networkEgress'): + # bytes -> GiB + return (amount / (1024 ** 3)) * price, info['category'] + elif resource_name == 'storage': + # byte-seconds -> GiB-months (730 hours/month) + gib_months = amount / (1024 ** 3) / (730 * 3600) + return gib_months * price, info['category'] + return 0.0, info['category'] + + +def _fetch_from_bigquery(date_from_str, date_to_str): + """Query BigQuery for usage data, return list of daily billing entries.""" + try: + from google.cloud import bigquery + except ImportError: + print("[rk_billing] google-cloud-bigquery not installed") + return [] + + try: + client = bigquery.Client(project=_BQ_PROJECT) + # Use the usage table for all resources (actual consumption, not just requests). + # The consumption table only records resource *requests* which can be far lower + # than actual usage (e.g. prove-n-tps-real: $2.87 requests vs $138.72 actual). + usage = f'{_BQ_PROJECT}.{_BQ_DATASET}.{_BQ_TABLE_USAGE}' + query = f""" + SELECT DATE(start_time) AS date, namespace, sku_id, resource_name, + SUM(usage.amount) AS total_usage + FROM `{usage}` + WHERE DATE(start_time) BETWEEN @date_from AND @date_to + GROUP BY date, namespace, sku_id, resource_name + ORDER BY date, namespace + """ + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter('date_from', 'DATE', date_from_str), + bigquery.ScalarQueryParameter('date_to', 'DATE', date_to_str), + ] + ) + rows = list(client.query(query, job_config=job_config).result()) + except Exception as e: + print(f"[rk_billing] BigQuery fetch failed: {e}") + return [] + + # Build daily structures + days = {} + for row in rows: + date_str = row.date.isoformat() if hasattr(row.date, 'isoformat') else str(row.date) + ns = row.namespace + cost, category = _usage_to_cost(row.sku_id, row.resource_name, float(row.total_usage)) + if cost <= 0: + continue + if date_str not in days: + days[date_str] = {'date': date_str, 'namespaces': {}} + if ns not in days[date_str]['namespaces']: + days[date_str]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + entry = days[date_str]['namespaces'][ns] + entry['breakdown'][category] = entry['breakdown'].get(category, 0) + cost + entry['total'] += cost + + # Round values + for data in days.values(): + for ns_data in data['namespaces'].values(): + ns_data['total'] = round(ns_data['total'], 4) + ns_data['breakdown'] = {k: round(v, 4) for k, v in ns_data['breakdown'].items()} + + return sorted(days.values(), key=lambda x: x['date']) + + +def _ensure_cached(): + now = time.time() + if _cache['data'] and now - _cache['ts'] < _CACHE_TTL: + return + if not _cache_lock.acquire(blocking=False): + return + try: + yesterday = datetime.now(timezone.utc).date() - timedelta(days=1) + date_from = (yesterday - timedelta(days=365)).isoformat() + date_to = yesterday.isoformat() + print(f"[rk_billing] Fetching billing data from BigQuery ({date_from} to {date_to})...") + data = _fetch_from_bigquery(date_from, date_to) + if data: + _cache['data'] = data + _cache['ts'] = now + print(f"[rk_billing] Cached {len(data)} days of billing data") + finally: + _cache_lock.release() + + +# ---- Public API ---- + +def get_billing_files_in_range(date_from, date_to): + """Return billing data for dates in range. Fetches from BigQuery with in-memory cache.""" + # Refresh SKU pricing from BigQuery (async, falls back to hardcoded) + threading.Thread(target=_refresh_sku_pricing, daemon=True).start() + + if not _cache['data']: + _ensure_cached() # block on first load so dashboard isn't empty + else: + threading.Thread(target=_ensure_cached, daemon=True).start() + + # Convert datetime args to date strings for filtering + from_str = date_from.strftime('%Y-%m-%d') if hasattr(date_from, 'strftime') else str(date_from) + to_str = date_to.strftime('%Y-%m-%d') if hasattr(date_to, 'strftime') else str(date_to) + + return [e for e in _cache['data'] if from_str <= e['date'] <= to_str] + + +def _merge_ns_billing(target, ns_data): + target['total'] += ns_data.get('total', 0) + for cat, val in ns_data.get('breakdown', {}).items(): + target['breakdown'][cat] = target['breakdown'].get(cat, 0) + val + + +def aggregate_billing_weekly(daily_data): + if not daily_data: + return [] + weeks = {} + for entry in daily_data: + d = datetime.strptime(entry['date'], '%Y-%m-%d') + week_start = d - timedelta(days=d.weekday()) + week_key = week_start.strftime('%Y-%m-%d') + if week_key not in weeks: + weeks[week_key] = {'date': week_key, 'namespaces': {}} + for ns, ns_data in entry.get('namespaces', {}).items(): + if ns not in weeks[week_key]['namespaces']: + weeks[week_key]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + _merge_ns_billing(weeks[week_key]['namespaces'][ns], ns_data) + return sorted(weeks.values(), key=lambda x: x['date']) + + +def aggregate_billing_monthly(daily_data): + if not daily_data: + return [] + months = {} + for entry in daily_data: + month_key = entry['date'][:7] + '-01' + if month_key not in months: + months[month_key] = {'date': month_key, 'namespaces': {}} + for ns, ns_data in entry.get('namespaces', {}).items(): + if ns not in months[month_key]['namespaces']: + months[month_key]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + _merge_ns_billing(months[month_key]['namespaces'][ns], ns_data) + return sorted(months.values(), key=lambda x: x['date']) + + +def serve_billing_dashboard(): + billing_html_path = Path(__file__).parent / 'billing-dashboard.html' + if billing_html_path.exists(): + with billing_html_path.open('r') as f: + return f.read() + return None diff --git a/ci3/ci-metrics/ci-run-seed.json.gz b/ci3/ci-metrics/ci-run-seed.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..a971ad10d38bf6cbc05df2f5cd5c79d93952c7ac GIT binary patch literal 357714 zcmV*YKv%yXiwFo3?Tl#x|6^$_a&>Mkb7f^@E^2dcZUF4vdvjaKktq1Da<12`>X5pe<1|tSfY;v^!ME_($D(O z`FVQb7eBrN9uyI!xhtJ!L?`+sk)``L&7V%YrZI(8E%H#AMbCrm#cMt7pg}2>yeKVy0m%hX6#jqW|e5x8WeHWTuI`21^?^gZ#{Kr4- z7q>ruv;EJTP5v=HLj+7ecD?@3<;~T*A^+jGZ{Yv?`>qToJ_4J;sDW9)6gW7xfu)$K z153M?Rcv4ZvR=6%1}zFM&Ll?g0h)~qkap-QAH=E(S>led%dtx!Y z7t@2q7Q?6RcbCKZ;&Q&}mlNE)bEqW2HZp=p!A(cq5QV(ijhkDITlxse=*GT|EscWq zaD&EH!3O9lc-t_kTFJKioo#H?*lMCpNj=)4Ekd0e6GgGQ+49F1?`BLR)vNaawlp$2 zsB%*uv>0zlfxI3tOx|3d_uKjMqPtAr@N9LS@UCy(E#|ZCe7?Evx3kOcha}T&w(Ft4 z>duGFu%7pe`Co@q>N>?Ab??4BME8&Vvit3@9MZ1?fk{D0nAc5AK^Y)?9hB)^jN(S( z+jzI(a>G43E{9!s8E~1Q4p%WaGcGpVqvMJbvOP|NV5q?*2Q|YoHQeLldT*SW1o5#7 zHzi9NthiOV(ksXPNka9$Uu=pQRSfA@Ml$Hi8N=<8t5r9Ogvh1MrI^M5QJJ(bH}uiuDN@ zu~JYqngyu|P85t7&4RTtSl+QO3L5tlUyq<|Wyk{rA-2pf>5N%?ji9P9m$KBTByrl} z!L8uk4r70Q-hI1TZrA;6+x^SXpU;=WX4CyS{pw+pw;peeKp&7fHch})k<11&c0Go z(gUNi_#-SsYo=QUnKBaLmFKQQZbW4BR$%{Yu@lgJ81fi zQ}AFWNnWn2 z!1RD#iwTf-`5*&K-oyN6z#(O{zn@=RzWL3^e!l46%@_0S=Pv#D?IPdiq{*wqFUjlf z%O{&+ikAqiwv30UFvXNOt|wAD7*y7)IQ8}N^^CC>Fa?y5E%tUtJ!V=_*2`J+%FhoBvamrXFgKJ>6}|ZMYIYQN6&xxx9`6D!*{=H zM+5E&A97lzZ?9IVw0AtB#7*g`l$6X$(3GA=DY$5lC2UGhpcDdxSX>VYBI=1Ijd<(f zeLvf-)^EG3)yI6Q`+sgeFK6Gaw!?SnSI^(ihxO)vcC&u5IO%;ja9{4j?Y?}}qxlo0 zZokn{DRkuRQrM)PQc8kZ3YydtO0ly`QImQysl1bJ9{q|l;8!m-a?9w*R0_c4OUu0K zB;O#Lvc>YcrN$O4B70$urmYqiBh@JiJF}u~tEG*M1x0Z{K+$%pMAA@Krx?8xvm!Oc z0~AT37!`pl6+>}Zs;r1jak?U+qU~ytpw~)8^>#&WiqjRv$6&mqi5~5WfyL!5@~;&` zdKRa7A0JS@;(U0&=syhIW;QJQ^?Y@^pw;!u^V`L&*<$FIH`m>2+0Evs)%5q82cHKz z@0asovD{uwx#}0v^G*e9=eiP2Nz#u>B00}yNotaZNv40PZF@AI(yEm7r6OJJ7%67_*r*CB`@_%znPG==I8J7`R3CWMucz=6`t^r5Ts98ee||`CPFcv38PLD z1cKQKn$sR90aUA#FBHJr>l6X)@x;w(pOZ_E6Wb-8NE9obO1&V)oUl3Vji>aXIqTs_ zs&dkVAy%h~JFPFt$&^Px{>;Lz&b;CBMo>FbdI5tkl!nZIH&pxqcQ@X4*ZpRb^S$lm z(A}nn`4Uxn=ch%B_h|9SZ%qM^_X0%5OXFBd0pqw`3@m0lZg#bH6bOvPa{^P99XUtA zY)8$mI_`A0-))LJtg}OoXtu*^RrR^{&-%JkGQr&9FQfw_7#EjrnAAHe|;b&4$nnry7FyV64U&5$X)n zqgBj?*bJw~AzG`(N5LvX@5DQ^AveRxh9P|)Mnhj0hoZYx0h8gYu8Mom@{1s(U@>mv zq-kA+p-512R4^1FiKgb!U)>?T%SZn$cW)TF?^g45u3od8UtDe%pWmQEdE@Ynqm!1e zkuR%fcV8aD_phs~?w{7fGPzBNL1mE`ZOfcMuFzUV!l<9R@_eG~XS=EO$^j=GeSg4+ z>@fz-=qZc@3Jo^LjHnqsfl(wvXEie3_IWi%q+f zU#+kDZMR%4-^?y=mLGqLi49FOlp`1W8Stxv|Tg^g&2~M%?yeX8HiN#|To1GP`}wTftQNEG-F%rV0>59U%x?j2`t^36FMw^j z<@Hr}=WyCxK)K%ql&7SP7}9gmM$1w<_$T{lBUx!H4wSueTI&FeUQx-q2wF?)(M(%$ zM!H3n$MazXvf99}i z{N$9spvHr8l;-!7qJv)p=FHL5K0(iju*MG{y;$;NClaRhNz4sQ-*z`UzT1!Y>@S<{ z;%0t6Y`Qd==$69|-DX&xcfa}m52r=MdqjN76Nu;|M~TLpFr_uQ^ajQaC5YXSn&DK# z^kEu12w^N-)ZJ^SBy2UbjV36PQyWb1)>DvT^jUL#C&*8SPV(cSI4>Ysrzp>L;4_= zvtfPLpFREAxv=D}l~(!=T{BiZ}wEM~Tr14U`!LNd3-KBE(FP$iEjHRC)20tu*+wg+$LBs;&uEWYYP83W z+5qCLQCm$L)JzMF_Sm#3cZo*Y;=!$PTJ{WPT3h?YMr)qWqz^enr$$=#c3N_d$rZ{} z=2x5}b`uYTe3SFxU+zjs{p;uJ;m^C9j34{OJY~r9)v_Db>(%=9Y+nAU-SDZ;@A|ZL zrs%$4B^_L_@-M5+WtZMo`pY^YUI{cJI^$wdaD|dL+V{9ir{PB}fca zsOh&Z-6J1C<%W7Z)VurQrs7mmhYB3XjB0F)+f??M&B}0^( zKx(v2VI+jA=VtoEkPeH+hB{@Xka9(I7sR&v8L$GCQ|=_i3^(OSY0ipM>ma@Du7>r+ z@MgR2=gVKc>(}e_hL-6Yx;QBtewm-#eR&AmCclGuH}5~JJ`T$dQzo7i(u~7tFkSv4 zDjiox9Jfoqx!sPNU9DXV!Bpm3&XRJdwF_dl!)8}&=P5e0#T}QLAMUkNw%bv&tF-ej z7v8kRT@b9Yqx6M&%(U04dhvF~1T@8lM^h!%Jd5-2(cz#6>!5Qm2xWHJ27-Jyw^Q*c`{cBJ> zV)3eRsF;$$VsD=`SjOCoVd}c`+ua$r%_4n%DPP=d@*UCD-Llr-EO@=|=Zl;5saLq} z?~l^K_eY0Ke{;HZX4wZ-Dt|Kq#SQA|pcusr1r6%SphEFB=m-=xsHcMRxgxa{%9u>N zE~sNB6XQ^VUbq-hK24J+uJpEm`YLtjd{)3k+eriz;VMK*@fOX9)DR~lMnEz4sf(3_ zv2?h4gpeEI^f&}YBOz059L9>l6dK}GL=UuUa`iAGv6DK)d}-c_SW`job!RSLFJ`4I z(N^Kook^@R%vDE9rSj4%D6sqPCOr`U@%{T@ww-?*x{H3(y&sZOZfJLV@O{L98a4;o&51xGuzB>_2oy7ea#?qn|<0x5P7qOE;BQuhTPoZKX3!-&o z1f^CwWwK{~8RO(+wq<`N4?aZW9SoJqC?hOB)XGuD6`B5P3bSb|ANyH?odp}qqp(Q& z=v`@E43Zv&>LxEp{loBn(SI1a^Z91>(chl%KCOql_i#^HzBUbR(MMq0#1r-PGZ!?j2e~+3 zDxzpxj(4RpX?wZK%kgEdic2uB#%p3aF-C&yBbK_#RxGo(#lSr)N=%>tYO1s2cV%;| zD@w@XVsRnYo}s_y<%xY}kxc6o*{smoXFPpRX2Cf*Xb60*Ppmy&xm2Sq#<+MWaeRFA zKG#1xXmO&&SkWw>%eBQ=QoxEB6YvnUaHuA?DWNnTA7@;=Jp+8wGa7V%MuQHHu*L9~ zRri~#`MRG{latluZ^O3LPf)DMN#kCOfMzdlUJrYPG&Wfe+ox!uN-qLu_JZbR^P*rZ zWV|akRpaH+J|0r@vUquL*22@}v-IO$B8LsZ#y z!gzQ*%;|ZZ7e=vrLGv|X-coYR<^HXCkI>VG8?vHwNF#5HdkT7Ai)aN>YP4Nom0+7v!r-r zK4^4IVcLb}lurKG)90?^_icK1w#;V=mMO!WZM#i+N3-qCdg%IGvSpip*2jK5@86xY zu@A(6cZFHLe0d1rY~HW?<^19@L0wxGhDRPSA23W$Hg7iM zX80@(xp-EjY>3V9c^YCg8$vUDc7`f3YN@3~dL;n^x18qL=QJ8}n_#nH%W0lt9EN-* z*_#bpPV+1cy|`dDG%a5@!ny^u$X?KNfeOw7eRaP10pIjntJ$~ZJI^jgJ>~wrTv!$L z7$ZtJ(vINCOPuNbZlFHpZlLG9B*xCP8<iNqTU}Zad>6s~l z>1;EdK`ge+7_-ud>SJL|)93TK7^yYfeHJ^e35rsVXoqTB<4+0|%uuZ}^3y?)CvS#o zhrT8r7!rrb(Nt|8&!QsMAx89O#C8zni4LM9sK^|PP|-8SFujj!dY7wizDz%FvFMho z^Yqx@e#SNJeAKOCk>*Xm7f}#$^HV9Jb!uEc< z(jl+1T}{`DmyUPTv2ULbftX_pT5MIvR`Ai7U{I+*%zitz+fjRNJ?3*u9HB^Qu_^tf z(A=hSasi(fTelx00@xj%w;rxmABUv39oAR#?RHph7oTsNAm{dV!|Z(1cUL!ylNTX$ z(SIrQx%={II$8wpZg0()isl9ng?3HR?@^4UTL2@9*c7KL3ft0)QUyr*UmVtKj9v!+=ne%r7H54*8^qCU;=XtfxM|T z5R=x#8>{m|@t}BXqo%1KZ^)}!Vk~-1LS$#u$QqOi!)AJgDgF0n)<@0X7g6^3lrz5w zDe?5t)+SVZDAa}WIF1%01Tz{oG%Ff=R%bl6Nd;ud zJ<_EB;NVZH$x$W5vZx=Ri(*7N20BP>hN{jwB2;kbv6oklcmt+_K*fWo>IjWb0q zlyDtms_8ub(w!!**N;==qeEb(z!oukA|{3?XpETRO71ul=Aq5ZL@i=Y7cseTznP+9 ziu95wjyVY$rO<%l3ilz>E22LA$|*Pde!ddlUiRz7e3&wHscP`iHgu%`s4^l5#AYLW z;UkwF>FB|dPte;Ts6m_zBC^;=Eocy@f?$XUR)`n>Bricc%5yxD_z+4%QzY=hH1(W9 zt6gVK0RYISzxT#gSITHEYi*>5jJQ2?zHo46gwB>3zXm$%pz1Mzv0vz2AA>5oLLt8~I&5@xbR5%!MUC|epps4n z>~!4dDx;2rs4bPj;Hv1-Ka{I0+>$(0#KR(wkk*hV?IZn}F#L&(-R7t)cQ>;J25< z?8EhHzTAGlUR|#?{o;?Scc)#~!~1o;gUiPJxnFMQ{l%*LzF#c*^Z9ahy2JRmhcBu# z^HJk_W_;pA%zW7Ro)=$wauqkD%6!oHo*AD+R4boxHr@D+eE(^b4>1()G3Q}5f|JSj zCthHAneQ*>i~pD}&nKOtM;FsH+^K1qF*>!EbGlVjc4Ah1-Q}FB7>`>fXeY&NmZBDK z)8j1{MnO}&sc@`R6flpP71vf?kD~4L2BvR!LFIc!^gA8N3Nu0M`qf#VExP@Q&MC`D_>hOo4RCn z(01RrU;13~G5mrbq+}?HYinl5n!{sEGk9;C{Y5~im{Z&;vRD<{9De#79s@Ht+bx+i zg~vL@a*fB4SZwQQ2gD+YsO^|+RPuF-;Oy}U zEk38lCz1ENmWvPH>W!>a8J|)r<5P}LTgKDk(?0ZS@PM|VS1DA+XDIDtjL1@xJ+5pd z4bfX4s#-_Os^*8P?O=AJtr(BNgSS-)Qtz;i*1H(Zw9sgeOzW7{+h|L*yY|r{X{j>w zNWz3IVVa&WAw{(YXhpij8Wl=ShSDn>fpB^&!@IJUIW+(E^Y!rO*?NAx?SAw9AG-9) z%{}48upHLAFLTuX=}-TB+G0__Uv@NiUq0_UXu#uiaaZEfL|1c?>J`%+^MdqVy4_f7 zqoYRmZ0MqMyQL)?9W=UUL>HL6naLI6Zz6lkou5idWy%{{pM}$=+ zwif$~1Y8H}wK(l4!(xLyYQzR1v@Pd^(kX+zup$*hF*VquMl1w&#(fiosst$|xCs8B z1gXJ3lpl>ATUY0uL$S#^Pc^b$S*5gk7d&~0(?WLs>20?ju2vt1?)>I@F`wmzGkM+h zv=akeuC(Z@phb|1&Zf)?kXWif1GCb2YuC$Z!^ z{tOYNR!r0^22IQ+MlnRSiMc}c{RhNKJ!Q&b#^>G36*Hc^im1Q=jR#yytz`Czr9{Lm zW}Gax7{gp?3#VV>9~V=RxI5G$b7?zXF4JG}2!=fi)$7`ETv zyi1IKT3u`sYU z8#Z38$IZ10Gyr=vM<}-lJxYY;;+DZUjW2X?*bnL}K(T=y12ic5U_5yfioFl_fbwpT zwt+$eJql<_Da8otYCIz{QmOO<=E`F!zdg#Sj@jz^b9bFS;_j+ne>h+LuNk?{CP>#W}h(jB!7H6B|uHJE}0quw@w9)q2jJ5>eqVt73hamm1 zI(Lkt9nEeeC{bPX*1OFZpb8d9fz7bcV2=n(ke+nGb}3ebwGuYkVOxH5)cgn-Jx9x` ztvd@)I{Ug0Rwz1i#8QjcV-Ce3FX5@NY%5eotVpRK;Rq}?*y9exLf%=2;+khHI|eRo zPE(p1Mbb(231;bS4{y7FyFSlboZqeH>nymOUtDe%pWmQEdE@YnFB_~s zcfU<+smQbq>rw9w^+?S zoD|c2sqS(2oEj`dvn3v!rHun58I3 zr(nC*qp^-M{Ure_WzD&+S4L^gSUJrIgDou4X=PX{hiDEDAI* z*;KNzE{1J#h2dTz>leVJ8Y`>Zyrmao6Xx0xjhX z=I2kv*~J!3hecD61XJ24i0Q|@n^v0WZe{E4YTK;8t#`U!UESq$+f_IFQu(yoeV-?- zlc`^{?!J7q_6^gzrRX?UnqL(UHWj&y2e@A&*D;$D?d1yM&0O5Lo|-F`DngBLVdHvQ zF6Ht9MVSj4*OPKZBrToh8-oVErr|Mbmfk-pR0D(jbhesA5?&0;%G$?eW;(_JT{ z&0@9f=F4ulUd=AM>-0Sg-4FdXVV>Ae@BQ@WkbCI3pQhKXe%7Um=6B@y+vn#4nEjyn zJvYAyjAlPVdLXzoK$4O@6tRWkVWAiu0f@0E9@Ir53$j@fn&e@Ust`@pL~?srSMz*i5DukI zdFd4dnp}pvjj6^oHkZVYXt`7ZXF;iEMIhOFX0Y0t3wAeKW8C`Owb$HKByy##_iDDq zW_w&)ftYO7*p*E}jV(GKijUW_Ei~I{wqxCZQgTCrrrt8~jJ$9it`dlyFOb-&*B%hTS9_xHEr{qsNG?ftlkXR(`L zGkFvf@9ZW#?E=e-Ci!Yy3*tP8*b2c?47FYWW-n+l^e8bTrE4o?lgam4Ux>*uud zD{58`SV7*LII?)~U1A#0aHs z;(M(EdaG4U-Jus|Wn0Y`AHXR`TfSS{F&DqUnySnO@!}f>O z`oktK4_z<%&ll^}&GP)DZhW8i@srM-lpSfrU~DEG3PqyXkHZCD0f!B)296|6cWj&7 zVOPAZ1}>kN7Eh&ndHJ>;Q=loy$$EI-&$g@ekIfMBzq(C&ceCxy z`o!%p;l8vDJY}@Se1gk*%2@#m8KuA;G%odXA3=kLW<}$GYHbokUMtZ&*dq-aS_PUX zF`{v-LlY>qZ686ShGs@%^rof-DH)N{n&Td{z|kIQwq)dGquCOX2x(TufYw%9HP1A1 z-U$>Y+vyeOfu>!e`%&#}_i7STMw84qK;WV2(Q#~5ev5z$j) znac9xA=)Fmu&_ieEcB&U9H(4iYORX5-Sv9@G53j+yg%>^haY(U=YGANFS~yq)_mVY{Ox3swLpb0~(9- zK&-P&lVh`G#d(?*dSy$|xLZ>-ZH!>0WprY;f$5LI;^a^=~>2(Qz+{B#SY}2>p&ZtXl+vTv` zZEK#=FW#?&#i!lOOqq^)u^rHXh=oiij;@jgMFzFXRg_tJ*>cvH3o%8Fw~e3de4QM* z;LUQ}Ha>tKXyi50|ezO_6-w*4}hxzjS@pn*@P1NF9grbdcW%|Y2Jjyu{kB}y5 zWVV53^QboBz-)uf=5cMrnQge)Jf01^NU>-`5+J$*pEtL~h5N zz}Fz5CZANNp;RUD*3DKWagSSjyp5aY6+g9^5=d1n0)PaoY29_&HiV9wh9F23Y`1Nl zkCmp$&TPuf^jM~xYX_T5*^}0pqC;mkg=TsjQ+3X>im3{nlVTaVeWn_MH4mz*Kk>?z z&Pr~|KsLAFuqqD%ELSrqyFqiSal;TbnA`w7)w)GBkD=Jc&|{6ED7Mp;*%a%BP(m?J zC2P~y*By!}ogpMJrZW=jLNQl_G`n$gt8pWsXsf=&7;En4i}RkaG=!!pYRZ%7fFj4M z_0Y}c-Nib+?e4l?_g9;<(+-c}es~N|d+=2;AbMltLM%=2!yd6HXcem}_x=*atdBJG zk=0h5+Np**G4W!Kv8rYYuUIU^;2qnpcrZDx6${ZU#!bv5rh&!Vs^db`%26>;aXFbz|yD~OIKA##X|ahF_aGIrS~1vFFe83n_qQ!25CmpU8R}YE4zymBz>wr zmzaH;nHeoF7ml-MW_j7rY;Zj5_GYQMLdrXe06@Yf5R z=@pigA0Md$*t>qcPVZ=$zM+fLR!)}t%E|JCXynqT3c=KuT+XEQP!t(ABh+X%qh@9_ z6N>8Cwjxa)eI=UrXh!3PW=0cYXX|B&p)@Jnk5<@57(>H`W*r?Vl6SVIma_Ya6=)F) z#|THzrZ!4^Nob}yJLOAqd0Wj^DU3^%Pe#$alPHBM4EYsIIgK1A5Bb?O$ez3_o{_vN#0u$R8a zQ62KE-mHU7=aF?nh-MvVI*+Rpb}h?oI;Q57uNLR0&m?fjx~s?ZkQ<(*lpt2gF%p|c zy^1C{FI&!-O+?QeBb)BSfH<|w&WVt9G>+r;*IrLs(!ZMC$OIiR#BLWz2%oNt{_vw)^Yf&$^WYEc>x4G`G zy7{tu_pY0-PI_J&9sQ_v_vO7r&<59KF;%fur6hU-5-|c!>8f=AAWm#MeMX! z!SMbDJccK4*_=LU_Gu2ywjt4%3bta{hv>enAOxeVtcpU*+(?z2jF@SfUY_|*OGGQEY%`KDie0p0y4)X)I9`SSUw=D(b*RM;mk*@gGH5KhUGTkKBoaU zy%-71df4=ykshW0_)r>D2{rRg()%xkc-F)9qMr@jYH|K!DV|^bSb^u`u-@dIxWqlI z6YH?)en=kOa`>TJEr$~?r{TRMKKnCz>A&NKx#pJH5SrohH1uLNq%$EZb&^&nzz0aWv~e(|cBWobTC<=y}up;;T`$r8Tu;l?-!f$V-m#M z%BsMbI!{$;P*Cw?4bKTJdfMua2{@PJu=Nu38Y^O#(St9QzD%#=GvMt2@v#mNvk;~4 z#ad2>Do$V}#E7I;#Of1R?Wo+5j>=^yE!A!rfof+OpAahj#|+g@dOsPIr0>{1^tHPa zPXy)jk^ER%aN!haF8Y>e94XQ8obo+z);5n)hsfniwm!YtiP>VC4%GT`05;x4L)D7e zK=SomoXHn`Co~Lfi!l%SimJEOIO8+q2$jC&_$>~?Ev;-bA(Gvfo9`(yo)l!p z`h?(pWyYiBJ>v2}%k3uk#GBxPB%)+Htij~#2I07y(b^E(b^hsyX+nWwJaGdxh=Dj# zaiuQgHlcpnl4>S;+x6$?^W{Z%HQ#LVzrSD4Kla<<4_8+=+y32RcmznH`;MUpmm2zG zzwCZHEQj>#K;UFakI*QUci^3wiW}7vQh{@9rlLmmgj6WSl4@ltXjD%}6@B#Ps50Ja zdTmrA$6JnV<BywF=_gvNLPBPB`W z!X_5Z)$e8GUEdWOBR9tBj1<)w%Zd_~HhT9ldROY&JHkkf@esz~!OF;3%NT-{(Y9az z%F~!JMs=v(M`ME{3>A!;Z<)H%(uumLI<3Lc?ILx`$`<{Hp}XieUHU`kSGz_JC&xqh z(&X~)%cm*LoE~l{yIzNu&H>@;KK1F_A5}}wLX2iLXliD);FOGN(V^6;6`P5b)wrpd zA9PU&fo$763YBUpkj1P- z6j7Jv2w-mX;wwef9;5A&^|79#>Vs+2&Ck)|(WE`148=QU2pZ;i=+=K3VJ$xLN+wS7B`}R+N=-&79#m#!?-mh0z-E5uSM_z-=2M)X8 zQ=i}PrhB(qZ8wSc`n37(eTywPcoFU|tIcJ4JNYQqq*#rjgkY_uUyubFm(7cCR@sWK zb+2329m$dQyU3f4>w7J%^6p3U9?gjDhT_xQP`riGNM@*Zr~B!k7}yTg zia$LK6k&)ETw$`GUSR|uCQTfMe*5;(7u|sG(?)*EJDxcqP`&LlX_*dCX+x67z0k#A z_6p6*=;fW&tC~yAT->-G=JI)mn2kreLvc>F&kMjFUf>#-1 z^|I}!kWZgnAhI45vvUOuKv4>NVgV(%^xT{pUjHq5t7D**vlawB96&m2Ak)%*Kta zf-S`w84vpet!5K@v|(eb7$noTfY#Iz>IO*^^=3AED)N%qY#B))z`GOa6YIo}vyqAm zv5@hnS3rZQ%11llVL?o}x;SSm$SBc8`!?q-Vl3@Pk07=a9#69$y@QfqhHCA-o(>Ag z*@wk;6zA!nd;l|4J7zldm}y8!dGKmZLMjZsOaFAGiCTJQVg#J@_ta zewnKYbnh0c*@tev+-!&byj#8R7X268{5Nm@em<IL#fSdp@N))-Wc{WFUvQO12KJuIWu*L>Ny`Bp5cNhBc`tlfqxd7046i;egAyn%os zS}S@=ZTK>a*%zAcDSU5RdKnL%d8+gcVjrHl`JQBWR<+if^s&ZQFr?>iX?zw$5#__G zqP>)5zu&Egi`9C*>i&4MnD@(milTxO7UNbwVMQactXOU_FrtV}ak63>5uNptO8VsM z6w{#U&5FAtcyJz+g`MM;3b#y+=&NU;o z4`_6BKI|HKe7D*Te;(E!=OrkErE7rf#@QqSo7u3jRj@^% zU}WQZwql-NX5+?YX5-*|h{g_8o}rFSfym58jje(WBY4}=2o|@5_OO9h`?Ia8Mb(RC zGo~a&`Qc{ntsxa`>9-D13WtN0@_}Te$@TiTxdilSdx)WaaoT-(h|{;1{dzGUCOyOe zgAf_R3JW{wsN+70Ud=w(df#4-{cNjGMJJ$_;A71Xf^%Zb542R}VULeEaiQqr1wHt~_AS>#{$S3# zUx_Sji$7<`|$D2 zCJ_(ayY=da&9J^Z`*XYgwdvk{?#}0%{@o&X*ZT2iTwLFD^W}Uy?-%pG?%KDXbaCvy z!4}gqSku%Hm4j_fGpLpp$7tN$V}(05_eXS(=*U`ES4wT(_qzK)W_M`rkLk`#&bXL| zrT!>;-36lAotpb2x_g%rHsf^{C@tddbq`{W_o}_C=6>MjS8!2nh23sX0@pm>LwYO- z6bc4wK57LMtE$|>f0KKk{_<@%e7f$J=iPPxIg`F067qHWxVs;cN_W*S`!2tnemwoV zUwq!oPq<}pVY{z=wtLuh)RHNhc zS}bhzq*puz@29s8{N~+a^{(r0&ga`-cFWbaTMzl2-E41j)_(nYHY~U6`7GtEABXiO zYY(^g_}`uOa{m39Ejakh*8h2#K>E-9y8CUvUahuMCM*$wqpcAdohfLk@sAtl4aG)i zBfQXfkI(CwNo@1cu)F$O#j9%O#m0LA-t;eH=L~fiYk5oe;zoF>@t$CCrbwn)44sA2 z;_ee)kYPz<}DgruZyFCtPK88@i3zW~Zm&Rs5Wr3*&a@+0k|4K+CDU8(i zcm=4qhL;JWikJzh5l$k+^q6xLtYd5)A-UqQ_%dO;3_I;IY=l5*C=8RU((e}}j^-fb z7K9Unkije3f)ERRarP3XS&E$y8sWr8VQ|*#MZx)+C?rf1O)1>kORo?p$0^xNUibUv z_A@>EOI`r%&(FK_;d)q}56js+Wu?32z}ru|+oX3r+up2)F8#8btCYZ=e2Wv}eYK-_ zaJ8eqtTvb3Y@O>iO~Lgk83|&{b9~`$MS2V4xSoW~xYTfujLQ+kc5x_TxhmOAC-Zq$GZ|LH*3x;^VV2Do| ziXr{YK~rxZIYTK`R3RKk3(kQVjT)K}O~8o{wxvMFjw{j7lNk*gnmN)uGN3Keu+B`< zJjI*MxS5#)Es{nx_S_+7SR1X3Rsp5|e48gwEKxP=Ti2Ni5Z_jLIT3 z?KHvN{lAeLe}CVthc~(5$Nc>|eQsSpTdy{oyW4;{)66S)m;HJ*(E8UHJwfMoqEU~3A}{t6s@=#IHM zmg!6pp5=?Zb$@Q?o*UmYz3Q2_?}E#SLn2Kw%pA*cwnZkEjShrNcYv6&E1I+HoQ(Z#bSdx4U1n z{!&W9>gMHZ%YC>Y8@64QnX@Co&UZBEZZD7VdVo6#yKz->&lK`B@PO^ zs_;L-0#8xNd(uNn3=-2H2DVx|6hdJee&mo+MoWo0dn?+rKkxDgXvSeN zfMZhQ+?Y%E!}p;v6wV8dp+Q5lqQ!hA+*W>B*-0H*dSKbnu%VgJ1cTbT>?%;@@F)Sy z&$MTMO6ge*kEqd>;%l*^^%z?4(WAIRq@`D!N^n0xih46V}o-eHr&6DjeS&^YYEhiMRLtVsjceMz+IY zgL_7B-kIT`!963m(qZ%wI8*z$=JvJQPZx|70Mb8cug<);X7< z^zdPEHl9jG4pg{OIYts2$zzZ(*-5yOJOYV^!j0515@{r-C-DFQ%p}xE9)TqHGBA@s zBY6Z8610^-LE|!TX)bnR;u1owk&9g= z7m&BlyNU~Z&FiOLWC}y~J z?D;vtm5x1+z_nw~&jv08urE$dEo1c}GEi%>qUhLI&MzgaH4_G(QWam+@(`k%SRuV7 zpbwh*G!fqw9C-L{d;kp<5$I8(FVzDnn;JLY+Q|8E0xH4ZrTz~mp!qpgg_`>+vKbW` z>XA?b#WY0PmTU5(uM!n1P_dyN3pIW0wtnz>#xcfPRJISl+)$4;{04U3Hvf-QK^g^u zmde;^^g(=_N}AJy&t3YWOTTZI-_L3@oVI|S-WRabL%qJe?AMF=FrhS11WfY*^`?|I zeZqw&rPpyR^+C*7)UY1Hg2WZYSWdi>N=k)LMyRk+Jw)X(SNAsEpri<~3JcR`WygYs z^&l2wH1%oXGbXLGO4Cd87-*i>zd$S7d_CUw3j|}8euou-m7awGL+S2PdW8_9|CyDe z7P!v|;VBCc#=KOl!nOd@SlGE0*n_4lW;AMOMzmauT1_W0>C8J9>(B^E&1l%rtdR!7 z;B1jbB2tUSY)0dTW{oseM72d)0Izju>Pm+o%4pBNnaLxd850ouhi*MwuTH536n<2@`|`v`$jW9OL}Das zKdw;dADO=WakJn;G@EfVvzi4JQJWcpuQUT^H^XLTGfSZpq8OVLVxdXlUNiNP%x2Kc ztY%KrCu=eTbyabOF6@R~lUY>~@-od#IY|2Oa%&6$)8Pdcu6dp*bMT>LmitA80BvlX z6ldtj0irF=a5w+1FjMe$vz9j+$Ho{L0*C3{CLc13X0w(zn$0lxRy8gZ7h2%$H^ais z)G@P`gILE#On+2#rm@j+VIgFnnGYC()vV6UbNlO5gQscG&5Q<6JyV7DP9T~M-xFXKhvK7hRo^FY>5a7hq!A@ zZP}11osSzs264wZ~&3_znjc}z0NrEr>O%&aY7Swt!`iGw;1^Z1f1VWYJF6)EEu>NZbv`N?O_qqfvR3sU5!RGR4E>Y|* z)Lb6RMVZ;`!p-H8T+;tQtS;7mvac^GIJQf0OyY|!f>C_S)u!cYK03ea=W01SL$T5< zGFr{roX9#S3SREk;S9GM99NrpyIEV}vznpH)c|bEvJyj`nRh5=v$n)%ju$bGzi2rdGT>Rlm|rZ@Pdw=!7Ty<6!GnvZ{Hfn;hVJ*ndh=nvoFFL#PF;+)?#t-0 zu=|KOD#>nFnZ+haP4W;)mxj^kZFj_cK&q4kWV0kU$-^XrNU$B_@T^=X38Ao3H6{s7 z^03i0lC2+98eK!FI`uxu5ZN4)wz>K%lr+uYb4+@4rrnmD@lZuf5;^hC6*8V&fIK*i z)3ekc`bByd-FkJiOvag^)TBmK6l&8^m8sQyXES9=-#i^ z-DZAuv*>eixy?zL*?(c)efdP^u!Qm=4F@uATbKIZu-_xhta+|PI z`aE2(W|uj4COj?TeZd+&xM0nnhWVoJ{v*AqDRHW~CbAdP{SgkOQ|%1nis^3%m=&oh zPE`z1iNLrv4}f)wF-9^ga#NhHDBc_MUX)UO*gc8}plnuzrg*@J%4AYZBkHc2MulR; z(yCxtv0^Rh^(Y#%o1{p=*)GE}g_;pHXqw!*l43qI3Of1X(2q?B^0MIPl)~P;8&dLm zKCCj`ZAXGr;=w&09E>-c_v?N+zqs7amlyqFm=cJ$Q%qwSdyc-a6bO3=X@_nj#6~!c zFaU|Nab2D&1tog3AT+^=f@v5vwf(p~HG-9df$ifcHNuJGXg*U9!grnS*#N%`DO@$TYpy`j3R`;kIj1g1hb(!^dIKE&Gr2 zi`%uflPc={Qkx#W)V3J@vg&?wHDC9e2|>t$8Y5dTYnF<$sNW;V63l}3+ZV5za9Zou zy-E@6O3;*^MoA!oSqYoclPCo)o|7vp)u$aV9Lbh-um|t~jBRwaROV<@(0gT9xS3Hi z8n&$Cp{|X>fmzg4m{!mH?qEd)p+eMyhRQ~0A*5Vv+t^K&D+I+w5I8JxY=Q>C;BwPA zgJ3x0;@SUE!a#CYVD6`cfzYRn(=d+y({KLpr@~o^>mkW)W~+6Yww@Bp@t4ascV8ZY z_5J01@gMW$`J|;14xXbp+dP{K8+hpr?1AE9td*f^N*BBWsYwasUXAF`n>Da$oLYnI z8r(Eatr4SDqpE1ci_$n^sfaxq5pw=e($L@|_(P5i++3gc+o7ASZm*qeyUX;wcOU!3 z&2ZZFb$?%9)Q2ZMC*U2%;EnZ{(@!qsW`3Lw!GX8Z)t#Sv$#lkKE&50*(OA%zu#zqW zHq+S_YhSP3wDrHeJWA2b!;R;Wc@U#Lda&_4CQsyCk_8VM+}d+#e7(m5MrBhKDn0WA zQf23X*&Q_UHM>1_E_z1O{bMzVC>S2HkbxS)fnr*f-hbB9RcG8W&&eMlVsD*lM7KQ!FH!gZD82IEMUrx zRZNetG{jLA800cJW-w@ACNO0%HQ|V7NkA*W5E(U=A`Br04+m77z1a-iyPNsqyqop2 z%Y3xD?`DhD&H4NFDkYDnOc+VOB#gT+4avR^OeQ?`0N14=Mn)+Fx==PF3Y;b>Qd z*ak-pt^zK%Otkg1&lRFLRKj@|&2Ze{>fi*7N8mX)sD&dn!(oG~9V*q<9E=JDy7#)V zs@bh(TK$sUtn+Iha`k1~`CTa_lY8LQt7vI{jS8#)2OJVu&Sxo|oF%1oKihWa!@HY{ zZn3)9bpKm-HD9jQ-FjH`+abv=hJJZ-eL75tQQXe$%a?~>{;^+nza5rC`gI_1n$rZg z-{>ftU;wbPRh>e6ooq+jv3ZCs1~s!mV|ylSGzK#%HqzrnsY|122*!3ZTozY&2FYFn|zJbXJh6eTbI^GID_I1R}n4$8H z9;sQ|ON9BnznQSDuumimUY%)$edPJJmry~xnXs*}PbCxt5+fmS9bv@MspK+YTVbEL z!X5zfNUVW^n0B^OXv7LM4UqDJLY2puz+BKH?L%&^`}MXzZ8wi_f1f(& zQ?4P#1m?i0>539_VJ9-ZfjwTaRM~OFD@+{yUNqRWN~!xyyo6vjiOu9OOoE`8O~5sj z$3RgR?Sq$ZCXZnfA&}Wbo5>@XP)%qnIGr$1Z9nk7L62IDuyJC?=ue882-@ zX`4`DG)^dy>=VlLn?pWC^!4N=zXB{_S{2Iqr?=g8`mWwASM&9o&FAIp^mlR55#BG^ zpgi^U>h$06$&?v*GL{tql`6KBjRbSF84p~(a5mGPM9$&2q4ZPR2mx7f`c1y?*Yl4F^Ue1+?-uh} zx7n`N=_BsaC%<}sQaq3^i*k2gKGC9Fz}w<^V7vBkSK#?4$8|-=Pm z?KXFTT+h$D)%z~5q;~n8%@@PE`=MVi(~Bpb1jPGEKz#C5@bunNipaKt9$rZc(}1`~ zk-S}zo8nYO_K|`|TVfe}rJ_Jovm!Oc$%=v=(AJJZcK2c{6w|+*SrMD!^bwU~R9n>@ zL9RlK;xSkeE4CG1527`@;Xn=@m|BykPrgPmeM_-We>=aDW-H;4;?3_D{fFVCPUOBg z(ZO8@7B}bpRewG$%7Q1dMHzz@YFm*T2SV}D48#qn7(fh>!1yo=PzS^u?GXeGs1!i} z5_sy0k5b6WFdRR*$scnBuQERoBqFD=@mSH5!#cXV`6$|^&4`?ZvwPMpU#FlzQV{8?U z#qQP0MRLqq(6k=b3eiWSmUmpK<)K2$H7)~}Q*&@dU&W=)7b+Z#afPO33NE$vja3d* zr$ye|CtSX18MIK$Ng-FpmZ#v%TH3S>S`r{wN5ZN!BSz;6^%C+c9N5LJ zP-iFOMrI@n4xRNRjfV;{b|kFkXW7(dIxmOJx|!lV#vsOK%L+ASUK!MzXJS*$UPQKN zs^cCQ}?^el%QVDH~n%-aDih) z>s9l}h5C;Qqhv|}yp^o#2-=GzvxgZAn$~e8R}thoGi3pDm_Z9On;GO=scPAU)H4Z@ zs>~wz7|mwX%#3lCX1{7GvKr3P_so@MQYwx!V#dwPYL@SWN85p2EFF9rH$&#=!a-B4 z*O%3gOQ{1f9sf*HfhyAa^joJ{cA--E?H?LqoLuDQ$J$d@{dRV_D=>xK`*r%La+O7X(j`LLS=}=J$TppEHt*N{ZFAK5@}i$MMMEY9F|Ni-sbr@Mqn6(J$Yx7! zmWNvAe;X|!R#FP7nGzc1fs`6hjHMq$Vr5u@I&ZebW_hTk2nO3kT|7dKWpn_g+m>ZZ z`{voJv$W3Ok&!j1ab^KEm>eXfu!Y%RdIi%n`jEEI|1w;yJ`UaXa_HWzSN-`c{Wa*` z-z;aDaFb+LSKYUFg*Lxk^qbAU3>WjwcK!LJa}d7YFlBv$P@UH1hIJ2M6gC551AKOX zX-KsLLIZqufXq(JfE8!9nx84z5kl}}z8D{Ca1{bZ^YgLhq|j@}wPrs;(W&Wxwx zEGl$;z@Vj80TtO#)3t=|sBS=|ptjr=N=MfB=|< z`QofukDFe#9{vC9y<2zd){!pySFq21XnUMcn)i#}9JkX>;y505y3^ylC=dxrutBHIUfA*XZhIjUqr$!vQr5A*b6jxLa$K)Wrr==I9 zx2%$K>{xr;DLrqZY$@N?r3c=Of=>>cduXDDul_hKPm^>x$roQI)2yzOYp}W*{D7p+ zs!5idl~r=uyfYpzXUl22D62Zzt!(d^5zUL45&w$Xnck#T^2f9)%SFCBuTyl(Py5K1 zA9ecv^zEO2B!a$!_&BN7pmDc=Wo?Oz+L9dh20bVUb_RF-e^q5yS+)53 zo8%NwOcrHQlpr)^c02Zd-%0M}+Yw6`15v(`%#FB?;vbXU62txTY>AnJ#F!GZ!7|6+ ztt1v)Jt;CTC=aGXWbz;~p2)nj*7KOijMc7}$eh(wGNwah>L4+d=jObuvq>_(Nb@3Dz%Q~>Pto2nf#nir|Bdw%0cGR24kdQ z-f+ZF8+?;)Ibvy+Ek}8f;|t^%gBFUnkjH3hbKFyMG{OkB9MwUNFOj3iDP88SaP9la z(F$r8LWdm9L5?r7t07pfU5$OjD1~)_eH{C^G@nR}Tt5fd-B;V7+7uO&x^B6q*al1j##TqRlU} zg$Ch~Xwpj-y)g)lS^)o)%)oRyw#@WFW?btSah)J9sTr;5WgV4uhD^7R zLVNf8#;+hWt|Q{C5kYYR11XNIqR8Jn#b%Xsk<3NI33S5w!Chv{I!ngW6#k9iMV8BA zkyV%~tLM{vkrd@3Yr2@Soi;5=scxmX`r}Ka_+yq&)8wCNaX#eic_D(1yf9sAVzDuN zgKiH= zpX_Il&zfto8~GR|gK_F0pRKumTNY4|;M`GbCgF+lQf_H8XnHl zqmd#qOEA^0?ix_}8CyLSHku!y0ar`00e?fxfy zzyC@5x1&&SQ2=a$$9e(pQo}R8TUt?&-X<+Adyv-i(sIUX$;vUquNHbrOH!L;JERp3 z(t2K6@O=VrBs;W(lYOM6oMTHX9;C&PmT*BcrRDkxi%d|`!g2>;JufUF+O7bGu!x-y zPYX+Hz8zEtVKICR!>ie#kKy~;LGAfIh7<>JJufb8ENciJ#HB@Fm(U2w7MDIq>v?Gz zV>~b27%j#7^0c%JUs~oMt!Jgx%uw^n*YNFSPiZ;P!xSE*_59hgCRmu+eF$kWGf!UF7$TEXBJ^-GfI>>gb!*5TB&nfF|c#-%HCv$LX zI~+}^nM^?rf)=5A$<{a}93@*Pw3m8t|HHh>uhK=9TxL~~O_RJRvI@7;&6kS=zhPO! z|K6DZWsGqpz`H-b6*B?~&I$RBbPAAK4mW9ZZ*O%|W(XmaUfBYtLMl zMzjh%DO(6+_~%v~WV>bAh9HdRH5ND<`<&ZooeomH$X`0h_9f1+^bsNp=1i#a#58ZG zbfxgoTY{_%pIFS;zNEDMf0|tFqJsaCOw050yf{zhQ~0&;594W;7R&igr5Nt$YFB@J zi4^~m7Rev8B7@IKCx@I;Exh10YvJu0h=r9wb;~g*>)3Ks2RXh#jzI{+I*{%tYTZYS zK}d?sqDzeWAjTJnQL1R1;5kHXQLCREJzI|QAjg*+N69p+a2!prz2s=LBxXiC#1hRp%FZofbP|GY+;}Ft`yRe&6IxKDS)_$#4yWnWPE*jn zydnC2pOTu3QmGd?=m)9MQbPp2XHoJ%Sli>QDXEQf#4d7pq9UlvU@}OEjjz)we2qF; z0lZrhNO4aBDPFVvdP7(MTMUl3gWOq)N8E`{VFlMx+p&bj=Qj9w!r~q+4N*{Jt@I2> zF2nnQOQ(A2SmM1NJkk)h3Qn49kVD$hBNiD8^Qd*YE z26jlx9HjNUv;-#0nR6$)hqSbS7|c;KMH;*nD~uRUZ7?av%UM={xZqEhSvAe-IvG#% zMUnpfY8gZEHa#t98y#EF`XK0C3L5+( zZMc-zp|7C5;-6@95cDnu9fEVriF`bvy##HwqtZ9Dp!*Qo90c9FlCGsSP6!^YhK!<* zq?OV_Q!Ppw_nvy|UQn=|WLHJDNY3&vyA_oY`c_n{KfZ0PR%@??h>}^WC0fehYjn#? z#UR-7k_UM`FE1m#bgUT|@Qu9hCoV0i8SM^nnS;1E$ICh`B6q2W?I$m5gJ;W29puG1 zUdl-35*_ViKXVvVq#E97dG#v9e>{0{{SD=u(T+8uaO7>T0|qiN!cqp&AnV`2J#jd3 z4en%}oR;OHUR3EksWbE)>nVVpz%Nl_@~$kBt8|)A+L4sAG@mZ3ou>4Zbay?O?H}JP zT5!r?GL+|&1VgmdO4lt@8>z;M4wT3SoFo zlIwS&hNy+olu(0_K^}tqA%_s71tyJ<@xkxI}rjue_Y#+rvYWnjk8J@$T zywujQ91a`$BxS(T2InXz65U8`)bIlSJgvcN9G7$4q*|4;yN0SoxpN+0b+-$C^~X00 z0Alo3YTn8NX9ThGK*cWE86lLAY}v_!?0U%#PK}P*1g_sG`^gSh2(e{n4zlZe?t+pm z+2OhCCpoW-XG=~UB-i)UIpNxrd*zq9gfwqW!I;<<%7$P>#0C=wIT;(bPp^3 zxzjc{2n5V#y$_#&6Jyt@=|-n%Rl%=Tz>hwEZZH~r%4B?=Ea2Z+A3emWK0Q9o&(EuJ zIUhBS=ct}%X?2<}9-~$-Z{Yhn-*+H+Ue(XfBO?QV?SLf;Yqj$PED^i|85AFuKYjb> zAJ>zC$vF<(jmu@R*flO|x&&Xq;YhH19x@40r3`$~oNz%%wHc-06D@;PFVQ8W!n@8F zNnV`ol`1*mJxNNBPqcKw@KMzhtIOBcA}7&@Qd^mgp|4{48 zi#V6bX|`BoRnh?T04SkG!j5kFhEA#sA52b)a(0H`2JlUl(8{q;RDrmXh3Np%3J5@>CP1{p8WrvHAd=?%Ez#?LET2S3Sr-F| zkP==xGeFJW7@!UHvfj&R6-T{fh`sa+41gAtskatXOz7v)+qf53`KbSHZQ4&|UZIy) zCFg!8#h8k-9v~^m$Uh(fL94`YmlQ1Um$={_Kf>cKy)hPhGp{|dosbDK#G$X z3D9shGH%T9b5w2t+H(V!;w1Z$q|^Moz*g{NO%Z3+?^*r(a&cK6H}8+K$$2&!gT_6E z?Qc0~;57N=_U$3?1rw~?9MIl`PaUn6R<3ncTh{?!2ue9g_+%iiAWW14bPm@>
M zrn$OV3X7wORXNV;I&b-CZ$X?~9;f5HI=TJG$%BtPhVehsBG0COh3MjH5Q6BRdBssc zY1Z`*tssq`7s9$gwD#c>X_bxx)NgfW)5#<$=ZoYjtMaoPZrZ#++TwABC-?{gFHlT) zF-hyBwQ&>l9faA*ZV_qba_TF=s7Lv@QN)rG%TQ7KR1??Md54Z#2Qa%rie;cO@4bY3^K z(9L{DKCU04dHeeD>fs1JF~UzgM)9W{!eWrgcNghwh!OA#5EPoB!um)>ulYI*dg@x# z*D;jF2QP-`FlHLQO0Q;Lw;+}U7*x>C0_SL|^124SOq$Dj1jm28Y`y4PM9;>@kDiTI zpUl=EJsYD}pI5`+4Dg@8f>Vk}G)5@Xg(prt1rpDoz~O#@O=!<&^2cdH^hjw7+S>qYn3g|jiwhJmayee1{ssPF!hq%;#!)@frX$7|OBKZ%@ z=lln#8yL-5S}pS8cveobs>pxOM(5dLG|9&BzbB)X@qL7RJ&Q-@yWgiCT>Q=FlK;u7 z-}C7--;py8K4vGj*r%r{Y@?A%ssU=ax6p3tv;l`|v(bjq=@t(cOsC8FLwGMw!Q#TdKV44N zvq68w-4W?cS|xu>@zaB#HA+R}1xr;e0j-jv16l*v8b_H3qYaM34DkN3?eZqUBb*d) zcAI$f6!6W9yuNtshqkdi#8nW!gKC}t`X}viKbdAJIGg7ur`h5<%Zd{)5z8sOlEWXH z1-n}d=)Ex>{2x|q3P?J>UuC?PZOpNOuGb=Z_wpp=${$#5C`p=&f7FzAK#%k3W4MdU z*9;ZioV%>5g1!Tse#*y{C+4@ONw;~1i$N0;*EqpjrHJnqXjoH$Wo zf|AI!na;y!g*KLi&VVyD$fIn!_!k#o>mTXZczQg}Pr&x4musdT;^%6bXM>a~cByQCk`W z@F$$uLiT?L7;I>Xm&#h}IEjIFriF_S91tI>kx~kTcb-J9Svvg2R1&kn=I#yOdI)eLB=`J@9 z=K!Op?kTvaAjI_&^|kyQCZ(xywA4B@STO&(-gc1!c7?pDLD{`h>` z?8DQuevfP`&l8PwtjuKND#spq9+0w)GSX7kM1$W!$~%-Y{z8@|WopFr2`O7AVxU~h zV8IO`axskggxTViuVU~`Jm@MT-D!`cQm)5UjHMDfw-*2+5zVfy;&ol>r*AL0oTJfJ zSXTR{j%M?2wz?Bboa-d9#5={BeRpdj{GSP|E-@LU@8Q-2P;cRA=pYL8VL*!c-T|Lf z<@}NR{m}6S(|mh=v2i4+FZ20)&B;k8yLYz(qAPjsw&B-^qSC@@$^u3itl=O_CJy0W z3d&eOq_^tzgxDJ6>N1&@#d(r8ZQDm{Al`nY%I4E_+~7YHzd@Nr-PI=2+(1-kY_p}_ zT0b=L{I(!%p5UNV2)s(K7Lt!(z#be&waEqndgi1ba?>`c`>tNuoq9_4j$vo@$9|qt zJx{AeT5P>h7fFt=#=r`DERlulY}7>oA~g&xK&QPr2yi6Pr1o5Fi9A){z3V627l{$t2z?3Awn)n#s*PuO>4f@#zpn93J5_8&_K^hgv*w3 zm{~x;Fhxob&lmYL`Dczlb7y{HM{frpk7ReGrbQeF7*b(QxUD9dpm0FX zM`n4HN_K@V&?NkWpesSpu&GJBppk7kn8!cyz>cJn!%L*=B(91e6i$}dj3-E#3 zBylqZIch&Qy8X;!B>w@o&Yy76%f%2dwKNW_IwxfV&ZA}L6+lXWGu3+<;O~t#Vt@ip zvNKGQCqI>o?ANTi%Ewff`Rc6^w)Bqb``5P9XC735c;E3tWI^$n8qo!rGe~JlL1trA za!8Wl*BOqF&B5fi{T}F;ESDA36OkON&`Kq>UsP;R5S)>mwB$!PPCg+-G>*d#W@FPn5BG zNNM!D7Svh#3LDoBcZ^v`f3ZlbVw#di3 z8qf_IOrh6t%lXxyos1$<9-+3Fs~MO%{0e!ezy2nfWf*Nt^Ww5bp6PKq&58*)B|BaD z74KJm#r}~u{^lF)+P{^_x3j!T>tQxSfhT2|J}EIu<>M35lSeff~TXi5}dyF?NRgdw&=e&DMOHkRV$W;x)<~laJT6M9^Cpfz->Kv z!W`lexP}F>bht=2kkElgE%dISyY*U` z9_(HJt)8Dr^oNk_~Svt&{S)n3m}zS$tV_m*50Q(#9+} z4xCKkMj%io?}?WEkgU5JI~Tc(CCzkb-3;g$sTB8vxVO7Y%}`)V_~v1aILI zaGo_*1-@~QJK8C&9NBOO&_y5JhqP{R$8#(mY+kpoza1(+=AK$^d#Jd8glJ` z`%~2AuutC@R$Wd?Vf(8qgtv3o6P%V_Z$RnthpLXX|PjqAZdE{?*wenOAME;W(X6 zR|L1XL98n4dX$b^rW*vW-2;U!hedssRrNjmZ!|`M6wO{ZULLv!dsy&JBL_Ai18>nI zCslS-*RyBj&`$7_3H(&d4=hgs+U5EtyT>2_*pr)oFVmZBm=U#BTONnC*PdTvLnHeuU);Ry|xMc-DX*g`gC*>S|`TX~W&*mZ0&%Sss z0{GV#muWQ!e{BM@XE6jtG|QTWED^=030-1Gy&Q8vb~>Lfs|E# zj#TWM;o2(Bn_pvAYhISq(Uls(UuEaj2JTP3w6!6aD6}?;MK6q8Rz%C1>cU;)(h%x4 z16d=1`<5c-eOLS_rI*|(S=C4J9d<)gg_&TT6H)DpZH_`3$>YP1!tz10yjW)|c`!r|dtFJV0?yJMz%>=$(=7*0yen!qQz2Ypq}Im+#dB7iD$y2m;U z>W4bm(FW^{;iYp$pMwn&@NEu`Cjlx7=B;|!FA)qHfPE5WPd25}D9cZ)=g|coJ)sC> zTkum72vV~pU=DgdNYAw;X_IJ_c}bpq0-2fjiB|Zw0LMBm3#wRQk+#m&13o&n6|$E&qt zQiio29McU_^{BN2m?n_v1^^QSnLV$yM**&9kZYsQ2q zjkS)+m=Im$GbTiE9fYM=R{=+l+*vPToU1_runu&c^?o{bv@ez4B%lFj4!G`+d5(0V8EW2(ht zrF^S{4Z*2_ug#1HJilUuJ%FaIn685?u`ArmL1FKnz}!E!eHuwGsP(ZONa_QUPavtN zOnC>AtfcSWo_z1E<#C-^=Ugw8qotUIJ5ZJ%9A&Q*6J^uGbCObcM==}HC^Ng7-W$ri ztQNRj9#l>3?b|>QCah_J`@onTa!lhDGI)1z${i%2E!ED}Y zLPhY+(m^%#C^fiHj+I@xxZq5hi@f6y-+iwL#ij1W?JX^Xl8JxT|Bvoosv#$Xy$Dmvt#4DF%xjk+hoce$LWJj4NV*AXjKpx ztv-PZLYfPTU_~9ixgn}%5-|*K1+$_HLaAgF6og73ulAr(6g=UJn|b*z(oNB5?KOu5 z6*;k-^`UcIA4EAfFGp~XY~*!gNTOi%gyy14M|&0_ zNVmI1m9N$p1mFdEo9*5wp|8RXlQCrN2vIgZ=#3#(YAmn?NN^2TS z8?^r*O@83#!jTh-m7dd~2a^Os2=0WZA#^tGJ6&Z2&2ug7^z#hM+B$w~73d!PldOb- zR)W722W_2g`l=udNGId88QMl>r)FhX`9_zz)>g83WyO(^Bx!QjrLN)dT4P8Aqjpig zj6WYEEnn91NGS34J!*ijF;Vh{ z$O(c2)pf&Zt8nU`5~>Nt2tWK-w2A(=ItER5q2iI6kqjK2#{k#N3o7!0lc$C9L-YKL z^gQ{`?CO_n3SMnG!>Riw>kr73=Wr@+v-;@g-c|yB{^f0Q{5m?#q=0}+t51?&d+Y(A z$|%}-4es0i`>Ws_=r+T}D3J7E6-K4ko?{1kp)n`iNPrChl+$O2XbQ7F! z97=o1cHjrwZmTSvCgZ#sFY`sSpT3<5I6eC9QM<#kxGc)+;^aE5XD2^B%A{NzpPr6Z zKVj7Tg!Ox-NEa7rk^GRByBh4XoqCAJ+f(5?hEmR%!ObF=e|pp~^3^0OZbs>J`t@Vr zCfQkDWD`tECBJ7?nXIPc->7rM80V!jywKFT9<3t=>~N%pDB+2~Qo=!HIVx*J^g$aE zM!Pu)*`t`X)`|ZrBcst+8C~G{iQe>}0BA0LLssKSpvX>!j*9Yjk$Qy*W(31KvKmB# zAwN18p3L;nVCatyMiiC}hWXH7=olll)Z9h&wH}#o7OS)Idv@1U5GSB^0Q)O__oS?N{kfT^rW+lTeLQ7 z+kIk=6>n^nmDPD#e^dR+ zRF>Q^4!=R>-#CrAX;$XnS=l4=Zwxq1fpjd52~LHBx)Go`B@D%n3$bC4c0Cu!^Cy~- zh_i<#M79`mZ4Jz{j zePdK#B?17`U+ zJ8Dy9XNlcLh*QXBS{q!DS`Asd^yb-t4FGYPNplM9_DCwxV`vQUbCIOLAo1JRG^f)l z3d3tPQhm~#7H69R8$aG4@q5%i4FQ8?E%J)CAAFK*@Sx!{`w5@-@ZX&6@8$@u*nY=1 z#vbaY_Z2@YMBuf<3*BSx6TCa^1cjfI0()aVfZJlh17p)p_HkPPJg=?j056IUGsptL zL{wT0-ptbNHsj9eRydmwBCVj^d(Y1V$zq@c&#$Fn=6Zh425qQqvhYNMzqH+>+uQQJ zxy^LZ!X3a0SAr8g_&1eN7%K!kjg3@7rbuuUC7Op5I{}Wb9!xy$ePfstd|PgfxuTY#R$c z9<9JSsj|s(oSh&6a@M^mYxA`&lP5=^S37T?6@!%0^e~_S-X|%jy?0hgHN5h?ES5!$ zZPv?T+zd#qcc;B#0Xhh!f@jexm=OAq9|q)&B%ApH*j^V06QF(d?f1VXjdp#VtPn*m z@6+Y^`Assv1V~q0-EE2gU&!r!((+gQE7{!~W<$i?j$GdTvELkK_t4RHjtbUq6whL5 z>3i^ugQ%Sqxn-$Kh&Eq(2jcmRbX;t7NM;xoDmbl9?y`}V>l|_yt*zJEC%zXl)!+rLi~6vZi!JbbX=ZA5{I>HO~rX$Xi-BJHdt! z?zMA`c-Q{$kAM7}oTbz0DY!)qZsYu@zDOq1@;te|$cp5bY?8xWTtlprUZwdoJ)N#| z6gPvU=zbwTF@AC=k>RiwP~8okp)!^YbY2Q~fUX-lC$u0BsRplG>;2;$gkK#w{AFNL z6nS=ljP@*QDn*M=EdKu?nzp!^gCjl3>T#9N*Eg@eW7Wa-W1_oV1%9y**a$y;lS`{Z}ydme+P}jMF{f z)!G1*Kx@Aup(FvX*4Qu`2HcJR-4MPeaQnea#UjDX5i-s#v-;>EWYg>8+jp8o{A0L& z%mC!%pXqA1VNb-Y6v|1@Y5G8X3B-ICV#Zq?sk5vN_-J}a=@>QVtMqENBIPZM=}mH- zFPg*=`t_KMA75nS%i9j=-O~1%d)mGMImC@})}PYxvY3=Z#At$ug0ojj9|<3+Ej?Wc zLuZ^7fl_=2j+OI6dXBI2N87l@(_;v|PR{Z#C;0EcFITRwIsNV;t)}@P56}hYoj1(h z9IB(e%L5F;KsZL-^nkHZfkFAhU%K^nb!PaY9&7}UWY*cpC!cB{bhLT_gu&{)-y{#Q zDCRyneTnJ_RE^)~(1I6~>jE!?k4C`mvKK3()VBLp1_);nDy;U45Dx=W+gAW0NF^1qMfF~+KROJpu zq&2v#Qhd0~0WRkS8!ol)MI;G+hYU{h_7DZt3rgpR?IK-J>QAqE*yYHDYybNUq(!Ra zz~s6%feob+xFwUAV1XyWM=K;1=xy%neBwmEYDlc43Gg4@sxbJ1{E%45;Db{{NDNQ3 z4Jr-@i60QMt_X0OZm|_^K4Z%~Gx>>8RN0&L~L>ki`cfvqLBw?*bU9N7bG?X|LzC@d@OqTb|4~{O6zTdv7Uw!-LR*QGIH&%P9F%N_aC&8G${zb16-e;EM`tkJEJ86aF6vV6| zx2SdtIY{qFAqN8?;*hx@9#{Pz9x@HxlMC%ss`6f{a-042FXJ~d-r)9Y8+a@!MzZ;h z8u?-ieTRIb^9T99ihP?C1aB5v+aB@_4%>H#0)Y=USVga|?q#HR69j^-uPLa4@WL9- zevWvgqvi*JfK-Sp(y}geeZ;*l97+g#gqO}HyiRq%8>NN+CNi+_ zL|E$v?V#V1?(s4;K3a%dnX5Y59i&{{i!!R9A{QJUSJf*YOsR~poMl<@$Y|FO9NqTq z^T5&X5F8Nz9l!9C9-`?vit z!@fZ_W;{#1W6ZdE24+^YmgIR0?sQ+&q6%!(7BP;*$>y$ z8-lZ1cwSR4yxK!YqmR)U%1%b_UB8w5=w%&5_ zpJ@U1@vr5gyc&X63{EKLIes6XE6+p?QczSB5jE~O5&i==>Eq@Hr6g}{!lr}Mi7JF; z$P7zlqVb6_hG3%Lc4y)2<2QM#jHWIL{{lZ@#!W$&2}N zQ6FKU^k~tx6)&e?Krc#w_A~<%GE1w=dd)iAy)tTl%m{zv))dyO;9DXY#wR9{{d3;7V?Ac!zI&&6VFb}bN>QOK()x0QzZ!%kNLJPEV%!S6T1I7~ zWzO|_VsPJgz^hF#!V}jRJTaikLpb*L%}0<1uOCiwR32HY1K&%K7H+SgC@W!b>BJyj zgep5r$BVN1CYe;_Ji+svwC6dgvVSl0Dnr`cP1A6h&*vzOUG@{|`+h>b)qcVkNY_SO zNh4Xr4N|!tw#fD+%fP5TsDUZ=QRD*IKj2 zKt3>jI~Ah4Z`61Cod_|)uVOiajQU7K-y=+uL8ur4bwoYQ8XMjdsCW<^fky^AEVOSOcfhTn4AWg^OFR zDKtnmV=-`XWusH%5Gc+^_g{+u_LSO3K0B2}W+;{W-y3BuN*G zDnDJ~Kn6}H)#b7pXE>yh=F|EQ%}6sg8~prB(thm(go&fg9W1~5FDZvsIySxck0k}Z zWrw7!_r?Umk91(3z}m&3Yk4q+PO>Wu2hQ?r+Qi$lbTPj80}2GqP;#EmS7N%2#ee<& zyMO%o??3&zYw_F`-d|fjP6Vf$Ca%Z1eQj)qe7p*_EorUG2YqB2?cjmGY+jBpMz_D> zq?Oe};LG`91pfNfDr5KHuAOFIlK;8@LwS+ctFhRA9S-o@gWgHQi;S#hZ-GFi#mRn3 zq2WS+m}#&=qk);irF&;iRzwP`tD>Ejyvoz$zw*To%Ts*sZXWL=>T=Ye&cdzhvVfqs zQ#8REuPD(VL$KRdT?Xe-XwRzR;r-V`mkmw}k-Drs91A~eC;e4cVVqw~z}GG(S)xo* zmAA9{sM7yeVq?pbZlVPjQR)>*#x~qUFO?0FL%wH@6h&GAzi}sw3TrKF7|lMzDB#<= zs5sdQ%q{nYk;+zdz$o`y8-r-sFQ3=OsQYT8wERP@_gfor6CG>GpfMX0P0zsSg$`U8 zd6O5f4kK$8!y+ASP=>|l<2M(~JOQH*ID=@3L+u0HJ>YwOB-Q+CW8@!dwb$Axn#~lf?IPBM zUNAb>`t>Xr_xg(Q0qy}NTZWaw@nH;LbG_DtHUj4zJQMk=~d^Hc8Dw8H-!)yw(3tN`ie2aKlWc|9tN(Ze5fkNn^8 zM-9Q-1q0`StPQ z%V=Jf)6tb0J$UmmVn1Kx)8wCdF<~ve9Wn?YeMf;f!1duM_C9=KjTipKi;y}&_>ZJK zOYZy9zIog(@whLot^7ZU=T1T2P*(8L(K+JryerL@NK_W$D9XQ#a1ANp{fqa^eMpH! zM8MVv-uHKySTb+(m_!7FT7B2Ws> zN4J5}9y|0{ZWXvAo#oK$&?881V2soxO9za4uVGZuvUa9n=1up6(MW0@y8}kK*D%^< zWS3PVvm0bb&%#J`y>!56_FEep#t)V@%4qt*7`0&^>b(vxV~AELub2r;e(KE$X)r+A z)JA+F!D~_ zcyPIHKNy41k`3ct-_+;~N@-acOso27qr=6;Tp0KHih-}>g=A7AS?hYhD76d96TDpb z1RP4iNd#{ui&ld*H)&O1VE0=#UL4QAWXE+iKE6$~ot!O;rX~KQ&S%SMx+trso*g|7 z8P9(^ZnJYI_$JMrEH>>2I@ov5MTdEIUIU$1IZDQoeCJ*0u)?Xg182j5vCas?o4GLUO3psW(GDby zoZLU;qEO5x1=-8>0~;-sV?+T$Utgtg z4A${_2aU6?7idV6CfT6zRO(Go+qO1IVXR{VBlk(0np7^+K=srn4Fl-e?l`yNPC2(6b{ht(&DJ@$oO0HJ z140}@cp|L~W&i=mJgd%-0HR(bll*E{PLk`3tVk9Y8UF9QE~_N3lX{l6XZaSlygEiK z-3UK_f*(I=KYoHAKY@>*tUi8{*C%%$zeo7b)0?#ZC7)a@hjiUJ1(Cltp4D0+47C){ zbfWCN@x;I@KGE97I4E+QVwYys?4sYVSP}79(J8R-V7d{z!Q&O|eS?RlyoU}v^e4c> z66`=!+FGT-Jq^+6Sz>@XRK0rFvlHl_!_8;u8%QVI0gb ziAW{&xSVGz|LPh^{}&C#6eaqXHUS`Q{2`Z^?igEjiQF@(LkC?BT){JGHjoP0$ z!k;)={lv#G|CEo*X<8@WU8J*e*rg|246Fevr2>(-QLzhb3vfHiwE$x$S^n4t*eibw zc@07JlB=x3L^2Nev}|1n_tAO2$j=LG+6Q397in>xP1=16r>Dp0nBm`F;l>3l96nA% zPcM#}1P8K=-Tw3zesrJnb@+b-v1iE-Syf-=#gJwr+!`xdul0FqH&9Umf7OA1#BOX! z8HQ+t4nFSBM1<4ea4GEr3#OpitYLtK1OJy8amFXoYCY`gJzeJ0$>t>F9$dYEhnFF; z@i3h0o<}V=rOW#G6B5s%~3|z1(US6 z?uvp*8fT;)vaO=IvEM~LSu5jZw2|lWs2-sZ=vIvF%+ES-N;nZie3{2tWYoBcvem)>+|FrEP2cYzKko1$ICe`l074gr0Ywr+MGr^O<< zfZsW}N~g=LK6(`NTp!=QbAs=z#W-0$O_P77E3dvMVn&O=+OeUJaE&>!LcS9*Fst5K z3bCMsk#WEb{y#_plh>GycN#81DWhgqZ54Yq9)7Y7%1|SD@C2>Mus-MR5g60Q3#R*j@*e#e7?j_UAABcnO?L#@pzDNX zqiYW6_C=RqXY4{(9nkHEF5EAgjjlYP+YepWT67k=%y@V&(A`#e{S>+ZVkp|~LeveO z>W~R(^z*@$zs$hXKhM6rg$iys1{T-jM!}knR!;XqPzNeQPb0`1M}1WUc}*wU!WU8; z4rYC1jg-9*G=iGa>(D{9w>s#++vcowM5b2P4?r3D0Gho8P)i}KXB}d{5w1UjpjM8J zpxj>-G~Ow}BhYbykD$S3J8BFRo(L73_`LZJV?=1#s2TYVD|_TSjI=QtPlB$nEp!Oq zV_S>=7L^yooy@EJDqUnNO3VcHH3ipcUX|t9qdZ-8b-a3b(mXuLi<9<4kMXSYDy{P- zQL-x%NEf;QQ_@(iNx-~ynxq^Y6=i!c@0M#l**V)Y*SXW>#@^j6H*WvfZ@F;|=V6f+ zFHO4+Du8rmRzZeNDcK>z=-okvdmuw)E!CS1G7Lzh(pQErE;)Px5pG;e4*TG}<~ait z@y1i~_0hWy`9eT{kngL=7ovO3+6=K8A?YFCK>N!e-(Z9pw({~JKthg`AE7gHQfKIw zEwYn4>WOK2eu7PDMYh(yewr0o{o63OEM6ah)gA?3l4AZ6op3=s7dy;du!M7bI1alw z4$(Plc?r#9MMu@Y5_@FQ=2+mV;ODwu#sU)Y|eWE`#jji6fIM zne;Ih?ha0=mKv+w$AEE|Yt0=ao+f_{zn*;m(=UJi?uYMxffug8LRM=E2p=8Yesl!C z`^F&S3!E5S>g0HxYFrfLd3FLBEr)C>%)fk-l+^?`6V6LG)AjOnme=*# z(y(1ld^#M+~Jb0uVHxab+w|Y{J+af`7R#vyLF&2Q%Q z%^;u@F8oxiRw8GK5%@MJ2WJOZV$KS!djJGew9T0sU(HN;t3mQ@84EK9QbX#NFPa+Cd0zz8~Q3y+; z7}9atT;K^__zy7|=Ma4!-zLsSw=LLfG30mnR`L^ax&ub(K=;x{6L~8)tdxY;WI`8Q z*7-lXWu^q75NfPPkOY@w0A5I2V;8uM*_A<+L%sCnp@EJ>m8C zop2*X43diX;EA;%#=cJYW+o_07hly=_`S30>ozAeS;NEE2p>Q%3*bAcyUO( z7z~qoRZ=SgI=v5V1GpK~RoRe|(WUkQ^6ce{>{hzwUb^OeNY@AC)@8Py$R3UmHmL?n zO4ljjtsZc(R9V}1yC~<$G`q^CNs(UV=XVv+HEV}#T;qJ6uD5}2C97qGgmb~`uWKiw z*O9ZG$a$#^jhu!%XGqz(yO{rUdh4~bU>14&B`XW&qlrDTU`ENH0+p;rejy#?uvTzz zCi5w}H|ufYD<)WtM=EhvwnWBW308Dh$gKeYr#I9%HKSzGGLaS4ACg6VXN|E93iK&C zy#dRK({AlfDV*+rqh6dZF8+|rr^_0z<;Ajk95l|ZkJGE!ipy+t`y)o^FOG_8v<~{; zKu3#7`p>eQ>?*1Wt_#s)zibp080%wqP%; z@@zcy$HDW~8ted2^6sVql?Ond2h<7viB|_ep93`D;Kxh6%Z`<^@87xX^Z+~#t!hST zA{=3nribeZ#z0xJG+_fol`(9ie75Ri#zZ=+h<5k>R!eE!{#6u0@LKSVq-v9$?p1U+ zB0_;1CE%-G;$GD`d=0P*>rGBa3mh7`D#sfckp1zaz@kRF>4F5V-!=7Cz`ct3bV4dc z=1*zl%mKN3+JKi&vigfql0q(KjZqMB%(HBNau80ou85;pEN z9FCxuXC;=B(juMSU^1;dC;C|)J{>h5tas7Y;(P!lGsCMnv8(mUIgb;=6s9;c>*8)| zuN+<8ZN=`MIV8k))gtLN+FQwD74zEnIX_WCFhIq*E_frI+IC&`>C?1R`NsR!$td2J zd@b@YM2>u|jH=IBj$S!Ip5>_GX5GEt9lj!UPj1uXZAv3=@9vhj@a|&&Fq1FxdoNYA$%&R<|=D)WPx7|jo>@luMo1}}hCA`Ut`Es#A zc+6RAR{3 ze#>aISY)&L0{wN2*H8;ivf?~1vg9It%wsa19zS?|vbfID%eD52i?o{NgRt{Kn4nob zj3R3O0#=01N{X(m13I3g{Fc~lbIEiAk0Rql8Xo!i;L(A9@bo?&Jg0dSlOmda2hRx| zxX>|+X|IFb?zg)eAGMYwH;50x>A2DVcA8er>M;1p=S+8gmY+AiXa4tJ+hkkQF#rGM z=Sj2EW`xQBZp^(W`eR-!A8mU3?c(_N{Ct#`@bZI?to365r7Y3~AfNn@?rf=2)SWp`(5bd~7CLxluCG3 zSAUR(e`axIqq2gF4D+(w3N){97dGGCdG@zw?c~|vm*3EfXr+sst{hxZuWT=5tRROy zXk^U$L&gdhSQ8u8^k83Xkf8^NXk^s8Lnc_Kq~g(i_P_;az&=5bGxl@-jl$D_eBDO2~_VKN4gMa z*&fY$Av$F|d4wYT_r|Vy#SYEJ<@_d@%KIUa;}zgXztt$s5+A0kw%xWd_4{A{mQ-0? zPM5FJ-O_u+Z+4sd$HQzeYbWBI>D?O&$8%BdfSK3cOuJqcL7O^P*{phIq=xD{I1E1POFQye1C*TE(2&Y|+vF=BA>Yj)9BXydQbttMK3rObW9Cu{k z(0$%81C6skPaXm&1G@WKeR{u7Lp9v!1XyG`q z_{28$vEfcs(TQI}3{THLecQ&#<3^BORBXE;0{U|H?*7j zo4W!`ayCsbH}<17e>s|Lu($NsjrN2NYO4aPL&Zr?a1%t^1(y~+2of#@7m9Qrr$eRt zy0+ejz_5n)C|G6rEh=zP1=Fsc-9Q%%Vi0cVcBXk+*SBk!78g~yJioZx#q{5ce3~zA znjzdORUp%rU)Kh+Be64%f)l(U zVZ@fQianjr-FDdDbDqU_!Fgt-mdfhL%PaQrb}iC`D~iJco`?wNWeCRWh`Rp2qL5_?*^uYF|Vanv?4i^~_3SMR(iZINZy zay(Eh*K66s2*J-5(y)?-yL@gWYA1tt zpCL5%HYg!^*G1@YUnmotpw-kEh{x>*UjL0c8?=n9(nl1g5313Vv%T=d8l}vKoDKkY z54-7D$xjmqVxG?F05ql681nAH|MNq#$;K(f1yMB1RG6(o{TL^b4$H|w^N z#g{rc%^=vt9m(0hm+RHZBEQTR>G9+S{vscbW_exb#rX(6ww|NJ_maP5^#$l?m8X}( z?4*VpT5Hx(kHYove6$zAQ(HZ(c9`digIHp=KPftHlMK_F*B-qBgro{Cun|&kGltv? zpbh;p5VlCAS!XoFUit;zP_S(D^k>vBxSjb3$xn!o5}ItHAQ9rFIbRJRW-8POfvkZX z3y&SieDn-NcR^C<8D&HqmV%k)^Lg9Qb2eMNMxbD&kH)g9;=%>pBTz6}3QO%FvA8Ey z4RXi!bWN%?;HqhBc%2;-|7mAysj)&wilrO^S_iJpkca|n&FOftOs5IN2jgYFXg4?I zMN%(kcQL_T9FaGB2(I!9KhmsjdW>GMuR3Lr?^(OVTw-X;OZlH==tNAp`PCyH;-1Y4%~RY)yu&LB_&xm z_1;4a8;9+(dNXh~^eT9qSY}y1Y5Ek&FqJB)IcQufDnuC47xOECHeO`aY8NQ>)vlub zrqN@)nYMj)g3nH>tZqKM8WY+Q-J6XL43%cly&2^#du*SJw@k?XBa9O}rJ1xg};bL~a(Z=v{l&(R*bSlObTkXL-u2jus%J z)^x%Xp%rIaL)ZCT+|09jT;=n{+R>e4S4FlMU6*n{o0p(v_42gIQ$8NUg>#MjDy}wW)m{#m69$fCkUaI0f$zG+ zs6{ZE#49*0#DMnI&J+1uN9yZ$t~y}>*TCvr)v?dO3Iry?dP7vgu^1w~RfBEaY6Q2* zujA=PF-KoteKI1)kh%D#lLX;uGTl=Ebo zU1ihNYQxsC#spW2aZHZU4F1(4AA6D=H(LzP)5$qpJk7aZtL|o#Yrvsas-4vCi>Z_% zddW)27!`>UuLxb3It4DHr7(raIdayyZ(v!Z6)4f!Y>}krX$;D!RRIJH++YZ05 zPp(&5L`?zYyz(+Aq5z~lUX@w zvJUtOh?!5DQIogg%mK~Bqbqs5em&@*5Nbq-*oBk=AbQjveGelI7Zh|aU3eyfe?;bce%nF3YoaoBPjZQl556NKK2L~4hW_4MG$Y_WaJ6?6+u9Y!Vg54ibp@?J+bgnI^$5cp;4)gak4+SunIS<0@3w8YLe zR#gRjQQadDK=PVMO%}ovVU!K~s(V&~5wN)cpzC4nCQ9$@Z{1 zhwp(+hfVwHNmVY3$w@<*SRhaDdikl1A4ZGxydH@cm7ccQl5=y?O-EoIT)O!9_$DX zJrNhFbVg_*cl&~3UDHQdUuQbkaEQDX%18bGdp! zr`EeeZbKuZz#T@c`xblU`oO9JvSw4TD&+-d)ApuDzDd{MkYx(pW%Uq55T4562J8w6 zpF77Em5zaz?*;cy_Qn`?}+h<#$eu4i;xHMVUiD+rq*H$xhT_+wX6Qc?=cvf>tU+*70t#xFjs)Qk$ zTAu*VSg8dGo`FwAF=zn5qy^Dss> zjB2l8)LMAQV-MEEBE6?zbVgV$IMYW)S&QH>11H^j6od%_wl}h0&U|=O<{QdL(rh&R#<_qF!{*Y{Y?Yed@bm z^hOB5;>LEikMWe=X+=$b?zLxgi{p0hYw({nxR}(@4n7(&uwePs-QMZ1zeyUr7RkIS z$5~zDf6vPrzD)}n9{bL%Jos>Q_kkURc3xSmWeXlLr6ES)Wf!!{V%I2j`6z25cskNC<6p6>)eP#+l`Yk*sWBC-nsUK`}}A(CS7LK+W%hq7c%2HRk!P>kW)G%6Q zNM2D^U(Nr2_TIg_apT$-{3^J)Yu(r@hE(r|XPueVX*;n`9H)=lN$#4-EFFl1B(z9? z0Z2Xk$9(qLy8u2>P!~W_uhuz^O$i`G{&v-_{rqh8`mycT_t5YjVEhAKoI||eFlH&8+hjQHv`)JEy$w1u& zEYg8;pF$Ma5ov}Loc@YjEBL31#r(Kfv{s;mWr*c65VG*DSok zm0x6+@wm)xklOGL4!lWnkJMl3Y?6Tc6HSvGx%s|xzaRg4hjU=7=(bE|vz^J=4M)M| z6XWJ_M*$yB52b>P4yr_tB`%Q^%ojYD7~|vvXmb&l%y~u6~`z26Ovf(atAWJKG35K}_KA!ctY)eoa2-imrZjNbIoGl$WiTRJR^ib%q&4@NNd zq!4^C6JrydYuFfFm@`l3ZWQ@Ay~y&+&-=8z@hFEl7mUfE>5W*<>?yJDvSG0qi&3Ai zpcn@`RvxR`Wj7AeFIKWo^$W7b5@Q1Et~?cNvMX4k1hZcHd=Q{P4*daOj*HuT0^8V8 znvaSSyCS1<0girl5xxKMKj-(mqZSvb+etdX+f!mF~e8jM+Lj zLg^~QfXY3hy`epKN29rWp2<#YE;Vp<-Q}8IIFJ!7Iv-;(RObwyNgjM}o z*<&mzPTRzp8Xm-qwMr@W>dYX3l1MFsLUV9j$R1{pC5kfNj90kA7&otZ#;dj*)7kQy zVWT3um#87eN^L!;!WB5_`QWyaug=dw2VQ2N_?*!bT^0Ftj4bri?6-7$Qtc$xHtnPB zTaR!y8!&p9p(-qV&%?L8K}A>(IFCy4)h^<#Z{S5bPG z;CGK!5sxhf@$Am^RK(3)RSZvz>x}>M@3e|*{<8ox;XS6Rhm0*U8rd^tkP~H$ht!^T z#X>BYn3VbE7Fo*`GPS?{A+i=c9Vuu~p)%S>!FmE8j9q&^aFs=N$Jb^WXhJNd#s=|^ zDQkK}NSu^7HGVL$Ywh_e-F*OM1f1~@2U8-^jSbhj)*8l%uQ~#tc2k1}n)(>}oZl8t zzOivJGB5yW$YPW{oTvr6`IvV)G+bd=ok3nabQF6`b=Y8SC3--|z>?>lk?(i~&LS%Z zk9P>bT|-DFbS))n)`EG;q8=V))6Ap;}0 z4ZM0l+)*~m(tKVQj;&&YZYpVoL)5O}k2yo4Sj;1wkA_mgBHnxW3ETxVUoYKV z?jzMx3S9VboK@E+(Lb+}IS!hv|NJ;A^5$E73!n8L9e?jD__E))y5Sr6pv9`5<7;@& z`{oPOlRr1VLj+%U<156`hnmG9E#r9+uP;pN;t#ZkFmznuZ0&rMmP@V(9C7AKP43|t zA2GfLzxmhi#zp@7c@uBL=;yzF7s1^K`48`+3e0|d0w9!@*h&3WdN)h+D!WOe53rmr z%k<-a{e+f5-jo7iSsr}$MU{@fUZM2!-+o;o^%V~N&ftNyFdOW@M%;d)8=tlQ(*WAV zA|J#1lF_JG;GDoYKD2_Rf%n6%%B#fv=?Y=|fP6j;+?(IP`(dl4q+BU!g+N~vv;QNCnpK9MwoE_4*T8~Ew1<;u1TJn_^8{k5VE56u zSo7<=xXt17{CMHM+s*pxUnJS2`o^uCVlrN@d|0g*TXekv7Z;d|z@oznaah2ItV_SX zY5wdinP1@#43>8~--@V>lRQf&`FuJ)y(;E0JU_0#%C-tz-!?@5ah$FZ*hRp!4A*}G z%p*#)NB@BdYT>LO>g&wlGr0s#T;1m*C&PTPm7ygprX+@&&$jP9+qid)-+7v+)u$md zrND$Biq&4l=Q}9tLZDrLMRmZZ1eLy|4>qQ2rl{D9=>c`f40kjrrAwE%txt`5NG3K{ zEoSvt!i}QxBA?s?`sF-^n~=h)W3gg>q8g=nQf5V5KkJYyR&&X0Nc6~s&^>K{Om|>z zm{mjy59WqR8brGJ(%cvf?{K5Ss^U2dc4KNVsizsxfyo)8uhNZi6RWij8UduG2=-#& z!Qi_K<1xkI3Ngaiz41J>dN^u}3Dc_<6Q=vOn0V!m_5DN3`!!{f!B-SvVMFg((4m(o z73eZ}&H_rghErA#^unBvl{|;o6rv}ER6#X)iZB~&hu~ZRO^wG711rQZr3N+HR#kE_ z-Z43djhB__XUl-<8BXQ~594e$fscwW4sC+qlsm>n5O78^&wvZVx{$MkTAv1|pof^H z>X36Slk(;i&1cx;$S_)5oMri0Jv4eNL-p`&Ye-QAjOgShDU0ZrVqD_S>G*Py{x%3? z!a1=){H>g@b_EJp*c~8ap`%vH1F|FlG9!C5NIZjWtr#Ul2N`R!J=6DrZ7n6{5q;ZQ zN(TR>dr)J{iv_GsIO5z4#b}pwDqzy$U+{Wz56axE-*wSKX*h)ZvGu0=yKs{i0?jtl(9m1xUKOD7+ZEo z#mV2BnA%NR?hc=RItYzG8^s1qXXIA)fKU^xxA2*c$|~;T(+!wnSCrFaKIl}gvNEYM zXLJt22}goR9)c4<%6c5b$b}`e&%4spV&-^gwCdfDQL-3kb1a~PWw&%|Rw;Mv72W0? zUPrHz?99FQ%)R$)`QG))T)p=|%MkhESx~u?tQ(mF4{NQA0SgD?rlJ_`sEQSlH z?|{cfUw%Ax(py8NBrj4lsP{+48Mt zn{OSYYX=cybZ>8IKr+k+pyUEv)jh+3487neCMi=xkMd``xY}EnabsA} zv~Qdz77i|>cw9@*)`cq^iPv7IF66=r6{ZV~mEk3!4p5_fTNwcrM3Pr`QZu9w2^w?Z z%9U;SNsV2aa3p&omT(1}B1 zs4ztDGeoLRydh&y>hCmPUO7O0yT)Qj!$Z1SFs^9Zh8@@!T_7Qot~5T7kY$AqQD%;? z3(n9MS=Zer#iOV1aWA$mKuJ@f!?YA9+=mkFLJ7lVmikaKIPzK#Ek&Of*D3bor_*d6 z{fcE?7hsvBH&_veb!ILgQ`di8Y)|xnH;k@MFJfj@?4GhBLBN8!CSV)a4~QqlWfddj z8xPuOq{c@L0-WMR%CMeuBKmAygEK_DtI}|zLZT0yWD;e1nQogQx2MU?bP3h@Jsgw8 zIE`JtATQ(H7{XsOJSdWX7RC5ThB7l9B}SkJsOV5>U_)CEH&;(wqOZ=Aacy*b{ZcUk zFIZ9H!3d5^%7h#cx!Q(kiXp2fz^S$WgjKvY?LY7Dw=?h$g2=FH&9?zLV_4<3Pr3=a zX(wq^++xjY23ju7u@D)2ta*BdP^DFb1+EuZ4DD)Gz>vf0hdL{jRmG&mB>Z?WK*qtG zC1EpGRlC9Yffyj7&KM(p)G>55h>PfvdRqoR3T zk?>hZg&5icP{9=jp*(;Jj)h@-h<(_!E4zpae*ar=Ra}$MVs`fLMK-!VbDbOYNnHWX zVEUx4ccof+F3~tcil@o_jD}HQ8^wFUDk%3lGhkn8-fBVa#SuD9u3LP8*ex_k0O6BQm1}Ey ziByX!PNz{Qv8da7&Pw>lm_70!Aw!}X^Lu<c-n2y42$xYahGtufBcp#=NG}A_Sk8Nczzc4s${FxbD zVK6pQS|$O{Cj}Vx8#zGcOBMR>I-oQZ(%(=SQWjy2l6|D1HM3ruUUNYhIhHiKq1%x_ zNE-HuQ@2R}5>&tk($TFkst-s<5}}1>3Im3`jp16nl&Sz&y;g`Iifbcn4}b(#M){Vn z6v7fN2Na2KB{`}*nN?4h+0Y7r3rTp36DvK{JKdQLDUCALx3r|>h6*)g>h^Zbse345 zsG;>F)-G62wywhaEap;M*~4f<%&pgG^JC7wB+QXmCv;H|WQoL;~nx=N1ur;rhbXa%lPn z90wZ!d_aT0MBb~A_(1dl-BNVOeV`kS)x|@)1hzqM3h^GCM(}$qmS59*eCtYwKltfA zR@0t?W*qF41NNASaO|n34;b5u5Gs{dDpoRW2lv9Am)RIZgG6|r7ML`fFSBP^zMEaE zz%#^IDM=p9lKXXH4c`1j4cJU6f$bS#sUc%}q=pn_R`?o1Ko&S%U`VXwPuX2Gzn>L| z?PXC_*{oWeJHo~9maDMg9E`?kH7c{2XIgpBMZ4FDi5-0ihbloUFws*F(8M;1zI%6x|? z*3TQd5Bm$!>s~B=`wf{?QM;V)wq*qHzf96GGN4tAF*yi1hLjLtY$U?)aOxK{H>L1K ztTYmsF||)E?sE3p6bXnyVJ_f5U(uA&uWKZog%X325!V(<%P@H&+e2GLi3w}O=eh^^ zi>u@Fmp(fo59>W>Dt*>t!{7@|vB?AQ{PDv(B$+@)Ut~VW@?_-FGB}9r3Y5fj98I$P z8gGRwi|Jy54?v2ec?rks$<~p1eVWaZIK9J@7LH0sMiJ9(@bh90|CGt6#baC2n!C;p zFSbSyVTK%lJH|(e(=1bGbF55`nHwv9lcl$4FOScY?exjx7#hQBC%bG391LNJ&y3T8 zA~DL|U8t|^Y%@sqXo5O4wKjduHjPv%-im3hIPy*G!3ein)u12dE4uA>`S>F;SN~kh zo!7Wtw9$I0KD8ZfEVnX3A`! z!#|VaGOBQkXY>2rg;(F^Y%uKOJk+|g6iL~5V1U7=4!csUh6_TxZkGX5))+Oo3cV&I zTn~*nPbP3yrFA*_>es)omYrgGbY9F~V#&EdR&yU46hneZB6VNVjt)REC^loWXyf3m z>-djJ1kLG+2)cgx*hJ8WjWQU8Y*VB|R+OnXSv@CNQR1ycKv^*@-(>ZyWW~7dkQJQD zZ?bw$vQpM|=s@M+c7Bu9)5*#bs=ZBuV487e266d)m!feQ-6v&!f|aeCV2mflWnBO>Zn*Jz3YJQ4g&gjmV#Xw@GbW8gbwn)-=q$Bt^Rl0o?Iykp&Fpm5`*323 z)ges?`UFlo*(0@MnZW*Xzwj`?`zE~og-59k39mWfsb0d9jQYkq4Yg1GG088?1FrcKyEHNjQ*4g#k__%aRFjk-A4q`Qug`Eezw){<1uM3Mpxgdkmro_UYhy+v!s zAIyA(vHRgY>JJ6)E$57Q2@}8&;e^Q{4t6~S$DIPF_^)^jYkz`M!t0-#)ti^67bO6O zed_T>9qTEg1Nh=$1V5hLX7e1*oU}Tbj@v$`_YuS0&V68p)29<=0V>T8v(!cp$;{z! zZ2Y`%IE=HKX)%s&0W~);8h*|yoH`9FbehzQeu-LEp5iDX732HQ@a<>y+t2XrXYls3 z=Iv)$b+&r@8qFUk_eu3jHojUs%pTCK5eT=02AR2rdOi3~n59(4n?<$~$6VS$lM6gm zIKfNrJ>D4oeDqM!14~cgGz?z>0*+`RgIFj&6DsP&<)}NiBc8ztHs)&`mOS=Qm~}$S zF_t8K!#{EK6%&dMoSb^T$R=BwF3+yBd2%|whhJo)c$zsTgm`|}I!y4s=&xyY1xHw! zCD+Bx5XdN{it?a34?=jj6%Dw+V%!9w1t7xbiMLj0+E{l>s<5 z8X~p|60x@Se~ycf$ne&X+v0~iUF2g|zBtBKl{A7yIZYaJ?x(Hh;uLH@qFG>>Qje|( zIJ&41zS%+`6i3vte^M?CwtR|jI`^^VEWbG`NI+SvsxdWO7{;kxj zoa4fgBh=C$Rsy314WHuzfHEez^#h8Y@~fNU!f|9sXr71Z@G&l&f|_Jh`LJXPcDEi7 zLc_3)gXGcEHCUxiY#>EIpKJ}_M46G^P3#&E=xg1?#4-~!j7d=<`fOrOt+&D;1HfF=aSQaIUCi|+nJm(iqaM5M<)@U9G9ddZ zPY9JgOmB*WAHw(mlc4m(KE1SoVThxq^@Luf^@Q#p`b=)7o-jug5a;Mz`w}N!1Qsnz z6CUPpqV!gAI#DW4-$Z$6Q3@F}N*6>)eTms_Q7SM+ydeY-G7&-yYR8$E_t9;7&a3Pa z=a!6$JWpE<57YA0-5Ia%jJNOH^oB=Q$t3#ax3v0gF~2T`8FZjzWDsj;)BCg&hY?+t z3d6XKZ;TFFDO9Mzr030naG3%&*$7D}@9v{H(kt= z^GSMkzPLQtAX7MNE#+ZzNh}K=3C5XK!fM}9%P?W(uUdSDl#pNx3pi(j+Ty?qQQ-)2 z!m%~Pi;z)N?lbT3m%9D%U&}l!Y)h*djDe`Tdn@}yG__WU7h}xUB+2a#iO#Q z;NSJ|+-)fTv&fVARgyPSkYJpYOVt#M5olE3&I75U^e!E_d!k!7`=k1tDe}4_F{X#@scFrLI2M+Tofu?7?uQ&~LoEZl7d_Ak+6|qOzVRBk3^A%Oye8A9HA8e}T)#0M zWrx+C!crX+TjeK)wdTuh*uisl3c60V<}+fxacu+904`f$M=kY!d{Sv z+|MgE#ZeZQ=_E}mq&fq!qkRc#@sJFUIL; zi`?P-^gO$~EQ`eqQBJG4nx#p3p6&RXuH+=muZwEXfKx&UAl%dV9uhKrmT@+r6b&-e>51|3;&sN^umOl0-?kTo!dg@Z8rcYIxPm=Dnq*Cc z%%x*0P`EfeK03dDn(4|1t-J;0Ot>UIyAvm48fy0lfiyP!GUZ&G@wE+KpHt^WF~_kh zvy%$Fb7u}kvuu`5vOInK(1;{VN){Fx;Y#!fjR=W-vciWloK%{M;U1DR&+GWc6ioEi z8XOm+>$IGt2-+lT4dMD!MDM0qnG8~tk{ZPY52F<1(v$U{OhRQ_8$S56P4?gul)F_JbyT?Q0#Nz0Le?K$E`FULByZH_&R(R1eSw46S+w zn6(ZMt$6dbKkEQ;sdXSq*JeE^kONpHeI3S+Dp;tM9@ zaS4{*vUU16HDjGnX+cQP00wg+Nce0+E|tU{OCM73e}b@qvn`6nye56PLMm_kski{2 zWRZ`nt#Z4GquM=#8_w|dGrWBpzn{`@X6n8I1VX;>_B*i6_CnfWM z$*dU{OzvgKJmWLDTQA}ilaM3CoM@wZ9m+<);uU*2Auke9IBO`OA-%gqTG<0pP?u6L zpWUb^c3G0}z(d>S#;F?^@5;wmpqI>(^F=*c-qr7ou8RCRS`FIRoWwCckFz{>HymIR z$Dq#M&e~Ch0aT1y?wgeIj5TL7Q=Ln@@^l?834i zzxzvco?aAX>ge#{WSJ~OwYc+diVV-LA_x4F(eY!mgPg(6@GGc7rMzhu1)JTIeC>lJ zQb~Lp;nsftV>C(50l3RWHg-%M7~|e}RP_F*cYpux`yB#_mjIxW)H|jXu7E*m9t1FG z!+#MdYHpSFqbLOHvkd(f=W;Yry%<3;rMMp`fOI@VtB2@yZZzoYg+(&?w_K0}BGlWIWR@|X1AzogYHg1=QIu&=A^w=|k$(`*~IX}PT3fg|PLaAwV7 zyfb!p9=_Pe62_ScBRAsGRj)-1hrHJh zHP}SPULofPXahxgAaRdnJ(^4m(VFUx)vT3eFTKgc5+S*ffr`NPXfi?Q{9z`5&PdH( zWrKu(?*`jdgpv6$+oM6kz>&w4e=TZl?LnKzK9|6Fg8pPn(L4$o7cYqDF%Op;@He^f zg_{f$8W!vR^WvM~mT6us%Jh9xyN}#xDVU=b{P!a~3cmJbI>&*Rm3QK$vs*v>(#&~% z28?NzNy#}4Ywlw`=3jy|wDx!R8P2)nL*|V->-DRYp*)a|ZhAhekfj{(!bIN{=!2*= zwbhMxMAkpNJ6_%$||N0J=sZl&|Mes;Wm_8ya-+uB-jEy8GX z7!{_66(+F5(pLgu1ol?5A$?by3wT~!UQW__4P(@h)#O7}d-1x33)iaytk#2OOpHW5 z?X?huRYH-WMb=LWQF}v635Rn~0F}!=y?sU+Rx_it@RM8 ztUF6o2LrJsND)h4cpyU%B9Sp8$Q3c%^ax}CbedS^hmHwpUuPhLQiJ8FVN4II51}al z+6;2=07?~QuP#Cft5c{T5H3#z zlrBaHU{)4`U@}Tl8$?QMiSYCwcf-Wc+wkf?#sw#XcrYCMy_<6OHBKW$D(0xEWpTlT zCg)Ste3*D=mR~x5#2;d4?ueInJi4=n2zKsT^jnR0p&=*CpmL2-kymcp;WU6NSZb5x zGWtU_D`4x!365B}$Z`xClub>b|k@kS1; zAj}Nk(9b>cg~0$EiWioPV5&#t%L*fj;9g7x(!~!QeerxZzt5*(It*Q|x#*)A6ain_ zyJEpamX_~7;}Gm_m*t(8*=UY0W}rFhWV#ELr+2e-goH2kZ;r__Y}PEpMjvQa-T_kj zf@;u7HjJ4|64oQ7Z#CBYc;)N5PwbH4q0=TMvemDjozAk^EX^;|614UWe2(nW6Z74x zUmxdr5lI4qN;vD2^`L~amf@5VA5KPN@_k5reK{SqD4oJiYCNLEsha3taE!&p>|ei& z%Hq~VIsf|cuaV=t4GghQt7+_>amdq=;rOc%;%kW<=HUdI=;di;J!TwsGHNUAklqDM zPyG7~D;Mj$S@a+O5s{_ZPVvhv=IL3LPOwxLuLVN8vTow_W8(TTdw_k&5WFBbI9doF zo)o+>OZGJCV0E1(#mVI7MeU@}Dr=RPoD zKd?a!!eALZ5zRF=Z-R9s!zz??pG_8Ji|dzVBb)8u?2Ltx|_YB)>-K61sgt0{IS@`phzy9#| zKc2mN|Lfm=@ga#ngLN}X+$o$^UXnEgsN0P`Qrj*WS+IAF*Lbqvix7Rt6xmTtUddM`Z~b!5 z31@#wMvHtrEZk_N5i+P!f!ACrfvGxBbjXt;)5FC@w8sBim)-r~ z$G^@bUw%4m3fs?`3!6CmudbnnKa^i5pj*R~RRneL0bJ#V}P9G+K@F9tcZDrSh$8Cctu% z>@^004f4t8KrCrEm|Rn^DtutV0j8DVLz>uMRFa~FwJ57Leq1r*|9bZU7UVzJQeBL|u|dMXW14^PMVIDZ^YQ=@&%+cKAbSY0 zK@ljeB@Z7b53Z@fq<~MpILl;EIn1xv{*8aCSZtTPAaCddE`n{}5Nl3~+jfLF8Xh8+ zs8Fp59Eay&4yC#aDC`gA)C-j2+6<|2dAChZ+hSS8w`QZW5iFgvWpzd?>=DMb+PSiD z&Yud4&spK|Ge}3-ijV>a2zu&z;R*o_{?dE9g%S!*xS)nvF8cI#13(H4IO>It8ev{( zNwk*QNG^l&JBIfdeh08=NtyBj$}AD&6%W6|(IHxdq<+YWC8~!VrU-W9`|L2VF%%p+ z1B-q!?sHy)5dl>BxQdE=a$j3xAASKZ0g#_p(UMbnH$YeSZ-Zw59~ntrMia2cQZ&Ub zvdhP%<0qZaDan1*zi@>rpFi+CXHkO^#EH9M4ilF>!sdzz&a@XH2IiQQgBRB{%iX$& z!1&AO(Kx%fa7B)8jlpr8qQy4Bvfcc0+p&L13yfQE>3wz`tS+U8mgLq|qR5I0#Ry3Qt(DuR{dC!;(naP4yhb1s1gGgGPus+t&Czhf6wtPT~NymXt!XbXmJkU40D z?qkz{lfmnWwQcC6rS^3W^c9xDk;+I|*|4YOrrV~`8ky963Q}u^HAs7~s(c&Oq<&zS zoqTH<*45;RK>5Owt~4QEfRf z#P}dK2CV_PnQ-Z&O6%00h8b{< zd_JLJLMcuL+e7nW7X2Kpvf3^jfnmzA<3l=MjIPu9H((}#N=WXbMUK(YDxH4;sqro2 zt4l6ISne%r(TzT4(P+_?(^+AoT06qC>SW*hG-`e&NU3u zk~lZWFGo1R>6;u6Ek_N8c33mA*0Pr<6LmCExbpVvQ$VuST5kJm-DE)!c;N}0}igZ zKDbZ-@z2evHBIrWRNrjKF{_-g%AM}qFIq}g8E^ZD(Gr=O2TN&NDqwTCa=W!GlF4my zUqzGTO%^M2Qeb|`BMS=~iJRd&W(Hbnjl7#ZKA#s;tlEP2j;>uSCx!pPFXlxtnP;;% znbgQrNvUtH23#Q&tntAmsmN1`h62tM%E6~W5nRH{1$;lOmu_<5tB($zct?Qpb(g3j zQG<1~VB00ChIVDSts-1`jXNt91AVv8`1H$fVWV6W;XBkyk%!SFh5;b&1^mRqlKz6Y*_7pQ%;L2jsEFLqMAodnCTQF(>ryeU&M|887oR%#ov}ZwNf2b!0--~kVkfK zg;`C-8)47Gv>f2S2}NU|6*^oN`Xrha*V!oK18 z3W?z>6H=r`Po&a4@{`zy1N+gZlPGK`65=nkWt-jAlP{mJ<3Lj8;@<-|N4GUEDc45% zG7@lwGu5td?J==!F>}T|R`=m3EKn^j==V)MWc@N3UoQ8+ik4@0cAJY2#`P28`gw=7 z4jF6#ql0p1g8N3TceU=h)|YxtLTUrb1Ug5|@VYr6W_6- z)x16xswlgNu*N0Hv1O{JFMuNh(*d)hn5@QsZ#1vP_2PDvh)u|6ew9^l>s4}-61`X;@NP+d)&EmI+fgJG9P?Hr7*f${+KX&~$Z7)Gwb)rj%Qz>ghP2&y+IV$3|n) zS})%pT#-bGHyh0_5S*(cgB&&)Tc z!q$@}0?r)M=gh&10?vJA4^s;J@dvXzKUcE{XO1&#!>I3RAG3#9Y6SHvc!NZ&Vqh=< zPU87f#u7LxJs#xpO5KCL006`N>x-#_CycS;z2j^vS`?%Ntl0%l(2S{nF-xlI8?0zZ z>TB2ma+^lid2x$|4*ciDSxA`cy~6vhT&fCxbF_1nTF#ob+SSVgxL@z#qffGQ*oFZ+ zN06}S8)@gm;7fGKpBtsMS9x=T#;`ez`~y|dacw2Cww1`Ua;b%c4K8$$c-w<&9@2+` zfVu0dzy$*hq)Y4`bS_V+4~4bXC=+;sgcH$9Bu-??Q$7z@a0Y|gbI9oP7(gf`)}!(u z^IU4Hdf*2(II})uMo45V9aP6SzfF_tt$m4uW|GS?nZ|X?TMQ~B{*>g%e6vFg|8DB2 z^wY`vBF~HRSUFeSW!?$RU~>~6A_@!=I;8*VkpXEf9|g8>(1>u36)vI&TF$-mRVu+1 zr1h|SI1@{c?z(c>jHmD-q|(N)u)b!cSr0ElYPg^HmYLL4TV{q?zHgDMUgEXb5)>5A zmDKClbVaTdp`0?`xg=qsC}jp{k-z8hIK|J8cJ@cBJ#l^gN z%OM$1qyl}wwF+y<_BFq9-31$yM)|ln;R;2jr?*{k{9U1}pqzw_FV`ONKCmkkhGl$8 zPf27SQePM^B=~DCJ07NhSU4{voLkxu>D@@0V?KV5ngcv=$I)e7cd*#y6ndH&4AKpj zQGrDUT7?f`koZ62BX)r+oJvoQ>GK@m=#x2=z_T7jH{e-aWT3>?m_CKEV;R=l%8)S@ zL>tw^*wGZOdBc2~f>o`_Tbl0pYTZz!N#J;0N%|N&T7gj`eL+dBDCO1;Q0u!9+#@Ps zL%>s!Vjzj+Lt0oHu8B8VtF@J|DIU_ofqLZmsbZs<5+;Zng-AhrsA6!%jPOm$g~g2% z?T0M1^8z$#v}-*dbGZpP@(m3vBI!W}ji;2hKB7gqLX{BvCkLF;{pwu|%lhjMEJ2%&iax7{63Ynn@JaUwCFedv5v~YH>HY`Qb0j;0T`Q~&AsvOaDRd9ABe1TOWIo^24d9W? z>|Jl&lmqOlyW<||YJH@u=WwJe&NM7*8b}*Ud#GLt66Zb+O1MJUFp9P9P2hp1*%NJb z9IB{?Bgr&vSg`|&MLmUv-Flh{D~ZoJfGfysE{6sgmcbX-Ml-q}P130T*?CcDEv-hA zF2f7QEOxO>E{k27mxn4M{kXWtuAx7IIX@)ApaggsJfsj9>2_KjR1XA@z%G>7Tjr51 z2VY8AeV{?8zoc_)BALNo!4MFRYDgR(dKH0p4F*bc#o5JZFoWEaNrn}r$80X->w)!L zKMRE@0d*Y`OcE!`O4DN-OU8+p8%9a6etjvRfcfCHtNL477SSJ?g5UX7nI_}i+4box z2DiT$?J=$Xkkh@Dt7G+X>;g{GNW}*PpSzETP1Vqx5xby#9;^x`itr7uAMSGW~?NVA>up2lls z@%YNNg&$6v(K@J})fgS#USl!RqkNeHi#aqa)PN%%CxPAofS_^jXP8|XwJJQ0c)%M) zafW>ueUj~S!s8S~;iLRP6v1KNV!a%DA}ig<@$B+yKDm#@1iZ_;MbM=2h5HaR3n$#5Zdf;x2PYn+Sh;eU=4qL2 zdm~fajrr-U7{#^wJ1KU8KhvAE{0({Q2CXu!(0vM;4N15R-;kCFV}uVcONj8)mj~cA z#lE%CqCzgxhAyOG5-FGM!;Z-pOMc4Ye6gL}J1UX}nG`8yK~|0+x(AS8j&p5&Kyog@ z=^MUVKVB7!$rzoy72nEvnvbpksp~2_zmLjvycprH>qh%|nnsi4GWr$krKYL z>aLUMW0u21Z&NqSYWeqS9>@q)d;KTA!8@>?0q;0pTwJ6`%pHwzSZHw(-4?Er41c3_VczcRxWhL&F3uPn1%om!K7?dnVh0W&o#r6ZE4B8jM1&gV>`4_Z5vkBxp)J z?67V}TX^**=tmZ`;W8+pBbcv#{y{-gqPhgl>6@S*SI|l*8&t}`ZBIE1s{Kba<4lt` zIX|kLIc0S#-RC#7?I&pbw8BuO3e*E~`-*r)3GgNnC(Su@$xZ%zYcV z+}ZuCx_=0?i)2!D=oySShOIxLdLSw!wVsL4h_wo95p5wl0VRPvq8R4|_X>QvCD8*c z$Z>jvwbl)r5W?!9(PL7F4?6~QjFQ4KA8v;6pz^4WIWIrFTfJiWG2->Nw13x`(1$p}wKo5gJ zbU}c#evE%{NNMztGLxr_dmEgyheIYpQUq|L366UsR!Y>2aV@MY6I>3eGc3wWIPj3t zzQ|$K;OUnav_vAOE69Deo04Z20QcJycxq<$Gfw<%El?SaJl|1WN=K;7Hh4) z=cvG#wo)+PreruqMXG_sS;t>O?R|!$2y7Tv7SW>5`T~1QN#X;^5@|!`kG9uWI96Wm(iTTPt9K`4eW7<~uI3an~Oj<{l7&@(v5 z#mzfy(XJgt4YdFhNU>>y$o;U>sfUh2_kcVqK8)!OxFo@3l5bN=f>~iR%+8u7^L3N~*4b47hE_xwLDE<^MF^t}M~yJF zKvcv^Dub#pMY}2H0n4;N&wrxH$z+)dGQ;fW#{k3b^PZJEq)%DbDy z>Q2gUmTAj+GEI`-z>XaQAg9@g0Z~?Jg@`=7FfN(8dPQ8Bpb; z^XNPfu@l2mUmzL4!s+Wn?9nmmp}OrA3)t!am*znoe#{vpi4BGyLL3 z1uu4*);n)lBm7gEO_J!JNq%`0)0J9lyG}_Bn%r8ShSwN;=7Yj|@0vRM=A1m)fn&Bf zpJbym&;!XCGIO3~Ie3xPqhbPfeZ2-^HJgq(r3Y(#!!6auv1+>g~XiyC6gpgb5KVK~x zIOISa%g)O6@4_SQQO{bGl!weoBPZdFU^?avj|f}y zh_FNPi0mIjri3Y?Y}kle;~5a$9YZ#PH&mo8F>=c-F^8B(+^v(3TXLw^C5ML+=HN4d z5-y|&ITOT*Aig1>-DiSU*4G`6R~RFj4>3*9ZTi1nl;32G1nupsB&oPx^Z zLAI8(q7Qr$b`ST%r!BVwO1`#p-adAY>FsH9GmYzTPJDh3nhmgxm-Btr*!?vtF{AX) zq8O4`p^OQkb%1@PPZZ%jij^$V?4*4Qk-MOt;WiAOb1T4z5)Qp6+(<|8nED(gIzRE5O|7Ib?@ZAMXHIEmV>e13}e<3yD z)@VVfcL^De%V@tcuQHuYl2Pg!OPfg8Iety-hd_g-PQ+$;JWZ!Xc^~~TyZo<3io-tB z`Dkm(Rx&=t$l59X4CcZJBV_JD@fwe=OlWP*oVm}`FG-rYY~mhm_F-b{l9Axl3Ygz?NbywD4YJFrcHA-y3W`t}LBe|PMR4;y z06~dijCdhZT#`XMzGkePCzsLp?^*Qz$7s1Ps(OdN|M5K>z*t~`p>O>3Q8J&G+4*9( zy5-B==is)0rQoam2d5NIDKjt|(ag?u=Cx~LTFpWz(KK2m5S^qx{?|_#{PG-r2WnvQ z1&C%>IVtcSU0u^oz=nt}x#1KIpHoP&Xp>U4w`2bdnZyl6rVbLqB(6@FVBt*S2%8jC z8!w;$r?!V|RD)IRT!qt77MJNH1tTS%l^~Wp*^VH7_Qu1&N*Pje(U1}sSX_M+Ke%ta zW0_i}_-kFJutvd)3<~HeLVGMz3g@om96Ey7ubf)ZqBNK*s?EwnQ(n@|XdtiCmNB{ZpZ$C#>iXC!bt2MU3 zq)5hT`Q=#xRA82hwuWDFp}Hg{xTU@hYn(iXA+A9InJO)@S*p5>@QX*;IC7XfQ~F{t zM?=nq8N>p=cuY@)357&qLEfC~(Tj%!JGL$c+eH(MDxuUXJ+OpP+=LMRp?4W|Q$rS)Km!{k!k}`u#G*+sur?&|DSaw2>`4`P0>DQolcTG~h8JludR3{?8&$ z=J2@ahh&gq)|_Mec+gyIX4_TG0A>funlQ<1^-G;k5Q4MBkIz}gB*NMpPPTf1= zi()bV724eq}vEB&Z5LL!2HhjBI9g`0u#1yfMj{wgoiiB z%X{M0J%_gCEt7)Cun~RQH)qzeE>t-q#7FLmL`#A*eHh!b$67tAEo#Zi7Oh`CX|plr zORS<{6rbACKDz}eqPQ;xV}#|P)x!<0hbkKuRT}-@*`*7@EKRRX(`m(HGY4A>S=@g9 z?k~r)xRw-dLyp02tI8-#y@kgBGLEC!pXhptXVWlMrne<7o^E>a zH=FixzZ4f2b<=)zm6V{UuxBGJYsR2sZY8=dcBPLy0kMmzkHJ4mgfR8n6H`xk4Xvm! zn&CV;-x^Td$`FPz&lHjs4HS+)XzZXI3mcQDTAWw+uxqBcX_}^-9p1zG0NQw4oK*4Z zefu0kT>v{;cPWF|+3S9cb_5qsH?_7Y=&}d@M2_t``DPG@aU$lG~*}t z)5(j|QO5bvKKwM&aPG4{ze?v|TBnOC_HrTzS$1(B{X~Dg`#ZSORaD=UH4`4Pyk?Hb zenW<(NpioPGO5=rdXw2@JT9}FG>;<>IPg(3uh|6FC@9+D z#6k7c!Hvb~>`**p+Xt&#vOi_{;_j$TDs~mYYhE5|ym8-6)0VGd;9TjDqGC(AhwI4l zP6<=++E|B#4HB7z2I=c|nq`Rm$3&hk%S zyx~Mz6uV1$}f0cb)J zxldX3S8J;XU{3E`y*cb4aOqQUMLyo=qdI9+-+q7M%fKDOdOAiiiPct8Z)6w8wv*xK z%~zugJ&r{VmXEW7K|&$OHLfhY>3H$x6zg+A~Q!C6b*&#cUjoJZuY*Yo z{7u1xDHujUNL+dxTr4udmhM-X)Mr>CVTBFrk@d{j=}hV)ph|egBsxI3ZX*;P2@`NqC>~_#8bLj|F=9Jpiksa~pQX!i{ug0`%Y?FKViSD#WR3*_ z5g26Y94v=yltu-dIg_ne+2rbUk!RIyGK=ervAZ+I(t;gc(jP#TUc(+3-*|SWl@ER)wPHEq~A^=VrWA+%O zUd<-ieAB_IB3L++iSrWH^gX#fj zXv1fB5dii$1gi&)YB7UzZhi4Vrj3xs3}<`MFOf=ORe{yt^W9zAAkA@46J1hWWzq;{u7N{ zJU1I9U0gbcGr<1Oi2wIlI+mmo?xYAlb2f3R@I}4A3G-{h(sRGutq6H z`)qWLMw=H@O~a~%eDl%%$G1)MXS6)0SL~D(Ks%ch_o;Jp&eLdqm98$L?ksm0c){FS zJ|tc}Bwjw`;1-ymwwWaL*@P;XYQAQwE{_q3ww2do56dOR#!t5gvr{|y>)DyV1b_X9vnrmR<)g2Mm` zRSYZqf5C>PlHg2-km6tm0OdUlO$i2x@*4FLJ14mvtbKc=j<%KZ_E6sAj_PITC=we) zsUwtR;iG!t$(BnWr4C#Xn)+uO$5k~Ah>e_`D*;P(w9|2l9{x5uI1%Fh-;3ma7&uru z6*6)Qrp%0cB2JkhuH>J#hs7Rgy=d5nh6QQ84tlE2^`f!o+2`^p&9zZ_ zAoER=x%gijGg=Mn z0-tnVW+R^yvvK7R9*pK#+7`y1Mrn_vqsAf@Z?U!pBS$kid?@>N?6$aw)*kq`F7*1r z?}68A_43ncJr?5ZIxX`Q)?T$sRp-4vgd^;*8y(CY?-U$B)JQl#hlLuW0y{Z3hgChL zo2P|PRH`7;!$N45G>z~6%#V|u8 z<=lI)@23(vh+rtPdj@ zk%L;TvK-rXt7b63yqKi80MkcF`dWoh1$9y>(u#II1e6uvmUw|O7S`E5ohyh_U0Dh4 z876MfU3K7L9ug~!&z9{YHP`Jp8Wlx3&hiBAif$H@93Vc=CeNcS#YzB*1X&zH`bvR2 z+fpnSqSmUrLa-otLg-$To4V;{m+8z}9NYO!d~`O7CK;&NW0$8;wW3IEUOqOfTBm!j zz`M6W5*Ht;8Y49Q59U_oSGpc2wdj>AIT(hMc`=;Rs%Xna5OFhAegzvlqzh< zteWo(EG%!3@ce9f@7eMJXGMNC0GP)CR3dX$%)Z6Cb7Yw8v2` zI{{;a$PU1i;6u_nj!G8J#45YQ)^Z$4R-HQX(~AV>{emf0&Bvf-j-Gem@Xe_TIYe=4 zOL*D=1tK}~c#Lp`OHK9A!37n3Wl(F;k3WC>_1(|!zd!r%w~xR6>Bs+#pd~k`)BjJ! zv6iBYuHe%ndZ4hU*?gzec}YfstaN!gcPtp#(+!Ha#3;ukh7X>n<-rR4I1Y8>L~Sym#gGn8ZO-?)aI04wgUcAsu4-TH_;R=*sj1K$;2P{6)9|ma!eU zahth|(;4jE_AJSi$y-# zErmdwW?5du=NQqQ9}FoDA3KQlo}Exj`XHr@fARrbVS*lPR&sdgdta`=gK(*Efo6js zA*|v=$R4bL9IRxpNxW;*?FO?CXIGWNm(SA)vJ%$~l{d9f4SrJn{@QSHAc;ivaziQ9VBYtvM3^a zGqB(a(bAe%*EWag77kHKuT);oR zHbii>aqffMdvSJG(RyQo#|{|67q&)*9d%$o^f>A`Qm*)hcOsNH9Bv4dU|u(Y!Cm9~ zNH(Rw3JMaG3bT^*u_@uSr%L;_u7K6eq!|=>-gfEIYd4wB5gwiw#T*;1XK=zKyIt1b zSIJ@!JQAc#f*N0}^pv?Xs=DA2;FSoU$t}3FT9X&&0Kq^$zrP8rXW%q2btZfGZ$eus zgm0?}Etx(52jB+#N1B7xGD;)ZZP|1&#d!;EG8Xduy7(|!GIa?vnm9+L0TF8&5b+dL zjx8B03zknvBbYcb%Je8U=L(jN^2Uyo)LISg<;IG*`J&9DbUK^gM^!p1mx2C8UPp0L zoPpMK)TGts=(+h-S=@qANP%=SF?2TEkPoAyq9%10we z`2xLw_sutYSA*L`0-gv>Lu}aKAB+z82LE)UDHQUJ`_L4Ws8>jo!BN$d;8>mPVexB$ zRmjp0Cag~LIxT)Hn3hTfO}wB)^f39gqDE-%>XZr-Yy*cD)lBU;tmwlpfBWz^xM5Ru zzW?F-_ka2Dw;zB06@HJ}Hp|b8yJ%cy7xQ+!^hrVhXoL_NSaTt1j}U;t?iX(q%qWg* zJ%^kBxo?E#s9Xzc4t>p@6SXmpCt=(MQ5#!9bPv&k*oF3u2;(@{iWxKXWc`1>fB*g; zfBk=>R%OSUhhcPoRB=kR5@-GLP_+raB%c=1U&^eyg7+0*AzTgd^ueDpib)<)qF}A* zQI^6jvUG6Yezw>-8p2Tbf@ZlsfNwK^{j>OjtrT8pEQrFAE|2Lih}7T}h*x>!!-3Dh zY$Rd)>Vk$J`3R^s%J>1*Ow(6qkP28Oit?btoJ-onAO$cv!s!JQxkv3mDb~lQE{KB# z3M;nC64rR0?9?cI?#2VbN|=!*j87~yPpG6|phq+(yICuDMOX=nd>7Q_JN6}>a-qTF zor=_0)=;&sUVi>myx}M0gv74zV{(6jBMjlQ(8Co+A&wIK3Db_$G&uT>#1?Ol`%0Yx zl$@|&0VZz(TnVt81`5zf(k4@WK!Dct5TKNbshznhaHcTIt&;6_<{KVYqZE`&R;((g zYI2z{^7AihPkJY|cL>hY^YDP!gw<5f_c0x~VT#in-=dg=*PrI-!C#hw^8XG7%c|vTfI?{cIPT9et zqAW~ycW<)mbKb#jH5ecaR+oxs8!kHuraIE>v}8v)r&hEK>KhiOF1xr$z8GxRi=%2hG8lxy>Hs9gEy+e*UrsMU%yr9|)^rJ`MSiQlGErh>O;rgqBo?oFn9 zwhGlUrPkB`(k)X$Ob?lA*u!Rr!xr?eQJiC#{a||cO~?-X&au?1=SN+#3jzCq0^jWVwv`4pr%e3nTI^jy54z3_wig0ODPe%=eOQ;^Ji=mZD zc4ONNJ$k{oKJ4~B>T>JYoyhkK=R%wHu6uVfDZ&9XLRevyXT4!2mk7Pr_RI^^c-h7M zhCP?%5oWKWr6Bz9u>Z+ z!d6oaC!`|5UXsx^7+WJH4kYAlJqcI9c9P5vTj<*MfFxJl*BXp`NZfSRmO* z%`9W0M6D@b^MZy-@$OBE16(?kq9I1w@H&(>H_Jm()F3S)T1KCgmLcQG z5z_%*hFCjPEc%HW=}Usm0dAQHma7Hz&6Vc1wL6iM0u%LgFI%+fb*Lpj#Sw&XY+YoR zi+UD7J^41xZ?dw;-Q^otF*wSsZvDf;VQi5-rqn50NuBl!KDJZopBB|To(?mcEwLCl zA>opyM0g}kS*N5y4RaAH>Eun){UsfA;-aJ#6DmB`NvY7`lBR@WXZmix{LY>CrS;}=ig=32tH{u2ho){-6v%p1Mr{H z(fl;U`8erfdRmpE(|RSGIa!=d=-J9uIoYNcTSF`kl%2E83S{Y4;ae-1IkCpj!11a= z(M?_qx?ze7(N(t1((*uHk@0YJEf_nTwrotJl;YeT&X<06i@8f#FpDYv;4A?>(Sil% zX)GrU$;X%nZ!5I6Umo7Dg+YC48jQE4G< +
+
daily ci spend
--
+
cost / merge
--
+
mq success rate
--
+
flakes / day
--
+
prs merged / day
--
+
+ + +
+
+

daily ci cost + 7-day rolling cost per merge

+
+
+
+

merge queue: daily outcomes + success rate

+
+
+
+

flakes + test failures per day

+
+
+
+ + +
flakes by pipeline
+
+ + + +
+
+ + +
author ci profile
+
+ + + +
+
+ + + + + diff --git a/ci3/ci-metrics/views/cost-overview.html b/ci3/ci-metrics/views/cost-overview.html new file mode 100644 index 000000000000..53424a2d2d70 --- /dev/null +++ b/ci3/ci-metrics/views/cost-overview.html @@ -0,0 +1,905 @@ + + + + + ACI - Cost Overview + + + +
+ +

cost overview

+ +
+ + + + | + + + | + + + + | + +
+ +
+ +
+
Overview
+
Resource Details
+
CI Attribution
+
+ +
+
+ +
+
+

combined daily spend

+
+
+
+

service category breakdown

+
+
+
+

aws vs gcp split

+
+
+
+ + + + +
+
+ +
+
+
+
+ + + +
+
+
+ +
+
+ +
+
+
+

ci cost by run type (time series)

+
+
+
+

cost by user (AWS + GCP)

+
+
+
+

cost by run type

+
+
+
+

instances

+
+ + + +
+
+
+ + + + + diff --git a/ci3/ci-metrics/views/test-timings.html b/ci3/ci-metrics/views/test-timings.html new file mode 100644 index 000000000000..0bf6c7213bd6 --- /dev/null +++ b/ci3/ci-metrics/views/test-timings.html @@ -0,0 +1,289 @@ + + + + + ACI - Test Timings + + + + + +

test timings

+ +
+ + + + + | + + + | + + + | + + +
+ +
loading...
+ +
+ +
+
+

avg duration by day

+
+
+
+

test run count by day

+
+
+
+ +

tests by duration

+
+ + + + + + + + + + + + + + + + +
test commandrunsavg (s)min (s)max (s)total (h)pass %passedfailedflaked
+
+ +

slowest individual runs

+
+ + + + + + + + + + + + + +
test commandduration (s)statusdateauthorpipelinelog
+
+ + + + + diff --git a/ci3/dashboard/Dockerfile b/ci3/dashboard/Dockerfile index 2ca190fd9753..2da7805ffa83 100644 --- a/ci3/dashboard/Dockerfile +++ b/ci3/dashboard/Dockerfile @@ -16,7 +16,12 @@ RUN apt update && apt install -y \ WORKDIR /app COPY requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt gunicorn + +# Install ci-metrics dependencies (ci-metrics runs as subprocess) +COPY ci-metrics/requirements.txt ci-metrics/requirements.txt +RUN pip install --no-cache-dir -r ci-metrics/requirements.txt + RUN git config --global --add safe.directory /aztec-packages COPY . . -EXPOSE 8080 +EXPOSE 8080 8081 CMD ["gunicorn", "-w", "100", "-b", "0.0.0.0:8080", "rk:app"] diff --git a/ci3/dashboard/deploy.sh b/ci3/dashboard/deploy.sh index cc417006d072..1d9e930e95a1 100755 --- a/ci3/dashboard/deploy.sh +++ b/ci3/dashboard/deploy.sh @@ -1,7 +1,13 @@ #!/bin/bash set -euo pipefail -rsync -avz --exclude='deploy.sh' -e "ssh -i ~/.ssh/build_instance_key" * ubuntu@ci.aztec-labs.com:rk +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Sync dashboard (rkapp) files +rsync -avz --exclude='deploy.sh' -e "ssh -i ~/.ssh/build_instance_key" "$SCRIPT_DIR"/* ubuntu@ci.aztec-labs.com:rk + +# Sync ci-metrics server (started as subprocess by rkapp) +rsync -avz -e "ssh -i ~/.ssh/build_instance_key" "$SCRIPT_DIR/../ci-metrics/" ubuntu@ci.aztec-labs.com:rk/ci-metrics/ ssh -i ~/.ssh/build_instance_key ubuntu@ci.aztec-labs.com " cd rk diff --git a/ci3/dashboard/rk.py b/ci3/dashboard/rk.py index 4e194cbc3a10..aedf35a824e2 100644 --- a/ci3/dashboard/rk.py +++ b/ci3/dashboard/rk.py @@ -18,13 +18,40 @@ YELLOW, BLUE, GREEN, RED, PURPLE, BOLD, RESET, hyperlink, r, get_section_data, get_list_as_string ) - LOGS_DISK_PATH = os.getenv('LOGS_DISK_PATH', '/logs-disk') DASHBOARD_PASSWORD = os.getenv('DASHBOARD_PASSWORD', 'password') +CI_METRICS_PORT = int(os.getenv('CI_METRICS_PORT', '8081')) +CI_METRICS_URL = os.getenv('CI_METRICS_URL', f'http://localhost:{CI_METRICS_PORT}') + app = Flask(__name__) Compress(app) auth = HTTPBasicAuth() +# Start the ci-metrics server as a subprocess +# Check sibling dir (repo layout) then subdirectory (Docker layout) +_ci_metrics_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'ci-metrics') +if not os.path.isdir(_ci_metrics_dir): + _ci_metrics_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'ci-metrics') +if os.path.isdir(_ci_metrics_dir): + # Kill any stale process on the port (e.g. leftover from previous reload) + import signal + try: + out = subprocess.check_output( + ['lsof', '-ti', f':{CI_METRICS_PORT}'], stderr=subprocess.DEVNULL, text=True) + for pid in out.strip().split('\n'): + if pid: + os.kill(int(pid), signal.SIGTERM) + import time; time.sleep(0.5) + except (subprocess.CalledProcessError, OSError): + pass + _ci_metrics_env = {**os.environ, 'CI_METRICS_PORT': str(CI_METRICS_PORT)} + subprocess.Popen( + ['gunicorn', '-w', '4', '-b', f'0.0.0.0:{CI_METRICS_PORT}', '--timeout', '120', 'app:app'], + cwd=_ci_metrics_dir, + env=_ci_metrics_env, + ) + print(f"[rk.py] ci-metrics server started on port {CI_METRICS_PORT}") + def read_from_disk(key): """Read log from disk as fallback when Redis key not found.""" try: @@ -145,6 +172,14 @@ def root() -> str: f"{hyperlink('https://aztecprotocol.github.io/benchmark-page-data/bench?branch=next', 'next')}\n" f"{hyperlink('/chonk-breakdowns', 'chonk breakdowns')}\n" f"{RESET}" + f"\n" + f"CI Metrics:\n" + f"\n{YELLOW}" + f"{hyperlink('/cost-overview', 'cost overview (AWS + GCP)')}\n" + f"{hyperlink('/namespace-billing', 'namespace billing')}\n" + f"{hyperlink('/ci-insights', 'ci insights')}\n" + f"{hyperlink('/test-timings', 'test timings')}\n" + f"{RESET}" ) def section_view(section: str) -> str: @@ -487,6 +522,57 @@ def make_options(param_name, options, current_value, suffix=''): # Redirect to log view. return redirect(f'/{run_id}') + +# ---- Reverse proxy to ci-metrics server ---- + +_proxy_session = requests.Session() +_HOP_BY_HOP = frozenset([ + 'connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', + 'te', 'trailers', 'transfer-encoding', 'upgrade', 'content-length', + # `requests` auto-decompresses gzip responses, so Content-Encoding is + # stale — strip it so the browser doesn't try to decompress plain content. + # Flask-Compress on rkapp handles browser compression. + 'content-encoding', +]) +# Don't forward Accept-Encoding — let `requests` negotiate with ci-metrics +# (it adds its own and auto-decompresses). +_STRIP_REQUEST_HEADERS = frozenset(['host', 'accept-encoding']) + +def _proxy(path): + """Forward request to ci-metrics, streaming the response back.""" + url = f'{CI_METRICS_URL}/{path.lstrip("/")}' + try: + resp = _proxy_session.request( + method=request.method, + url=url, + params=request.args, + data=request.get_data(), + headers={k: v for k, v in request.headers if k.lower() not in _STRIP_REQUEST_HEADERS}, + stream=True, + timeout=60, + ) + # Strip hop-by-hop headers + headers = {k: v for k, v in resp.headers.items() if k.lower() not in _HOP_BY_HOP} + return Response(resp.iter_content(chunk_size=8192), + status=resp.status_code, headers=headers) + except Exception as e: + return Response(json.dumps({'error': f'ci-metrics unavailable: {e}'}), + mimetype='application/json', status=502) + +@app.route('/namespace-billing') +@app.route('/ci-health') +@app.route('/ci-insights') +@app.route('/cost-overview') +@app.route('/test-timings') +@auth.login_required +def proxy_dashboard(): + return _proxy(request.path) + +@app.route('/api/', methods=['GET', 'POST', 'PUT', 'DELETE']) +@auth.login_required +def proxy_api(path): + return _proxy(f'/api/{path}') + @app.route('/') @auth.login_required def get_value(key): diff --git a/ci3/log_ci_run b/ci3/log_ci_run index 5c9567ae91dd..b52b93256edc 100755 --- a/ci3/log_ci_run +++ b/ci3/log_ci_run @@ -35,6 +35,14 @@ if [ -z "$key" ]; then author="$(git log -1 --pretty=format:"%an")" name=$REF_NAME [ "$(aws_get_meta_data instance-life-cycle)" == "spot" ] && spot=true || spot=false + instance_type=$(aws_get_meta_data instance-type 2>/dev/null || echo "unknown") + instance_vcpus=$(nproc 2>/dev/null || echo 0) + + # Extract PR number from branch name or merge queue ref + pr_number="" + if [[ "$REF_NAME" =~ [Pp][Rr]-?([0-9]+) ]]; then + pr_number="${BASH_REMATCH[1]}" + fi # If this is github merge queue, just keep the queue name. if [[ "$name" =~ ^gh-readonly-queue/([^/]+)/ ]]; then @@ -42,6 +50,7 @@ if [ -z "$key" ]; then fi msg=$(pr_link "$msg") + dashboard="${range_key#ci-run-}" json=$(jq -c -j -n \ --argjson timestamp "$key" \ @@ -53,7 +62,12 @@ if [ -z "$key" ]; then --arg author "$author" \ --arg arch "$(arch)" \ --argjson spot "$spot" \ - '{timestamp: $timestamp, run_id: $run_id, job_id: $job_id, status: $status, msg: $msg, name: $name, author: $author, arch: $arch, spot: $spot}') + --arg instance_type "$instance_type" \ + --argjson instance_vcpus "$instance_vcpus" \ + --arg pr_number "$pr_number" \ + --arg dashboard "$dashboard" \ + --arg github_actor "${GITHUB_ACTOR:-}" \ + '{timestamp: $timestamp, run_id: $run_id, job_id: $job_id, status: $status, msg: $msg, name: $name, author: $author, github_actor: $github_actor, arch: $arch, spot: $spot, instance_type: $instance_type, instance_vcpus: $instance_vcpus, pr_number: $pr_number, dashboard: $dashboard}') # echo "$json" >&2 redis_cli ZADD $range_key $key "$json" &>/dev/null redis_cli SETEX hb-$key 60 1 &>/dev/null diff --git a/ci3/run_test_cmd b/ci3/run_test_cmd index 66334e535f27..35c37c4d0c42 100755 --- a/ci3/run_test_cmd +++ b/ci3/run_test_cmd @@ -160,7 +160,8 @@ if [ "$publish" -eq 1 ]; then --arg commit_hash "$COMMIT_HASH" \ --arg commit_author "$COMMIT_AUTHOR" \ --arg commit_msg "$COMMIT_MSG" \ - '{status: $status, test_cmd: $test_cmd, log_id: $log_id, log_url: $log_url, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, timestamp: now | todate}') + --arg dashboard "${CI_DASHBOARD:-}" \ + '{status: $status, test_cmd: $test_cmd, log_id: $log_id, log_url: $log_url, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, dashboard: $dashboard, timestamp: now | todate}') redis_publish "ci:test:started" "$start_redis_data" fi @@ -228,15 +229,16 @@ function track_test_failed { function publish_redis { local redis_data=$(jq -n \ --arg status "$1" \ - --arg cmd "$cmd" \ - --arg log_key "$log_key" \ - --arg ref_name "$REF_NAME" \ + --arg test_cmd "$cmd" \ + --arg log_url "http://ci.aztec-labs.com/$log_key" \ + --arg ref_name "${TARGET_BRANCH:-$REF_NAME}" \ --arg commit_hash "$COMMIT_HASH" \ --arg commit_author "$COMMIT_AUTHOR" \ --arg commit_msg "$COMMIT_MSG" \ --argjson code "$code" \ --argjson duration "$SECONDS" \ - '{status: $status, cmd: $cmd, log_key: $log_key, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, exit_code: $code, duration_seconds: $duration, timestamp: now | todate}') + --arg dashboard "${CI_DASHBOARD:-}" \ + '{status: $status, test_cmd: $test_cmd, log_url: $log_url, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, exit_code: $code, duration_seconds: $duration, dashboard: $dashboard, timestamp: now | todate}') redis_publish "ci:test:$1" "$redis_data" } @@ -247,6 +249,8 @@ function pass { local line="${green}PASSED${reset}${log_info:-}: $test_cmd (${SECONDS}s)" echo -e "$line" + [ "$publish" -eq 1 ] && publish_redis "passed" + if [ "$track_test_history" -eq 1 ]; then local track_line="${green}PASSED${reset}${log_info:-} ${fail_links}: $test_cmd (${SECONDS}s) (${purple}$COMMIT_AUTHOR${reset}: $COMMIT_MSG)" track_test_history "$track_line" From 1618ded3107c9709015db24b3f7b62d61407343e Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Fri, 13 Feb 2026 11:57:42 +0000 Subject: [PATCH 15/62] Removed unused interface from p2p client --- yarn-project/end-to-end/src/spartan/n_tps.test.ts | 4 ++-- yarn-project/p2p/src/client/interface.ts | 7 ------- yarn-project/p2p/src/client/p2p_client.ts | 11 +---------- .../p2p/src/mem_pools/tx_pool_v2/interfaces.ts | 2 +- .../txe/src/state_machine/dummy_p2p_client.ts | 4 ---- 5 files changed, 4 insertions(+), 24 deletions(-) diff --git a/yarn-project/end-to-end/src/spartan/n_tps.test.ts b/yarn-project/end-to-end/src/spartan/n_tps.test.ts index e52495e4cc4d..f43b0cd42830 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps.test.ts @@ -345,8 +345,8 @@ describe('sustained N TPS test', () => { let lowValueTxs = 0; const lowValueSendTx = async (wallet: TestWallet) => { lowValueTxs++; - //const feeAmount = Number(randomBigInt(10n)) + 1; - const feeAmount = 1; + const feeAmount = Number(randomBigInt(10n)) + 1; + //const feeAmount = 1; const fee = new GasFees(0, feeAmount); logger.info('Sending low value tx ' + lowValueTxs + ' with fee ' + feeAmount); diff --git a/yarn-project/p2p/src/client/interface.ts b/yarn-project/p2p/src/client/interface.ts index f70316f88715..1aa02f01a1c7 100644 --- a/yarn-project/p2p/src/client/interface.ts +++ b/yarn-project/p2p/src/client/interface.ts @@ -107,13 +107,6 @@ export type P2P = P2PApiFull & **/ sendTx(tx: Tx): Promise; - /** - * Adds transactions to the pool. Does not send to peers or validate the tx. - * @param txs - The transactions. - * @returns The number of txs added to the pool. Note if the transaction already exists, it will not be added again. - **/ - addTxsToPool(txs: Tx[]): Promise; - /** * Handles failed transaction execution by removing txs from the pool. * @param txHashes - Hashes of the transactions that failed execution. diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 8857575d8667..4bcfa04539a3 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -582,7 +582,7 @@ export class P2PClient **/ public async sendTx(tx: Tx): Promise { this.#assertIsReady(); - const result = await this.txPool.addPendingTxs([tx], { feeOnly: true }); + const result = await this.txPool.addPendingTxs([tx], { feeComparisonOnly: true }); if (result.accepted.length === 1) { await this.p2pService.propagate(tx); } else { @@ -592,15 +592,6 @@ export class P2PClient } } - /** - * Adds transactions to the pool. Does not send to peers or validate the txs. - * @param txs - The transactions. - **/ - public async addTxsToPool(txs: Tx[]): Promise { - this.#assertIsReady(); - return (await this.txPool.addPendingTxs(txs, { feeOnly: true })).accepted.length; - } - /** * Returns whether the given tx hash is flagged as pending or mined. * @param txHash - Hash of the tx to query. diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts index 84ae4097e7c8..908e42a1eee7 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts @@ -101,7 +101,7 @@ export interface TxPoolV2 extends TypedEventEmitter { * @param opts - Optional metadata (e.g., source for logging) * @returns Result categorizing each transaction as accepted, rejected, or ignored */ - addPendingTxs(txs: Tx[], opts?: { source?: string; feeOnly?: boolean }): Promise; + addPendingTxs(txs: Tx[], opts?: { source?: string; feeComparisonOnly?: boolean }): Promise; /** * Checks if a transaction can be added without modifying the pool. diff --git a/yarn-project/txe/src/state_machine/dummy_p2p_client.ts b/yarn-project/txe/src/state_machine/dummy_p2p_client.ts index fa6615cc8b13..2dc0d77d9376 100644 --- a/yarn-project/txe/src/state_machine/dummy_p2p_client.ts +++ b/yarn-project/txe/src/state_machine/dummy_p2p_client.ts @@ -171,10 +171,6 @@ export class DummyP2P implements P2P { throw new Error('DummyP2P does not implement "hasTxsInPool"'); } - public addTxsToPool(_txs: Tx[]): Promise { - throw new Error('DummyP2P does not implement "addTxs"'); - } - public getSyncedLatestBlockNum(): Promise { throw new Error('DummyP2P does not implement "getSyncedLatestBlockNum"'); } From 1cad1d1c7fdf52c5f3decadb2c9fc59187aa63c9 Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Fri, 13 Feb 2026 12:17:26 +0000 Subject: [PATCH 16/62] Reverted change --- spartan/environments/five-tps-short-epoch.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spartan/environments/five-tps-short-epoch.env b/spartan/environments/five-tps-short-epoch.env index b6601d0ad866..85f36344fc19 100644 --- a/spartan/environments/five-tps-short-epoch.env +++ b/spartan/environments/five-tps-short-epoch.env @@ -55,7 +55,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=36 +SEQ_MAX_TX_PER_BLOCK=180 SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests From a9776617c8ef9c8d47f655c9153e176b123fc855 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 12 Feb 2026 18:31:42 -0300 Subject: [PATCH 17/62] chore(validator): blob upload tests --- .../light/lightweight_checkpoint_builder.ts | 3 +- .../validator-client/src/validator.test.ts | 66 +++++++++++++++---- .../validator-client/src/validator.ts | 2 +- 3 files changed, 55 insertions(+), 16 deletions(-) diff --git a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts index d193f436c7e5..1a142362b1fb 100644 --- a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts +++ b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts @@ -248,8 +248,7 @@ export class LightweightCheckpointBuilder { ); const epochOutHash = accumulateCheckpointOutHashes([...this.previousCheckpointOutHashes, checkpointOutHash]); - // TODO(palla/mbps): Should we source this from the constants instead? - // timestamp of a checkpoint is the timestamp of the last block in the checkpoint. + // All blocks in the checkpoint have the same timestamp const timestamp = blocks[blocks.length - 1].timestamp; const totalManaUsed = blocks.reduce((acc, block) => acc.add(block.header.totalManaUsed), Fr.ZERO); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index ac288b1a58a1..879d2a55da49 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -61,7 +61,7 @@ describe('ValidatorClient', () => { > & { disableTransactions: boolean; }; - let validatorClient: ValidatorClient; + let validatorClient: TestValidatorClient; let p2pClient: MockProxy; let blockSource: MockProxy; let l1ToL2MessageSource: MockProxy; @@ -148,7 +148,7 @@ describe('ValidatorClient', () => { }; keyStoreManager = new KeystoreManager(keyStore); - validatorClient = await ValidatorClient.new( + validatorClient = (await ValidatorClient.new( config, checkpointsBuilder, worldState, @@ -160,7 +160,7 @@ describe('ValidatorClient', () => { keyStoreManager, blobClient, dateProvider, - ); + )) as TestValidatorClient; }); describe('createBlockProposal', () => { @@ -411,6 +411,7 @@ describe('ValidatorClient', () => { it('should attest to a checkpoint proposal after validating a block for that slot', async () => { const addCheckpointAttestationsSpy = jest.spyOn(p2pClient, 'addOwnCheckpointAttestations'); + const uploadBlobsSpy = jest.spyOn(validatorClient, 'uploadBlobsForCheckpoint'); const didValidate = await validatorClient.validateBlockProposal(proposal, sender); expect(didValidate).toBe(true); @@ -425,12 +426,18 @@ describe('ValidatorClient', () => { }, }); + // Enable blob upload for this attestation + blobClient.canUpload.mockReturnValue(true); + validatorClient.updateConfig({ skipCheckpointProposalValidation: true }); const attestations = await validatorClient.attestToCheckpointProposal(checkpointProposal, sender); expect(attestations).toBeDefined(); expect(attestations).toHaveLength(1); expect(addCheckpointAttestationsSpy).toHaveBeenCalledTimes(1); + expect(uploadBlobsSpy).toHaveBeenCalled(); + + uploadBlobsSpy.mockRestore(); }); it('should wait for previous block to sync', async () => { @@ -689,16 +696,6 @@ describe('ValidatorClient', () => { // block_proposal_handler.ts. }); - // TODO(palla/mbps): Blob upload functionality has been moved to checkpoint proposal handling (Phase 6) - // These tests are skipped until the blob upload is implemented in the new location. - describe.skip('filestore blob upload', () => { - it.todo('should upload blobs to filestore after successful checkpoint proposal'); - it.todo('should not attempt upload when fileStoreBlobUploadClient is undefined'); - it.todo('should not fail when blob upload fails'); - it.todo('should trigger re-execution when filestore is configured even if validatorReexecute is false'); - it.todo('should not upload blobs when validation fails'); - }); - it('should validate proposals in fisherman mode but not create or broadcast attestations', async () => { // Enable fisherman mode (which also triggers re-execution) validatorClient.updateConfig({ fishermanMode: true }); @@ -791,6 +788,42 @@ describe('ValidatorClient', () => { }); }); + describe('uploadBlobsForCheckpoint', () => { + const proposalInfo = { slotNumber: 1, archive: '0x00', proposer: '0x00', txCount: 0 }; + + it('should send blobs from blocks in the slot to filestore', async () => { + const blobFields = [Fr.random(), Fr.random()]; + const mockBlock = { toBlobFields: () => blobFields } as unknown as L2Block; + blockSource.getBlockHeaderByArchive.mockResolvedValue(makeBlockHeader()); + blockSource.getBlocksForSlot.mockResolvedValue([mockBlock]); + + const proposal = await makeCheckpointProposal({ lastBlock: {} }); + await validatorClient.uploadBlobsForCheckpoint(proposal, proposalInfo); + + expect(blockSource.getBlocksForSlot).toHaveBeenCalledWith(proposal.slotNumber); + expect(blobClient.sendBlobsToFilestore).toHaveBeenCalled(); + }); + + it('should not upload if last block header is not found', async () => { + blockSource.getBlockHeaderByArchive.mockResolvedValue(undefined); + + const proposal = await makeCheckpointProposal({ lastBlock: {} }); + await validatorClient.uploadBlobsForCheckpoint(proposal, proposalInfo); + + expect(blobClient.sendBlobsToFilestore).not.toHaveBeenCalled(); + }); + + it('should not throw when blob upload fails', async () => { + const mockBlock = { toBlobFields: () => [Fr.random()] } as unknown as L2Block; + blockSource.getBlockHeaderByArchive.mockResolvedValue(makeBlockHeader()); + blockSource.getBlocksForSlot.mockResolvedValue([mockBlock]); + blobClient.sendBlobsToFilestore.mockRejectedValue(new Error('upload failed')); + + const proposal = await makeCheckpointProposal({ lastBlock: {} }); + await expect(validatorClient.uploadBlobsForCheckpoint(proposal, proposalInfo)).resolves.toBeUndefined(); + }); + }); + describe('configuration', () => { it('should use VALIDATOR_PRIVATE_KEY for validatorPrivateKeys when VALIDATOR_PRIVATE_KEYS is not set', () => { const originalEnv = process.env; @@ -830,3 +863,10 @@ describe('ValidatorClient', () => { }); }); }); + +/** Exposes protected methods for direct testing */ +class TestValidatorClient extends ValidatorClient { + declare public uploadBlobsForCheckpoint: ( + ...args: Parameters + ) => Promise; +} diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 682aaf6997ed..49d52134179a 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -732,7 +732,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) /** * Uploads blobs for a checkpoint to the filestore (fire and forget). */ - private async uploadBlobsForCheckpoint(proposal: CheckpointProposalCore, proposalInfo: LogData): Promise { + protected async uploadBlobsForCheckpoint(proposal: CheckpointProposalCore, proposalInfo: LogData): Promise { try { const lastBlockHeader = await this.blockSource.getBlockHeaderByArchive(proposal.archive); if (!lastBlockHeader) { From 2d3a235b2b6e7cc0367819c8f4864f2f5d9e3d9a Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Fri, 13 Feb 2026 12:48:57 +0000 Subject: [PATCH 18/62] Review changes --- yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts | 2 +- yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 5 +++-- yarn-project/p2p/src/test-helpers/testbench-utils.ts | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts index 6fc3b915df9f..42d8a39406be 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts @@ -70,7 +70,7 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte // === Core Operations === - addPendingTxs(txs: Tx[], opts: { source?: string; feeOnly?: boolean } = {}): Promise { + addPendingTxs(txs: Tx[], opts: { source?: string; feeComparisonOnly?: boolean } = {}): Promise { return this.#queue.put(() => this.#impl.addPendingTxs(txs, opts)); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index b6080f47be0b..240f28c4af59 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -178,14 +178,15 @@ export class TxPoolV2Impl { this.#log.info(`Deleted ${toDelete.length} invalid/rejected transactions on startup`, { txHashes: toDelete }); } - async addPendingTxs(txs: Tx[], opts: { source?: string; feeOnly?: boolean }): Promise { + async addPendingTxs(txs: Tx[], opts: { source?: string; feeComparisonOnly?: boolean }): Promise { const accepted: TxHash[] = []; const ignored: TxHash[] = []; const rejected: TxHash[] = []; const acceptedPending = new Set(); const poolAccess = this.#createPreAddPoolAccess(); - const preAddContext: PreAddContext | undefined = opts.feeOnly !== undefined ? { feeOnly: opts.feeOnly } : undefined; + const preAddContext: PreAddContext | undefined = + opts.feeComparisonOnly !== undefined ? { feeOnly: opts.feeComparisonOnly } : undefined; await this.#store.transactionAsync(async () => { for (const tx of txs) { diff --git a/yarn-project/p2p/src/test-helpers/testbench-utils.ts b/yarn-project/p2p/src/test-helpers/testbench-utils.ts index d556dbdcc9bf..cfa6a923fcb8 100644 --- a/yarn-project/p2p/src/test-helpers/testbench-utils.ts +++ b/yarn-project/p2p/src/test-helpers/testbench-utils.ts @@ -59,7 +59,7 @@ export class InMemoryTxPool extends EventEmitter implements TxPoolV2 { // === Core Operations (TxPoolV2) === - addPendingTxs(txs: Tx[], opts?: { source?: string; feeOnly?: boolean }): Promise { + addPendingTxs(txs: Tx[], opts?: { source?: string; feeComparisonOnly?: boolean }): Promise { const accepted: TxHash[] = []; const newTxs: Tx[] = []; for (const tx of txs) { From 9d2307a43b4899a5b7a8e1def380226771e0554f Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Fri, 13 Feb 2026 12:50:50 +0000 Subject: [PATCH 19/62] Review changes --- .../tx_pool_v2/eviction/eviction_manager.test.ts | 2 +- .../p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts | 2 +- .../tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts | 8 ++++---- .../p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts | 8 ++++---- .../p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts index 1419fd85e0be..f7fd0d3252de 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts @@ -216,7 +216,7 @@ describe('EvictionManager', () => { evictionManager.registerPreAddRule(preAddRule); const incomingMeta = createMeta('0x1111', 100n); - const context = { feeOnly: true }; + const context = { feeComparisonOnly: true }; await evictionManager.runPreAddRules(incomingMeta, poolAccess, context); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts index 81d4bb659014..ab5af10718cf 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts @@ -90,7 +90,7 @@ export interface PreAddResult { /** Context passed to pre-add rules from addPendingTxs. */ export interface PreAddContext { /** If true, compare priority fee only (no tx hash tiebreaker). Used for RPC submissions. */ - feeOnly?: boolean; + feeComparisonOnly?: boolean; } /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts index 21a782a5cbfb..e2fc58850110 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts @@ -168,7 +168,7 @@ describe('LowPriorityPreAddRule', () => { expect(result.txHashesToEvict).toContain(lowestPriorityMeta.txHash); }); - it('uses feeOnly: same fee, incoming is ignored even if it wins hash tiebreaker', async () => { + it('uses feeComparisonOnly: same fee, incoming is ignored even if it wins hash tiebreaker', async () => { const existing = createMeta('0x1111', 100n); const incoming = createMeta('0x2222', 100n); @@ -177,7 +177,7 @@ describe('LowPriorityPreAddRule', () => { const [incomingMeta, lowestPriorityMeta] = cmp > 0 ? [incoming, existing] : [existing, incoming]; const poolAccess = createPoolAccess(100, lowestPriorityMeta); - const context: PreAddContext = { feeOnly: true }; + const context: PreAddContext = { feeComparisonOnly: true }; // feeOnly mode: same fee means ignored (no hash tiebreaker) const result = await rule.check(incomingMeta, poolAccess, context); @@ -197,7 +197,7 @@ describe('LowPriorityPreAddRule', () => { expect(result1.txHashesToEvict).toContain('0x2222'); // With feeOnly - const result2 = await rule.check(incomingMeta, poolAccess, { feeOnly: true }); + const result2 = await rule.check(incomingMeta, poolAccess, { feeComparisonOnly: true }); expect(result2.shouldIgnore).toBe(false); expect(result2.txHashesToEvict).toContain('0x2222'); }); @@ -212,7 +212,7 @@ describe('LowPriorityPreAddRule', () => { expect(result1.shouldIgnore).toBe(true); // With feeOnly - const result2 = await rule.check(incomingMeta, poolAccess, { feeOnly: true }); + const result2 = await rule.check(incomingMeta, poolAccess, { feeComparisonOnly: true }); expect(result2.shouldIgnore).toBe(true); }); }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts index c2c4eb933b36..200d8e16709c 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts @@ -3509,7 +3509,7 @@ describe('TxPoolV2', () => { // Same fee as the lowest — with feeOnly, no hash tiebreaker, always ignored const tx3 = await mockTxWithFee(3, 10); - const result = await pool.addPendingTxs([tx3], { feeOnly: true }); + const result = await pool.addPendingTxs([tx3], { feeComparisonOnly: true }); expect(toStrings(result.ignored)).toContain(hashOf(tx3)); expect(result.accepted).toHaveLength(0); @@ -3527,7 +3527,7 @@ describe('TxPoolV2', () => { clearCallbackTracking(); const tx3 = await mockTxWithFee(3, 15); - const result = await pool.addPendingTxs([tx3], { feeOnly: true }); + const result = await pool.addPendingTxs([tx3], { feeComparisonOnly: true }); expect(toStrings(result.accepted)).toContain(hashOf(tx3)); expect(await pool.getPendingTxCount()).toBe(2); @@ -3545,7 +3545,7 @@ describe('TxPoolV2', () => { clearCallbackTracking(); const tx3 = await mockTxWithFee(3, 5); - const result = await pool.addPendingTxs([tx3], { feeOnly: true }); + const result = await pool.addPendingTxs([tx3], { feeComparisonOnly: true }); expect(toStrings(result.ignored)).toContain(hashOf(tx3)); expect(await pool.getPendingTxCount()).toBe(2); @@ -3558,7 +3558,7 @@ describe('TxPoolV2', () => { const tx1 = await mockTxWithFee(1, 10); // Both modes accept when below capacity - const result1 = await pool.addPendingTxs([tx1], { feeOnly: true }); + const result1 = await pool.addPendingTxs([tx1], { feeComparisonOnly: true }); expect(result1.accepted).toHaveLength(1); const tx2 = await mockTxWithFee(2, 10); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index 240f28c4af59..73a16d061228 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -186,7 +186,7 @@ export class TxPoolV2Impl { const poolAccess = this.#createPreAddPoolAccess(); const preAddContext: PreAddContext | undefined = - opts.feeComparisonOnly !== undefined ? { feeOnly: opts.feeComparisonOnly } : undefined; + opts.feeComparisonOnly !== undefined ? { feeComparisonOnly: opts.feeComparisonOnly } : undefined; await this.#store.transactionAsync(async () => { for (const tx of txs) { From 513c526d138189938afde63dac8d1f8f32bf4936 Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Fri, 13 Feb 2026 12:58:22 +0000 Subject: [PATCH 20/62] Fix --- .../mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts index 82b874303a63..fa5cc0360dcd 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts @@ -43,7 +43,7 @@ export class LowPriorityPreAddRule implements PreAddRule { // Compare incoming tx against lowest priority tx. // feeOnly mode (RPC): use strict fee comparison only — avoids churn from hash ordering // Default (gossip): use full comparePriority (fee + tx hash tiebreaker) for determinism - const isHigherPriority = context?.feeOnly + const isHigherPriority = context?.feeComparisonOnly ? incomingMeta.priorityFee > lowestPriorityMeta.priorityFee : comparePriority(incomingMeta, lowestPriorityMeta) > 0; From 3a3a39759a2f6c191ce562bfd2578dac279edcdf Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Thu, 12 Feb 2026 10:43:55 +0000 Subject: [PATCH 21/62] automatically stop node from signalling --- .../ethereum/src/contracts/empire_base.ts | 2 + .../src/contracts/empire_slashing_proposer.ts | 6 + .../src/contracts/governance_proposer.ts | 6 + .../src/publisher/sequencer-publisher.test.ts | 115 ++++++++++++++++++ .../src/publisher/sequencer-publisher.ts | 27 ++++ 5 files changed, 156 insertions(+) diff --git a/yarn-project/ethereum/src/contracts/empire_base.ts b/yarn-project/ethereum/src/contracts/empire_base.ts index 470137b78604..cd978719250e 100644 --- a/yarn-project/ethereum/src/contracts/empire_base.ts +++ b/yarn-project/ethereum/src/contracts/empire_base.ts @@ -22,6 +22,8 @@ export interface IEmpireBase { signerAddress: Hex, signer: (msg: TypedDataDefinition) => Promise, ): Promise; + /** Checks if a payload was ever submitted to governance via submitRoundWinner. */ + hasPayloadBeenProposed(payload: Hex): Promise; } export function encodeSignal(payload: Hex): Hex { diff --git a/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts b/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts index 565d62260c16..77e25238611e 100644 --- a/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts +++ b/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts @@ -126,6 +126,12 @@ export class EmpireSlashingProposerContract extends EventEmitter implements IEmp }; } + /** Checks if a payload was ever submitted to governance via submitRoundWinner. */ + public async hasPayloadBeenProposed(payload: Hex): Promise { + const events = await this.proposer.getEvents.PayloadSubmitted({ payload }, { fromBlock: 0n, strict: true }); + return events.length > 0; + } + public listenToSubmittablePayloads(callback: (args: { payload: `0x${string}`; round: bigint }) => unknown) { return this.proposer.watchEvent.PayloadSubmittable( {}, diff --git a/yarn-project/ethereum/src/contracts/governance_proposer.ts b/yarn-project/ethereum/src/contracts/governance_proposer.ts index fe4611105bdf..3fa65643f0de 100644 --- a/yarn-project/ethereum/src/contracts/governance_proposer.ts +++ b/yarn-project/ethereum/src/contracts/governance_proposer.ts @@ -110,6 +110,12 @@ export class GovernanceProposerContract implements IEmpireBase { }; } + /** Checks if a payload was ever submitted to governance via submitRoundWinner. */ + public async hasPayloadBeenProposed(payload: Hex): Promise { + const events = await this.proposer.getEvents.PayloadSubmitted({ payload }, { fromBlock: 0n, strict: true }); + return events.length > 0; + } + public async submitRoundWinner( round: bigint, l1TxUtils: L1TxUtils, diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index 29f39e0c2581..e05c1c263ed5 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -418,4 +418,119 @@ describe('SequencerPublisher', () => { ), ).toEqual(false); }); + + it('stops signalling when payload was previously proposed', async () => { + const { govPayload } = mockGovernancePayload(); + governanceProposerContract.hasPayloadBeenProposed.mockResolvedValue(true); + + expect( + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(2), + 1n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ), + ).toEqual(false); + }); + + it('continues signalling when payload was NOT proposed', async () => { + const { govPayload } = mockGovernancePayload(); + governanceProposerContract.hasPayloadBeenProposed.mockResolvedValue(false); + + expect( + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(2), + 1n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ), + ).toEqual(true); + }); + + it('caches proposed result and prevents repeated L1 calls', async () => { + const { govPayload } = mockGovernancePayload(); + governanceProposerContract.hasPayloadBeenProposed.mockResolvedValue(true); + + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(2), + 1n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ); + + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(3), + 2n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ); + + expect(governanceProposerContract.hasPayloadBeenProposed).toHaveBeenCalledTimes(1); + }); + + it('retries on transient RPC failure and succeeds', async () => { + const { govPayload } = mockGovernancePayload(); + governanceProposerContract.hasPayloadBeenProposed + .mockRejectedValueOnce(new Error('RPC error')) + .mockRejectedValueOnce(new Error('RPC error')) + .mockResolvedValueOnce(false); + + expect( + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(2), + 1n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ), + ).toEqual(true); + }); + + it('fails closed on persistent RPC failure', async () => { + const { govPayload } = mockGovernancePayload(); + governanceProposerContract.hasPayloadBeenProposed.mockRejectedValue(new Error('RPC error')); + + expect( + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(2), + 1n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ), + ).toEqual(false); + }); + + it('does not cache false result and re-checks on subsequent calls', async () => { + const { govPayload } = mockGovernancePayload(); + governanceProposerContract.hasPayloadBeenProposed.mockResolvedValueOnce(false).mockResolvedValueOnce(true); + + // First call: not proposed, signalling proceeds + expect( + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(2), + 1n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ), + ).toEqual(true); + + // Second call: now proposed, signalling stops + expect( + await publisher.enqueueGovernanceCastSignal( + govPayload, + SlotNumber(3), + 2n, + EthAddress.fromString(testHarnessAttesterAccount.address), + msg => testHarnessAttesterAccount.signTypedData(msg), + ), + ).toEqual(false); + + expect(governanceProposerContract.hasPayloadBeenProposed).toHaveBeenCalledTimes(2); + }); }); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index d5df5fdbf3c6..fd3a5cf15f7c 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -32,6 +32,7 @@ import type { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Signature, type ViemSignature } from '@aztec/foundation/eth-signature'; import { type Logger, createLogger } from '@aztec/foundation/log'; +import { makeBackoff, retry } from '@aztec/foundation/retry'; import { bufferToHex } from '@aztec/foundation/string'; import { DateProvider, Timer } from '@aztec/foundation/timer'; import { EmpireBaseAbi, ErrorsAbi, RollupAbi } from '@aztec/l1-artifacts'; @@ -112,6 +113,7 @@ export class SequencerPublisher { protected lastActions: Partial> = {}; private isPayloadEmptyCache: Map = new Map(); + private payloadProposedCache: Set = new Set(); protected log: Logger; protected ethereumSlotDuration: bigint; @@ -691,6 +693,31 @@ export class SequencerPublisher { return false; } + // Check if payload was already submitted to governance + const cacheKey = payload.toString(); + if (!this.payloadProposedCache.has(cacheKey)) { + try { + const proposed = await retry( + () => base.hasPayloadBeenProposed(payload.toString()), + 'Check if payload was proposed', + makeBackoff([0, 1, 2]), + this.log, + true, + ); + if (proposed) { + this.payloadProposedCache.add(cacheKey); + } + } catch (err) { + this.log.warn(`Failed to check if payload ${payload} was proposed after retries, skipping signal`, err); + return false; + } + } + + if (this.payloadProposedCache.has(cacheKey)) { + this.log.info(`Payload ${payload} was already proposed to governance, stopping signals`); + return false; + } + const cachedLastVote = this.lastActions[signalType]; this.lastActions[signalType] = slotNumber; const action = signalType; From a5fe63e8a9dcd093d84a406eb03bf91078a31273 Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Fri, 13 Feb 2026 13:53:15 +0000 Subject: [PATCH 22/62] using L1 start block as fromBlock --- yarn-project/ethereum/src/contracts/empire_base.ts | 2 +- .../ethereum/src/contracts/empire_slashing_proposer.ts | 4 ++-- yarn-project/ethereum/src/contracts/governance_proposer.ts | 4 ++-- .../src/publisher/sequencer-publisher.test.ts | 1 + .../sequencer-client/src/publisher/sequencer-publisher.ts | 3 ++- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/yarn-project/ethereum/src/contracts/empire_base.ts b/yarn-project/ethereum/src/contracts/empire_base.ts index cd978719250e..377c279b3a2d 100644 --- a/yarn-project/ethereum/src/contracts/empire_base.ts +++ b/yarn-project/ethereum/src/contracts/empire_base.ts @@ -23,7 +23,7 @@ export interface IEmpireBase { signer: (msg: TypedDataDefinition) => Promise, ): Promise; /** Checks if a payload was ever submitted to governance via submitRoundWinner. */ - hasPayloadBeenProposed(payload: Hex): Promise; + hasPayloadBeenProposed(payload: Hex, fromBlock: bigint): Promise; } export function encodeSignal(payload: Hex): Hex { diff --git a/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts b/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts index 77e25238611e..8fc38f21b46b 100644 --- a/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts +++ b/yarn-project/ethereum/src/contracts/empire_slashing_proposer.ts @@ -127,8 +127,8 @@ export class EmpireSlashingProposerContract extends EventEmitter implements IEmp } /** Checks if a payload was ever submitted to governance via submitRoundWinner. */ - public async hasPayloadBeenProposed(payload: Hex): Promise { - const events = await this.proposer.getEvents.PayloadSubmitted({ payload }, { fromBlock: 0n, strict: true }); + public async hasPayloadBeenProposed(payload: Hex, fromBlock: bigint): Promise { + const events = await this.proposer.getEvents.PayloadSubmitted({ payload }, { fromBlock, strict: true }); return events.length > 0; } diff --git a/yarn-project/ethereum/src/contracts/governance_proposer.ts b/yarn-project/ethereum/src/contracts/governance_proposer.ts index 3fa65643f0de..a7c203a67bdf 100644 --- a/yarn-project/ethereum/src/contracts/governance_proposer.ts +++ b/yarn-project/ethereum/src/contracts/governance_proposer.ts @@ -111,8 +111,8 @@ export class GovernanceProposerContract implements IEmpireBase { } /** Checks if a payload was ever submitted to governance via submitRoundWinner. */ - public async hasPayloadBeenProposed(payload: Hex): Promise { - const events = await this.proposer.getEvents.PayloadSubmitted({ payload }, { fromBlock: 0n, strict: true }); + public async hasPayloadBeenProposed(payload: Hex, fromBlock: bigint): Promise { + const events = await this.proposer.getEvents.PayloadSubmitted({ payload }, { fromBlock, strict: true }); return events.length > 0; } diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index e05c1c263ed5..789dae9a37eb 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -126,6 +126,7 @@ describe('SequencerPublisher', () => { rollup = mock(); rollup.validateHeader.mockReturnValue(Promise.resolve()); + rollup.getL1StartBlock.mockResolvedValue(1n); (rollup as any).address = mockRollupAddress; forwardSpy = jest.spyOn(Multicall3, 'forward'); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index fd3a5cf15f7c..86bdd12612d1 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -697,8 +697,9 @@ export class SequencerPublisher { const cacheKey = payload.toString(); if (!this.payloadProposedCache.has(cacheKey)) { try { + const l1StartBlock = await this.rollupContract.getL1StartBlock(); const proposed = await retry( - () => base.hasPayloadBeenProposed(payload.toString()), + () => base.hasPayloadBeenProposed(payload.toString(), l1StartBlock), 'Check if payload was proposed', makeBackoff([0, 1, 2]), this.log, From e52b7d261d485740341a54ee9c812bfe51ef6be3 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 13 Feb 2026 17:00:14 +0000 Subject: [PATCH 23/62] chore: move TXE ports out of Linux ephemeral range (#20475) - TXE servers were allocated on ports 45730+ which falls in the Linux ephemeral range (32768-60999), risking collisions with OS-assigned outgoing connection ports. Moves to 14730+ which is safely below the range. - Adds `ci3/check_port` utility that checks if a port is free and prints the process tree of the holder (via `pstree`) if taken --- bootstrap.sh | 13 ++++++++++--- ci3/check_port | 19 +++++++++++++++++++ ci3/find_ports | 6 ------ noir-projects/aztec-nr/bootstrap.sh | 18 +++++++++++++++--- noir-projects/noir-contracts/bootstrap.sh | 19 ++++++++++++++++--- 5 files changed, 60 insertions(+), 15 deletions(-) create mode 100755 ci3/check_port delete mode 100755 ci3/find_ports diff --git a/bootstrap.sh b/bootstrap.sh index 47b92fefb888..5b6f65c40175 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -232,11 +232,14 @@ function start_txes { export TOKIO_WORKER_THREADS=1 # Starting txe servers with incrementing port numbers. + # Base port is below the Linux ephemeral range (32768-60999) to avoid conflicts. + local txe_base_port=14730 for i in $(seq 0 $((NUM_TXES-1))); do - port=$((45730 + i)) + port=$((txe_base_port + i)) existing_pid=$(lsof -ti :$port || true) if [ -n "$existing_pid" ]; then echo "Killing existing process $existing_pid on port: $port" + check_port $port kill -9 $existing_pid &>/dev/null || true while kill -0 $existing_pid &>/dev/null; do sleep 0.1; done fi @@ -247,8 +250,12 @@ function start_txes { echo "Waiting for TXE's to start..." for i in $(seq 0 $((NUM_TXES-1))); do local j=0 - while ! nc -z 127.0.0.1 $((45730 + i)) &>/dev/null; do - [ $j == 60 ] && echo_stderr "TXE $i took too long to start. Exiting." && exit 1 + while ! nc -z 127.0.0.1 $((txe_base_port + i)) &>/dev/null; do + if [ $j == 60 ]; then + echo_stderr "TXE $i failed to start on port $((txe_base_port + i)) after 60s." + check_port $((txe_base_port + i)) + exit 1 + fi sleep 1 j=$((j+1)) done diff --git a/ci3/check_port b/ci3/check_port new file mode 100755 index 000000000000..a637a8e78405 --- /dev/null +++ b/ci3/check_port @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# Check if a port is free. If taken, print the process tree of the holder. +# Usage: check_port +# Exit code: 0 if free, 1 if taken. +set -eu + +port="${1:?Usage: check_port }" + +pid=$(lsof -ti :"$port" 2>/dev/null | head -1 || true) +if [ -z "$pid" ]; then + exit 0 +fi + +echo "Port $port is taken by PID $pid:" >&2 +# Show the command line of the process. +ps -p "$pid" -o pid,ppid,user,args --no-headers >&2 || true +# Show the process tree rooted at this PID. +pstree -apls "$pid" >&2 2>/dev/null || pstree -p "$pid" >&2 2>/dev/null || true +exit 1 diff --git a/ci3/find_ports b/ci3/find_ports deleted file mode 100755 index d7da0afe53bf..000000000000 --- a/ci3/find_ports +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -source $(git rev-parse --show-toplevel)/ci3/source -# Find 'num_ports' free ports between 9000 and 10000 -# Read first arg, default to 1 port -num_ports="${1:-1}" -echo $(comm -23 <(seq 9000 10000 | sort) <(ss -Htan | awk '{print $4}' | cut -d':' -f2 | sort -u) | shuf | head -n "$num_ports") diff --git a/noir-projects/aztec-nr/bootstrap.sh b/noir-projects/aztec-nr/bootstrap.sh index ed07b0eb933c..2181000ce913 100755 --- a/noir-projects/aztec-nr/bootstrap.sh +++ b/noir-projects/aztec-nr/bootstrap.sh @@ -20,17 +20,29 @@ function test_cmds { i=0 $NARGO test --list-tests --silence-warnings | sort | while read -r package test; do # We assume there are 8 txe's running. - port=$((45730 + (i++ % ${NUM_TXES:-1}))) + port=$((14730 + (i++ % ${NUM_TXES:-1}))) echo "$hash noir-projects/scripts/run_test.sh aztec-nr $package $test $port" done } function test { # Start txe server. + # Port is below the Linux ephemeral range (32768-60999) to avoid conflicts. + local txe_base_port=14730 trap 'kill $(jobs -p)' EXIT - (cd $root/yarn-project/txe && LOG_LEVEL=error TXE_PORT=45730 yarn start) & + check_port $txe_base_port || echo "WARNING: port $txe_base_port is in use, TXE may fail to start" + (cd $root/yarn-project/txe && LOG_LEVEL=error TXE_PORT=$txe_base_port yarn start) & echo "Waiting for TXE to start..." - while ! nc -z 127.0.0.1 45730 &>/dev/null; do sleep 1; done + local j=0 + while ! nc -z 127.0.0.1 $txe_base_port &>/dev/null; do + if [ $j == 60 ]; then + echo "TXE failed to start on port $txe_base_port after 60s." >&2 + check_port $txe_base_port + exit 1 + fi + sleep 1 + j=$((j+1)) + done export NARGO_FOREIGN_CALL_TIMEOUT=300000 test_cmds | filter_test_cmds | parallelize diff --git a/noir-projects/noir-contracts/bootstrap.sh b/noir-projects/noir-contracts/bootstrap.sh index 12c6a06c4ee3..fe6b671029c3 100755 --- a/noir-projects/noir-contracts/bootstrap.sh +++ b/noir-projects/noir-contracts/bootstrap.sh @@ -237,7 +237,7 @@ function test_cmds { i=0 $NARGO test --list-tests --silence-warnings | sort | while read -r package test; do - port=$((45730 + (i++ % ${NUM_TXES:-1}))) + port=$((14730 + (i++ % ${NUM_TXES:-1}))) [ -z "${cache[$package]:-}" ] && cache[$package]=$(get_contract_hash $package $folder_name) echo "${cache[$package]} noir-projects/scripts/run_test.sh noir-contracts $package $test $port" done @@ -245,14 +245,27 @@ function test_cmds { function test { # Starting txe servers with incrementing port numbers. + # Base port is below the Linux ephemeral range (32768-60999) to avoid conflicts. + local txe_base_port=14730 export NUM_TXES=8 trap 'kill $(jobs -p) &>/dev/null || true' EXIT for i in $(seq 0 $((NUM_TXES-1))); do - (cd $root/yarn-project/txe && LOG_LEVEL=silent TXE_PORT=$((45730 + i)) yarn start) >/dev/null & + check_port $((txe_base_port + i)) || echo "WARNING: port $((txe_base_port + i)) is in use, TXE $i may fail to start" + (cd $root/yarn-project/txe && LOG_LEVEL=silent TXE_PORT=$((txe_base_port + i)) yarn start) >/dev/null & done echo "Waiting for TXE's to start..." for i in $(seq 0 $((NUM_TXES-1))); do - while ! nc -z 127.0.0.1 $((45730 + i)) &>/dev/null; do sleep 1; done + local j=0 + local port=$((txe_base_port + i)) + while ! nc -z 127.0.0.1 $port &>/dev/null; do + if [ $j == 60 ]; then + echo "TXE $i failed to start on port $port after 60s." >&2 + check_port $port + exit 1 + fi + sleep 1 + j=$((j+1)) + done done export NARGO_FOREIGN_CALL_TIMEOUT=300000 From 1003d7ba476cf09b596ea2d687ee75b8eb445877 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 16:38:12 -0300 Subject: [PATCH 24/62] fix(node): sync ws before simulating public calls (#20499) The `simulatePublicCalls` method in the aztec node would simulate using a fake block following immediately after the latest one from the archiver. However, it also used a sync of world state that didn't necessarily have all the latest changes from that block. This means that a client who was monitoring the node for a block to be mined could send a simulation request that relied on data from that very latest block that was still not in world-state, and fail. This was causing issues eg in [cross-chain-bot e2e tests](http://ci.aztec-labs.com/4f7378c362712da7) where the simulation for consuming the message in public land would fail since the message was flagged as ready (since that's an archiver check) but not present in world state, erroring with: ``` 17:22:06 Simulation error: Assertion failed: Tried to consume nonexistent L1-to-L2 message 'self.l1_to_l2_msg_exists(message_hash, leaf_index)' 17:22:06 Context: 17:22:06 TxExecutionRequest(0x032f68986bd02951337b130a702e9041a055bdc49a8cb809e3244d252596d2f4 called 0x9d57a239) 17:22:06 simulatePublic=true 17:22:06 skipTxValidation=true 17:22:06 scopes=0x032f68986bd02951337b130a702e9041a055bdc49a8cb809e3244d252596d2f4 17:22:06 17:22:06 228 | 17:22:06 229 | assert(!self.nullifier_exists_unsafe(nullifier, self.this_address()), "L1-to-L2 message is already nullified"); 17:22:06 > 230 | assert(self.l1_to_l2_msg_exists(message_hash, leaf_index), "Tried to consume nonexistent L1-to-L2 message"); 17:22:06 | ^ 17:22:06 231 | 17:22:06 232 | self.push_nullifier(nullifier); 17:22:06 233 | } ``` --- yarn-project/aztec-node/src/aztec-node/server.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index a609ba4b9f6b..0765a48e40ab 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -1129,7 +1129,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { } const txHash = tx.getTxHash(); - const blockNumber = BlockNumber((await this.blockSource.getBlockNumber()) + 1); + const latestBlockNumber = await this.blockSource.getBlockNumber(); + const blockNumber = BlockNumber.add(latestBlockNumber, 1); // If sequencer is not initialized, we just set these values to zero for simulation. const coinbase = EthAddress.ZERO; @@ -1153,6 +1154,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { blockNumber, }); + // Ensure world-state has caught up with the latest block we loaded from the archiver + await this.worldStateSynchronizer.syncImmediate(latestBlockNumber); const merkleTreeFork = await this.worldStateSynchronizer.fork(); try { const config = PublicSimulatorConfig.from({ From 471c876682c66a9966769636e7debf92682ab701 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 16:38:18 -0300 Subject: [PATCH 25/62] fix(ethereum): check timeout before consuming nonce in L1TxUtils (#20501) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Move the `txTimeoutAt` check before `nonceManager.consume()` in `L1TxUtils.sendTransaction` to prevent nonce leaks - Add regression test verifying that a timed-out send does not consume a nonce When a transaction timed out before sending (e.g. due to `advanceInboxInProgress` warping L1 time), the nonce was consumed but the tx was never submitted to L1. This created a permanent gap: all subsequent transactions used higher nonces (108, 109, ...) but the chain expected the leaked nonce (107) first. The sequencer got stuck in an infinite prune-rebuild loop. Flaky failure: http://ci.aztec-labs.com/6b46aa90848758e1 (`e2e_bot > creates bot after inbox drift`) ## Test plan - New unit test: `does not consume nonce when transaction times out before sending` - Full `l1_tx_utils.test.ts` suite passes (46/46) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Opus 4.6 --- .../src/l1_tx_utils/l1_tx_utils.test.ts | 15 +++++++++++++++ .../ethereum/src/l1_tx_utils/l1_tx_utils.ts | 17 ++++++++++------- 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts index 152abf04e680..8670113a7ed7 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.test.ts @@ -918,6 +918,21 @@ describe('L1TxUtils', () => { expect(result.receipt.status).toBe('reverted'); }); + it('does not consume nonce when transaction times out before sending', async () => { + // Get the expected nonce before any transaction + const expectedNonce = await l1Client.getTransactionCount({ address: l1Client.account.address }); + + // Try to send with an already-expired timeout (epoch 0 is well in the past) + const pastTimeout = new Date(0); + await expect(gasUtils.sendTransaction(request, { txTimeoutAt: pastTimeout })).rejects.toThrow( + /timed out before sending/, + ); + + // The next transaction should use the same nonce (not skip one due to a leaked consume) + const { state } = await gasUtils.sendTransaction(request); + expect(state.nonce).toBe(expectedNonce); + }, 10_000); + it('stops trying after timeout once block is mined', async () => { await cheatCodes.setAutomine(false); await cheatCodes.setIntervalMining(0); diff --git a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts index f91539bfb946..5c5c0f776db2 100644 --- a/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils/l1_tx_utils.ts @@ -244,6 +244,16 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { throw new InterruptError(`Transaction sending is interrupted`); } + // Check timeout before consuming nonce to avoid leaking a nonce that was never sent. + // A leaked nonce creates a gap (e.g. nonce 107 consumed but unsent), so all subsequent + // transactions (108, 109, ...) can never be mined since the chain expects 107 first. + const now = new Date(await this.getL1Timestamp()); + if (gasConfig.txTimeoutAt && now > gasConfig.txTimeoutAt) { + throw new TimeoutError( + `Transaction timed out before sending (now ${now.toISOString()} > timeoutAt ${gasConfig.txTimeoutAt.toISOString()})`, + ); + } + const nonce = await this.nonceManager.consume({ client: this.client, address: account, @@ -253,13 +263,6 @@ export class L1TxUtils extends ReadOnlyL1TxUtils { const baseState = { request, gasLimit, blobInputs, gasPrice, nonce }; const txData = this.makeTxData(baseState, { isCancelTx: false }); - const now = new Date(await this.getL1Timestamp()); - if (gasConfig.txTimeoutAt && now > gasConfig.txTimeoutAt) { - throw new TimeoutError( - `Transaction timed out before sending (now ${now.toISOString()} > timeoutAt ${gasConfig.txTimeoutAt.toISOString()})`, - ); - } - // Send the new tx const signedRequest = await this.prepareSignedTransaction(txData); const txHash = await this.client.sendRawTransaction({ serializedTransaction: signedRequest }); From e6ea5186564dd618e60cc203d1f82f25bb207e1a Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 11:07:46 -0300 Subject: [PATCH 26/62] chore(mbps): cleanup todos no longer needed --- yarn-project/stdlib/src/p2p/block_proposal.ts | 2 -- yarn-project/stdlib/src/rollup/checkpoint_header.ts | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/yarn-project/stdlib/src/p2p/block_proposal.ts b/yarn-project/stdlib/src/p2p/block_proposal.ts index 91f1c96e821b..8371d8adb9b1 100644 --- a/yarn-project/stdlib/src/p2p/block_proposal.ts +++ b/yarn-project/stdlib/src/p2p/block_proposal.ts @@ -56,8 +56,6 @@ export class BlockProposal extends Gossipable { /** The per-block header containing block state and global variables */ public readonly blockHeader: BlockHeader, - // TODO(palla/mbps): Is this really needed? Can we just derive it from the indexWithinCheckpoint of the parent block and the slot number? - // See the block-proposal-handler, we have a lot of extra validations to check this is correct, so maybe we can avoid storing it here. /** Index of this block within the checkpoint (0-indexed) */ public readonly indexWithinCheckpoint: IndexWithinCheckpoint, diff --git a/yarn-project/stdlib/src/rollup/checkpoint_header.ts b/yarn-project/stdlib/src/rollup/checkpoint_header.ts index 81ce41fd6976..2f42b8993a71 100644 --- a/yarn-project/stdlib/src/rollup/checkpoint_header.ts +++ b/yarn-project/stdlib/src/rollup/checkpoint_header.ts @@ -19,8 +19,8 @@ import type { UInt64 } from '../types/shared.js'; /** * Header of a checkpoint. A checkpoint is a collection of blocks submitted to L1 all within the same slot. - * TODO(palla/mbps): Should this include chainId and version as well? Is this used just in circuits? - * TODO(palla/mbps): What about CheckpointNumber? + * This header is verified as-is in the rollup circuits, posted to the L1 rollup contract, stored in the archiver, + * and exposed via the Aztec Node API. See `CheckpointData` for a struct that includes the header plus extra metadata. */ export class CheckpointHeader { constructor( From 283592555c56a572438ac9599b0783ce8053a6e2 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 11:08:14 -0300 Subject: [PATCH 27/62] refactor(mbps): add timestamp to checkpoint global vars Given it's a constant value throughout the checkpoint. --- .../src/e2e_l1_publisher/e2e_l1_publisher.test.ts | 1 + .../light/lightweight_checkpoint_builder.test.ts | 15 ++++++--------- .../prover-client/src/mocks/test_context.ts | 2 +- .../src/global_variable_builder/global_builder.ts | 2 +- .../checkpoint_proposal_job.timing.test.ts | 4 ++-- .../slasher/src/watchers/epoch_prune_watcher.ts | 1 + yarn-project/stdlib/src/tx/global_variables.ts | 6 +++--- .../src/block_proposal_handler.ts | 3 ++- .../src/checkpoint_builder.test.ts | 1 + .../src/validator.integration.test.ts | 1 + yarn-project/validator-client/src/validator.ts | 1 + 11 files changed, 20 insertions(+), 17 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 28489eb537f0..d635d11c0ffb 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -354,6 +354,7 @@ describe('L1Publisher integration', () => { chainId: globalVariables.chainId, version: globalVariables.version, slotNumber: globalVariables.slotNumber, + timestamp: globalVariables.timestamp, coinbase: globalVariables.coinbase, feeRecipient: globalVariables.feeRecipient, gasFees: globalVariables.gasFees, diff --git a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.test.ts b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.test.ts index a2e426847320..8c791e955acc 100644 --- a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.test.ts +++ b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.test.ts @@ -3,17 +3,16 @@ import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/bra import { timesAsync } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; -import { ProtocolContractsList, protocolContractsHash } from '@aztec/protocol-contracts'; +import { ProtocolContractsList } from '@aztec/protocol-contracts'; import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice'; import { PublicDataWrite } from '@aztec/stdlib/avm'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { EthAddress } from '@aztec/stdlib/block'; import { GasFees } from '@aztec/stdlib/gas'; import { accumulateCheckpointOutHashes } from '@aztec/stdlib/messaging'; -import { CheckpointConstantData } from '@aztec/stdlib/rollup'; import { mockProcessedTx } from '@aztec/stdlib/testing'; import { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; -import type { ProcessedTx } from '@aztec/stdlib/tx'; +import type { CheckpointGlobalVariables, ProcessedTx } from '@aztec/stdlib/tx'; import { GlobalVariables } from '@aztec/stdlib/tx'; import { NativeWorldStateService } from '@aztec/world-state/native'; @@ -41,18 +40,16 @@ describe('LightweightCheckpointBuilder', () => { await worldState.close(); }); - const makeCheckpointConstants = (slotNumber: SlotNumber): CheckpointConstantData => { - return CheckpointConstantData.from({ + const makeCheckpointConstants = (slotNumber: SlotNumber): CheckpointGlobalVariables => { + return { chainId: Fr.ZERO, version: Fr.ZERO, - vkTreeRoot: getVKTreeRoot(), - protocolContractsHash, - proverId: Fr.ZERO, slotNumber, + timestamp: BigInt(slotNumber) * 123n, coinbase: EthAddress.ZERO, feeRecipient: AztecAddress.ZERO, gasFees: GasFees.empty(), - }); + }; }; const makeGlobalVariables = (blockNumber: BlockNumber, slotNumber: SlotNumber): GlobalVariables => { diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 8234ce61ecf9..b27adcec0d97 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -250,7 +250,7 @@ export class TestContext { const previousCheckpointOutHashes = this.checkpointOutHashes; const builder = await LightweightCheckpointBuilder.startNewCheckpoint( checkpointNumber, - constants, + { ...constants, timestamp }, l1ToL2Messages, previousCheckpointOutHashes, cleanFork, diff --git a/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts b/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts index 0dc1f73ef4bb..73f4bc47f1a1 100644 --- a/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts +++ b/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts @@ -120,7 +120,7 @@ export class GlobalVariableBuilder implements GlobalVariableBuilderInterface { coinbase: EthAddress, feeRecipient: AztecAddress, slotNumber: SlotNumber, - ): Promise { + ): Promise { const { chainId, version } = this; const timestamp = getTimestampForSlot(slotNumber, { diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts index b25291dfc60d..00f39be89c90 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts @@ -62,7 +62,7 @@ class TimingAwareMockCheckpointBuilder extends MockCheckpointBuilder { public recordedBuildTimes: Array<{ blockNumber: number; startTime: number; endTime: number }> = []; constructor( - constants: CheckpointGlobalVariables & { timestamp: bigint }, + constants: CheckpointGlobalVariables, checkpointNumber: CheckpointNumber, private readonly dateProvider: ManualDateProvider, private readonly getSecondsIntoSlot: () => number, @@ -368,7 +368,7 @@ describe('CheckpointProposalJob Timing Tests', () => { ); // Create timing-aware checkpoint builder - const checkpointConstants: CheckpointGlobalVariables & { timestamp: bigint } = { ...globalVariables }; + const checkpointConstants: CheckpointGlobalVariables = { ...globalVariables }; checkpointBuilder = new TimingAwareMockCheckpointBuilder( checkpointConstants, checkpointNumber, diff --git a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts index 77da4ac6d957..f10c4b39378c 100644 --- a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts +++ b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts @@ -172,6 +172,7 @@ export class EpochPruneWatcher extends (EventEmitter as new () => WatcherEmitter chainId: gv.chainId, version: gv.version, slotNumber: gv.slotNumber, + timestamp: gv.timestamp, coinbase: gv.coinbase, feeRecipient: gv.feeRecipient, gasFees: gv.gasFees, diff --git a/yarn-project/stdlib/src/tx/global_variables.ts b/yarn-project/stdlib/src/tx/global_variables.ts index f475f73ff42a..a5b5ff521ab8 100644 --- a/yarn-project/stdlib/src/tx/global_variables.ts +++ b/yarn-project/stdlib/src/tx/global_variables.ts @@ -22,10 +22,10 @@ import { schemas } from '../schemas/index.js'; import type { UInt64 } from '../types/index.js'; /** - * Global variables that are constant across the entire slot. - * TODO(palla/mbps): Should timestamp be included here as well? + * Global variables that are constant across the entire checkpoint (slot). + * Excludes blockNumber since that varies per block within a checkpoint. */ -export type CheckpointGlobalVariables = Omit, 'blockNumber' | 'timestamp'>; +export type CheckpointGlobalVariables = Omit, 'blockNumber'>; /** * Global variables of the L2 block. diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index fba4fcb25f3f..8b7df53a434c 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -477,11 +477,12 @@ export class BlockProposalHandler { await this.worldState.syncImmediate(parentBlockNumber); using fork = await this.worldState.fork(parentBlockNumber); - // Build checkpoint constants from proposal (excludes blockNumber and timestamp which are per-block) + // Build checkpoint constants from proposal (excludes blockNumber which is per-block) const constants: CheckpointGlobalVariables = { chainId: new Fr(config.l1ChainId), version: new Fr(config.rollupVersion), slotNumber: slot, + timestamp: blockHeader.globalVariables.timestamp, coinbase: blockHeader.globalVariables.coinbase, feeRecipient: blockHeader.globalVariables.feeRecipient, gasFees: blockHeader.globalVariables.gasFees, diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index 9b1323316a0e..76899d131bdd 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -41,6 +41,7 @@ describe('CheckpointBuilder', () => { chainId: new Fr(1), version: new Fr(1), slotNumber, + timestamp: BigInt(Date.now()), coinbase: EthAddress.random(), feeRecipient: AztecAddress.fromField(Fr.random()), gasFees: GasFees.empty(), diff --git a/yarn-project/validator-client/src/validator.integration.test.ts b/yarn-project/validator-client/src/validator.integration.test.ts index 109996b6ca23..551928645950 100644 --- a/yarn-project/validator-client/src/validator.integration.test.ts +++ b/yarn-project/validator-client/src/validator.integration.test.ts @@ -279,6 +279,7 @@ describe('ValidatorClient Integration', () => { feeRecipient: await AztecAddress.random(), gasFees: GasFees.empty(), slotNumber: slot, + timestamp: BigInt(Date.now()), }; using fork = await proposer.worldStateDb.fork(); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 82e9f27fc0cd..c936a20dea51 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -737,6 +737,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) chainId: gv.chainId, version: gv.version, slotNumber: gv.slotNumber, + timestamp: gv.timestamp, coinbase: gv.coinbase, feeRecipient: gv.feeRecipient, gasFees: gv.gasFees, From d9ad4ff007b649fd5ac17599e904cc9f8212356d Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 16:29:02 -0300 Subject: [PATCH 28/62] chore(mbps): remove outdated todo comments --- yarn-project/p2p/src/services/encoding.ts | 3 ++- .../src/publisher/sequencer-publisher.ts | 16 ---------------- .../sequencer-client/src/sequencer/sequencer.ts | 1 - 3 files changed, 2 insertions(+), 18 deletions(-) diff --git a/yarn-project/p2p/src/services/encoding.ts b/yarn-project/p2p/src/services/encoding.ts index c2a5e6dc5a87..9a4d610c4fa5 100644 --- a/yarn-project/p2p/src/services/encoding.ts +++ b/yarn-project/p2p/src/services/encoding.ts @@ -58,7 +58,8 @@ const DefaultMaxSizesKb: Record = { // Proposals may carry some tx objects, so we allow a larger size capped at 10mb // Note this may not be enough for carrying all tx objects in a block [TopicType.block_proposal]: 1024 * 10, - // TODO(palla/mbps): Check size for checkpoint proposal + // Checkpoint proposals carry almost the same data as a block proposal (see the lastBlockProposal) + // Only diff is an additional header, which is pretty small compared to the 10mb limit [TopicType.checkpoint_proposal]: 1024 * 10, }; diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index c4c4fe2c0518..f63921818d82 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -639,22 +639,6 @@ export class SequencerPublisher { options: { forcePendingCheckpointNumber?: CheckpointNumber }, ): Promise { const ts = BigInt((await this.l1TxUtils.getBlock()).timestamp + this.ethereumSlotDuration); - - // TODO(palla/mbps): This should not be needed, there's no flow where we propose with zero attestations. Or is there? - // If we have no attestations, we still need to provide the empty attestations - // so that the committee is recalculated correctly - // const ignoreSignatures = attestationsAndSigners.attestations.length === 0; - // if (ignoreSignatures) { - // const { committee } = await this.epochCache.getCommittee(block.header.globalVariables.slotNumber); - // if (!committee) { - // this.log.warn(`No committee found for slot ${block.header.globalVariables.slotNumber}`); - // throw new Error(`No committee found for slot ${block.header.globalVariables.slotNumber}`); - // } - // attestationsAndSigners.attestations = committee.map(committeeMember => - // CommitteeAttestation.fromAddress(committeeMember), - // ); - // } - const blobFields = checkpoint.toBlobFields(); const blobs = getBlobsPerL1Block(blobFields); const blobInput = getPrefixedEthBlobCommitments(blobs); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index de3dd62cd897..05f040ff1d78 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -160,7 +160,6 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter Date: Fri, 13 Feb 2026 16:41:50 -0300 Subject: [PATCH 29/62] fix(validator): do not attest to checkpoint that points to non-latest block We were checking if the last block pointed to by a checkpoint proposal existed, but not if it was the last one. This would have eventually failed due to mismatched reexecution, since reexecution picked up all blocks in the slot, but this check is cheaper to do. --- .../validator-client/src/validator.test.ts | 38 +++++++++++++++++++ .../validator-client/src/validator.ts | 6 +++ 2 files changed, 44 insertions(+) diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index d68edcf7207f..bd766d65bbb3 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -482,6 +482,44 @@ describe('ValidatorClient', () => { uploadBlobsSpy.mockRestore(); }); + it('should not attest to a checkpoint proposal that references a middle block instead of the last', async () => { + const addCheckpointAttestationsSpy = jest.spyOn(p2pClient, 'addOwnCheckpointAttestations'); + + // First validate a block proposal so the validator has seen a block for this slot + const didValidate = await validatorClient.validateBlockProposal(proposal, sender); + expect(didValidate).toBe(true); + + // Create 3 blocks for the slot, each with a distinct archive root + const block1Archive = new AppendOnlyTreeSnapshot(Fr.random(), 1); + const block2Archive = new AppendOnlyTreeSnapshot(Fr.random(), 2); + const block3Archive = new AppendOnlyTreeSnapshot(Fr.random(), 3); + const blocks = [ + { archive: block1Archive, number: 1 }, + { archive: block2Archive, number: 2 }, + { archive: block3Archive, number: 3 }, + ] as unknown as L2Block[]; + + // Proposal references the middle block's archive (block 2), not the last (block 3) + const checkpointProposal = await makeCheckpointProposal({ + archiveRoot: block2Archive.root, + checkpointHeader: makeCheckpointHeader(0, { slotNumber: proposal.slotNumber }), + lastBlock: { + blockHeader: makeBlockHeader(1, { blockNumber: BlockNumber(2), slotNumber: proposal.slotNumber }), + indexWithinCheckpoint: IndexWithinCheckpoint(1), + txHashes: proposal.txHashes, + }, + }); + + // Mock getBlockHeaderByArchive to return a header so retryUntil succeeds + blockSource.getBlockHeaderByArchive.mockResolvedValue(makeBlockHeader()); + blockSource.getBlocksForSlot.mockResolvedValue(blocks); + + // Checkpoint validation should fail: proposal points to block 2 but last block in slot is block 3 + const attestations = await validatorClient.attestToCheckpointProposal(checkpointProposal, sender); + expect(attestations).toBeUndefined(); + expect(addCheckpointAttestationsSpy).not.toHaveBeenCalled(); + }); + it('should wait for previous block to sync', async () => { epochCache.filterInCommittee.mockResolvedValue([EthAddress.fromString(validatorAccounts[0].address)]); blockSource.getBlockHeaderByArchive.mockResolvedValueOnce(undefined); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index c936a20dea51..873dc9a15b50 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -643,6 +643,12 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) return { isValid: false, reason: 'no_blocks_for_slot' }; } + // Ensure the last block for this slot matches the archive in the checkpoint proposal + if (!blocks.at(-1)?.archive.root.equals(proposal.archive)) { + this.log.warn(`Last block archive mismatch for checkpoint proposal`, proposalInfo); + return { isValid: false, reason: 'last_block_archive_mismatch' }; + } + this.log.debug(`Found ${blocks.length} blocks for slot ${slot}`, { ...proposalInfo, blockNumbers: blocks.map(b => b.number), From 25a11bf6a6812f0ece44e90d65bae256406681e8 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 15:10:53 -0300 Subject: [PATCH 30/62] feat(archiver): return L2 block data to avoid fetching full block The block proposal handler requires access to a block header, along with its checkpoint number. However, the checkpoint number is NOT part of the block header, and it's a bit painful to add (since the block header goes into circuits). Instead, the checkpoint number for a given block is only returned as part of the full L2Block, including txs. This PR adds an intermediate struct `BlockData` (similar to `CheckpointData` from #20467) that contains the block header plus checkpoint number, archive root, index within checkpoint, etc. --- .../archiver/src/modules/data_source_base.ts | 17 +++++- .../archiver/src/store/block_store.ts | 54 ++++++++++++++---- .../archiver/src/store/kv_archiver_store.ts | 24 +++++++- .../archiver/src/test/mock_l2_block_source.ts | 29 ++++++++++ .../aztec-node/src/aztec-node/server.ts | 17 +++++- .../aztec-node/src/sentinel/sentinel.test.ts | 2 +- .../aztec-node/src/sentinel/sentinel.ts | 8 +-- .../src/sequencer/sequencer.test.ts | 20 ++++++- .../src/sequencer/sequencer.ts | 22 ++++---- yarn-project/stdlib/src/block/block_data.ts | 26 +++++++++ yarn-project/stdlib/src/block/index.ts | 1 + .../stdlib/src/block/l2_block_source.ts | 15 +++++ .../stdlib/src/interfaces/archiver.test.ts | 18 +++++- .../stdlib/src/interfaces/archiver.ts | 3 + .../stdlib/src/interfaces/aztec-node.test.ts | 8 ++- .../src/block_proposal_handler.ts | 49 +++++++---------- .../validator-client/src/validator.test.ts | 55 ++++++++----------- 17 files changed, 270 insertions(+), 98 deletions(-) create mode 100644 yarn-project/stdlib/src/block/block_data.ts diff --git a/yarn-project/archiver/src/modules/data_source_base.ts b/yarn-project/archiver/src/modules/data_source_base.ts index 7be5a1b801d8..2cd512830e74 100644 --- a/yarn-project/archiver/src/modules/data_source_base.ts +++ b/yarn-project/archiver/src/modules/data_source_base.ts @@ -4,7 +4,14 @@ import type { EthAddress } from '@aztec/foundation/eth-address'; import { isDefined } from '@aztec/foundation/types'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { type BlockHash, CheckpointedL2Block, CommitteeAttestation, L2Block, type L2Tips } from '@aztec/stdlib/block'; +import { + type BlockData, + type BlockHash, + CheckpointedL2Block, + CommitteeAttestation, + L2Block, + type L2Tips, +} from '@aztec/stdlib/block'; import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; @@ -129,6 +136,14 @@ export abstract class ArchiverDataSourceBase return this.store.getBlockHeaderByArchive(archive); } + public getBlockData(number: BlockNumber): Promise { + return this.store.getBlockData(number); + } + + public getBlockDataByArchive(archive: Fr): Promise { + return this.store.getBlockDataByArchive(archive); + } + public async getL2Block(number: BlockNumber): Promise { // If the number provided is -ve, then return the latest block. if (number < 0) { diff --git a/yarn-project/archiver/src/store/block_store.ts b/yarn-project/archiver/src/store/block_store.ts index 732fa7e13c3b..636df8cc299a 100644 --- a/yarn-project/archiver/src/store/block_store.ts +++ b/yarn-project/archiver/src/store/block_store.ts @@ -9,6 +9,7 @@ import { isDefined } from '@aztec/foundation/types'; import type { AztecAsyncKVStore, AztecAsyncMap, AztecAsyncSingleton, Range } from '@aztec/kv-store'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { + type BlockData, BlockHash, Body, CheckpointedL2Block, @@ -655,6 +656,32 @@ export class BlockStore { } } + /** + * Gets block metadata (without tx data) by block number. + * @param blockNumber - The number of the block to return. + * @returns The requested block data. + */ + async getBlockData(blockNumber: BlockNumber): Promise { + const blockStorage = await this.#blocks.getAsync(blockNumber); + if (!blockStorage || !blockStorage.header) { + return undefined; + } + return this.getBlockDataFromBlockStorage(blockStorage); + } + + /** + * Gets block metadata (without tx data) by archive root. + * @param archive - The archive root of the block to return. + * @returns The requested block data. + */ + async getBlockDataByArchive(archive: Fr): Promise { + const blockNumber = await this.#blockArchiveIndex.getAsync(archive.toString()); + if (blockNumber === undefined) { + return undefined; + } + return this.getBlockData(BlockNumber(blockNumber)); + } + /** * Gets an L2 block. * @param blockNumber - The number of the block to return. @@ -759,15 +786,24 @@ export class BlockStore { } } + private getBlockDataFromBlockStorage(blockStorage: BlockStorage): BlockData { + return { + header: BlockHeader.fromBuffer(blockStorage.header), + archive: AppendOnlyTreeSnapshot.fromBuffer(blockStorage.archive), + blockHash: Fr.fromBuffer(blockStorage.blockHash), + checkpointNumber: CheckpointNumber(blockStorage.checkpointNumber), + indexWithinCheckpoint: IndexWithinCheckpoint(blockStorage.indexWithinCheckpoint), + }; + } + private async getBlockFromBlockStorage( blockNumber: number, blockStorage: BlockStorage, ): Promise { - const header = BlockHeader.fromBuffer(blockStorage.header); - const archive = AppendOnlyTreeSnapshot.fromBuffer(blockStorage.archive); - const blockHash = blockStorage.blockHash; - header.setHash(Fr.fromBuffer(blockHash)); - const blockHashString = bufferToHex(blockHash); + const { header, archive, blockHash, checkpointNumber, indexWithinCheckpoint } = + this.getBlockDataFromBlockStorage(blockStorage); + header.setHash(blockHash); + const blockHashString = bufferToHex(blockStorage.blockHash); const blockTxsBuffer = await this.#blockTxs.getAsync(blockHashString); if (blockTxsBuffer === undefined) { this.#log.warn(`Could not find body for block ${header.globalVariables.blockNumber} ${blockHash}`); @@ -786,13 +822,7 @@ export class BlockStore { txEffects.push(deserializeIndexedTxEffect(txEffect).data); } const body = new Body(txEffects); - const block = new L2Block( - archive, - header, - body, - CheckpointNumber(blockStorage.checkpointNumber!), - IndexWithinCheckpoint(blockStorage.indexWithinCheckpoint), - ); + const block = new L2Block(archive, header, body, checkpointNumber, indexWithinCheckpoint); if (block.number !== blockNumber) { throw new Error( diff --git a/yarn-project/archiver/src/store/kv_archiver_store.ts b/yarn-project/archiver/src/store/kv_archiver_store.ts index 2be54f985f2d..ed31527c65ed 100644 --- a/yarn-project/archiver/src/store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.ts @@ -6,7 +6,13 @@ import { createLogger } from '@aztec/foundation/log'; import type { AztecAsyncKVStore, CustomRange, StoreSize } from '@aztec/kv-store'; import { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { BlockHash, CheckpointedL2Block, L2Block, type ValidateCheckpointResult } from '@aztec/stdlib/block'; +import { + type BlockData, + BlockHash, + CheckpointedL2Block, + L2Block, + type ValidateCheckpointResult, +} from '@aztec/stdlib/block'; import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, @@ -369,6 +375,22 @@ export class KVArchiverDataStore implements ContractDataSource { return this.#blockStore.getBlockHeaderByArchive(archive); } + /** + * Gets block metadata (without tx data) by block number. + * @param blockNumber - The block number to return. + */ + getBlockData(blockNumber: BlockNumber): Promise { + return this.#blockStore.getBlockData(blockNumber); + } + + /** + * Gets block metadata (without tx data) by archive root. + * @param archive - The archive root to return. + */ + getBlockDataByArchive(archive: Fr): Promise { + return this.#blockStore.getBlockDataByArchive(archive); + } + /** * Gets a tx effect. * @param txHash - The hash of the tx corresponding to the tx effect. diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index 3d06e42e7391..d0a44894ec4e 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -8,6 +8,7 @@ import { createLogger } from '@aztec/foundation/log'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { + type BlockData, BlockHash, CheckpointedL2Block, L2Block, @@ -255,6 +256,34 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { return Promise.resolve(block?.header); } + public async getBlockData(number: BlockNumber): Promise { + const block = this.l2Blocks[number - 1]; + if (!block) { + return undefined; + } + return { + header: block.header, + archive: block.archive, + blockHash: await block.hash(), + checkpointNumber: block.checkpointNumber, + indexWithinCheckpoint: block.indexWithinCheckpoint, + }; + } + + public async getBlockDataByArchive(archive: Fr): Promise { + const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); + if (!block) { + return undefined; + } + return { + header: block.header, + archive: block.archive, + blockHash: await block.hash(), + checkpointNumber: block.checkpointNumber, + indexWithinCheckpoint: block.indexWithinCheckpoint, + }; + } + getBlockHeader(number: number | 'latest'): Promise { return Promise.resolve(this.l2Blocks.at(typeof number === 'number' ? number - 1 : -1)?.header); } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 0765a48e40ab..30e0b38bbaf6 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -33,7 +33,14 @@ import { } from '@aztec/slasher'; import { CollectionLimitsConfig, PublicSimulatorConfig } from '@aztec/stdlib/avm'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { BlockHash, type BlockParameter, type DataInBlock, L2Block, type L2BlockSource } from '@aztec/stdlib/block'; +import { + type BlockData, + BlockHash, + type BlockParameter, + type DataInBlock, + L2Block, + type L2BlockSource, +} from '@aztec/stdlib/block'; import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, @@ -1106,6 +1113,14 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { return await this.blockSource.getBlockHeaderByArchive(archive); } + public getBlockData(number: BlockNumber): Promise { + return this.blockSource.getBlockData(number); + } + + public getBlockDataByArchive(archive: Fr): Promise { + return this.blockSource.getBlockDataByArchive(archive); + } + /** * Simulates the public part of a transaction with the current state. * @param tx - The transaction to simulate. diff --git a/yarn-project/aztec-node/src/sentinel/sentinel.test.ts b/yarn-project/aztec-node/src/sentinel/sentinel.test.ts index 23bd50f032cd..add05d7e7525 100644 --- a/yarn-project/aztec-node/src/sentinel/sentinel.test.ts +++ b/yarn-project/aztec-node/src/sentinel/sentinel.test.ts @@ -589,7 +589,7 @@ describe('sentinel', () => { ts, nowMs: ts * 1000n, }); - archiver.getL2Block.calledWith(blockNumber).mockResolvedValue(mockBlock); + archiver.getBlockHeader.calledWith(blockNumber).mockResolvedValue(mockBlock.header); archiver.getL1Constants.mockResolvedValue(l1Constants); epochCache.getL1Constants.mockReturnValue(l1Constants); diff --git a/yarn-project/aztec-node/src/sentinel/sentinel.ts b/yarn-project/aztec-node/src/sentinel/sentinel.ts index 44e4dc80d022..4be3a6972772 100644 --- a/yarn-project/aztec-node/src/sentinel/sentinel.ts +++ b/yarn-project/aztec-node/src/sentinel/sentinel.ts @@ -139,15 +139,15 @@ export class Sentinel extends (EventEmitter as new () => WatcherEmitter) impleme return; } const blockNumber = event.block.number; - const block = await this.archiver.getL2Block(blockNumber); - if (!block) { - this.logger.error(`Failed to get block ${blockNumber}`, { block }); + const header = await this.archiver.getBlockHeader(blockNumber); + if (!header) { + this.logger.error(`Failed to get block header ${blockNumber}`); return; } // TODO(palla/slash): We should only be computing proven performance if this is // a full proof epoch and not a partial one, otherwise we'll end up with skewed stats. - const epoch = getEpochAtSlot(block.header.getSlot(), this.epochCache.getL1Constants()); + const epoch = getEpochAtSlot(header.getSlot(), this.epochCache.getL1Constants()); this.logger.debug(`Computing proven performance for epoch ${epoch}`); const performance = await this.computeProvenPerformance(epoch); this.logger.info(`Computed proven performance for epoch ${epoch}`, performance); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 0aa7c921015a..9329b90fba22 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -1,7 +1,13 @@ import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants'; import type { EpochCache, EpochCommitteeInfo } from '@aztec/epoch-cache'; import type { RollupContract } from '@aztec/ethereum/contracts'; -import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { + BlockNumber, + CheckpointNumber, + EpochNumber, + IndexWithinCheckpoint, + SlotNumber, +} from '@aztec/foundation/branded-types'; import { omit, times, timesParallel } from '@aztec/foundation/collection'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; @@ -12,6 +18,7 @@ import type { P2P } from '@aztec/p2p'; import type { SlasherClientInterface } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { + type BlockData, CommitteeAttestation, CommitteeAttestationsAndSigners, GENESIS_CHECKPOINT_HEADER_HASH, @@ -31,7 +38,8 @@ import { type WorldStateSynchronizerStatus, } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; -import { GlobalVariables, type Tx } from '@aztec/stdlib/tx'; +import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; +import { BlockHeader, GlobalVariables, type Tx } from '@aztec/stdlib/tx'; import type { FullNodeCheckpointsBuilder, ValidatorClient } from '@aztec/validator-client'; import { expect } from '@jest/globals'; @@ -235,7 +243,13 @@ describe('sequencer', () => { checkpointBuilder.setBlockProvider(() => block); l2BlockSource = mock({ - getL2Block: mockFn().mockResolvedValue(L2Block.empty()), + getBlockData: mockFn().mockResolvedValue({ + header: BlockHeader.empty(), + archive: AppendOnlyTreeSnapshot.empty(), + blockHash: Fr.ZERO, + checkpointNumber: CheckpointNumber(0), + indexWithinCheckpoint: IndexWithinCheckpoint(0), + } satisfies BlockData), getBlockNumber: mockFn().mockResolvedValue(lastBlockNumber), getL2Tips: mockFn().mockResolvedValue({ proposed: { number: lastBlockNumber, hash }, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index de3dd62cd897..b34bb0c273f9 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -12,7 +12,7 @@ import type { DateProvider } from '@aztec/foundation/timer'; import type { TypedEventEmitter } from '@aztec/foundation/types'; import type { P2P } from '@aztec/p2p'; import type { SlasherClientInterface } from '@aztec/slasher'; -import type { L2Block, L2BlockSink, L2BlockSource, ValidateCheckpointResult } from '@aztec/stdlib/block'; +import type { BlockData, L2BlockSink, L2BlockSource, ValidateCheckpointResult } from '@aztec/stdlib/block'; import type { Checkpoint } from '@aztec/stdlib/checkpoint'; import { getSlotAtTimestamp, getSlotStartBuildTimestamp } from '@aztec/stdlib/epoch-helpers'; import { @@ -301,10 +301,10 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter= slot) { + if (syncedTo.blockData && syncedTo.blockData.header.getSlot() >= slot) { this.log.warn( `Cannot propose block at next L2 slot ${slot} since that slot was taken by block ${syncedTo.blockNumber}`, - { ...logCtx, block: syncedTo.block.header.toInspect() }, + { ...logCtx, block: syncedTo.blockData.header.toInspect() }, ); this.metrics.recordBlockProposalPrecheckFailed('slot_already_taken'); return undefined; @@ -523,18 +523,18 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter; + /** + * Gets block metadata (without tx data) by block number. + * @param number - The block number to retrieve. + * @returns The requested block data (or undefined if not found). + */ + getBlockData(number: BlockNumber): Promise; + + /** + * Gets block metadata (without tx data) by archive root. + * @param archive - The archive root to retrieve. + * @returns The requested block data (or undefined if not found). + */ + getBlockDataByArchive(archive: Fr): Promise; + /** * Gets an L2 block by block number. * @param number - The block number to return. diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index 2d605b35b0b4..6235da1d6c81 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -10,7 +10,7 @@ import type { ContractArtifact } from '../abi/abi.js'; import { FunctionSelector } from '../abi/function_selector.js'; import { AztecAddress } from '../aztec-address/index.js'; import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; -import { BlockHash, CommitteeAttestation, L2Block } from '../block/index.js'; +import { type BlockData, BlockHash, CommitteeAttestation, L2Block } from '../block/index.js'; import type { L2Tips } from '../block/l2_block_source.js'; import type { ValidateCheckpointResult } from '../block/validate_block_result.js'; import { Checkpoint } from '../checkpoint/checkpoint.js'; @@ -110,6 +110,16 @@ describe('ArchiverApiSchema', () => { expect(result).toBeInstanceOf(BlockHeader); }); + it('getBlockData', async () => { + const result = await context.client.getBlockData(BlockNumber(1)); + expect(result).toBeUndefined(); + }); + + it('getBlockDataByArchive', async () => { + const result = await context.client.getBlockDataByArchive(Fr.random()); + expect(result).toBeUndefined(); + }); + it('getBlockHeaderByHash', async () => { const result = await context.client.getBlockHeaderByHash(BlockHash.random()); expect(result).toBeInstanceOf(BlockHeader); @@ -453,6 +463,12 @@ class MockArchiver implements ArchiverApi { getBlockHeaderByArchive(_archive: Fr): Promise { return Promise.resolve(BlockHeader.empty()); } + getBlockData(_number: BlockNumber): Promise { + return Promise.resolve(undefined); + } + getBlockDataByArchive(_archive: Fr): Promise { + return Promise.resolve(undefined); + } getL2Block(number: BlockNumber): Promise { return L2Block.random(number); } diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index 51d62fd5cb6b..4caf08b4a30a 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -4,6 +4,7 @@ import type { ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; +import { BlockDataSchema } from '../block/block_data.js'; import { BlockHash } from '../block/block_hash.js'; import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; import { L2Block } from '../block/l2_block.js'; @@ -104,6 +105,8 @@ export const ArchiverApiSchema: ApiSchemaFor = { getCheckpointedBlockByArchive: z.function().args(schemas.Fr).returns(CheckpointedL2Block.schema.optional()), getBlockHeaderByHash: z.function().args(BlockHash.schema).returns(BlockHeader.schema.optional()), getBlockHeaderByArchive: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), + getBlockData: z.function().args(BlockNumberSchema).returns(BlockDataSchema.optional()), + getBlockDataByArchive: z.function().args(schemas.Fr).returns(BlockDataSchema.optional()), getL2Block: z.function().args(BlockNumberSchema).returns(L2Block.schema.optional()), getL2BlockByHash: z.function().args(BlockHash.schema).returns(L2Block.schema.optional()), getL2BlockByArchive: z.function().args(schemas.Fr).returns(L2Block.schema.optional()), diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts index 2243efd963dc..436945dd773e 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts @@ -16,7 +16,7 @@ import times from 'lodash.times'; import type { ContractArtifact } from '../abi/abi.js'; import { AztecAddress } from '../aztec-address/index.js'; import type { DataInBlock } from '../block/in_block.js'; -import { BlockHash, type BlockParameter, CommitteeAttestation, L2Block } from '../block/index.js'; +import { type BlockData, BlockHash, type BlockParameter, CommitteeAttestation, L2Block } from '../block/index.js'; import type { L2Tips } from '../block/l2_block_source.js'; import { Checkpoint } from '../checkpoint/checkpoint.js'; import { L1PublishedData, PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; @@ -637,6 +637,12 @@ class MockAztecNode implements AztecNode { getBlockHeaderByArchive(_archive: Fr): Promise { return Promise.resolve(BlockHeader.empty()); } + getBlockData(_number: BlockNumber): Promise { + return Promise.resolve(undefined); + } + getBlockDataByArchive(_archive: Fr): Promise { + return Promise.resolve(undefined); + } getCurrentMinFees(): Promise { return Promise.resolve(GasFees.empty()); } diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index fba4fcb25f3f..e1811bd36957 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -9,7 +9,7 @@ import { retryUntil } from '@aztec/foundation/retry'; import { DateProvider, Timer } from '@aztec/foundation/timer'; import type { P2P, PeerId } from '@aztec/p2p'; import { BlockProposalValidator } from '@aztec/p2p/msg_validators'; -import type { L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; +import type { BlockData, L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import { getEpochAtSlot, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import type { ITxProvider, ValidatorClientFullConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { @@ -18,7 +18,7 @@ import { computeInHashFromL1ToL2Messages, } from '@aztec/stdlib/messaging'; import type { BlockProposal } from '@aztec/stdlib/p2p'; -import { BlockHeader, type CheckpointGlobalVariables, type FailedTx, type Tx } from '@aztec/stdlib/tx'; +import type { CheckpointGlobalVariables, FailedTx, Tx } from '@aztec/stdlib/tx'; import { ReExFailedTxsError, ReExStateMismatchError, @@ -153,16 +153,16 @@ export class BlockProposalHandler { } // Check that the parent proposal is a block we know, otherwise reexecution would fail - const parentBlockHeader = await this.getParentBlock(proposal); - if (parentBlockHeader === undefined) { + const parentBlock = await this.getParentBlock(proposal); + if (parentBlock === undefined) { this.log.warn(`Parent block for proposal not found, skipping processing`, proposalInfo); return { isValid: false, reason: 'parent_block_not_found' }; } // Check that the parent block's slot is not greater than the proposal's slot. - if (parentBlockHeader !== 'genesis' && parentBlockHeader.getSlot() > slotNumber) { + if (parentBlock !== 'genesis' && parentBlock.header.getSlot() > slotNumber) { this.log.warn(`Parent block slot is greater than proposal slot, skipping processing`, { - parentBlockSlot: parentBlockHeader.getSlot().toString(), + parentBlockSlot: parentBlock.header.getSlot().toString(), proposalSlot: slotNumber.toString(), ...proposalInfo, }); @@ -171,9 +171,9 @@ export class BlockProposalHandler { // Compute the block number based on the parent block const blockNumber = - parentBlockHeader === 'genesis' + parentBlock === 'genesis' ? BlockNumber(INITIAL_L2_BLOCK_NUM) - : BlockNumber(parentBlockHeader.getBlockNumber() + 1); + : BlockNumber(parentBlock.header.getBlockNumber() + 1); // Check that this block number does not exist already const existingBlock = await this.blockSource.getBlockHeader(blockNumber); @@ -190,7 +190,7 @@ export class BlockProposalHandler { }); // Compute the checkpoint number for this block and validate checkpoint consistency - const checkpointResult = await this.computeCheckpointNumber(proposal, parentBlockHeader, proposalInfo); + const checkpointResult = this.computeCheckpointNumber(proposal, parentBlock, proposalInfo); if (checkpointResult.reason) { return { isValid: false, blockNumber, reason: checkpointResult.reason }; } @@ -260,7 +260,7 @@ export class BlockProposalHandler { return { isValid: true, blockNumber, reexecutionResult }; } - private async getParentBlock(proposal: BlockProposal): Promise<'genesis' | BlockHeader | undefined> { + private async getParentBlock(proposal: BlockProposal): Promise<'genesis' | BlockData | undefined> { const parentArchive = proposal.blockHeader.lastArchive.root; const slot = proposal.slotNumber; const config = this.checkpointsBuilder.getConfig(); @@ -276,12 +276,11 @@ export class BlockProposalHandler { try { return ( - (await this.blockSource.getBlockHeaderByArchive(parentArchive)) ?? + (await this.blockSource.getBlockDataByArchive(parentArchive)) ?? (timeoutDurationMs <= 0 ? undefined : await retryUntil( - () => - this.blockSource.syncImmediate().then(() => this.blockSource.getBlockHeaderByArchive(parentArchive)), + () => this.blockSource.syncImmediate().then(() => this.blockSource.getBlockDataByArchive(parentArchive)), 'force archiver sync', timeoutDurationMs / 1000, 0.5, @@ -297,12 +296,12 @@ export class BlockProposalHandler { } } - private async computeCheckpointNumber( + private computeCheckpointNumber( proposal: BlockProposal, - parentBlockHeader: 'genesis' | BlockHeader, + parentBlock: 'genesis' | BlockData, proposalInfo: object, - ): Promise { - if (parentBlockHeader === 'genesis') { + ): CheckpointComputationResult { + if (parentBlock === 'genesis') { // First block is in checkpoint 1 if (proposal.indexWithinCheckpoint !== 0) { this.log.warn(`First block proposal has non-zero indexWithinCheckpoint`, proposalInfo); @@ -311,19 +310,9 @@ export class BlockProposalHandler { return { checkpointNumber: CheckpointNumber.INITIAL }; } - // Get the parent block to find its checkpoint number - // TODO(palla/mbps): The block header should include the checkpoint number to avoid this lookup, - // or at least the L2BlockSource should return a different struct that includes it. - const parentBlockNumber = parentBlockHeader.getBlockNumber(); - const parentBlock = await this.blockSource.getL2Block(parentBlockNumber); - if (!parentBlock) { - this.log.warn(`Parent block ${parentBlockNumber} not found in archiver`, proposalInfo); - return { reason: 'invalid_proposal' }; - } - if (proposal.indexWithinCheckpoint === 0) { // If this is the first block in a new checkpoint, increment the checkpoint number - if (!(proposal.blockHeader.getSlot() > parentBlockHeader.getSlot())) { + if (!(proposal.blockHeader.getSlot() > parentBlock.header.getSlot())) { this.log.warn(`Slot should be greater than parent block slot for first block in checkpoint`, proposalInfo); return { reason: 'invalid_proposal' }; } @@ -335,7 +324,7 @@ export class BlockProposalHandler { this.log.warn(`Non-sequential indexWithinCheckpoint`, proposalInfo); return { reason: 'invalid_proposal' }; } - if (proposal.blockHeader.getSlot() !== parentBlockHeader.getSlot()) { + if (proposal.blockHeader.getSlot() !== parentBlock.header.getSlot()) { this.log.warn(`Slot should be equal to parent block slot for non-first block in checkpoint`, proposalInfo); return { reason: 'invalid_proposal' }; } @@ -356,7 +345,7 @@ export class BlockProposalHandler { */ private validateNonFirstBlockInCheckpoint( proposal: BlockProposal, - parentBlock: L2Block, + parentBlock: BlockData, proposalInfo: object, ): CheckpointComputationResult | undefined { const proposalGlobals = proposal.blockHeader.globalVariables; diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index d68edcf7207f..097959351792 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -23,7 +23,7 @@ import { } from '@aztec/p2p'; import { OffenseType, WANT_TO_SLASH_EVENT } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; +import type { BlockData, L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import type { getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import type { SlasherConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; @@ -336,23 +336,19 @@ describe('ValidatorClient', () => { epochCache.filterInCommittee.mockResolvedValue([EthAddress.fromString(validatorAccounts[0].address)]); epochCache.isEscapeHatchOpenAtSlot.mockResolvedValue(false); - // Return parent block header when requested - blockSource.getBlockHeaderByArchive.mockResolvedValue({ - getBlockNumber: () => blockNumber - 1, - getSlot: () => SlotNumber(Number(blockHeader.globalVariables.slotNumber) - 1), - } as BlockHeader); - - // Return parent block when requested (needed for checkpoint number computation) - // The parent block has slot - 1, which is different from the proposal's slot + // Return parent block data when requested (includes checkpoint info, avoids loading full L2Block) const parentSlot = SlotNumber(Number(blockHeader.globalVariables.slotNumber) - 1); - blockSource.getL2Block.mockResolvedValue({ - checkpointNumber: CheckpointNumber(1), - indexWithinCheckpoint: IndexWithinCheckpoint(0), + blockSource.getBlockDataByArchive.mockResolvedValue({ header: { - globalVariables: blockHeader.globalVariables, + getBlockNumber: () => blockNumber - 1, getSlot: () => parentSlot, + globalVariables: blockHeader.globalVariables, }, - } as unknown as L2Block); + archive: new AppendOnlyTreeSnapshot(Fr.random(), blockNumber - 1), + blockHash: Fr.random(), + checkpointNumber: CheckpointNumber(1), + indexWithinCheckpoint: IndexWithinCheckpoint(0), + } as unknown as BlockData); blockSource.getGenesisValues.mockResolvedValue({ genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT) }); blockSource.syncImmediate.mockImplementation(() => Promise.resolve()); @@ -484,11 +480,11 @@ describe('ValidatorClient', () => { it('should wait for previous block to sync', async () => { epochCache.filterInCommittee.mockResolvedValue([EthAddress.fromString(validatorAccounts[0].address)]); - blockSource.getBlockHeaderByArchive.mockResolvedValueOnce(undefined); - blockSource.getBlockHeaderByArchive.mockResolvedValueOnce(undefined); - blockSource.getBlockHeaderByArchive.mockResolvedValueOnce(undefined); + blockSource.getBlockDataByArchive.mockResolvedValueOnce(undefined); + blockSource.getBlockDataByArchive.mockResolvedValueOnce(undefined); + blockSource.getBlockDataByArchive.mockResolvedValueOnce(undefined); const isValid = await validatorClient.validateBlockProposal(proposal, sender); - expect(blockSource.getBlockHeaderByArchive).toHaveBeenCalledTimes(4); + expect(blockSource.getBlockDataByArchive).toHaveBeenCalledTimes(4); expect(isValid).toBe(true); }); @@ -693,23 +689,18 @@ describe('ValidatorClient', () => { nextSlot: SlotNumber(nonFirstBlockProposal.slotNumber + 1), }); - // Mock parent block header returned by getBlockHeaderByArchive - const parentBlockHeader = { - getBlockNumber: () => BlockNumber(parentBlockNumber), - getSlot: () => SlotNumber(parentSlotNumber), - globalVariables: parentGlobalVariables, - } as BlockHeader; - blockSource.getBlockHeaderByArchive.mockResolvedValue(parentBlockHeader); - - // Mock parent block returned by getL2Block - const parentBlock = { - checkpointNumber: parentCheckpointNumber, - indexWithinCheckpoint: IndexWithinCheckpoint(0), // Parent is first block in checkpoint + // Mock parent block data returned by getBlockDataByArchive + blockSource.getBlockDataByArchive.mockResolvedValue({ header: { + getBlockNumber: () => BlockNumber(parentBlockNumber), + getSlot: () => SlotNumber(parentSlotNumber), globalVariables: parentGlobalVariables, }, - } as unknown as L2Block; - blockSource.getL2Block.mockResolvedValue(parentBlock); + archive: new AppendOnlyTreeSnapshot(Fr.random(), parentBlockNumber), + blockHash: Fr.random(), + checkpointNumber: parentCheckpointNumber, + indexWithinCheckpoint: IndexWithinCheckpoint(0), // Parent is first block in checkpoint + } as unknown as BlockData); // Set time for the slot const genesisTime = 1n; From 4c30e23a8806cbb6b69dbab5852f20e4ac8d4c32 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 9 Feb 2026 21:26:14 +0000 Subject: [PATCH 31/62] chore(e2e): toggle mbps in e2e tests Updates the configuration on some e2e epochs and p2p tests in order to run with multiple blocks per slot. Also updates the fixed ports to be overridden via env vars so we can run multiple e2e tests in parallel without having them clash with each other. --- .../epochs_invalidate_block.parallel.test.ts | 140 ++++++++------ ...t.ts => epochs_l1_reorgs.parallel.test.ts} | 181 +++++++++++++++--- .../end-to-end/src/e2e_epochs/epochs_test.ts | 38 +++- ...tiple_validators_sentinel.parallel.test.ts | 10 +- .../end-to-end/src/e2e_p2p/reqresp/utils.ts | 44 ++++- .../end-to-end/src/fixtures/fixtures.ts | 10 + yarn-project/prover-node/src/factory.ts | 1 + yarn-project/prover-node/src/prover-node.ts | 2 +- yarn-project/sequencer-client/src/config.ts | 3 + .../src/sequencer/checkpoint_proposal_job.ts | 9 + yarn-project/stdlib/src/interfaces/configs.ts | 6 +- 11 files changed, 337 insertions(+), 107 deletions(-) rename yarn-project/end-to-end/src/e2e_epochs/{epochs_l1_reorgs.test.ts => epochs_l1_reorgs.parallel.test.ts} (72%) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts index 01ac7e29aec0..78ebee3bb157 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts @@ -7,8 +7,8 @@ import { RollupContract } from '@aztec/ethereum/contracts'; import type { Operator } from '@aztec/ethereum/deploy-aztec-l1-contracts'; import type { ExtendedViemWalletClient } from '@aztec/ethereum/types'; import { asyncMap } from '@aztec/foundation/async-map'; -import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { times } from '@aztec/foundation/collection'; +import { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { times, timesAsync } from '@aztec/foundation/collection'; import { SecretValue } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; import { promiseWithResolvers } from '@aztec/foundation/promise'; @@ -16,14 +16,16 @@ import { retryUntil } from '@aztec/foundation/retry'; import { bufferToHex } from '@aztec/foundation/string'; import { timeoutPromise } from '@aztec/foundation/timer'; import { RollupAbi } from '@aztec/l1-artifacts'; -import type { SpamContract } from '@aztec/noir-test-contracts.js/Spam'; +import type { TestContract } from '@aztec/noir-test-contracts.js/Test'; import { OffenseType } from '@aztec/slasher'; import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import { jest } from '@jest/globals'; import { privateKeyToAccount } from 'viem/accounts'; +import { getAnvilPort } from '../fixtures/fixtures.js'; import { type EndToEndContext, getPrivateKeyFromIndex } from '../fixtures/utils.js'; +import { proveInteraction } from '../test-wallet/utils.js'; import { EpochsTestContext } from './epochs_test.js'; jest.setTimeout(1000 * 60 * 10); @@ -31,17 +33,19 @@ jest.setTimeout(1000 * 60 * 10); const NODE_COUNT = 5; const VALIDATOR_COUNT = 5; +const BASE_ANVIL_PORT = getAnvilPort(); + describe('e2e_epochs/epochs_invalidate_block', () => { let context: EndToEndContext; let logger: Logger; let l1Client: ExtendedViemWalletClient; let rollupContract: RollupContract; - let anvilPort = 8545; + let anvilPortOffset = 0; let test: EpochsTestContext; let validators: (Operator & { privateKey: `0x${string}` })[]; let nodes: AztecNodeService[]; - let contract: SpamContract; + let testContract: TestContract; beforeEach(async () => { validators = times(VALIDATOR_COUNT, i => { @@ -51,8 +55,13 @@ describe('e2e_epochs/epochs_invalidate_block', () => { }); // Setup context with the given set of validators, mocked gossip sub network, and no anvil test watcher. + // Uses multiple-blocks-per-slot timing configuration. test = await EpochsTestContext.setup({ ethereumSlotDuration: 8, + aztecSlotDuration: 36, + blockDurationMs: 6000, + l1PublishingTime: 8, + enforceTimeTable: true, numberOfAccounts: 1, initialValidators: validators, mockGossipSubNetwork: true, @@ -62,11 +71,12 @@ describe('e2e_epochs/epochs_invalidate_block', () => { aztecTargetCommitteeSize: VALIDATOR_COUNT, archiverPollingIntervalMS: 200, anvilAccounts: 20, - anvilPort: ++anvilPort, + anvilPort: BASE_ANVIL_PORT + ++anvilPortOffset, slashingRoundSizeInEpochs: 4, slashingOffsetInRounds: 256, slasherFlavor: 'tally', minTxsPerBlock: 1, + maxTxsPerBlock: 1, }); ({ context, logger, l1Client } = test); @@ -88,8 +98,9 @@ describe('e2e_epochs/epochs_invalidate_block', () => { ); logger.warn(`Started ${NODE_COUNT} validator nodes.`, { validators: validatorNodes.map(v => v.attester) }); - // Register spam contract for sending txs. - contract = await test.registerSpamContract(context.wallet); + // Register test contract for lightweight txs + testContract = await test.registerTestContract(context.wallet); + logger.warn(`Test setup completed.`, { validators: validators.map(v => v.attester.toString()) }); }); @@ -98,23 +109,29 @@ describe('e2e_epochs/epochs_invalidate_block', () => { await test.teardown(); }); - it('proposer invalidates previous block while posting its own', async () => { + it('proposer invalidates previous checkpoint with multiple blocks while posting its own', async () => { const sequencers = nodes.map(node => node.getSequencer()!); - const initialBlockNumber = await nodes[0].getBlockNumber(); + const [initialCheckpointNumber, initialBlockNumber] = await nodes[0] + .getL2Tips() + .then(t => [t.checkpointed.checkpoint.number, t.checkpointed.block.number] as const); // Configure all sequencers to skip collecting attestations before starting + // Also set minBlocksForCheckpoint to ensure multi-block checkpoints logger.warn('Configuring all sequencers to skip attestation collection'); sequencers.forEach(sequencer => { - sequencer.updateConfig({ skipCollectingAttestations: true }); + sequencer.updateConfig({ skipCollectingAttestations: true, minBlocksForCheckpoint: 2 }); }); - // Send a transaction so the sequencer builds a block - logger.warn('Sending transaction to trigger block building'); - const sentTx = await contract.methods.spam(1, 1n, false).send({ from: context.accounts[0], wait: NO_WAIT }); + // Send a few transactions so the sequencer builds multiple blocks in the checkpoint + // We'll later check that the first tx at least was picked up and mined + logger.warn('Sending multiple transactions to trigger block building'); + const [sentTx] = await timesAsync(8, i => + testContract.methods.emit_nullifier(BigInt(i + 1)).send({ from: context.accounts[0], wait: NO_WAIT }), + ); - // Disable skipCollectingAttestations after the first L2 block is mined - test.monitor.once('checkpoint', ({ checkpointNumber }) => { - logger.warn(`Disabling skipCollectingAttestations after L2 block ${checkpointNumber} has been mined`); + // Disable skipCollectingAttestations after the first checkpoint and capture its number + test.monitor.on('checkpoint', ({ checkpointNumber }) => { + logger.warn(`Disabling skipCollectingAttestations after checkpoint ${checkpointNumber} has been mined`); sequencers.forEach(sequencer => { sequencer.updateConfig({ skipCollectingAttestations: false }); }); @@ -133,8 +150,8 @@ describe('e2e_epochs/epochs_invalidate_block', () => { toBlock: 'latest', }); - // The next proposer should invalidate the previous block and publish a new one - logger.warn('Waiting for next proposer to invalidate the previous block'); + // The next proposer should invalidate the previous checkpoint and publish a new one + logger.warn('Waiting for next proposer to invalidate the previous checkpoint'); // Wait for the CheckpointInvalidated event const checkpointInvalidatedEvents = await retryUntil( @@ -150,10 +167,10 @@ describe('e2e_epochs/epochs_invalidate_block', () => { // Verify the CheckpointInvalidated event was emitted and that the block was removed const [event] = checkpointInvalidatedEvents; logger.warn(`CheckpointInvalidated event emitted`, { event }); - expect(event.args.checkpointNumber).toBeGreaterThan(initialBlockNumber); + expect(event.args.checkpointNumber).toBeGreaterThan(initialCheckpointNumber); expect(test.rollup.address).toEqual(event.address); - // Wait for all nodes to sync the new block + // Wait for all nodes to sync the new block proposed logger.warn('Waiting for all nodes to sync'); await retryUntil( async () => { @@ -167,7 +184,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { ); // Verify the transaction was eventually included - const receipt = await waitForTx(context.aztecNode, sentTx, { timeout: 30 }); + const receipt = await waitForTx(context.aztecNode, sentTx, { timeout: test.L2_SLOT_DURATION_IN_S * 4 }); expect(receipt.isMined()).toBeTrue(); logger.warn(`Transaction included in block ${receipt.blockNumber}`); @@ -177,15 +194,19 @@ describe('e2e_epochs/epochs_invalidate_block', () => { const invalidBlockOffense = offenses.find(o => o.offenseType === OffenseType.PROPOSED_INSUFFICIENT_ATTESTATIONS); expect(invalidBlockOffense).toBeDefined(); + const currentCheckpoint = await test.rollup.getCheckpointNumber(); + logger.warn(`Waiting for checkpoint ${currentCheckpoint + 2} to be mined to ensure chain can progress`); + await test.waitUntilCheckpointNumber(CheckpointNumber(currentCheckpoint + 2), test.L2_SLOT_DURATION_IN_S * 8); + logger.warn(`Test succeeded '${expect.getState().currentTestName}'`); }); - // Regression for an issue where, if the invalidator proposed another invalid block, the next proposer would + // Regression for an issue where, if the invalidator proposed another invalid checkpoint, the next proposer would // try invalidating the first one, which would fail due to mismatching attestations. For example: - // Slot S: Block N is proposed with invalid attestations - // Slot S+1: Block N is invalidated, and block N' (same number) is proposed instead, but also has invalid attestations - // Slot S+2: Proposer tries to invalidate block N, when they should invalidate block N' instead, and fails - it('chain progresses if a block with insufficient attestations is invalidated with an invalid one', async () => { + // Slot S: Checkpoint N is proposed with invalid attestations + // Slot S+1: Checkpoint N is invalidated, and checkpoint N' (same number) is proposed instead, but also has invalid attestations + // Slot S+2: Proposer tries to invalidate checkpoint N, when they should invalidate checkpoint N' instead, and fails + it('chain progresses if a checkpoint with insufficient attestations is invalidated with an invalid one', async () => { // Configure all sequencers to skip collecting attestations before starting and always build blocks logger.warn('Configuring all sequencers to skip attestation collection'); const sequencers = nodes.map(node => node.getSequencer()!); @@ -212,10 +233,16 @@ describe('e2e_epochs/epochs_invalidate_block', () => { }); await Promise.race([timeoutPromise(1000 * test.L2_SLOT_DURATION_IN_S * 8), invalidatePromise.promise]); - // Disable skipCollectingAttestations + // Disable skipCollectingAttestations and send txs so MBPS can produce multi-block checkpoints sequencers.forEach(sequencer => { sequencer.updateConfig({ skipCollectingAttestations: false }); }); + logger.warn('Sending transactions to enable multi-block checkpoints'); + const from = context.accounts[0]; + for (let i = 0; i < 4; i++) { + const tx = await proveInteraction(context.wallet, testContract.methods.emit_nullifier(new Fr(100 + i)), { from }); + await tx.send({ wait: NO_WAIT }); + } // Ensure chain progresses const targetCheckpointNumber = CheckpointNumber(lastInvalidatedCheckpointNumber! + 2); @@ -236,11 +263,13 @@ describe('e2e_epochs/epochs_invalidate_block', () => { 0.5, ); + await test.assertMultipleBlocksPerSlot(2); + logger.warn(`Test succeeded '${expect.getState().currentTestName}'`); }); // Regression for Joe's Q42025 London attack. Same as above but with an invalid signature instead of insufficient ones. - it('chain progresses if a block with an invalid attestation is invalidated with an invalid one', async () => { + it('chain progresses if a checkpoint with an invalid attestation is invalidated with an invalid one', async () => { // Configure all sequencers to skip collecting attestations before starting and always build blocks logger.warn('Configuring all sequencers to inject one invalid attestation'); const sequencers = nodes.map(node => node.getSequencer()!); @@ -270,7 +299,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { invalidatePromise.promise, ]); - // Disable injectFakeAttestation + // Disable injectFakeAttestations sequencers.forEach(sequencer => { sequencer.updateConfig({ injectFakeAttestation: false }); }); @@ -297,11 +326,11 @@ describe('e2e_epochs/epochs_invalidate_block', () => { logger.warn(`Test succeeded '${expect.getState().currentTestName}'`); }); - // Here we disable invalidation checks from two of the proposers. Our goal is to get two invalid blocks + // Here we disable invalidation checks from two of the proposers. Our goal is to get two invalid checkpoints // in a row, so the third proposer invalidates the earliest one, and the chain progresses. Note that the - // second invalid block will also have invalid attestations, we are *not* testing the scenario where the - // committee is malicious (or incompetent) and attests for the descendent of an invalid block. - it('proposer invalidates multiple blocks', async () => { + // second invalid checkpoint will also have invalid attestations, we are *not* testing the scenario where the + // committee is malicious (or incompetent) and attests for the descendent of an invalid checkpoint. + it('proposer invalidates multiple checkpoints', async () => { const initialSlot = (await test.monitor.run()).l2SlotNumber; // Disable validation and attestation gathering for the proposers of two consecutive slots @@ -406,9 +435,9 @@ describe('e2e_epochs/epochs_invalidate_block', () => { logger.warn(`Test succeeded '${expect.getState().currentTestName}'`); }); - it('proposer invalidates previous block without publishing its own', async () => { + it('proposer invalidates previous checkpoint without publishing its own', async () => { const sequencers = nodes.map(node => node.getSequencer()!); - const initialBlockNumber = await nodes[0].getBlockNumber(); + const initialCheckpointNumber = (await nodes[0].getL2Tips()).checkpointed.checkpoint.number; // Configure all sequencers to skip collecting attestations before starting logger.warn('Configuring all sequencers to skip attestation collection and always publish blocks'); @@ -437,8 +466,8 @@ describe('e2e_epochs/epochs_invalidate_block', () => { toBlock: 'latest', }); - // The next proposer should invalidate the previous block - logger.warn('Waiting for next proposer to invalidate the previous block'); + // The next proposer should invalidate the previous checkpoint + logger.warn('Waiting for next proposer to invalidate the previous checkpoint'); // Wait for the CheckpointInvalidated event const checkpointInvalidatedEvents = await retryUntil( @@ -454,8 +483,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { // Verify the CheckpointInvalidated event was emitted and that the block was removed const [event] = checkpointInvalidatedEvents; logger.warn(`CheckpointInvalidated event emitted`, { event }); - expect(event.args.checkpointNumber).toBeGreaterThan(initialBlockNumber); - const initialCheckpointNumber = await getCheckpointNumberForBlock(nodes[0], initialBlockNumber); + expect(event.args.checkpointNumber).toBeGreaterThan(initialCheckpointNumber); expect(await test.rollup.getCheckpointNumber()).toEqual(initialCheckpointNumber); logger.warn(`Test succeeded '${expect.getState().currentTestName}'`); @@ -465,7 +493,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { // REFACTOR: Remove code duplication with above test (and others?) it('proposer invalidates previous block with shuffled attestations', async () => { const sequencers = nodes.map(node => node.getSequencer()!); - const initialBlockNumber = await nodes[0].getBlockNumber(); + const initialCheckpointNumber = (await nodes[0].getL2Tips()).checkpointed.checkpoint.number; // Configure all sequencers to shuffle attestations before starting logger.warn('Configuring all sequencers to shuffle attestations and always publish blocks'); @@ -494,8 +522,8 @@ describe('e2e_epochs/epochs_invalidate_block', () => { toBlock: 'latest', }); - // The next proposer should invalidate the previous block - logger.warn('Waiting for next proposer to invalidate the previous block'); + // The next proposer should invalidate the previous checkpoint + logger.warn('Waiting for next proposer to invalidate the previous checkpoint'); // Wait for the CheckpointInvalidated event const checkpointInvalidatedEvents = await retryUntil( @@ -511,8 +539,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { // Verify the CheckpointInvalidated event was emitted and that the block was removed const [event] = checkpointInvalidatedEvents; logger.warn(`CheckpointInvalidated event emitted`, { event }); - expect(event.args.checkpointNumber).toBeGreaterThan(initialBlockNumber); - const initialCheckpointNumber = await getCheckpointNumberForBlock(nodes[0], initialBlockNumber); + expect(event.args.checkpointNumber).toBeGreaterThan(initialCheckpointNumber); expect(await test.rollup.getCheckpointNumber()).toEqual(initialCheckpointNumber); logger.warn(`Test succeeded '${expect.getState().currentTestName}'`); @@ -520,7 +547,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { it('committee member invalidates a block if proposer does not come through', async () => { const sequencers = nodes.map(node => node.getSequencer()!); - const initialBlockNumber = await nodes[0].getBlockNumber(); + const initialCheckpointNumber = await nodes[0].getL2Tips().then(t => t.checkpointed.checkpoint.number); // Configure all sequencers to skip collecting attestations before starting logger.warn('Configuring all sequencers to skip attestation collection and invalidation as proposer'); @@ -560,8 +587,8 @@ describe('e2e_epochs/epochs_invalidate_block', () => { toBlock: 'latest', }); - // Some committee member should invalidate the previous block - logger.warn('Waiting for committee member to invalidate the previous block'); + // Some committee member should invalidate the previous checkpoint + logger.warn('Waiting for committee member to invalidate the previous checkpoint'); // Wait for the CheckpointInvalidated event const checkpointInvalidatedEvents = await retryUntil( @@ -577,7 +604,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { // Verify the CheckpointInvalidated event was emitted const [event] = checkpointInvalidatedEvents; logger.warn(`CheckpointInvalidated event emitted`, { event }); - expect(event.args.checkpointNumber).toBeGreaterThan(initialBlockNumber); + expect(event.args.checkpointNumber).toBeGreaterThan(initialCheckpointNumber); // And check that the invalidation happened at least after the specified timeout. // We use the checkpoint header timestamp (L2 timestamp) since that's what the sequencer uses @@ -585,20 +612,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { const invalidSlotTimestamp = getTimestampForSlot(invalidCheckpointSlotNumber!, test.constants); const { timestamp: invalidationTimestamp } = await l1Client.getBlock({ blockNumber: event.blockNumber }); expect(invalidationTimestamp).toBeGreaterThanOrEqual(invalidSlotTimestamp + BigInt(invalidationDelay)); + logger.warn(`Test succeeded '${expect.getState().currentTestName}'`); }); }); - -async function getCheckpointNumberForBlock( - node: AztecNodeService, - blockNumber: BlockNumber, -): Promise { - if (blockNumber === 0) { - return CheckpointNumber(0); - } - const block = await node.getBlock(blockNumber); - if (!block) { - throw new Error(`Block ${blockNumber} not found`); - } - return block.checkpointNumber; -} diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts similarity index 72% rename from yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts rename to yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts index 063287d2ec1b..776a9374a093 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts @@ -1,6 +1,7 @@ import type { Archiver } from '@aztec/archiver'; import type { AztecNodeService } from '@aztec/aztec-node'; import { AztecAddress } from '@aztec/aztec.js/addresses'; +import { NO_WAIT } from '@aztec/aztec.js/contracts'; import { Fr } from '@aztec/aztec.js/fields'; import type { Logger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; @@ -15,6 +16,7 @@ import { AbortError } from '@aztec/foundation/error'; import { retryUntil } from '@aztec/foundation/retry'; import { hexToBuffer } from '@aztec/foundation/string'; import { executeTimeout } from '@aztec/foundation/timer'; +import type { TestContract } from '@aztec/noir-test-contracts.js/Test'; import type { ProverNode } from '@aztec/prover-node'; import { jest } from '@jest/globals'; @@ -23,9 +25,10 @@ import { keccak256, parseTransaction } from 'viem'; import { sendL1ToL2Message } from '../fixtures/l1_to_l2_messaging.js'; import type { EndToEndContext } from '../fixtures/utils.js'; +import { proveInteraction } from '../test-wallet/utils.js'; import { EpochsTestContext } from './epochs_test.js'; -jest.setTimeout(1000 * 60 * 10); +jest.setTimeout(1000 * 60 * 20); describe('e2e_epochs/epochs_l1_reorgs', () => { let context: EndToEndContext; @@ -41,18 +44,44 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { let L2_SLOT_DURATION_IN_S: number; let test: EpochsTestContext; + let contract: TestContract; + let from: AztecAddress; + + // Number of txs to send at the start of each blocks test to trigger multi-block checkpoints. + const TX_COUNT = 8; + + /** Pre-proves and sends txs to generate L2 activity for multi-block checkpoints. */ + const sendTransactions = async (count: number, offset = 0) => { + logger.warn(`Pre-proving ${count} transactions`); + const txs = await timesAsync(count, i => + proveInteraction(context.wallet, contract.methods.emit_nullifier(new Fr(offset + i + 1)), { from }), + ); + const txHashes = await Promise.all(txs.map(tx => tx.send({ wait: NO_WAIT }))); + logger.warn(`Sent ${txHashes.length} transactions`); + return txHashes; + }; beforeEach(async () => { test = await EpochsTestContext.setup({ + numberOfAccounts: 1, maxSpeedUpAttempts: 0, // Do not speed up l1 txs, we dont want them to land cancelTxOnTimeout: false, - aztecEpochDuration: 8, // Bump epoch duration, epoch 0 is finishing before we had a chance to do anything - ethereumSlotDuration: process.env.L1_BLOCK_TIME ? parseInt(process.env.L1_BLOCK_TIME) : 4, // Got to speed these tests up for CI + aztecEpochDuration: 4, + ethereumSlotDuration: 4, + aztecSlotDuration: 36, + blockDurationMs: 8000, + l1PublishingTime: 2, + minTxsPerBlock: 0, + maxTxsPerBlock: 1, + enforceTimeTable: true, + aztecProofSubmissionEpochs: 1, }); ({ proverDelayer, sequencerDelayer, context, logger, monitor, L1_BLOCK_TIME_IN_S, L2_SLOT_DURATION_IN_S } = test); node = context.aztecNode; archiver = (node as AztecNodeService).getBlockSource() as Archiver; proverNode = context.proverNode!; + from = context.accounts[0]; + contract = await test.registerTestContract(context.wallet); }); afterEach(async () => { @@ -75,6 +104,12 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { const getProvenCheckpointNumber = (node: AztecNode) => node.getL2Tips().then(tips => tips.proven.checkpoint.number); it('prunes L2 blocks if a proof is removed due to an L1 reorg', async () => { + // Send txs to trigger multi-block checkpoints + await sendTransactions(TX_COUNT); + + // Capture initial chain state + const initialProvenCheckpoint = (await monitor.run(true)).provenCheckpointNumber; + // Wait until we have proven something and the nodes have caught up const epochDurationSeconds = test.constants.epochDuration * test.constants.slotDuration; logger.warn(`Waiting for initial proof to land`); @@ -82,7 +117,7 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { signal => { return new Promise<{ provenCheckpointNumber: number; l1BlockNumber: number }>((res, rej) => { const handleMsg = (...[ev]: ChainMonitorEventMap['checkpoint-proven']) => { - if (ev.provenCheckpointNumber !== 0) { + if (ev.provenCheckpointNumber > initialProvenCheckpoint) { res(ev); monitor.off('checkpoint-proven', handleMsg); } @@ -104,15 +139,18 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { // And remove the proof from L1 await context.cheatCodes.eth.reorgTo(provenBlockEvent.l1BlockNumber - 1); - expect((await monitor.run(true)).provenCheckpointNumber).toEqual(0); + expect((await monitor.run(true)).provenCheckpointNumber).toEqual(initialProvenCheckpoint); - // Wait until the end of the proof submission window for the first epoch - await test.waitUntilLastSlotOfProofSubmissionWindow(0); + // Wait until the end of the proof submission window for the epoch of the proven checkpoint + const provenCheckpointEpoch = await test.rollup.getEpochNumberForCheckpoint( + CheckpointNumber(provenBlockEvent.provenCheckpointNumber), + ); + await test.waitUntilLastSlotOfProofSubmissionWindow(provenCheckpointEpoch); // Ensure that a new node sees the reorg logger.warn(`Syncing new node to test reorg`); const newNode = await executeTimeout(() => test.createNonValidatorNode(), 10_000, `new node sync`); - expect(await newNode.getProvenBlockNumber()).toEqual(0); + expect(await getProvenCheckpointNumber(newNode)).toEqual(initialProvenCheckpoint); // Latest checkpointed block seen by the node may be from the current checkpoint, or one less if it was *just* mined. // This is because the call to createNonValidatorNode will block until the initial sync is completed, @@ -123,53 +161,97 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { // And check that the old node has processed the reorg as well logger.warn(`Testing old node after reorg`); - await retryUntil(() => node.getProvenBlockNumber().then(b => b === 0), 'prune', L2_SLOT_DURATION_IN_S * 4, 0.1); + await retryUntil( + () => getProvenCheckpointNumber(node).then(cp => cp === initialProvenCheckpoint), + 'prune', + L2_SLOT_DURATION_IN_S * 4, + 0.1, + ); expect(await getCheckpointNumber(node)).toBeWithin(monitor.checkpointNumber - 1, monitor.checkpointNumber + 1); + // Verify multi-block checkpoints were built + await test.assertMultipleBlocksPerSlot(2); + logger.warn(`Test succeeded`); await newNode.stop(); }); it('does not prune if a second proof lands within the submission window after the first one is reorged out', async () => { + // Send txs to trigger multi-block checkpoints + await sendTransactions(TX_COUNT); + + // Capture initial chain state + const initialProvenCheckpoint = (await monitor.run(true)).provenCheckpointNumber; + const targetProvenCheckpoint = CheckpointNumber(initialProvenCheckpoint + 1); + // Wait until we have proven something and the nodes have caught up + // Use a longer timeout since we need to wait for the epoch to complete (~288s) plus proving time + const epochDurationSeconds = test.constants.epochDuration * test.constants.slotDuration; logger.warn(`Waiting for initial proof to land`); - const provenCheckpoint = await test.waitUntilProvenCheckpointNumber(CheckpointNumber(1)); - const provenBlock = Number(provenCheckpoint); - await retryUntil(() => node.getProvenBlockNumber().then(p => p >= provenBlock), 'node sync', 10, 0.1); + const provenCheckpoint = await test.waitUntilProvenCheckpointNumber( + targetProvenCheckpoint, + epochDurationSeconds * 2, + ); + await retryUntil(() => getProvenCheckpointNumber(node).then(cp => cp >= provenCheckpoint), 'node sync', 10, 0.1); // Stop the prover node await proverNode.stop(); // Remove the proof from L1 but do not change the block number await context.cheatCodes.eth.reorgWithReplacement(1); - await expect(monitor.run(true).then(m => m.provenCheckpointNumber)).resolves.toEqual(0); + await expect(monitor.run(true).then(m => m.provenCheckpointNumber)).resolves.toEqual(initialProvenCheckpoint); // Create another prover node so it submits a proof and wait until it is submitted + // Use a longer timeout to allow the new prover to sync and generate a proof const newProverNode = await test.createProverNode(); - const provenCheckpointRetry = await test.waitUntilProvenCheckpointNumber(CheckpointNumber(1)); - await expect(monitor.run(true).then(m => m.provenCheckpointNumber)).resolves.toBeGreaterThanOrEqual(1); + const provenCheckpointRetry = await test.waitUntilProvenCheckpointNumber( + targetProvenCheckpoint, + epochDurationSeconds, + ); + await expect(monitor.run(true).then(m => m.provenCheckpointNumber)).resolves.toBeGreaterThanOrEqual( + targetProvenCheckpoint, + ); // Check that the node has followed along logger.warn(`Testing old node`); - const provenBlockRetry = Number(provenCheckpointRetry); - await retryUntil(() => node.getProvenBlockNumber().then(b => b >= provenBlockRetry), 'proof sync', 10, 0.1); + await retryUntil( + () => getProvenCheckpointNumber(node).then(cp => cp >= provenCheckpointRetry), + 'proof sync', + 10, + 0.1, + ); expect(await getCheckpointNumber(node)).toBeWithin(monitor.checkpointNumber - 1, monitor.checkpointNumber + 1); + // Verify multi-block checkpoints were built + await test.assertMultipleBlocksPerSlot(2); + logger.warn(`Test succeeded`); await newProverNode.stop(); }); it('restores L2 blocks if a proof is added due to an L1 reorg', async () => { + // Send txs to trigger multi-block checkpoints + await sendTransactions(TX_COUNT); + + // Capture initial chain state + const initialProvenCheckpoint = (await monitor.run(true)).provenCheckpointNumber; + const initialCheckpoint = monitor.checkpointNumber; + // Next proof shall not land proverDelayer.cancelNextTx(); // Expect pending chain to advance, so there's something to be pruned - await retryUntil(() => node.getBlockNumber().then(b => b > 1), 'node sync', 60, 0.1); + await retryUntil(() => getCheckpointNumber(node).then(cp => cp > initialCheckpoint), 'node sync', 60, 0.1); - // Wait until the end of the proof submission window for the first epoch - await test.waitUntilLastSlotOfProofSubmissionWindow(0); + // Wait until the end of the proof submission window for the first unproven epoch + const firstUnprovenCheckpoint = CheckpointNumber(initialProvenCheckpoint + 1); + await test.waitUntilCheckpointNumber(firstUnprovenCheckpoint, 60); + const epochToWaitFor = await test.rollup.getEpochNumberForCheckpoint(firstUnprovenCheckpoint); + await test.waitUntilLastSlotOfProofSubmissionWindow(epochToWaitFor); await monitor.run(true); - logger.warn(`End of epoch 0 submission window (L1 block ${await monitor.run(true).then(m => m.l1BlockNumber)}).`); + logger.warn( + `End of epoch ${epochToWaitFor} submission window (L1 block ${await monitor.run(true).then(m => m.l1BlockNumber)}).`, + ); // Grab the prover's tx to submit it later as part of a reorg and stop the prover const [proofTx] = proverDelayer.getCancelledTxs(); @@ -179,9 +261,14 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { // Wait for the node to prune const syncTimeout = L2_SLOT_DURATION_IN_S * 2; - await retryUntil(() => node.getBlockNumber().then(b => b <= 1), 'node prune', syncTimeout, 0.1); - expect(monitor.provenCheckpointNumber).toEqual(0); - expect(await node.getProvenBlockNumber()).toEqual(0); + await retryUntil( + () => getCheckpointNumber(node).then(cp => cp <= initialProvenCheckpoint + 1), + 'node prune', + syncTimeout, + 0.1, + ); + expect(monitor.provenCheckpointNumber).toEqual(initialProvenCheckpoint); + expect(await getProvenCheckpointNumber(node)).toEqual(initialProvenCheckpoint); // But not all is lost, for a reorg gets the proof back on chain! logger.warn(`Reorging proof back (L1 block ${await monitor.run(true).then(m => m.l1BlockNumber)}).`); @@ -191,8 +278,8 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { // Monitor should update to see the proof const { checkpointNumber, provenCheckpointNumber } = await monitor.run(true); - expect(checkpointNumber).toBeGreaterThan(1); - expect(provenCheckpointNumber).toBeGreaterThan(0); + expect(checkpointNumber).toBeGreaterThan(initialCheckpoint); + expect(provenCheckpointNumber).toBeGreaterThan(initialProvenCheckpoint); // And so the node undoes its reorg await retryUntil( @@ -208,18 +295,30 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { 0.1, ); + // Verify multi-block checkpoints were built + await test.assertMultipleBlocksPerSlot(2); + logger.warn(`Test succeeded`); }); it('prunes blocks from pending chain removed from L1 due to an L1 reorg', async () => { + // Send txs to trigger multi-block checkpoints + await sendTransactions(TX_COUNT); + + // Capture initial chain state + const initialCheckpoint = (await monitor.run(true)).checkpointNumber; + // Wait until CHECKPOINT_NUMBER is mined and node synced, and stop the sequencer - const CHECKPOINT_NUMBER = CheckpointNumber(3); - await test.waitUntilCheckpointNumber(CHECKPOINT_NUMBER, L2_SLOT_DURATION_IN_S * (CHECKPOINT_NUMBER + 4)); + const CHECKPOINT_NUMBER = CheckpointNumber(initialCheckpoint + 3); + await test.waitUntilCheckpointNumber(CHECKPOINT_NUMBER, L2_SLOT_DURATION_IN_S * 7); expect(monitor.checkpointNumber).toEqual(CHECKPOINT_NUMBER); const l1BlockNumber = monitor.l1BlockNumber; // Wait for node to sync to the checkpoint. await retryUntil(() => getCheckpointNumber(node).then(b => b === CHECKPOINT_NUMBER), 'node sync', 10, 0.1); + // Verify multi-block checkpoints were built before we do the reorg + await test.assertMultipleBlocksPerSlot(2); + logger.warn(`Reached checkpoint ${CHECKPOINT_NUMBER}. Stopping block production.`); await context.aztecNodeAdmin.setConfig({ minTxsPerBlock: 100 }); @@ -237,14 +336,23 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { }); it('sees new blocks added in an L1 reorg', async () => { + // Send txs to trigger multi-block checkpoints + await sendTransactions(TX_COUNT); + + // Capture initial chain state + const initialCheckpoint = (await monitor.run(true)).checkpointNumber; + // Wait until the checkpoint *before* CHECKPOINT_NUMBER is mined and node synced - const CHECKPOINT_NUMBER = CheckpointNumber(3); + const CHECKPOINT_NUMBER = CheckpointNumber(initialCheckpoint + 3); const prevCheckpointNumber = CheckpointNumber(CHECKPOINT_NUMBER - 1); - await test.waitUntilCheckpointNumber(prevCheckpointNumber, L2_SLOT_DURATION_IN_S * (CHECKPOINT_NUMBER + 4)); + await test.waitUntilCheckpointNumber(prevCheckpointNumber, L2_SLOT_DURATION_IN_S * 7); expect(monitor.checkpointNumber).toEqual(prevCheckpointNumber); // Wait for node to sync to the checkpoint await retryUntil(() => getCheckpointNumber(node).then(b => b === prevCheckpointNumber), 'node sync', 5, 0.1); + // Verify multi-block checkpoints were built before we do the reorg + await test.assertMultipleBlocksPerSlot(2); + // Cancel the next tx to be mined and pause the sequencer sequencerDelayer.cancelNextTx(); await retryUntil(() => sequencerDelayer.getCancelledTxs().length, 'next block', L2_SLOT_DURATION_IN_S * 2, 0.1); @@ -308,6 +416,9 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { ); it('updates L1 to L2 messages changed due to an L1 reorg', async () => { + // Send L2 txs to trigger multi-block checkpoints + await sendTransactions(TX_COUNT, 100); + // Send 3 messages and wait for archiver sync logger.warn(`Sending 3 cross chain messages`); const msgs = await timesAsync(3, async (i: number) => { @@ -335,9 +446,16 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { await retryUntil(() => node.isL1ToL2MessageSynced(newMsg.msgHash), 'new message sync', L1_BLOCK_TIME_IN_S * 6, 1); expect(await node.isL1ToL2MessageSynced(msgs[0].msgHash)).toBe(true); expect(await node.isL1ToL2MessageSynced(msgs.at(-1)!.msgHash)).toBe(false); + + // Verify multi-block checkpoints were built + await test.assertMultipleBlocksPerSlot(2); }); it('handles missed message inserted by an L1 reorg', async () => { + // Send L2 txs to trigger multi-block checkpoints and wait for them to land in a checkpoint + await sendTransactions(TX_COUNT, 200); + await test.waitUntilCheckpointNumber(CheckpointNumber(2), L2_SLOT_DURATION_IN_S * 4); + // Send a message and wait for node to sync it logger.warn(`Sending first cross chain message`); const firstMsg = await sendMessage(); @@ -369,6 +487,9 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { logger.warn(`Reorged-in second message on L1 block ${secondMsg.txReceipt.blockNumber}. Sending third message.`); const thirdMsg = await sendMessage(); await retryUntil(() => node.isL1ToL2MessageSynced(thirdMsg.msgHash), '3rd msg sync', L1_BLOCK_TIME_IN_S * 3, 1); + + // Verify multi-block checkpoints were built + await test.assertMultipleBlocksPerSlot(2); }); }); }); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts index 951e686db373..d7818b85be44 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts @@ -1,3 +1,4 @@ +import type { Archiver } from '@aztec/archiver'; import { type AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node'; import { getTimestampRangeForEpoch } from '@aztec/aztec.js/block'; import { getContractInstanceFromInstantiationParams } from '@aztec/aztec.js/contracts'; @@ -319,7 +320,10 @@ export class EpochsTestContext { this.logger.info(`Waiting until last slot of submission window for epoch ${epochNumber} at ${date}`, { oneSlotBefore, }); - await waitUntilL1Timestamp(this.l1Client, oneSlotBefore); + // Use a timeout that accounts for the full proof submission window + const proofSubmissionWindowDuration = + this.constants.proofSubmissionEpochs * this.epochDuration * this.L2_SLOT_DURATION_IN_S; + await waitUntilL1Timestamp(this.l1Client, oneSlotBefore, undefined, proofSubmissionWindowDuration * 2); } /** Waits for the aztec node to sync to the target block number. */ @@ -394,6 +398,38 @@ export class EpochsTestContext { expect(result).toBe(expectedSuccess); } + /** Verifies at least one checkpoint has the target number of blocks (for MBPS validation). */ + public async assertMultipleBlocksPerSlot(targetBlockCount: number) { + const archiver = (this.context.aztecNode as AztecNodeService).getBlockSource() as Archiver; + const checkpoints = await archiver.getCheckpoints(CheckpointNumber(1), 50); + + this.logger.warn(`Retrieved ${checkpoints.length} checkpoints from archiver`, { + checkpoints: checkpoints.map(pc => pc.checkpoint.getStats()), + }); + + let expectedBlockNumber = checkpoints[0].checkpoint.blocks[0].number; + let targetFound = false; + + for (const checkpoint of checkpoints) { + const blockCount = checkpoint.checkpoint.blocks.length; + targetFound = targetFound || blockCount >= targetBlockCount; + + this.logger.verbose(`Checkpoint ${checkpoint.checkpoint.number} has ${blockCount} blocks`, { + checkpoint: checkpoint.checkpoint.getStats(), + }); + + for (let i = 0; i < blockCount; i++) { + const block = checkpoint.checkpoint.blocks[i]; + expect(block.indexWithinCheckpoint).toBe(i); + expect(block.checkpointNumber).toBe(checkpoint.checkpoint.number); + expect(block.number).toBe(expectedBlockNumber); + expectedBlockNumber++; + } + } + + expect(targetFound).toBe(true); + } + public watchSequencerEvents( sequencers: SequencerClient[], getMetadata: (i: number) => Record = () => ({}), diff --git a/yarn-project/end-to-end/src/e2e_p2p/multiple_validators_sentinel.parallel.test.ts b/yarn-project/end-to-end/src/e2e_p2p/multiple_validators_sentinel.parallel.test.ts index 7dc521775561..8747f2b7251d 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/multiple_validators_sentinel.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/multiple_validators_sentinel.parallel.test.ts @@ -10,17 +10,18 @@ import 'jest-extended'; import os from 'os'; import path from 'path'; +import { getBootNodeUdpPort } from '../fixtures/fixtures.js'; import { createNodes, createNonValidatorNode } from '../fixtures/setup_p2p_test.js'; import { P2PNetworkTest } from './p2p_network.js'; const NUM_NODES = 2; const VALIDATORS_PER_NODE = 3; const NUM_VALIDATORS = NUM_NODES * VALIDATORS_PER_NODE; -const BOOT_NODE_UDP_PORT = 4500; +const BOOT_NODE_UDP_PORT = getBootNodeUdpPort(); const SLOT_COUNT = 3; const EPOCH_DURATION = 2; -const ETHEREUM_SLOT_DURATION = 4; -const AZTEC_SLOT_DURATION = 8; +const ETHEREUM_SLOT_DURATION = 8; +const AZTEC_SLOT_DURATION = 36; const DATA_DIR = fs.mkdtempSync(path.join(os.tmpdir(), 'validators-sentinel-')); @@ -46,6 +47,9 @@ describe('e2e_p2p_multiple_validators_sentinel', () => { aztecTargetCommitteeSize: NUM_VALIDATORS, aztecSlotDuration: AZTEC_SLOT_DURATION, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, + blockDurationMs: 6000, + l1PublishingTime: 8, + enforceTimeTable: true, aztecProofSubmissionEpochs: 1024, // effectively do not reorg listenAddress: '127.0.0.1', minTxsPerBlock: 0, diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts index b5860060ba5f..05fe3e3429be 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts @@ -3,24 +3,24 @@ import { createLogger } from '@aztec/aztec.js/log'; import { waitForTx } from '@aztec/aztec.js/node'; import { Tx } from '@aztec/aztec.js/tx'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { SlotNumber } from '@aztec/foundation/branded-types'; +import { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { timesAsync } from '@aztec/foundation/collection'; import { retryUntil } from '@aztec/foundation/retry'; -import { jest } from '@jest/globals'; +import { expect, jest } from '@jest/globals'; import fs from 'fs'; import os from 'os'; import path from 'path'; -import { shouldCollectMetrics } from '../../fixtures/fixtures.js'; +import { getBootNodeUdpPort, shouldCollectMetrics } from '../../fixtures/fixtures.js'; import { createNodes } from '../../fixtures/setup_p2p_test.js'; -import { P2PNetworkTest, SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES, WAIT_FOR_TX_TIMEOUT } from '../p2p_network.js'; +import { P2PNetworkTest, WAIT_FOR_TX_TIMEOUT } from '../p2p_network.js'; import { prepareTransactions } from '../shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds export const NUM_VALIDATORS = 6; -export const NUM_TXS_PER_NODE = 2; -export const BOOT_NODE_UDP_PORT = 4500; +export const NUM_TXS_PER_NODE = 4; +export const BOOT_NODE_UDP_PORT = getBootNodeUdpPort(); export const createReqrespDataDir = () => fs.mkdtempSync(path.join(os.tmpdir(), 'reqresp-')); @@ -38,8 +38,14 @@ export async function createReqrespTest(options: ReqrespOptions = {}): Promise

= 2; + + for (let i = 0; i < blockCount; i++) { + const block = published.checkpoint.blocks[i]; + expect(block.indexWithinCheckpoint).toBe(i); + expect(block.checkpointNumber).toBe(published.checkpoint.number); + expect(block.number).toBe(expectedBlockNumber); + expectedBlockNumber++; + } + } + + expect(mbpsFound).toBe(true); return nodes; } diff --git a/yarn-project/end-to-end/src/fixtures/fixtures.ts b/yarn-project/end-to-end/src/fixtures/fixtures.ts index edf72bd67b53..6b2002c6ffcd 100644 --- a/yarn-project/end-to-end/src/fixtures/fixtures.ts +++ b/yarn-project/end-to-end/src/fixtures/fixtures.ts @@ -7,6 +7,16 @@ export const shouldCollectMetrics = () => { return undefined; }; +/** Returns the boot node UDP port from environment variable or default value. */ +export function getBootNodeUdpPort(): number { + return process.env.BOOT_NODE_UDP_PORT ? parseInt(process.env.BOOT_NODE_UDP_PORT, 10) : 4500; +} + +/** Returns the anvil port from environment variable or default value. */ +export function getAnvilPort(): number { + return process.env.ANVIL_PORT ? parseInt(process.env.ANVIL_PORT, 10) : 8545; +} + export const TEST_PEER_CHECK_INTERVAL_MS = 1000; export const TEST_MAX_PENDING_TX_POOL_COUNT = 10_000; // Number of max pending TXs ~ 1.56GB diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index ac79e73e22ff..db2c855ddf7f 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -231,5 +231,6 @@ export async function createProverNode( proverNodeConfig, telemetry, delayer, + dateProvider, ); } diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 47f704581168..25f7bbb3eb60 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -56,7 +56,6 @@ type DataStoreOptions = Pick & Pick = new Map(); private config: ProverNodeOptions; @@ -81,6 +80,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable config: Partial = {}, protected readonly telemetryClient: TelemetryClient = getTelemetryClient(), private delayer?: Delayer, + private readonly dateProvider: DateProvider = new DateProvider(), ) { this.config = { proverNodePollingIntervalMs: 1_000, diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 60ce42919779..a6004e0f88c9 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -205,6 +205,9 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'Skip pushing proposed blocks to archiver (default: true)', ...booleanConfigHelper(DefaultSequencerConfig.skipPushProposedBlocksToArchiver), }, + minBlocksForCheckpoint: { + description: 'Minimum number of blocks required for a checkpoint proposal (test only)', + }, ...pickConfigMappings(p2pConfigMappings, ['txPublicSetupAllowList']), }; diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index e7b84bb9cddc..7987730d579b 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -248,6 +248,15 @@ export class CheckpointProposalJob implements Traceable { return undefined; } + const minBlocksForCheckpoint = this.config.minBlocksForCheckpoint; + if (minBlocksForCheckpoint !== undefined && blocksInCheckpoint.length < minBlocksForCheckpoint) { + this.log.warn( + `Checkpoint has fewer blocks than minimum (${blocksInCheckpoint.length} < ${minBlocksForCheckpoint}), skipping proposal`, + { slot: this.slot, blocksBuilt: blocksInCheckpoint.length, minBlocksForCheckpoint }, + ); + return undefined; + } + // Assemble and broadcast the checkpoint proposal, including the last block that was not // broadcasted yet, and wait to collect the committee attestations. this.setStateFn(SequencerState.ASSEMBLING_CHECKPOINT, this.slot); diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index 2b94ba64f1db..d51c3b0383ea 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -69,6 +69,8 @@ export interface SequencerConfig { buildCheckpointIfEmpty?: boolean; /** Skip pushing proposed blocks to archiver (default: false) */ skipPushProposedBlocksToArchiver?: boolean; + /** Minimum number of blocks required for a checkpoint proposal (test only, defaults to undefined = no minimum) */ + minBlocksForCheckpoint?: number; } export const SequencerConfigSchema = zodFor()( @@ -103,6 +105,7 @@ export const SequencerConfigSchema = zodFor()( blockDurationMs: z.number().positive().optional(), buildCheckpointIfEmpty: z.boolean().optional(), skipPushProposedBlocksToArchiver: z.boolean().optional(), + minBlocksForCheckpoint: z.number().positive().optional(), }), ); @@ -117,7 +120,8 @@ type SequencerConfigOptionalKeys = | 'fakeThrowAfterProcessingTxCount' | 'l1PublishingTime' | 'txPublicSetupAllowList' - | 'minValidTxsPerBlock'; + | 'minValidTxsPerBlock' + | 'minBlocksForCheckpoint'; export type ResolvedSequencerConfig = Prettify< Required> & Pick From 761ee8ce81d3c318f94a4767f857e57c350c5af0 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 20:12:59 -0300 Subject: [PATCH 32/62] chore(e2e): reenable block building test (#20504) Flags as flake just in case, but seems to be working properly. --- .test_patterns.yml | 5 +++++ yarn-project/end-to-end/src/e2e_block_building.test.ts | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.test_patterns.yml b/.test_patterns.yml index 18b68fcee5bb..32666e514a5a 100644 --- a/.test_patterns.yml +++ b/.test_patterns.yml @@ -309,6 +309,11 @@ tests: owners: - *palla + - regex: "src/e2e_block_building.test.ts" + error_regex: "✕ processes txs until hitting timetable" + owners: + - *palla + # http://ci.aztec-labs.com/e8228a36afda93b8 # Test passed but there was an error on stopping - regex: "playground/scripts/run_test.sh" diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 9452b0dc2826..21aa382720ac 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -79,8 +79,7 @@ describe('e2e_block_building', () => { afterAll(() => teardown()); - // TODO(palla/mbps): We've seen these errors on syncing world state if we abort a tx processing halfway through. - it.skip('processes txs until hitting timetable', async () => { + it('processes txs until hitting timetable', async () => { // We send enough txs so they are spread across multiple blocks, but not // so many so that we don't end up hitting a reorg or timing out the tx wait(). const TX_COUNT = 16; From 06d81c8871f64fd659abfcfbb3074904917f3d8e Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 13 Feb 2026 20:14:14 -0300 Subject: [PATCH 33/62] refactor(sequencer): rename block-level metrics to checkpoint-level (#20505) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Renames sequencer metrics that were incorrectly named at the block level to checkpoint level (proposal success, precheck failed, rewards) - Adds new checkpoint-level metrics: build duration, block count, tx count, total mana - Removes old block-level metric constants (`SEQUENCER_BLOCK_PROPOSAL_SUCCESS_COUNT`, `SEQUENCER_BLOCK_PROPOSAL_PRECHECK_FAILED_COUNT`, `SEQUENCER_CURRENT_BLOCK_REWARDS`) - Fixes attestation metric descriptions to say "checkpoint" instead of "block" - Updates grafana alert for new naming. ## Test plan - [x] `yarn build` passes - [x] `yarn format` and `yarn lint` clean - [x] `yarn workspace @aztec/sequencer-client test src/sequencer/checkpoint_proposal_job.test.ts` passes (26/26) - [x] `yarn workspace @aztec/sequencer-client test src/sequencer/sequencer.test.ts` passes (22/22) 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Opus 4.6 --- spartan/metrics/grafana/alerts/rules.yaml | 2 +- .../src/sequencer/checkpoint_proposal_job.ts | 13 ++++- .../sequencer-client/src/sequencer/metrics.ts | 52 ++++++++++++++----- .../src/sequencer/sequencer.ts | 8 +-- yarn-project/telemetry-client/src/metrics.ts | 50 ++++++++++++++---- 5 files changed, 94 insertions(+), 31 deletions(-) diff --git a/spartan/metrics/grafana/alerts/rules.yaml b/spartan/metrics/grafana/alerts/rules.yaml index cbea5beb931a..bfe88fa23d11 100644 --- a/spartan/metrics/grafana/alerts/rules.yaml +++ b/spartan/metrics/grafana/alerts/rules.yaml @@ -273,7 +273,7 @@ groups: datasourceUid: spartan-metrics-prometheus model: editorMode: code - expr: sum by (k8s_namespace_name, aztec_error_type) (increase(aztec_sequencer_block_proposal_precheck_failed_count{k8s_namespace_name=~".*(fisherman|mainnet).*"}[$__rate_interval])) + expr: sum by (k8s_namespace_name, aztec_error_type) (increase(aztec_sequencer_checkpoint_precheck_failed_count{k8s_namespace_name=~".*(fisherman|mainnet).*"}[$__rate_interval])) instant: true intervalMs: 60000 legendFormat: __auto diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index e7b84bb9cddc..3336ba871168 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -129,7 +129,7 @@ export class CheckpointProposalJob implements Traceable { await Promise.all(votesPromises); if (checkpoint) { - this.metrics.recordBlockProposalSuccess(); + this.metrics.recordCheckpointProposalSuccess(); } // Do not post anything to L1 if we are fishermen, but do perform L1 fee analysis @@ -221,6 +221,7 @@ export class CheckpointProposalJob implements Traceable { let blocksInCheckpoint: L2Block[] = []; let blockPendingBroadcast: { block: L2Block; txs: Tx[] } | undefined = undefined; + const checkpointBuildTimer = new Timer(); try { // Main loop: build blocks for the checkpoint @@ -253,6 +254,14 @@ export class CheckpointProposalJob implements Traceable { this.setStateFn(SequencerState.ASSEMBLING_CHECKPOINT, this.slot); const checkpoint = await checkpointBuilder.completeCheckpoint(); + // Record checkpoint-level build metrics + this.metrics.recordCheckpointBuild( + checkpointBuildTimer.ms(), + blocksInCheckpoint.length, + checkpoint.getStats().txCount, + Number(checkpoint.header.totalManaUsed.toBigInt()), + ); + // Do not collect attestations nor publish to L1 in fisherman mode if (this.config.fishermanMode) { this.log.info( @@ -826,7 +835,7 @@ export class CheckpointProposalJob implements Traceable { slot: this.slot, feeAnalysisId: feeAnalysis?.id, }); - this.metrics.recordBlockProposalFailed('block_build_failed'); + this.metrics.recordCheckpointProposalFailed('block_build_failed'); } this.publisher.clearPendingRequests(); diff --git a/yarn-project/sequencer-client/src/sequencer/metrics.ts b/yarn-project/sequencer-client/src/sequencer/metrics.ts index 2758074d3bdd..a6d774d8be3c 100644 --- a/yarn-project/sequencer-client/src/sequencer/metrics.ts +++ b/yarn-project/sequencer-client/src/sequencer/metrics.ts @@ -18,7 +18,6 @@ import { type Hex, formatUnits } from 'viem'; import type { SequencerState } from './utils.js'; -// TODO(palla/mbps): Review all metrics and add any missing ones per checkpoint export class SequencerMetrics { public readonly tracer: Tracer; private meter: Meter; @@ -40,11 +39,16 @@ export class SequencerMetrics { private filledSlots: UpDownCounter; private blockProposalFailed: UpDownCounter; - private blockProposalSuccess: UpDownCounter; - private blockProposalPrecheckFailed: UpDownCounter; + private checkpointProposalSuccess: UpDownCounter; + private checkpointPrecheckFailed: UpDownCounter; + private checkpointProposalFailed: UpDownCounter; private checkpointSuccess: UpDownCounter; private slashingAttempts: UpDownCounter; private checkpointAttestationDelay: Histogram; + private checkpointBuildDuration: Histogram; + private checkpointBlockCount: Gauge; + private checkpointTxCount: Gauge; + private checkpointTotalMana: Gauge; // Fisherman fee analysis metrics private fishermanWouldBeIncluded: UpDownCounter; @@ -83,7 +87,7 @@ export class SequencerMetrics { this.checkpointAttestationDelay = this.meter.createHistogram(Metrics.SEQUENCER_CHECKPOINT_ATTESTATION_DELAY); - this.rewards = this.meter.createGauge(Metrics.SEQUENCER_CURRENT_BLOCK_REWARDS); + this.rewards = this.meter.createGauge(Metrics.SEQUENCER_CURRENT_SLOT_REWARDS); this.slots = createUpDownCounterWithDefault(this.meter, Metrics.SEQUENCER_SLOT_COUNT); @@ -106,16 +110,16 @@ export class SequencerMetrics { Metrics.SEQUENCER_BLOCK_PROPOSAL_FAILED_COUNT, ); - this.blockProposalSuccess = createUpDownCounterWithDefault( + this.checkpointProposalSuccess = createUpDownCounterWithDefault( this.meter, - Metrics.SEQUENCER_BLOCK_PROPOSAL_SUCCESS_COUNT, + Metrics.SEQUENCER_CHECKPOINT_PROPOSAL_SUCCESS_COUNT, ); this.checkpointSuccess = createUpDownCounterWithDefault(this.meter, Metrics.SEQUENCER_CHECKPOINT_SUCCESS_COUNT); - this.blockProposalPrecheckFailed = createUpDownCounterWithDefault( + this.checkpointPrecheckFailed = createUpDownCounterWithDefault( this.meter, - Metrics.SEQUENCER_BLOCK_PROPOSAL_PRECHECK_FAILED_COUNT, + Metrics.SEQUENCER_CHECKPOINT_PRECHECK_FAILED_COUNT, { [Attributes.ERROR_TYPE]: [ 'slot_already_taken', @@ -126,6 +130,16 @@ export class SequencerMetrics { }, ); + this.checkpointProposalFailed = createUpDownCounterWithDefault( + this.meter, + Metrics.SEQUENCER_CHECKPOINT_PROPOSAL_FAILED_COUNT, + ); + + this.checkpointBuildDuration = this.meter.createHistogram(Metrics.SEQUENCER_CHECKPOINT_BUILD_DURATION); + this.checkpointBlockCount = this.meter.createGauge(Metrics.SEQUENCER_CHECKPOINT_BLOCK_COUNT); + this.checkpointTxCount = this.meter.createGauge(Metrics.SEQUENCER_CHECKPOINT_TX_COUNT); + this.checkpointTotalMana = this.meter.createGauge(Metrics.SEQUENCER_CHECKPOINT_TOTAL_MANA); + this.slashingAttempts = createUpDownCounterWithDefault(this.meter, Metrics.SEQUENCER_SLASHING_ATTEMPTS_COUNT); // Fisherman fee analysis metrics @@ -258,18 +272,30 @@ export class SequencerMetrics { }); } - recordBlockProposalSuccess() { - this.blockProposalSuccess.add(1); + recordCheckpointProposalSuccess() { + this.checkpointProposalSuccess.add(1); } - recordBlockProposalPrecheckFailed( + recordCheckpointPrecheckFailed( checkType: 'slot_already_taken' | 'rollup_contract_check_failed' | 'slot_mismatch' | 'block_number_mismatch', ) { - this.blockProposalPrecheckFailed.add(1, { - [Attributes.ERROR_TYPE]: checkType, + this.checkpointPrecheckFailed.add(1, { [Attributes.ERROR_TYPE]: checkType }); + } + + recordCheckpointProposalFailed(reason?: string) { + this.checkpointProposalFailed.add(1, { + ...(reason && { [Attributes.ERROR_TYPE]: reason }), }); } + /** Records aggregate metrics for a completed checkpoint build. */ + recordCheckpointBuild(durationMs: number, blockCount: number, txCount: number, totalMana: number) { + this.checkpointBuildDuration.record(Math.ceil(durationMs)); + this.checkpointBlockCount.record(blockCount); + this.checkpointTxCount.record(txCount); + this.checkpointTotalMana.record(totalMana); + } + recordSlashingAttempt(actionCount: number) { this.slashingAttempts.add(actionCount); } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index de3dd62cd897..0f981bcda8dc 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -306,7 +306,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter TypedEventEmitter Date: Fri, 13 Feb 2026 22:57:21 +0000 Subject: [PATCH 34/62] feat(archiver): add l2 tips cache We were hitting a bug in the archiver where getL2Tips failed when called during a reorg, since blocks were removed inbetween the getBlockNumber and getBlock calls. An easy fix is adding a retry. But why make it easy? This PR adds a cache for L2 tips for the archiver that gets updated only during write operations and within store transactions. This should ensure we don't get rugged while computing tips, and also reduce the load on the block store, since getL2Tips gets called constantly by all subsystems on the archiver (from their respective blockstreams). Builds on top of #20503 so we can use getBlockData instead of getBlock for accessing checkpoint numbers. --- .../archiver/src/archiver-sync.test.ts | 6 + yarn-project/archiver/src/archiver.ts | 120 ++---------------- yarn-project/archiver/src/factory.ts | 7 + yarn-project/archiver/src/index.ts | 1 + .../src/modules/data_store_updater.ts | 61 ++++++--- .../archiver/src/modules/l1_synchronizer.ts | 8 +- .../archiver/src/store/kv_archiver_store.ts | 5 + .../archiver/src/store/l2_tips_cache.ts | 89 +++++++++++++ 8 files changed, 166 insertions(+), 131 deletions(-) create mode 100644 yarn-project/archiver/src/store/l2_tips_cache.ts diff --git a/yarn-project/archiver/src/archiver-sync.test.ts b/yarn-project/archiver/src/archiver-sync.test.ts index 11cd821b297d..76a12af1f559 100644 --- a/yarn-project/archiver/src/archiver-sync.test.ts +++ b/yarn-project/archiver/src/archiver-sync.test.ts @@ -30,6 +30,7 @@ import { Archiver, type ArchiverEmitter } from './archiver.js'; import type { ArchiverInstrumentation } from './modules/instrumentation.js'; import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; import { KVArchiverDataStore } from './store/kv_archiver_store.js'; +import { L2TipsCache } from './store/l2_tips_cache.js'; import { FakeL1State } from './test/fake_l1_state.js'; describe('Archiver Sync', () => { @@ -116,6 +117,9 @@ describe('Archiver Sync', () => { // Create event emitter shared by archiver and synchronizer const events = new EventEmitter() as ArchiverEmitter; + // Create L2 tips cache shared by archiver and synchronizer + const l2TipsCache = new L2TipsCache(archiverStore.blockStore); + // Create the L1 synchronizer synchronizer = new ArchiverL1Synchronizer( publicClient, @@ -132,6 +136,7 @@ describe('Archiver Sync', () => { l1Constants, events, instrumentation.tracer, + l2TipsCache, syncLogger, ); @@ -147,6 +152,7 @@ describe('Archiver Sync', () => { l1Constants, synchronizer, events, + l2TipsCache, ); }); diff --git a/yarn-project/archiver/src/archiver.ts b/yarn-project/archiver/src/archiver.ts index 9eefe8d2d66c..4aec6f3c9e69 100644 --- a/yarn-project/archiver/src/archiver.ts +++ b/yarn-project/archiver/src/archiver.ts @@ -1,5 +1,4 @@ import type { BlobClientInterface } from '@aztec/blob-client/client'; -import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { EpochCache } from '@aztec/epoch-cache'; import { BlockTagTooOldError, RollupContract } from '@aztec/ethereum/contracts'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; @@ -15,8 +14,6 @@ import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/runni import { DateProvider } from '@aztec/foundation/timer'; import { type ArchiverEmitter, - type CheckpointId, - GENESIS_CHECKPOINT_HEADER_HASH, L2Block, type L2BlockSink, type L2Tips, @@ -41,6 +38,7 @@ import { ArchiverDataStoreUpdater } from './modules/data_store_updater.js'; import type { ArchiverInstrumentation } from './modules/instrumentation.js'; import type { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; import type { KVArchiverDataStore } from './store/kv_archiver_store.js'; +import { L2TipsCache } from './store/l2_tips_cache.js'; /** Export ArchiverEmitter for use in factory and tests. */ export type { ArchiverEmitter }; @@ -83,6 +81,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra /** Helper to handle updates to the store */ private readonly updater: ArchiverDataStoreUpdater; + /** In-memory cache for L2 chain tips. */ + private readonly l2TipsCache: L2TipsCache; + public readonly tracer: Tracer; /** @@ -122,6 +123,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, synchronizer: ArchiverL1Synchronizer, events: ArchiverEmitter, + l2TipsCache?: L2TipsCache, private readonly log: Logger = createLogger('archiver'), ) { super(dataStore, l1Constants); @@ -130,7 +132,8 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra this.initialSyncPromise = promiseWithResolvers(); this.synchronizer = synchronizer; this.events = events; - this.updater = new ArchiverDataStoreUpdater(this.dataStore); + this.l2TipsCache = l2TipsCache ?? new L2TipsCache(this.dataStore.blockStore); + this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache); // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync // are done as fast as possible. This then gets updated once the initial sync completes. @@ -391,111 +394,8 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra return true; } - public async getL2Tips(): Promise { - const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber] = await Promise.all([ - this.getBlockNumber(), - this.getProvenBlockNumber(), - this.getCheckpointedL2BlockNumber(), - this.getFinalizedL2BlockNumber(), - ] as const); - - const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1); - - // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks - const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] = - await Promise.all([ - latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined, - provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined, - finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined, - checkpointedBlockNumber > beforeInitialblockNumber - ? this.getCheckpointedBlock(checkpointedBlockNumber) - : undefined, - ] as const); - - if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) { - throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`); - } - - // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number. - if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) { - throw new Error( - `Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`, - ); - } - - if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) { - throw new Error( - `Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`, - ); - } - - if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) { - throw new Error( - `Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`, - ); - } - - const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - const finalizedBlockHeaderHash = - (await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - - // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks - const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([ - provenCheckpointedBlock !== undefined - ? await this.getCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1) - : [undefined], - finalizedCheckpointedBlock !== undefined - ? await this.getCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1) - : [undefined], - checkpointedBlock !== undefined ? await this.getCheckpoints(checkpointedBlock?.checkpointNumber, 1) : [undefined], - ]); - - const initialcheckpointId: CheckpointId = { - number: CheckpointNumber.ZERO, - hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(), - }; - - const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => { - if (checkpoint === undefined) { - return initialcheckpointId; - } - return { - number: checkpoint.checkpoint.number, - hash: checkpoint.checkpoint.hash().toString(), - }; - }; - - const l2Tips: L2Tips = { - proposed: { - number: latestBlockNumber, - hash: latestBlockHeaderHash.toString(), - }, - proven: { - block: { - number: provenBlockNumber, - hash: provenBlockHeaderHash.toString(), - }, - checkpoint: makeCheckpointId(provenBlockCheckpoint), - }, - finalized: { - block: { - number: finalizedBlockNumber, - hash: finalizedBlockHeaderHash.toString(), - }, - checkpoint: makeCheckpointId(finalizedBlockCheckpoint), - }, - checkpointed: { - block: { - number: checkpointedBlockNumber, - hash: checkpointedBlockHeaderHash.toString(), - }, - checkpoint: makeCheckpointId(checkpointedBlockCheckpoint), - }, - }; - - return l2Tips; + public getL2Tips(): Promise { + return this.l2TipsCache.getL2Tips(); } public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise { @@ -532,7 +432,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash }); if (targetL2BlockNumber < currentProvenBlock) { this.log.info(`Clearing proven L2 block number`); - await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO); + await this.updater.setProvenCheckpointNumber(CheckpointNumber.ZERO); } // TODO(palla/reorg): Set the finalized block when we add support for it. // if (targetL2BlockNumber < currentFinalizedBlock) { diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index eeb5090c406e..dc0ca5552d85 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -25,6 +25,7 @@ import { type ArchiverConfig, mapArchiverConfig } from './config.js'; import { ArchiverInstrumentation } from './modules/instrumentation.js'; import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; import { ARCHIVER_DB_VERSION, KVArchiverDataStore } from './store/kv_archiver_store.js'; +import { L2TipsCache } from './store/l2_tips_cache.js'; export const ARCHIVER_STORE_NAME = 'archiver'; @@ -128,6 +129,9 @@ export async function createArchiver( // Create the event emitter that will be shared by archiver and synchronizer const events = new EventEmitter() as ArchiverEmitter; + // Create L2 tips cache shared by archiver and synchronizer + const l2TipsCache = new L2TipsCache(archiverStore.blockStore); + // Create the L1 synchronizer const synchronizer = new ArchiverL1Synchronizer( publicClient, @@ -144,6 +148,8 @@ export async function createArchiver( l1Constants, events, instrumentation.tracer, + l2TipsCache, + undefined, // log (use default) ); const archiver = new Archiver( @@ -158,6 +164,7 @@ export async function createArchiver( l1Constants, synchronizer, events, + l2TipsCache, ); await archiver.start(opts.blockUntilSync); diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 224884764f17..51aa5f45706c 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -8,5 +8,6 @@ export * from './config.js'; export { type L1PublishedData } from './structs/published.js'; export { KVArchiverDataStore, ARCHIVER_DB_VERSION } from './store/kv_archiver_store.js'; export { ContractInstanceStore } from './store/contract_instance_store.js'; +export { L2TipsCache } from './store/l2_tips_cache.js'; export { retrieveCheckpointsFromRollup, retrieveL2ProofVerifiedEvents } from './l1/data_retrieval.js'; diff --git a/yarn-project/archiver/src/modules/data_store_updater.ts b/yarn-project/archiver/src/modules/data_store_updater.ts index 1df274146880..dd2e6becd57a 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.ts @@ -25,6 +25,7 @@ import type { UInt64 } from '@aztec/stdlib/types'; import groupBy from 'lodash.groupby'; import type { KVArchiverDataStore } from '../store/kv_archiver_store.js'; +import type { L2TipsCache } from '../store/l2_tips_cache.js'; /** Operation type for contract data updates. */ enum Operation { @@ -44,7 +45,10 @@ type ReconcileCheckpointsResult = { export class ArchiverDataStoreUpdater { private readonly log = createLogger('archiver:store_updater'); - constructor(private store: KVArchiverDataStore) {} + constructor( + private store: KVArchiverDataStore, + private l2TipsCache?: L2TipsCache, + ) {} /** * Adds proposed blocks to the store with contract class/instance extraction from logs. @@ -56,11 +60,11 @@ export class ArchiverDataStoreUpdater { * @param pendingChainValidationStatus - Optional validation status to set. * @returns True if the operation is successful. */ - public addProposedBlocks( + public async addProposedBlocks( blocks: L2Block[], pendingChainValidationStatus?: ValidateCheckpointResult, ): Promise { - return this.store.transactionAsync(async () => { + const result = await this.store.transactionAsync(async () => { await this.store.addProposedBlocks(blocks); const opResults = await Promise.all([ @@ -72,8 +76,10 @@ export class ArchiverDataStoreUpdater { ...blocks.map(block => this.addContractDataToDb(block)), ]); + await this.l2TipsCache?.refresh(); return opResults.every(Boolean); }); + return result; } /** @@ -87,11 +93,11 @@ export class ArchiverDataStoreUpdater { * @param pendingChainValidationStatus - Optional validation status to set. * @returns Result with information about any pruned blocks. */ - public addCheckpoints( + public async addCheckpoints( checkpoints: PublishedCheckpoint[], pendingChainValidationStatus?: ValidateCheckpointResult, ): Promise { - return this.store.transactionAsync(async () => { + const result = await this.store.transactionAsync(async () => { // Before adding checkpoints, check for conflicts with local blocks if any const { prunedBlocks, lastAlreadyInsertedBlockNumber } = await this.pruneMismatchingLocalBlocks(checkpoints); @@ -111,8 +117,10 @@ export class ArchiverDataStoreUpdater { ...newBlocks.map(block => this.addContractDataToDb(block)), ]); + await this.l2TipsCache?.refresh(); return { prunedBlocks, lastAlreadyInsertedBlockNumber }; }); + return result; } /** @@ -197,8 +205,8 @@ export class ArchiverDataStoreUpdater { * @returns The removed blocks. * @throws Error if any block to be removed is checkpointed. */ - public removeUncheckpointedBlocksAfter(blockNumber: BlockNumber): Promise { - return this.store.transactionAsync(async () => { + public async removeUncheckpointedBlocksAfter(blockNumber: BlockNumber): Promise { + const result = await this.store.transactionAsync(async () => { // Verify we're only removing uncheckpointed blocks const lastCheckpointedBlockNumber = await this.store.getCheckpointedL2BlockNumber(); if (blockNumber < lastCheckpointedBlockNumber) { @@ -207,8 +215,11 @@ export class ArchiverDataStoreUpdater { ); } - return await this.removeBlocksAfter(blockNumber); + const result = await this.removeBlocksAfter(blockNumber); + await this.l2TipsCache?.refresh(); + return result; }); + return result; } /** @@ -238,17 +249,31 @@ export class ArchiverDataStoreUpdater { * @returns True if the operation is successful. */ public async removeCheckpointsAfter(checkpointNumber: CheckpointNumber): Promise { - const { blocksRemoved = [] } = await this.store.removeCheckpointsAfter(checkpointNumber); - - const opResults = await Promise.all([ - // Prune rolls back to the last proven block, which is by definition valid - this.store.setPendingChainValidationStatus({ valid: true }), - // Remove contract data for all blocks being removed - ...blocksRemoved.map(block => this.removeContractDataFromDb(block)), - this.store.deleteLogs(blocksRemoved), - ]); + return await this.store.transactionAsync(async () => { + const { blocksRemoved = [] } = await this.store.removeCheckpointsAfter(checkpointNumber); + + const opResults = await Promise.all([ + // Prune rolls back to the last proven block, which is by definition valid + this.store.setPendingChainValidationStatus({ valid: true }), + // Remove contract data for all blocks being removed + ...blocksRemoved.map(block => this.removeContractDataFromDb(block)), + this.store.deleteLogs(blocksRemoved), + ]); - return opResults.every(Boolean); + await this.l2TipsCache?.refresh(); + return opResults.every(Boolean); + }); + } + + /** + * Updates the proven checkpoint number and refreshes the L2 tips cache. + * @param checkpointNumber - The checkpoint number to set as proven. + */ + public async setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise { + await this.store.transactionAsync(async () => { + await this.store.setProvenCheckpointNumber(checkpointNumber); + await this.l2TipsCache?.refresh(); + }); } /** Extracts and stores contract data from a single block. */ diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index 7d8992c09616..c2dbca60d559 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -28,6 +28,7 @@ import { retrievedToPublishedCheckpoint, } from '../l1/data_retrieval.js'; import type { KVArchiverDataStore } from '../store/kv_archiver_store.js'; +import type { L2TipsCache } from '../store/l2_tips_cache.js'; import type { InboxMessage } from '../structs/inbox_message.js'; import { ArchiverDataStoreUpdater } from './data_store_updater.js'; import type { ArchiverInstrumentation } from './instrumentation.js'; @@ -77,9 +78,10 @@ export class ArchiverL1Synchronizer implements Traceable { private readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, private readonly events: ArchiverEmitter, tracer: Tracer, + l2TipsCache?: L2TipsCache, private readonly log: Logger = createLogger('archiver:l1-sync'), ) { - this.updater = new ArchiverDataStoreUpdater(this.store); + this.updater = new ArchiverDataStoreUpdater(this.store, l2TipsCache); this.tracer = tracer; } @@ -550,7 +552,7 @@ export class ArchiverL1Synchronizer implements Traceable { if (provenCheckpointNumber === 0) { const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber(); if (localProvenCheckpointNumber !== provenCheckpointNumber) { - await this.store.setProvenCheckpointNumber(provenCheckpointNumber); + await this.updater.setProvenCheckpointNumber(provenCheckpointNumber); this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber }); } } @@ -582,7 +584,7 @@ export class ArchiverL1Synchronizer implements Traceable { ) { const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber(); if (localProvenCheckpointNumber !== provenCheckpointNumber) { - await this.store.setProvenCheckpointNumber(provenCheckpointNumber); + await this.updater.setProvenCheckpointNumber(provenCheckpointNumber); this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber }); const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber; const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1Constants); diff --git a/yarn-project/archiver/src/store/kv_archiver_store.ts b/yarn-project/archiver/src/store/kv_archiver_store.ts index ed31527c65ed..24447c10ae5a 100644 --- a/yarn-project/archiver/src/store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.ts @@ -80,6 +80,11 @@ export class KVArchiverDataStore implements ContractDataSource { this.#contractInstanceStore = new ContractInstanceStore(db); } + /** Returns the underlying block store. Used by L2TipsCache. */ + get blockStore(): BlockStore { + return this.#blockStore; + } + /** Opens a new transaction to the underlying store and runs all operations within it. */ public transactionAsync(callback: () => Promise): Promise { return this.db.transactionAsync(callback); diff --git a/yarn-project/archiver/src/store/l2_tips_cache.ts b/yarn-project/archiver/src/store/l2_tips_cache.ts new file mode 100644 index 000000000000..64a0192e7624 --- /dev/null +++ b/yarn-project/archiver/src/store/l2_tips_cache.ts @@ -0,0 +1,89 @@ +import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; +import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { type BlockData, type CheckpointId, GENESIS_CHECKPOINT_HEADER_HASH, type L2Tips } from '@aztec/stdlib/block'; + +import type { BlockStore } from './block_store.js'; + +/** + * In-memory cache for L2 chain tips (proposed, checkpointed, proven, finalized). + * Populated from the BlockStore on first access, then kept up-to-date by the ArchiverDataStoreUpdater. + * Refresh calls should happen within the store transaction that mutates block data to ensure consistency. + */ +export class L2TipsCache { + #tipsPromise: Promise | undefined; + + constructor(private blockStore: BlockStore) {} + + /** Returns the cached L2 tips. Loads from the block store on first call. */ + public getL2Tips(): Promise { + return (this.#tipsPromise ??= this.loadFromStore()); + } + + /** Reloads the L2 tips from the block store. Should be called within the store transaction that mutates data. */ + public async refresh(): Promise { + this.#tipsPromise = this.loadFromStore(); + await this.#tipsPromise; + } + + private async loadFromStore(): Promise { + const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber] = await Promise.all([ + this.blockStore.getLatestBlockNumber(), + this.blockStore.getProvenBlockNumber(), + this.blockStore.getCheckpointedL2BlockNumber(), + this.blockStore.getFinalizedL2BlockNumber(), + ]); + + const genesisBlockHeader = { + blockHash: GENESIS_BLOCK_HEADER_HASH, + checkpointNumber: CheckpointNumber.ZERO, + } as const; + const beforeInitialBlockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1); + + const getBlockData = (blockNumber: BlockNumber) => + blockNumber > beforeInitialBlockNumber ? this.blockStore.getBlockData(blockNumber) : genesisBlockHeader; + + const [latestBlockData, provenBlockData, checkpointedBlockData, finalizedBlockData] = await Promise.all( + [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber].map(getBlockData), + ); + + if (!latestBlockData || !provenBlockData || !finalizedBlockData || !checkpointedBlockData) { + throw new Error('Failed to load block data for L2 tips'); + } + + const [provenCheckpointId, finalizedCheckpointId, checkpointedCheckpointId] = await Promise.all([ + this.getCheckpointIdForBlock(provenBlockData), + this.getCheckpointIdForBlock(finalizedBlockData), + this.getCheckpointIdForBlock(checkpointedBlockData), + ]); + + return { + proposed: { number: latestBlockNumber, hash: latestBlockData.blockHash.toString() }, + proven: { + block: { number: provenBlockNumber, hash: provenBlockData.blockHash.toString() }, + checkpoint: provenCheckpointId, + }, + finalized: { + block: { number: finalizedBlockNumber, hash: finalizedBlockData.blockHash.toString() }, + checkpoint: finalizedCheckpointId, + }, + checkpointed: { + block: { number: checkpointedBlockNumber, hash: checkpointedBlockData.blockHash.toString() }, + checkpoint: checkpointedCheckpointId, + }, + }; + } + + private async getCheckpointIdForBlock(blockData: Pick): Promise { + const checkpointData = await this.blockStore.getCheckpointData(blockData.checkpointNumber); + if (!checkpointData) { + return { + number: CheckpointNumber.ZERO, + hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(), + }; + } + return { + number: checkpointData.checkpointNumber, + hash: checkpointData.header.hash().toString(), + }; + } +} From 5f7ad8f206222b80f9da2d1bace3d3e938dd2b1d Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Sun, 15 Feb 2026 15:12:42 +0000 Subject: [PATCH 35/62] More metrics --- spartan/scripts/deploy_network.sh | 2 ++ spartan/terraform/deploy-aztec-infra/main.tf | 1 + spartan/terraform/deploy-aztec-infra/variables.tf | 6 ++++++ .../archiver/src/modules/instrumentation.ts | 12 ++++++++++-- .../archiver/src/modules/l1_synchronizer.ts | 2 +- yarn-project/end-to-end/src/spartan/n_tps.test.ts | 5 +++-- yarn-project/foundation/src/config/env_var.ts | 1 + .../eviction/fee_payer_balance_eviction_rule.ts | 2 +- .../eviction/invalid_txs_after_mining_rule.ts | 5 +---- .../eviction/invalid_txs_after_reorg_rule.ts | 4 ++-- .../eviction/low_priority_eviction_rule.ts | 11 +++++------ yarn-project/sequencer-client/src/config.ts | 6 ++++++ .../src/sequencer/checkpoint_proposal_job.ts | 15 +++++++++++++++ yarn-project/stdlib/src/interfaces/configs.ts | 3 +++ yarn-project/telemetry-client/src/metrics.ts | 5 +++++ 15 files changed, 62 insertions(+), 18 deletions(-) diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index abbdaf060174..9d8103c0794b 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -97,6 +97,7 @@ SEQ_MIN_TX_PER_BLOCK=${SEQ_MIN_TX_PER_BLOCK:-0} SEQ_MAX_TX_PER_BLOCK=${SEQ_MAX_TX_PER_BLOCK:-8} SEQ_BLOCK_DURATION_MS=${SEQ_BLOCK_DURATION_MS:-} SEQ_BUILD_CHECKPOINT_IF_EMPTY=${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-} +SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT=${SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT:0} PROVER_REPLICAS=${PROVER_REPLICAS:-4} PROVER_AGENTS_PER_PROVER=${PROVER_AGENTS_PER_PROVER:-1} R2_ACCESS_KEY_ID=${R2_ACCESS_KEY_ID:-} @@ -506,6 +507,7 @@ SEQ_MIN_TX_PER_BLOCK = ${SEQ_MIN_TX_PER_BLOCK} SEQ_MAX_TX_PER_BLOCK = ${SEQ_MAX_TX_PER_BLOCK} SEQ_BLOCK_DURATION_MS = ${SEQ_BLOCK_DURATION_MS:-null} SEQ_BUILD_CHECKPOINT_IF_EMPTY = ${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-null} +SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT = ${SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT} PROVER_MNEMONIC = "${LABS_INFRA_MNEMONIC}" PROVER_PUBLISHER_MNEMONIC_START_INDEX = ${PROVER_PUBLISHER_MNEMONIC_START_INDEX} PROVER_PUBLISHERS_PER_PROVER = ${PUBLISHERS_PER_PROVER} diff --git a/spartan/terraform/deploy-aztec-infra/main.tf b/spartan/terraform/deploy-aztec-infra/main.tf index dccea9a87427..347939595c5b 100644 --- a/spartan/terraform/deploy-aztec-infra/main.tf +++ b/spartan/terraform/deploy-aztec-infra/main.tf @@ -220,6 +220,7 @@ locals { "validator.node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE "validator.node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS "validator.node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS + "validator.node.env.SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT" = var.SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT } # Note: nonsensitive() is required here because helm_releases is used in for_each, diff --git a/spartan/terraform/deploy-aztec-infra/variables.tf b/spartan/terraform/deploy-aztec-infra/variables.tf index f3f27dc0cde8..9947d2379f43 100644 --- a/spartan/terraform/deploy-aztec-infra/variables.tf +++ b/spartan/terraform/deploy-aztec-infra/variables.tf @@ -343,6 +343,12 @@ variable "SEQ_MAX_TX_PER_BLOCK" { default = "8" } +variable "SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT" { + description = "Percentage probability of skipping checkpoint publishing" + type = string + default = "0" +} + variable "SEQ_BLOCK_DURATION_MS" { description = "Duration per block in milliseconds when building multiple blocks per slot" type = string diff --git a/yarn-project/archiver/src/modules/instrumentation.ts b/yarn-project/archiver/src/modules/instrumentation.ts index 57f7c6413f75..f0a18a2d17d7 100644 --- a/yarn-project/archiver/src/modules/instrumentation.ts +++ b/yarn-project/archiver/src/modules/instrumentation.ts @@ -13,10 +13,13 @@ import { createUpDownCounterWithDefault, } from '@aztec/telemetry-client'; +import type { CheckpointData } from '../store/block_store.js'; + export class ArchiverInstrumentation { public readonly tracer: Tracer; private blockHeight: Gauge; + private checkpointHeight: Gauge; private txCount: UpDownCounter; private l1BlockHeight: Gauge; private proofsSubmittedDelay: Histogram; @@ -47,6 +50,8 @@ export class ArchiverInstrumentation { this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT); + this.checkpointHeight = meter.createGauge(Metrics.ARCHIVER_CHECKPOINT_HEIGHT); + this.l1BlockHeight = meter.createGauge(Metrics.ARCHIVER_L1_BLOCK_HEIGHT); this.txCount = createUpDownCounterWithDefault(meter, Metrics.ARCHIVER_TOTAL_TXS); @@ -105,6 +110,7 @@ export class ArchiverInstrumentation { public processNewBlocks(syncTimePerBlock: number, blocks: L2Block[]) { this.syncDurationPerBlock.record(Math.ceil(syncTimePerBlock)); this.blockHeight.record(Math.max(...blocks.map(b => b.number))); + this.checkpointHeight.record(Math.max(...blocks.map(b => b.checkpointNumber))); this.syncBlockCount.add(blocks.length); for (const block of blocks) { @@ -127,8 +133,10 @@ export class ArchiverInstrumentation { this.pruneDuration.record(Math.ceil(duration)); } - public updateLastProvenBlock(blockNumber: number) { - this.blockHeight.record(blockNumber, { [Attributes.STATUS]: 'proven' }); + public updateLastProvenCheckpoint(checkpoint: CheckpointData) { + const lastBlockNumberInCheckpoint = checkpoint.startBlock + checkpoint.numBlocks - 1; + this.blockHeight.record(lastBlockNumberInCheckpoint, { [Attributes.STATUS]: 'proven' }); + this.checkpointHeight.record(checkpoint.checkpointNumber, { [Attributes.STATUS]: 'proven' }); } public processProofsVerified(logs: { proverId: string; l2BlockNumber: bigint; delay: bigint }[]) { diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index c2dbca60d559..bc73f3bb35c2 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -599,7 +599,7 @@ export class ArchiverL1Synchronizer implements Traceable { slotNumber: provenSlotNumber, epochNumber: provenEpochNumber, }); - this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint); + this.instrumentation.updateLastProvenCheckpoint(localCheckpointForDestinationProvenCheckpointNumber); } else { this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`); } diff --git a/yarn-project/end-to-end/src/spartan/n_tps.test.ts b/yarn-project/end-to-end/src/spartan/n_tps.test.ts index f43b0cd42830..95405e1b9e84 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps.test.ts @@ -345,8 +345,9 @@ describe('sustained N TPS test', () => { let lowValueTxs = 0; const lowValueSendTx = async (wallet: TestWallet) => { lowValueTxs++; - const feeAmount = Number(randomBigInt(10n)) + 1; + //const feeAmount = Number(randomBigInt(100n)) + 1; //const feeAmount = 1; + const feeAmount = Math.floor(lowValueTxs / 1000) + 1; const fee = new GasFees(0, feeAmount); logger.info('Sending low value tx ' + lowValueTxs + ' with fee ' + feeAmount); @@ -359,7 +360,7 @@ describe('sustained N TPS test', () => { let highValueTxs = 0; const highValueSendTx = async (wallet: TestWallet) => { highValueTxs++; - const feeAmount = Number(randomBigInt(10n)) + 11; + const feeAmount = Number(randomBigInt(10n)) + 1000; const fee = new GasFees(0, feeAmount); logger.info('Sending high value tx ' + highValueTxs + ' with fee ' + feeAmount); diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 3ed05d7dbb28..014ff224754e 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -215,6 +215,7 @@ export type EnvVar = | 'SEQ_BUILD_CHECKPOINT_IF_EMPTY' | 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_COMMITTEE_MEMBER' | 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_NON_COMMITTEE_MEMBER' + | 'SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT' | 'SLASH_MIN_PENALTY_PERCENTAGE' | 'SLASH_MAX_PENALTY_PERCENTAGE' | 'SLASH_VALIDATORS_ALWAYS' diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts index 3bc9a84d8a58..969fd127b1d1 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts @@ -68,7 +68,7 @@ export class FeePayerBalanceEvictionRule implements EvictionRule { if (txsToEvict.length > 0) { await pool.deleteTxs(txsToEvict, this.name); - this.log.verbose(`Evicted ${txsToEvict.length} txs due to insufficient fee payer balance`, { + this.log.debug(`Evicted ${txsToEvict.length} txs due to insufficient fee payer balance`, { txHashes: txsToEvict, }); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts index 3cf9ac3e5e45..366eb8e02b21 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts @@ -35,22 +35,19 @@ export class InvalidTxsAfterMiningRule implements EvictionRule { for (const meta of pendingTxs) { // Evict pending txs that share nullifiers with mined txs if (meta.nullifiers.some(nullifier => minedNullifiers.has(nullifier))) { - this.log.verbose(`Evicting tx ${meta.txHash} from pool due to a duplicate nullifier with a mined tx`); txsToEvict.push(meta.txHash); continue; } // Evict pending txs with an expiration timestamp less than or equal to the mined block timestamp if (meta.includeByTimestamp <= timestamp) { - this.log.verbose( - `Evicting tx ${meta.txHash} from pool due to the tx being expired (includeByTimestamp: ${meta.includeByTimestamp}, mined block timestamp: ${timestamp})`, - ); txsToEvict.push(meta.txHash); continue; } } if (txsToEvict.length > 0) { + this.log.info(`Evicted ${txsToEvict.length} invalid txs after block mined`); await pool.deleteTxs(txsToEvict, this.name); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts index 3a9476c18cef..72462a8a687f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts @@ -72,7 +72,7 @@ export class InvalidTxsAfterReorgRule implements EvictionRule { } if (txsToEvict.length > 0) { - this.log.verbose(`Evicting ${txsToEvict.length} txs from pool due to referencing pruned blocks`); + this.log.info(`Evicting ${txsToEvict.length} txs from pool due to referencing pruned blocks`); await pool.deleteTxs(txsToEvict, this.name); } @@ -81,7 +81,7 @@ export class InvalidTxsAfterReorgRule implements EvictionRule { this.log.verbose(`Kept ${keptCount} txs that did not reference pruned blocks`); } - this.log.info(`Evicted ${txsToEvict.length} invalid txs after reorg`, { txHashes: txsToEvict }); + this.log.debug(`Evicted ${txsToEvict.length} invalid txs after reorg`, { txHashes: txsToEvict }); return { reason: 'reorg_invalid_txs', diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts index 261a14ff3730..047695aaf681 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts @@ -48,19 +48,18 @@ export class LowPriorityEvictionRule implements EvictionRule { }; } - this.log.verbose( - `Evicting low priority txs. Pending tx count above limit: ${currentTxCount} > ${this.maxPoolSize}`, - ); + this.log.info(`Evicting low priority txs. Pending tx count above limit: ${currentTxCount} > ${this.maxPoolSize}`); const numberToEvict = currentTxCount - this.maxPoolSize; const txsToEvict = pool.getLowestPriorityPending(numberToEvict); + const toEvictSet = new Set(txsToEvict); + const numNewTxsEvicted = context.newTxHashes.filter(newTxHash => toEvictSet.has(newTxHash)).length; if (txsToEvict.length > 0) { + this.log.info(`Evicted ${txsToEvict.length} low priority txs, including ${numNewTxsEvicted} newly added txs`); await pool.deleteTxs(txsToEvict, this.name); } - const numNewTxsEvicted = context.newTxHashes.filter(newTxHash => txsToEvict.includes(newTxHash)).length; - - this.log.verbose(`Evicted ${txsToEvict.length} low priority txs, including ${numNewTxsEvicted} newly added txs`, { + this.log.debug(`Evicted ${txsToEvict.length} low priority txs, including ${numNewTxsEvicted} newly added txs`, { txHashes: txsToEvict, }); diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index a6004e0f88c9..982616eba126 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -55,6 +55,7 @@ export const DefaultSequencerConfig: ResolvedSequencerConfig = { fishermanMode: false, shuffleAttestationOrdering: false, skipPushProposedBlocksToArchiver: false, + skipPublishingCheckpointsPercent: 0, }; /** @@ -208,6 +209,11 @@ export const sequencerConfigMappings: ConfigMappingsType = { minBlocksForCheckpoint: { description: 'Minimum number of blocks required for a checkpoint proposal (test only)', }, + skipPublishingCheckpointsPercent: { + env: 'SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT', + description: 'Percent probability (0 - 100) of sequencer skipping checkpoint publishing (testing only)', + ...numberConfigHelper(DefaultSequencerConfig.skipPublishingCheckpointsPercent), + }, ...pickConfigMappings(p2pConfigMappings, ['txPublicSetupAllowList']), }; diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index 1b0f345a2493..fcc773ab3a5d 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -336,6 +336,21 @@ export class CheckpointProposalJob implements Traceable { const aztecSlotDuration = this.l1Constants.slotDuration; const slotStartBuildTimestamp = this.getSlotStartBuildTimestamp(); const txTimeoutAt = new Date((slotStartBuildTimestamp + aztecSlotDuration) * 1000); + + // If we have been configured to potentially skip publishing checkpoint then roll the dice here + if ( + this.config.skipPublishingCheckpointsPercent !== undefined && + this.config.skipPublishingCheckpointsPercent > 0 + ) { + const result = Math.max(0, randomInt(100)); + if (result < this.config.skipPublishingCheckpointsPercent) { + this.log.warn( + `Skipping publishing proposal for checkpoint ${checkpoint.number}. Configured percentage: ${this.config.skipPublishingCheckpointsPercent}, generated value: ${result}`, + ); + return checkpoint; + } + } + await this.publisher.enqueueProposeCheckpoint(checkpoint, attestations, attestationsSignature, { txTimeoutAt, forcePendingCheckpointNumber: this.invalidateCheckpoint?.forcePendingCheckpointNumber, diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index d51c3b0383ea..5149006d2f65 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -71,6 +71,8 @@ export interface SequencerConfig { skipPushProposedBlocksToArchiver?: boolean; /** Minimum number of blocks required for a checkpoint proposal (test only, defaults to undefined = no minimum) */ minBlocksForCheckpoint?: number; + /** Skip publishing checkpoint proposals probability (for testing checkpoint prunes only) */ + skipPublishingCheckpointsPercent?: number; } export const SequencerConfigSchema = zodFor()( @@ -106,6 +108,7 @@ export const SequencerConfigSchema = zodFor()( buildCheckpointIfEmpty: z.boolean().optional(), skipPushProposedBlocksToArchiver: z.boolean().optional(), minBlocksForCheckpoint: z.number().positive().optional(), + skipPublishingCheckpointsPercent: z.number().gte(0).lte(100).optional(), }), ); diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index bddee3fbe308..5f6b94abe1b3 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -273,6 +273,11 @@ export const ARCHIVER_BLOCK_HEIGHT: MetricDefinition = { description: 'The height of the latest block processed by the archiver', valueType: ValueType.INT, }; +export const ARCHIVER_CHECKPOINT_HEIGHT: MetricDefinition = { + name: 'aztec.archiver.checkpoint_height', + description: 'The height of the latest checkpoint processed by the archiver', + valueType: ValueType.INT, +}; export const ARCHIVER_ROLLUP_PROOF_DELAY: MetricDefinition = { name: 'aztec.archiver.rollup_proof_delay', description: 'Time after a block is submitted until its proof is published', From 879a554b09ec1464358744ac327e5b86ac288f60 Mon Sep 17 00:00:00 2001 From: danielntmd Date: Mon, 16 Feb 2026 02:48:49 +0000 Subject: [PATCH 36/62] fix: set PXE sync chain tip to proposed for scenario bot - fixes a timeout issue where the default chart pointed to checkpointed insted of on proposed. - also bumps timeout for proving benchmarks --- .github/workflows/weekly-proving-bench.yml | 4 ++-- yarn-project/end-to-end/src/spartan/utils/bot.ts | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/weekly-proving-bench.yml b/.github/workflows/weekly-proving-bench.yml index f342710184e5..f9f561f6d88e 100644 --- a/.github/workflows/weekly-proving-bench.yml +++ b/.github/workflows/weekly-proving-bench.yml @@ -47,7 +47,7 @@ jobs: fi - name: Run real proving benchmarks - timeout-minutes: 150 + timeout-minutes: 180 env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -57,7 +57,7 @@ jobs: GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} RUN_ID: ${{ github.run_id }} - AWS_SHUTDOWN_TIME: 150 + AWS_SHUTDOWN_TIME: 180 NO_SPOT: 1 run: | ./.github/ci3.sh network-proving-bench prove-n-tps-real prove-n-tps-real "aztecprotocol/aztec:${{ steps.nightly-tag.outputs.nightly_tag }}" diff --git a/yarn-project/end-to-end/src/spartan/utils/bot.ts b/yarn-project/end-to-end/src/spartan/utils/bot.ts index 6314d2850292..abd275cd80a6 100644 --- a/yarn-project/end-to-end/src/spartan/utils/bot.ts +++ b/yarn-project/end-to-end/src/spartan/utils/bot.ts @@ -35,6 +35,7 @@ export async function installTransferBot({ replicas = 1, txIntervalSeconds = 10, followChain = 'CHECKPOINTED', + pxeSyncChainTip = 'proposed', mnemonic = process.env.LABS_INFRA_MNEMONIC ?? 'test test test test test test test test test test test junk', mnemonicStartIndex, botPrivateKey = process.env.BOT_TRANSFERS_L2_PRIVATE_KEY ?? '0xcafe01', @@ -49,6 +50,7 @@ export async function installTransferBot({ replicas?: number; txIntervalSeconds?: number; followChain?: string; + pxeSyncChainTip?: string; mnemonic?: string; mnemonicStartIndex?: number | string; botPrivateKey?: string; @@ -67,6 +69,7 @@ export async function installTransferBot({ 'bot.replicaCount': replicas, 'bot.txIntervalSeconds': txIntervalSeconds, 'bot.followChain': followChain, + 'bot.pxeSyncChainTip': pxeSyncChainTip, 'bot.botPrivateKey': botPrivateKey, 'bot.nodeUrl': resolvedNodeUrl, 'bot.mnemonic': mnemonic, From 2aea1d33972dd9c17dae86b8e5c26bab8b147806 Mon Sep 17 00:00:00 2001 From: ludamad Date: Mon, 16 Feb 2026 09:11:15 -0500 Subject: [PATCH 37/62] fix(ci): preserve both attempt logs when retrying flaky tests (#20439) ## Summary - Added `publish_log_final` in the `flake()` function so the log link is actually accessible (was a dead link before) - Added a `========== RETRY (flake detection) ==========` separator between attempts in `$tmp_file` so both the initial failure and retry output are visible under the same log link Co-authored-by: Claude Opus 4.6 --- ci3/run_test_cmd | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/ci3/run_test_cmd b/ci3/run_test_cmd index 35c37c4d0c42..a8fd10836497 100755 --- a/ci3/run_test_cmd +++ b/ci3/run_test_cmd @@ -119,6 +119,21 @@ function publish_log_final { cat $tmp_file 2>/dev/null | cache_persistent $log_key $expire } +# Finalize the current log and start a fresh one with a new unique key. +function rotate_log { + if [ "$CI_REDIS_AVAILABLE" -eq 1 ]; then + publish_log_final "$@" + fi + log_key=$(uuid) + log_info=" ($(ci_term_link $log_key))" + > $tmp_file + if [ "$live_logging" -eq 1 ]; then + kill ${publish_pid:-} &>/dev/null + live_publish_log & + publish_pid=$! + fi +} + function live_publish_log { # Not replacing previous trap as we run this function in the background. trap 'kill $sleep_pid &>/dev/null; exit' SIGTERM SIGINT @@ -336,12 +351,23 @@ flake_group_id=$(echo "$test_entries" | jq -r '.flake_group_id // empty' | head if [ -z "$owners" ]; then fail else - echo -e "${yellow}RETRYING${reset}${log_info:-}: $test_cmd" + failure_log_key=$log_key + failure_log_info=$log_info + rotate_log $((60 * 60 * 24 * 7 * 12)) + + echo -e "${yellow}RETRYING${reset}${log_info}: $test_cmd" run_test - # Test passed. Signal it as a flake, but pass. - [ $code -eq 0 ] && flake + if [ $code -eq 0 ]; then + # Publish the retry's log, then point back at the failure for flake reporting. + if [ "$CI_REDIS_AVAILABLE" -eq 1 ]; then + publish_log_final + fi + log_key=$failure_log_key + log_info=$failure_log_info + flake + fi # Otherwise we failed twice in a row, so hard fail. fail From 8d8451c555162e9967f77b99251c1ac5b4f99f4a Mon Sep 17 00:00:00 2001 From: spalladino <429604+spalladino@users.noreply.github.com> Date: Mon, 16 Feb 2026 15:35:51 +0000 Subject: [PATCH 38/62] refactor: add getCheckpointsDataForEpoch and enrich CheckpointData MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Adds a new `getCheckpointsDataForEpoch` method to `L2BlockSource` that returns `CheckpointData` (metadata-only, no full blocks), avoiding unnecessary block fetching when callers only need checkpoint out-hashes - Moves `CheckpointData` type from archiver to stdlib with richer typing: `attestations` as `CommitteeAttestation[]` (not `Buffer[]`), `startBlock` as `BlockNumber`, **precomputed `checkpointOutHash`**, and `numBlocks` renamed to `blockCount` - Adds a new index slot-to-checkpoint-number in the archiver store, so we can grab checkpoints for a given epoch without having to potentially walk the entire set of checkpoints. - Migrates sequencer, validator, block proposal handler, and slasher to use the new lightweight method, resolving the TODO in `block_proposal_handler.ts` about inefficient block fetching - Updates the mock block source (used in tests only) to handle mbps (in a separate commit) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Phil Windle Co-authored-by: Santiago Palladino --- .../archiver/src/modules/data_source_base.ts | 144 ++++-------- .../archiver/src/modules/instrumentation.ts | 5 +- .../archiver/src/modules/l1_synchronizer.ts | 2 +- .../archiver/src/store/block_store.ts | 67 ++++-- .../src/store/kv_archiver_store.test.ts | 6 +- .../archiver/src/store/kv_archiver_store.ts | 9 +- .../archiver/src/test/mock_archiver.ts | 5 +- .../archiver/src/test/mock_l2_block_source.ts | 207 +++++++++++------- .../epochs_proof_fails.parallel.test.ts | 10 +- .../sequencer/checkpoint_proposal_job.test.ts | 29 ++- .../checkpoint_proposal_job.timing.test.ts | 2 +- .../src/sequencer/checkpoint_proposal_job.ts | 7 +- .../src/sequencer/sequencer.test.ts | 1 + .../src/watchers/epoch_prune_watcher.test.ts | 1 + .../src/watchers/epoch_prune_watcher.ts | 6 +- .../stdlib/src/block/l2_block_source.ts | 7 + .../stdlib/src/checkpoint/checkpoint.ts | 8 +- .../stdlib/src/checkpoint/checkpoint_data.ts | 51 +++++ yarn-project/stdlib/src/checkpoint/index.ts | 1 + .../src/checkpoint/published_checkpoint.ts | 4 +- .../stdlib/src/interfaces/archiver.test.ts | 25 +++ .../stdlib/src/interfaces/archiver.ts | 2 + .../src/block_proposal_handler.ts | 21 +- .../validator-client/src/validator.test.ts | 1 + .../validator-client/src/validator.ts | 11 +- 25 files changed, 374 insertions(+), 258 deletions(-) create mode 100644 yarn-project/stdlib/src/checkpoint/checkpoint_data.ts diff --git a/yarn-project/archiver/src/modules/data_source_base.ts b/yarn-project/archiver/src/modules/data_source_base.ts index 2cd512830e74..7a8cfc85f238 100644 --- a/yarn-project/archiver/src/modules/data_source_base.ts +++ b/yarn-project/archiver/src/modules/data_source_base.ts @@ -1,18 +1,12 @@ +import { range } from '@aztec/foundation/array'; import { BlockNumber, CheckpointNumber, type EpochNumber, type SlotNumber } from '@aztec/foundation/branded-types'; import type { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { isDefined } from '@aztec/foundation/types'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { - type BlockData, - type BlockHash, - CheckpointedL2Block, - CommitteeAttestation, - L2Block, - type L2Tips, -} from '@aztec/stdlib/block'; -import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { type BlockData, type BlockHash, CheckpointedL2Block, L2Block, type L2Tips } from '@aztec/stdlib/block'; +import { Checkpoint, type CheckpointData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client'; @@ -24,7 +18,6 @@ import type { BlockHeader, IndexedTxEffect, TxHash, TxReceipt } from '@aztec/std import type { UInt64 } from '@aztec/stdlib/types'; import type { ArchiverDataSource } from '../interfaces.js'; -import type { CheckpointData } from '../store/block_store.js'; import type { KVArchiverDataStore } from '../store/kv_archiver_store.js'; import type { ValidateCheckpointResult } from './validation.js'; @@ -121,7 +114,7 @@ export abstract class ArchiverDataSourceBase if (!checkpointData) { return undefined; } - return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1); + return BlockNumber(checkpointData.startBlock + checkpointData.blockCount - 1); } public getCheckpointedBlocks(from: BlockNumber, limit: number): Promise { @@ -238,28 +231,21 @@ export abstract class ArchiverDataSourceBase public async getCheckpoints(checkpointNumber: CheckpointNumber, limit: number): Promise { const checkpoints = await this.store.getRangeOfCheckpoints(checkpointNumber, limit); - const blocks = ( - await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber))) - ).filter(isDefined); - - const fullCheckpoints: PublishedCheckpoint[] = []; - for (let i = 0; i < checkpoints.length; i++) { - const blocksForCheckpoint = blocks[i]; - const checkpoint = checkpoints[i]; - const fullCheckpoint = new Checkpoint( - checkpoint.archive, - checkpoint.header, - blocksForCheckpoint, - checkpoint.checkpointNumber, - ); - const publishedCheckpoint = new PublishedCheckpoint( - fullCheckpoint, - checkpoint.l1, - checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)), - ); - fullCheckpoints.push(publishedCheckpoint); + return Promise.all(checkpoints.map(ch => this.getPublishedCheckpointFromCheckpointData(ch))); + } + + private async getPublishedCheckpointFromCheckpointData(checkpoint: CheckpointData): Promise { + const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpoint.checkpointNumber); + if (!blocksForCheckpoint) { + throw new Error(`Blocks for checkpoint ${checkpoint.checkpointNumber} not found`); } - return fullCheckpoints; + const fullCheckpoint = new Checkpoint( + checkpoint.archive, + checkpoint.header, + blocksForCheckpoint, + checkpoint.checkpointNumber, + ); + return new PublishedCheckpoint(fullCheckpoint, checkpoint.l1, checkpoint.attestations); } public getBlocksForSlot(slotNumber: SlotNumber): Promise { @@ -267,84 +253,44 @@ export abstract class ArchiverDataSourceBase } public async getCheckpointedBlocksForEpoch(epochNumber: EpochNumber): Promise { - if (!this.l1Constants) { - throw new Error('L1 constants not set'); - } - - const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1Constants); - const blocks: CheckpointedL2Block[] = []; - - // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. - // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. - let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); - const slot = (b: CheckpointData) => b.header.slotNumber; - while (checkpoint && slot(checkpoint) >= start) { - if (slot(checkpoint) <= end) { - // push the blocks on backwards - const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1; - for (let i = endBlock; i >= checkpoint.startBlock; i--) { - const checkpointedBlock = await this.getCheckpointedBlock(BlockNumber(i)); - if (checkpointedBlock) { - blocks.push(checkpointedBlock); - } - } - } - checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1)); - } - - return blocks.reverse(); + const checkpointsData = await this.getCheckpointsDataForEpoch(epochNumber); + const blocks = await Promise.all( + checkpointsData.flatMap(checkpoint => + range(checkpoint.blockCount, checkpoint.startBlock).map(blockNumber => + this.getCheckpointedBlock(BlockNumber(blockNumber)), + ), + ), + ); + return blocks.filter(isDefined); } public async getCheckpointedBlockHeadersForEpoch(epochNumber: EpochNumber): Promise { - if (!this.l1Constants) { - throw new Error('L1 constants not set'); - } - - const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1Constants); - const blocks: BlockHeader[] = []; - - // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. - // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. - let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); - const slot = (b: CheckpointData) => b.header.slotNumber; - while (checkpoint && slot(checkpoint) >= start) { - if (slot(checkpoint) <= end) { - // push the blocks on backwards - const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1; - for (let i = endBlock; i >= checkpoint.startBlock; i--) { - const block = await this.getBlockHeader(BlockNumber(i)); - if (block) { - blocks.push(block); - } - } - } - checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1)); - } - return blocks.reverse(); + const checkpointsData = await this.getCheckpointsDataForEpoch(epochNumber); + const blocks = await Promise.all( + checkpointsData.flatMap(checkpoint => + range(checkpoint.blockCount, checkpoint.startBlock).map(blockNumber => + this.getBlockHeader(BlockNumber(blockNumber)), + ), + ), + ); + return blocks.filter(isDefined); } public async getCheckpointsForEpoch(epochNumber: EpochNumber): Promise { + const checkpointsData = await this.getCheckpointsDataForEpoch(epochNumber); + return Promise.all( + checkpointsData.map(data => this.getPublishedCheckpointFromCheckpointData(data).then(p => p.checkpoint)), + ); + } + + /** Returns checkpoint data for all checkpoints whose slot falls within the given epoch. */ + public getCheckpointsDataForEpoch(epochNumber: EpochNumber): Promise { if (!this.l1Constants) { throw new Error('L1 constants not set'); } const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1Constants); - const checkpoints: Checkpoint[] = []; - - // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. - // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. - let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); - const slot = (b: CheckpointData) => b.header.slotNumber; - while (checkpointData && slot(checkpointData) >= start) { - if (slot(checkpointData) <= end) { - // push the checkpoints on backwards - const [checkpoint] = await this.getCheckpoints(checkpointData.checkpointNumber, 1); - checkpoints.push(checkpoint.checkpoint); - } - checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1)); - } - - return checkpoints.reverse(); + return this.store.getCheckpointDataForSlotRange(start, end); } public async getBlock(number: BlockNumber): Promise { diff --git a/yarn-project/archiver/src/modules/instrumentation.ts b/yarn-project/archiver/src/modules/instrumentation.ts index f0a18a2d17d7..fbf91cf16a1a 100644 --- a/yarn-project/archiver/src/modules/instrumentation.ts +++ b/yarn-project/archiver/src/modules/instrumentation.ts @@ -1,5 +1,6 @@ import { createLogger } from '@aztec/foundation/log'; import type { L2Block } from '@aztec/stdlib/block'; +import type { CheckpointData } from '@aztec/stdlib/checkpoint'; import { Attributes, type Gauge, @@ -13,8 +14,6 @@ import { createUpDownCounterWithDefault, } from '@aztec/telemetry-client'; -import type { CheckpointData } from '../store/block_store.js'; - export class ArchiverInstrumentation { public readonly tracer: Tracer; @@ -134,7 +133,7 @@ export class ArchiverInstrumentation { } public updateLastProvenCheckpoint(checkpoint: CheckpointData) { - const lastBlockNumberInCheckpoint = checkpoint.startBlock + checkpoint.numBlocks - 1; + const lastBlockNumberInCheckpoint = checkpoint.startBlock + checkpoint.blockCount - 1; this.blockHeight.record(lastBlockNumberInCheckpoint, { [Attributes.STATUS]: 'proven' }); this.checkpointHeight.record(checkpoint.checkpointNumber, { [Attributes.STATUS]: 'proven' }); } diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index bc73f3bb35c2..22b1ed5aba29 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -590,7 +590,7 @@ export class ArchiverL1Synchronizer implements Traceable { const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1Constants); const lastBlockNumberInCheckpoint = localCheckpointForDestinationProvenCheckpointNumber.startBlock + - localCheckpointForDestinationProvenCheckpointNumber.numBlocks - + localCheckpointForDestinationProvenCheckpointNumber.blockCount - 1; this.events.emit(L2BlockSourceEvents.L2BlockProven, { diff --git a/yarn-project/archiver/src/store/block_store.ts b/yarn-project/archiver/src/store/block_store.ts index 636df8cc299a..a9ec9a501c85 100644 --- a/yarn-project/archiver/src/store/block_store.ts +++ b/yarn-project/archiver/src/store/block_store.ts @@ -19,7 +19,7 @@ import { deserializeValidateCheckpointResult, serializeValidateCheckpointResult, } from '@aztec/stdlib/block'; -import { L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { type CheckpointData, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; @@ -62,23 +62,14 @@ type BlockStorage = { type CheckpointStorage = { header: Buffer; archive: Buffer; + checkpointOutHash: Buffer; checkpointNumber: number; startBlock: number; - numBlocks: number; + blockCount: number; l1: Buffer; attestations: Buffer[]; }; -export type CheckpointData = { - checkpointNumber: CheckpointNumber; - header: CheckpointHeader; - archive: AppendOnlyTreeSnapshot; - startBlock: number; - numBlocks: number; - l1: L1PublishedData; - attestations: Buffer[]; -}; - export type RemoveCheckpointsResult = { blocksRemoved: L2Block[] | undefined }; /** @@ -91,6 +82,9 @@ export class BlockStore { /** Map checkpoint number to checkpoint data */ #checkpoints: AztecAsyncMap; + /** Map slot number to checkpoint number, for looking up checkpoints by slot range. */ + #slotToCheckpoint: AztecAsyncMap; + /** Map block hash to list of tx hashes */ #blockTxs: AztecAsyncMap; @@ -131,6 +125,7 @@ export class BlockStore { this.#lastProvenCheckpoint = db.openSingleton('archiver_last_proven_l2_checkpoint'); this.#pendingChainValidationStatus = db.openSingleton('archiver_pending_chain_validation_status'); this.#checkpoints = db.openMap('archiver_checkpoints'); + this.#slotToCheckpoint = db.openMap('archiver_slot_to_checkpoint'); } /** @@ -274,7 +269,7 @@ export class BlockStore { // If we have a previous checkpoint then we need to get the previous block number if (previousCheckpointData !== undefined) { - previousBlockNumber = BlockNumber(previousCheckpointData.startBlock + previousCheckpointData.numBlocks - 1); + previousBlockNumber = BlockNumber(previousCheckpointData.startBlock + previousCheckpointData.blockCount - 1); previousBlock = await this.getBlock(previousBlockNumber); if (previousBlock === undefined) { // We should be able to get the required previous block @@ -338,12 +333,16 @@ export class BlockStore { await this.#checkpoints.set(checkpoint.checkpoint.number, { header: checkpoint.checkpoint.header.toBuffer(), archive: checkpoint.checkpoint.archive.toBuffer(), + checkpointOutHash: checkpoint.checkpoint.getCheckpointOutHash().toBuffer(), l1: checkpoint.l1.toBuffer(), attestations: checkpoint.attestations.map(attestation => attestation.toBuffer()), checkpointNumber: checkpoint.checkpoint.number, startBlock: checkpoint.checkpoint.blocks[0].number, - numBlocks: checkpoint.checkpoint.blocks.length, + blockCount: checkpoint.checkpoint.blocks.length, }); + + // Update slot-to-checkpoint index + await this.#slotToCheckpoint.set(checkpoint.checkpoint.header.slotNumber, checkpoint.checkpoint.number); } await this.#lastSynchedL1Block.set(checkpoints[checkpoints.length - 1].l1.blockNumber); @@ -426,7 +425,7 @@ export class BlockStore { if (!targetCheckpoint) { throw new Error(`Target checkpoint ${checkpointNumber} not found in store`); } - lastBlockToKeep = BlockNumber(targetCheckpoint.startBlock + targetCheckpoint.numBlocks - 1); + lastBlockToKeep = BlockNumber(targetCheckpoint.startBlock + targetCheckpoint.blockCount - 1); } // Remove all blocks after lastBlockToKeep (both checkpointed and uncheckpointed) @@ -434,6 +433,11 @@ export class BlockStore { // Remove all checkpoints after the target for (let c = latestCheckpointNumber; c > checkpointNumber; c = CheckpointNumber(c - 1)) { + const checkpointStorage = await this.#checkpoints.getAsync(c); + if (checkpointStorage) { + const slotNumber = CheckpointHeader.fromBuffer(checkpointStorage.header).slotNumber; + await this.#slotToCheckpoint.delete(slotNumber); + } await this.#checkpoints.delete(c); this.#log.debug(`Removed checkpoint ${c}`); } @@ -462,17 +466,32 @@ export class BlockStore { return checkpoints; } - private checkpointDataFromCheckpointStorage(checkpointStorage: CheckpointStorage) { - const data: CheckpointData = { + /** Returns checkpoint data for all checkpoints whose slot falls within the given range (inclusive). */ + async getCheckpointDataForSlotRange(startSlot: SlotNumber, endSlot: SlotNumber): Promise { + const result: CheckpointData[] = []; + for await (const [, checkpointNumber] of this.#slotToCheckpoint.entriesAsync({ + start: startSlot, + end: endSlot + 1, + })) { + const checkpointStorage = await this.#checkpoints.getAsync(checkpointNumber); + if (checkpointStorage) { + result.push(this.checkpointDataFromCheckpointStorage(checkpointStorage)); + } + } + return result; + } + + private checkpointDataFromCheckpointStorage(checkpointStorage: CheckpointStorage): CheckpointData { + return { header: CheckpointHeader.fromBuffer(checkpointStorage.header), archive: AppendOnlyTreeSnapshot.fromBuffer(checkpointStorage.archive), + checkpointOutHash: Fr.fromBuffer(checkpointStorage.checkpointOutHash), checkpointNumber: CheckpointNumber(checkpointStorage.checkpointNumber), - startBlock: checkpointStorage.startBlock, - numBlocks: checkpointStorage.numBlocks, + startBlock: BlockNumber(checkpointStorage.startBlock), + blockCount: checkpointStorage.blockCount, l1: L1PublishedData.fromBuffer(checkpointStorage.l1), - attestations: checkpointStorage.attestations, + attestations: checkpointStorage.attestations.map(buf => CommitteeAttestation.fromBuffer(buf)), }; - return data; } async getBlocksForCheckpoint(checkpointNumber: CheckpointNumber): Promise { @@ -484,7 +503,7 @@ export class BlockStore { const blocksForCheckpoint = await toArray( this.#blocks.entriesAsync({ start: checkpoint.startBlock, - end: checkpoint.startBlock + checkpoint.numBlocks, + end: checkpoint.startBlock + checkpoint.blockCount, }), ); @@ -557,7 +576,7 @@ export class BlockStore { if (!checkpointStorage) { throw new CheckpointNotFoundError(provenCheckpointNumber); } else { - return BlockNumber(checkpointStorage.startBlock + checkpointStorage.numBlocks - 1); + return BlockNumber(checkpointStorage.startBlock + checkpointStorage.blockCount - 1); } } @@ -922,7 +941,7 @@ export class BlockStore { if (!checkpoint) { return BlockNumber(INITIAL_L2_BLOCK_NUM - 1); } - return BlockNumber(checkpoint.startBlock + checkpoint.numBlocks - 1); + return BlockNumber(checkpoint.startBlock + checkpoint.blockCount - 1); } async getLatestL2BlockNumber(): Promise { diff --git a/yarn-project/archiver/src/store/kv_archiver_store.test.ts b/yarn-project/archiver/src/store/kv_archiver_store.test.ts index f0100cc2cce0..d05044ded8d2 100644 --- a/yarn-project/archiver/src/store/kv_archiver_store.test.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.test.ts @@ -1474,7 +1474,7 @@ describe('KVArchiverDataStore', () => { expect(checkpoints.length).toBe(1); expect(checkpoints[0].checkpointNumber).toBe(1); expect(checkpoints[0].startBlock).toBe(1); - expect(checkpoints[0].numBlocks).toBe(2); + expect(checkpoints[0].blockCount).toBe(2); }); it('returns multiple checkpoints in order', async () => { @@ -1504,7 +1504,7 @@ describe('KVArchiverDataStore', () => { expect(checkpoints.length).toBe(3); expect(checkpoints.map(c => c.checkpointNumber)).toEqual([1, 2, 3]); expect(checkpoints.map(c => c.startBlock)).toEqual([1, 3, 6]); - expect(checkpoints.map(c => c.numBlocks)).toEqual([2, 3, 1]); + expect(checkpoints.map(c => c.blockCount)).toEqual([2, 3, 1]); }); it('respects the from parameter', async () => { @@ -1586,7 +1586,7 @@ describe('KVArchiverDataStore', () => { const data = checkpoints[0]; expect(data.checkpointNumber).toBe(1); expect(data.startBlock).toBe(1); - expect(data.numBlocks).toBe(3); + expect(data.blockCount).toBe(3); expect(data.l1.blockNumber).toBe(42n); expect(data.header.equals(checkpoint.checkpoint.header)).toBe(true); expect(data.archive.equals(checkpoint.checkpoint.archive)).toBe(true); diff --git a/yarn-project/archiver/src/store/kv_archiver_store.ts b/yarn-project/archiver/src/store/kv_archiver_store.ts index 24447c10ae5a..d46075e2a588 100644 --- a/yarn-project/archiver/src/store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.ts @@ -13,7 +13,7 @@ import { L2Block, type ValidateCheckpointResult, } from '@aztec/stdlib/block'; -import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import type { CheckpointData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, @@ -31,7 +31,7 @@ import type { UInt64 } from '@aztec/stdlib/types'; import { join } from 'path'; import type { InboxMessage } from '../structs/inbox_message.js'; -import { BlockStore, type CheckpointData, type RemoveCheckpointsResult } from './block_store.js'; +import { BlockStore, type RemoveCheckpointsResult } from './block_store.js'; import { ContractClassStore } from './contract_class_store.js'; import { ContractInstanceStore } from './contract_instance_store.js'; import { LogStore } from './log_store.js'; @@ -645,6 +645,11 @@ export class KVArchiverDataStore implements ContractDataSource { return this.#blockStore.getCheckpointData(checkpointNumber); } + /** Returns checkpoint data for all checkpoints whose slot falls within the given range (inclusive). */ + getCheckpointDataForSlotRange(startSlot: SlotNumber, endSlot: SlotNumber): Promise { + return this.#blockStore.getCheckpointDataForSlotRange(startSlot, endSlot); + } + /** * Gets all blocks that have the given slot number. * @param slotNumber - The slot number to search for. diff --git a/yarn-project/archiver/src/test/mock_archiver.ts b/yarn-project/archiver/src/test/mock_archiver.ts index a613dabf011b..bcdcc3928d96 100644 --- a/yarn-project/archiver/src/test/mock_archiver.ts +++ b/yarn-project/archiver/src/test/mock_archiver.ts @@ -56,8 +56,9 @@ export class MockPrefilledArchiver extends MockArchiver { } const fromBlock = this.l2Blocks.length; - // TODO: Add L2 blocks and checkpoints separately once archiver has the apis for that. - this.addProposedBlocks(this.prefilled.slice(fromBlock, fromBlock + numBlocks).flatMap(c => c.blocks)); + const checkpointsToAdd = this.prefilled.slice(fromBlock, fromBlock + numBlocks); + this.addProposedBlocks(checkpointsToAdd.flatMap(c => c.blocks)); + this.checkpointList.push(...checkpointsToAdd); return Promise.resolve(); } } diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index d0a44894ec4e..ff4a0fe4af52 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -16,9 +16,11 @@ import { type L2Tips, type ValidateCheckpointResult, } from '@aztec/stdlib/block'; -import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { Checkpoint, type CheckpointData, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { EmptyL1RollupConstants, type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; +import { computeCheckpointOutHash } from '@aztec/stdlib/messaging'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { type BlockHeader, TxExecutionResult, TxHash, TxReceipt, TxStatus } from '@aztec/stdlib/tx'; import type { UInt64 } from '@aztec/stdlib/types'; @@ -27,6 +29,7 @@ import type { UInt64 } from '@aztec/stdlib/types'; */ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { protected l2Blocks: L2Block[] = []; + protected checkpointList: Checkpoint[] = []; private provenBlockNumber: number = 0; private finalizedBlockNumber: number = 0; @@ -34,14 +37,30 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { private log = createLogger('archiver:mock_l2_block_source'); + /** Creates blocks grouped into single-block checkpoints. */ public async createBlocks(numBlocks: number) { - for (let i = 0; i < numBlocks; i++) { - const blockNum = this.l2Blocks.length + 1; - const block = await L2Block.random(BlockNumber(blockNum), { slotNumber: SlotNumber(blockNum) }); - this.l2Blocks.push(block); + await this.createCheckpoints(numBlocks, 1); + } + + /** Creates checkpoints, each containing `blocksPerCheckpoint` blocks. */ + public async createCheckpoints(numCheckpoints: number, blocksPerCheckpoint: number = 1) { + for (let c = 0; c < numCheckpoints; c++) { + const checkpointNum = CheckpointNumber(this.checkpointList.length + 1); + const startBlockNum = this.l2Blocks.length + 1; + const slotNumber = SlotNumber(Number(checkpointNum)); + const checkpoint = await Checkpoint.random(checkpointNum, { + numBlocks: blocksPerCheckpoint, + startBlockNumber: startBlockNum, + slotNumber, + checkpointNumber: checkpointNum, + }); + this.checkpointList.push(checkpoint); + this.l2Blocks.push(...checkpoint.blocks); } - this.log.verbose(`Created ${numBlocks} blocks in the mock L2 block source`); + this.log.verbose( + `Created ${numCheckpoints} checkpoints with ${blocksPerCheckpoint} blocks each in the mock L2 block source`, + ); } public addProposedBlocks(blocks: L2Block[]) { @@ -51,6 +70,16 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { public removeBlocks(numBlocks: number) { this.l2Blocks = this.l2Blocks.slice(0, -numBlocks); + const maxBlockNum = this.l2Blocks.length; + // Remove any checkpoint whose last block is beyond the remaining blocks. + this.checkpointList = this.checkpointList.filter(c => { + const lastBlockNum = c.blocks[0].number + c.blocks.length - 1; + return lastBlockNum <= maxBlockNum; + }); + // Keep tip numbers consistent with remaining blocks. + this.checkpointedBlockNumber = Math.min(this.checkpointedBlockNumber, maxBlockNum); + this.provenBlockNumber = Math.min(this.provenBlockNumber, maxBlockNum); + this.finalizedBlockNumber = Math.min(this.finalizedBlockNumber, maxBlockNum); this.log.verbose(`Removed ${numBlocks} blocks from the mock L2 block source`); } @@ -66,7 +95,33 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { } public setCheckpointedBlockNumber(checkpointedBlockNumber: number) { + const prevCheckpointed = this.checkpointedBlockNumber; this.checkpointedBlockNumber = checkpointedBlockNumber; + // Auto-create single-block checkpoints for newly checkpointed blocks that don't have one yet. + // This handles blocks added via addProposedBlocks that are now being marked as checkpointed. + const newCheckpoints: Checkpoint[] = []; + for (let blockNum = prevCheckpointed + 1; blockNum <= checkpointedBlockNumber; blockNum++) { + const block = this.l2Blocks[blockNum - 1]; + if (!block) { + continue; + } + if (this.checkpointList.some(c => c.blocks.some(b => b.number === block.number))) { + continue; + } + const checkpointNum = CheckpointNumber(this.checkpointList.length + newCheckpoints.length + 1); + const checkpoint = new Checkpoint( + block.archive, + CheckpointHeader.random({ slotNumber: block.header.globalVariables.slotNumber }), + [block], + checkpointNum, + ); + newCheckpoints.push(checkpoint); + } + // Insert new checkpoints in order by number. + if (newCheckpoints.length > 0) { + this.checkpointList.push(...newCheckpoints); + this.checkpointList.sort((a, b) => a.number - b.number); + } } /** @@ -113,13 +168,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { if (!block) { return Promise.resolve(undefined); } - const checkpointedBlock = new CheckpointedL2Block( - CheckpointNumber.fromBlockNumber(number), - block, - new L1PublishedData(BigInt(number), BigInt(number), `0x${number.toString(16).padStart(64, '0')}`), - [], - ); - return Promise.resolve(checkpointedBlock); + return Promise.resolve(this.toCheckpointedBlock(block)); } public async getCheckpointedBlocks(from: BlockNumber, limit: number): Promise { @@ -168,44 +217,22 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { } public getCheckpoints(from: CheckpointNumber, limit: number) { - // TODO(mbps): Implement this properly. This only works when we have one block per checkpoint. - const blocks = this.l2Blocks.slice(from - 1, from - 1 + limit); - return Promise.all( - blocks.map(async block => { - // Create a checkpoint from the block - manually construct since L2Block doesn't have toCheckpoint() - const checkpoint = await Checkpoint.random(block.checkpointNumber, { numBlocks: 1 }); - checkpoint.blocks = [block]; - return new PublishedCheckpoint( - checkpoint, - new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), - [], - ); - }), + const checkpoints = this.checkpointList.slice(from - 1, from - 1 + limit); + return Promise.resolve( + checkpoints.map(checkpoint => new PublishedCheckpoint(checkpoint, this.mockL1DataForCheckpoint(checkpoint), [])), ); } - public async getCheckpointByArchive(archive: Fr): Promise { - // TODO(mbps): Implement this properly. This only works when we have one block per checkpoint. - const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); - if (!block) { - return undefined; - } - // Create a checkpoint from the block - manually construct since L2Block doesn't have toCheckpoint() - const checkpoint = await Checkpoint.random(block.checkpointNumber, { numBlocks: 1 }); - checkpoint.blocks = [block]; - return checkpoint; + public getCheckpointByArchive(archive: Fr): Promise { + const checkpoint = this.checkpointList.find(c => c.archive.root.equals(archive)); + return Promise.resolve(checkpoint); } public async getCheckpointedBlockByHash(blockHash: BlockHash): Promise { for (const block of this.l2Blocks) { const hash = await block.hash(); if (hash.equals(blockHash)) { - return CheckpointedL2Block.fromFields({ - checkpointNumber: CheckpointNumber.fromBlockNumber(block.number), - block, - l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), - attestations: [], - }); + return this.toCheckpointedBlock(block); } } return undefined; @@ -216,14 +243,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { if (!block) { return Promise.resolve(undefined); } - return Promise.resolve( - CheckpointedL2Block.fromFields({ - checkpointNumber: CheckpointNumber.fromBlockNumber(block.number), - block, - l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), - attestations: [], - }), - ); + return Promise.resolve(this.toCheckpointedBlock(block)); } public async getL2BlockByHash(blockHash: BlockHash): Promise { @@ -289,42 +309,36 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { } getCheckpointsForEpoch(epochNumber: EpochNumber): Promise { - // TODO(mbps): Implement this properly. This only works when we have one block per checkpoint. - const epochDuration = DefaultL1ContractsConfig.aztecEpochDuration; - const [start, end] = getSlotRangeForEpoch(epochNumber, { epochDuration }); - const blocks = this.l2Blocks.filter(b => { - const slot = b.header.globalVariables.slotNumber; - return slot >= start && slot <= end; - }); - // Create checkpoints from blocks - manually construct since L2Block doesn't have toCheckpoint() - return Promise.all( - blocks.map(async block => { - const checkpoint = await Checkpoint.random(block.checkpointNumber, { numBlocks: 1 }); - checkpoint.blocks = [block]; - return checkpoint; - }), - ); + return Promise.resolve(this.getCheckpointsInEpoch(epochNumber)); } - getCheckpointedBlocksForEpoch(epochNumber: EpochNumber): Promise { - const epochDuration = DefaultL1ContractsConfig.aztecEpochDuration; - const [start, end] = getSlotRangeForEpoch(epochNumber, { epochDuration }); - const blocks = this.l2Blocks.filter(b => { - const slot = b.header.globalVariables.slotNumber; - return slot >= start && slot <= end; - }); + getCheckpointsDataForEpoch(epochNumber: EpochNumber): Promise { + const checkpoints = this.getCheckpointsInEpoch(epochNumber); return Promise.resolve( - blocks.map(block => - CheckpointedL2Block.fromFields({ - checkpointNumber: CheckpointNumber.fromBlockNumber(block.number), - block, - l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), + checkpoints.map( + (checkpoint): CheckpointData => ({ + checkpointNumber: checkpoint.number, + header: checkpoint.header, + archive: checkpoint.archive, + checkpointOutHash: computeCheckpointOutHash( + checkpoint.blocks.map(b => b.body.txEffects.map(tx => tx.l2ToL1Msgs)), + ), + startBlock: checkpoint.blocks[0].number, + blockCount: checkpoint.blocks.length, attestations: [], + l1: this.mockL1DataForCheckpoint(checkpoint), }), ), ); } + getCheckpointedBlocksForEpoch(epochNumber: EpochNumber): Promise { + const checkpoints = this.getCheckpointsInEpoch(epochNumber); + return Promise.resolve( + checkpoints.flatMap(checkpoint => checkpoint.blocks.map(block => this.toCheckpointedBlock(block))), + ); + } + getBlocksForSlot(slotNumber: SlotNumber): Promise { const blocks = this.l2Blocks.filter(b => b.header.globalVariables.slotNumber === slotNumber); return Promise.resolve(blocks); @@ -413,7 +427,10 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { const makeTipId = (blockId: typeof latestBlockId) => ({ block: blockId, - checkpoint: { number: CheckpointNumber.fromBlockNumber(blockId.number), hash: blockId.hash }, + checkpoint: { + number: this.findCheckpointNumberForBlock(blockId.number) ?? CheckpointNumber(0), + hash: blockId.hash, + }, }); return { @@ -501,4 +518,38 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { getPendingChainValidationStatus(): Promise { return Promise.resolve({ valid: true }); } + + /** Returns checkpoints whose slot falls within the given epoch. */ + private getCheckpointsInEpoch(epochNumber: EpochNumber): Checkpoint[] { + const epochDuration = DefaultL1ContractsConfig.aztecEpochDuration; + const [start, end] = getSlotRangeForEpoch(epochNumber, { epochDuration }); + return this.checkpointList.filter(c => c.header.slotNumber >= start && c.header.slotNumber <= end); + } + + /** Creates a mock L1PublishedData for a checkpoint. */ + private mockL1DataForCheckpoint(checkpoint: Checkpoint): L1PublishedData { + return new L1PublishedData(BigInt(checkpoint.number), BigInt(checkpoint.number), Buffer32.random().toString()); + } + + /** Creates a CheckpointedL2Block from a block using stored checkpoint info. */ + private toCheckpointedBlock(block: L2Block): CheckpointedL2Block { + const checkpoint = this.checkpointList.find(c => c.blocks.some(b => b.number === block.number)); + const checkpointNumber = checkpoint?.number ?? block.checkpointNumber; + return new CheckpointedL2Block( + checkpointNumber, + block, + new L1PublishedData( + BigInt(block.number), + BigInt(block.number), + `0x${block.number.toString(16).padStart(64, '0')}`, + ), + [], + ); + } + + /** Finds the checkpoint number for a block, or undefined if the block is not in any checkpoint. */ + private findCheckpointNumberForBlock(blockNumber: BlockNumber): CheckpointNumber | undefined { + const checkpoint = this.checkpointList.find(c => c.blocks.some(b => b.number === blockNumber)); + return checkpoint?.number; + } } diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts index c228d7694280..2b4efa87d32d 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts @@ -56,13 +56,11 @@ describe('e2e_epochs/epochs_proof_fails', () => { // Here we cause a re-org by not publishing the proof for epoch 0 until after the end of epoch 1 // The proof will be rejected and a re-org will take place - // Ensure that there was at least one block mined in epoch 0, otherwise this test fails, since it + // Ensure that there was at least one checkpoint mined in epoch 0, otherwise this test fails, since it // relies on the proof for epoch zero not landing in time, which will never happen if there is - // nothing to prove on epoch zero. This is flakey because startup times change continuously. - // Also note that there should always be at least a checkpoint before we start since setup - // enforces it (search the comment "waiting for an empty block 1 to be mined" in `setup`). - const firstCheckpointNumber = (await test.monitor.run()).checkpointNumber; - expect(firstCheckpointNumber).toBeGreaterThanOrEqual(CheckpointNumber(1)); + // nothing to prove on epoch zero. We need to wait for the checkpoint L1 tx to be mined, not just + // for the block to appear in the node's world state, since the propose tx may still be in-flight. + await test.waitUntilCheckpointNumber(CheckpointNumber(1)); const firstCheckpoint = await rollup.getCheckpoint(CheckpointNumber(1)); const firstCheckpointEpoch = getEpochAtSlot(firstCheckpoint.slotNumber, test.constants); expect(firstCheckpointEpoch).toEqual(EpochNumber(0)); diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts index f71618d90b37..c3523b9ce36c 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts @@ -24,7 +24,7 @@ import { type P2P, P2PClientState } from '@aztec/p2p'; import type { SlasherClientInterface } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { CommitteeAttestation, L2Block, type L2BlockSink, type L2BlockSource } from '@aztec/stdlib/block'; -import { Checkpoint } from '@aztec/stdlib/checkpoint'; +import { Checkpoint, type CheckpointData, L1PublishedData } from '@aztec/stdlib/checkpoint'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { GasFees } from '@aztec/stdlib/gas'; import { @@ -216,7 +216,7 @@ describe('CheckpointProposalJob', () => { l1ToL2MessageSource.getL1ToL2Messages.mockResolvedValue(Array(4).fill(Fr.ZERO)); l2BlockSource = mock(); - l2BlockSource.getCheckpointsForEpoch.mockResolvedValue([]); + l2BlockSource.getCheckpointsDataForEpoch.mockResolvedValue([]); blockSink = mock(); blockSink.addBlock.mockResolvedValue(undefined); @@ -369,6 +369,7 @@ describe('CheckpointProposalJob', () => { it('passes previous checkpoint out hashes when there are earlier checkpoints in the epoch', async () => { // Create two previous checkpoints in the same epoch const previousCheckpoints = await timesAsync(2, i => Checkpoint.random(CheckpointNumber(i + 1))); + const previousCheckpointsData: CheckpointData[] = previousCheckpoints.map(c => toCheckpointData(c)); // Update job to be for checkpoint 3 checkpointNumber = CheckpointNumber(3); @@ -383,7 +384,7 @@ describe('CheckpointProposalJob', () => { ); // Mock l2BlockSource to return the previous checkpoints - l2BlockSource.getCheckpointsForEpoch.mockResolvedValue(previousCheckpoints); + l2BlockSource.getCheckpointsDataForEpoch.mockResolvedValue(previousCheckpointsData); // Build block successfully const { txs, block } = await setupTxsAndBlock(p2p, globalVariables, 1, chainId); @@ -419,8 +420,12 @@ describe('CheckpointProposalJob', () => { }), ); - // Mock l2BlockSource to return all three checkpoints - l2BlockSource.getCheckpointsForEpoch.mockResolvedValue([previousCheckpoint, currentCheckpoint, futureCheckpoint]); + // Mock l2BlockSource to return all three checkpoints as data + l2BlockSource.getCheckpointsDataForEpoch.mockResolvedValue([ + toCheckpointData(previousCheckpoint), + toCheckpointData(currentCheckpoint), + toCheckpointData(futureCheckpoint), + ]); // Build block successfully const { txs, block } = await setupTxsAndBlock(p2p, globalVariables, 1, chainId); @@ -1114,3 +1119,17 @@ class TestCheckpointProposalJob extends CheckpointProposalJob { return super.buildSingleBlock(checkpointBuilder, opts); } } + +/** Creates a CheckpointData from a Checkpoint for testing. */ +function toCheckpointData(checkpoint: Checkpoint): CheckpointData { + return { + checkpointNumber: checkpoint.number, + header: checkpoint.header, + archive: checkpoint.archive, + checkpointOutHash: checkpoint.getCheckpointOutHash(), + startBlock: BlockNumber(checkpoint.blocks[0]?.number ?? 1), + blockCount: checkpoint.blocks.length, + attestations: [], + l1: L1PublishedData.random(), + }; +} diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts index 00f39be89c90..6b373ad5a3dd 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts @@ -422,7 +422,7 @@ describe('CheckpointProposalJob Timing Tests', () => { l1ToL2MessageSource.getL1ToL2Messages.mockResolvedValue(Array(4).fill(Fr.ZERO)); l2BlockSource = mock(); - l2BlockSource.getCheckpointsForEpoch.mockResolvedValue([]); + l2BlockSource.getCheckpointsDataForEpoch.mockResolvedValue([]); blockSink = mock(); blockSink.addBlock.mockResolvedValue(undefined); diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index fcc773ab3a5d..032804d9cd04 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -186,10 +186,9 @@ export class CheckpointProposalJob implements Traceable { const inHash = computeInHashFromL1ToL2Messages(l1ToL2Messages); // Collect the out hashes of all the checkpoints before this one in the same epoch - const previousCheckpoints = (await this.l2BlockSource.getCheckpointsForEpoch(this.epoch)).filter( - c => c.number < this.checkpointNumber, - ); - const previousCheckpointOutHashes = previousCheckpoints.map(c => c.getCheckpointOutHash()); + const previousCheckpointOutHashes = (await this.l2BlockSource.getCheckpointsDataForEpoch(this.epoch)) + .filter(c => c.checkpointNumber < this.checkpointNumber) + .map(c => c.checkpointOutHash); // Get the fee asset price modifier from the oracle const feeAssetPriceModifier = await this.publisher.getFeeAssetPriceModifier(); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 9329b90fba22..cb625f07002d 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -271,6 +271,7 @@ describe('sequencer', () => { getPendingChainValidationStatus: mockFn().mockResolvedValue({ valid: true }), getCheckpointedBlocksForEpoch: mockFn().mockResolvedValue([]), getCheckpointsForEpoch: mockFn().mockResolvedValue([]), + getCheckpointsDataForEpoch: mockFn().mockResolvedValue([]), }); l1ToL2MessageSource = mock({ diff --git a/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts b/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts index 24355f8545d8..dd52040bdef7 100644 --- a/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts +++ b/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts @@ -253,6 +253,7 @@ describe('EpochPruneWatcher', () => { class MockL2BlockSource { public readonly events = new EventEmitter(); public getCheckpointsForEpoch = () => []; + public getCheckpointsDataForEpoch = () => []; constructor() {} } diff --git a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts index f10c4b39378c..0de0f6b27f65 100644 --- a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts +++ b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts @@ -132,9 +132,9 @@ export class EpochPruneWatcher extends (EventEmitter as new () => WatcherEmitter const blocksByCheckpoint = chunkBy(sortedBlocks, b => b.checkpointNumber); // Get prior checkpoints in the epoch (in case this was a partial prune) to extract the out hashes - const priorCheckpointOutHashes = (await this.l2BlockSource.getCheckpointsForEpoch(epochNumber)) - .filter(c => c.number < sortedBlocks[0].checkpointNumber) - .map(c => c.getCheckpointOutHash()); + const priorCheckpointOutHashes = (await this.l2BlockSource.getCheckpointsDataForEpoch(epochNumber)) + .filter(c => c.checkpointNumber < sortedBlocks[0].checkpointNumber) + .map(c => c.checkpointOutHash); let previousCheckpointOutHashes: Fr[] = [...priorCheckpointOutHashes]; const fork = await this.checkpointsBuilder.getFork( diff --git a/yarn-project/stdlib/src/block/l2_block_source.ts b/yarn-project/stdlib/src/block/l2_block_source.ts index 778c70c97228..f1daf550d7eb 100644 --- a/yarn-project/stdlib/src/block/l2_block_source.ts +++ b/yarn-project/stdlib/src/block/l2_block_source.ts @@ -13,6 +13,7 @@ import type { TypedEventEmitter } from '@aztec/foundation/types'; import { z } from 'zod'; import type { Checkpoint } from '../checkpoint/checkpoint.js'; +import type { CheckpointData } from '../checkpoint/checkpoint_data.js'; import type { PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; import type { L1RollupConstants } from '../epoch-helpers/index.js'; import { CheckpointHeader } from '../rollup/checkpoint_header.js'; @@ -99,6 +100,12 @@ export interface L2BlockSource { */ getCheckpointsForEpoch(epochNumber: EpochNumber): Promise; + /** + * Gets lightweight checkpoint metadata for a given epoch, without fetching full block data. + * @param epochNumber - Epoch for which we want checkpoint data + */ + getCheckpointsDataForEpoch(epochNumber: EpochNumber): Promise; + /** * Gets a block header by its hash. * @param blockHash - The block hash to retrieve. diff --git a/yarn-project/stdlib/src/checkpoint/checkpoint.ts b/yarn-project/stdlib/src/checkpoint/checkpoint.ts index 9e633345f89c..2c95d3c0be4a 100644 --- a/yarn-project/stdlib/src/checkpoint/checkpoint.ts +++ b/yarn-project/stdlib/src/checkpoint/checkpoint.ts @@ -94,9 +94,11 @@ export class Checkpoint { return this.header.hash(); } - // Returns the out hash computed from all l2-to-l1 messages in this checkpoint. - // Note: This value is different from the out hash in the header, which is the **accumulated** out hash over all - // checkpoints up to and including this one in the epoch. + /** + * Returns the out hash computed from all l2-to-l1 messages in this checkpoint. + * Note: This value is different from the out hash in the header, which is the **accumulated** out hash over all + * checkpoints up to and including this one in the epoch. + */ public getCheckpointOutHash(): Fr { const msgs = this.blocks.map(block => block.body.txEffects.map(txEffect => txEffect.l2ToL1Msgs)); return computeCheckpointOutHash(msgs); diff --git a/yarn-project/stdlib/src/checkpoint/checkpoint_data.ts b/yarn-project/stdlib/src/checkpoint/checkpoint_data.ts new file mode 100644 index 000000000000..32dd799181ee --- /dev/null +++ b/yarn-project/stdlib/src/checkpoint/checkpoint_data.ts @@ -0,0 +1,51 @@ +import { + BlockNumber, + BlockNumberSchema, + CheckpointNumber, + CheckpointNumberSchema, +} from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { schemas } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + +import { CommitteeAttestation } from '../block/proposal/committee_attestation.js'; +import { CheckpointHeader } from '../rollup/checkpoint_header.js'; +import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; +import { L1PublishedData } from './published_checkpoint.js'; + +/** Lightweight checkpoint metadata without full block data. */ +export type CheckpointData = { + checkpointNumber: CheckpointNumber; + header: CheckpointHeader; + archive: AppendOnlyTreeSnapshot; + checkpointOutHash: Fr; + startBlock: BlockNumber; + blockCount: number; + attestations: CommitteeAttestation[]; + l1: L1PublishedData; +}; + +export const CheckpointDataSchema = z + .object({ + checkpointNumber: CheckpointNumberSchema, + header: CheckpointHeader.schema, + archive: AppendOnlyTreeSnapshot.schema, + checkpointOutHash: schemas.Fr, + startBlock: BlockNumberSchema, + blockCount: schemas.Integer, + attestations: z.array(CommitteeAttestation.schema), + l1: L1PublishedData.schema, + }) + .transform( + (obj): CheckpointData => ({ + checkpointNumber: obj.checkpointNumber, + header: obj.header, + archive: obj.archive, + checkpointOutHash: obj.checkpointOutHash, + startBlock: obj.startBlock, + blockCount: obj.blockCount, + attestations: obj.attestations, + l1: obj.l1, + }), + ); diff --git a/yarn-project/stdlib/src/checkpoint/index.ts b/yarn-project/stdlib/src/checkpoint/index.ts index 6c189e5a5ddc..d86f88c87bbb 100644 --- a/yarn-project/stdlib/src/checkpoint/index.ts +++ b/yarn-project/stdlib/src/checkpoint/index.ts @@ -1,3 +1,4 @@ export * from './checkpoint.js'; +export * from './checkpoint_data.js'; export * from './checkpoint_info.js'; export * from './published_checkpoint.js'; diff --git a/yarn-project/stdlib/src/checkpoint/published_checkpoint.ts b/yarn-project/stdlib/src/checkpoint/published_checkpoint.ts index d5afc5c2e3e0..67c2104fe05b 100644 --- a/yarn-project/stdlib/src/checkpoint/published_checkpoint.ts +++ b/yarn-project/stdlib/src/checkpoint/published_checkpoint.ts @@ -55,9 +55,11 @@ export class L1PublishedData { export class PublishedCheckpoint { constructor( + /** The checkpoint itself. */ public checkpoint: Checkpoint, + /** Info on when this checkpoint was published on L1. */ public l1: L1PublishedData, - // The attestations for the last block in the checkpoint. + /** The attestations for the last block in the checkpoint. */ public attestations: CommitteeAttestation[], ) {} diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index 6235da1d6c81..2b5cb983325b 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -14,6 +14,7 @@ import { type BlockData, BlockHash, CommitteeAttestation, L2Block } from '../blo import type { L2Tips } from '../block/l2_block_source.js'; import type { ValidateCheckpointResult } from '../block/validate_block_result.js'; import { Checkpoint } from '../checkpoint/checkpoint.js'; +import type { CheckpointData } from '../checkpoint/checkpoint_data.js'; import { L1PublishedData, PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; import { getContractClassFromArtifact } from '../contract/contract_class.js'; import { @@ -194,6 +195,14 @@ describe('ArchiverApiSchema', () => { expect(result).toEqual([expect.any(Checkpoint)]); }); + it('getCheckpointsDataForEpoch', async () => { + const result = await context.client.getCheckpointsDataForEpoch(EpochNumber(1)); + expect(result).toHaveLength(1); + expect(result[0].checkpointNumber).toBeDefined(); + expect(result[0].checkpointOutHash).toBeDefined(); + expect(result[0].attestations[0]).toBeInstanceOf(CommitteeAttestation); + }); + it('getCheckpointedBlock', async () => { const result = await context.client.getCheckpointedBlock(BlockNumber(1)); expect(result).toBeDefined(); @@ -501,6 +510,22 @@ class MockArchiver implements ArchiverApi { expect(epochNumber).toEqual(EpochNumber(1)); return [await Checkpoint.random(CheckpointNumber(1))]; } + async getCheckpointsDataForEpoch(epochNumber: EpochNumber): Promise { + expect(epochNumber).toEqual(EpochNumber(1)); + const checkpoint = await Checkpoint.random(CheckpointNumber(1)); + return [ + { + checkpointNumber: checkpoint.number, + header: checkpoint.header, + archive: checkpoint.archive, + checkpointOutHash: checkpoint.getCheckpointOutHash(), + startBlock: BlockNumber(1), + blockCount: checkpoint.blocks.length, + attestations: [CommitteeAttestation.random()], + l1: L1PublishedData.random(), + }, + ]; + } async getCheckpointedBlocksForEpoch(epochNumber: EpochNumber): Promise { expect(epochNumber).toEqual(EpochNumber(1)); const block = await L2Block.random(BlockNumber(Number(epochNumber))); diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index 4caf08b4a30a..9af2b49e6fbc 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -11,6 +11,7 @@ import { L2Block } from '../block/l2_block.js'; import { type L2BlockSource, L2TipsSchema } from '../block/l2_block_source.js'; import { ValidateCheckpointResultSchema } from '../block/validate_block_result.js'; import { Checkpoint } from '../checkpoint/checkpoint.js'; +import { CheckpointDataSchema } from '../checkpoint/checkpoint_data.js'; import { PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; import { ContractClassPublicSchema, @@ -115,6 +116,7 @@ export const ArchiverApiSchema: ApiSchemaFor = { getL2SlotNumber: z.function().args().returns(schemas.SlotNumber.optional()), getL2EpochNumber: z.function().args().returns(EpochNumberSchema.optional()), getCheckpointsForEpoch: z.function().args(EpochNumberSchema).returns(z.array(Checkpoint.schema)), + getCheckpointsDataForEpoch: z.function().args(EpochNumberSchema).returns(z.array(CheckpointDataSchema)), getCheckpointedBlocksForEpoch: z.function().args(EpochNumberSchema).returns(z.array(CheckpointedL2Block.schema)), getBlocksForSlot: z.function().args(schemas.SlotNumber).returns(z.array(L2Block.schema)), getCheckpointedBlockHeadersForEpoch: z.function().args(EpochNumberSchema).returns(z.array(BlockHeader.schema)), diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index 4be070f3a8c5..776b48ac266c 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -1,7 +1,6 @@ import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import type { EpochCache } from '@aztec/epoch-cache'; import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { chunkBy } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { TimeoutError } from '@aztec/foundation/error'; import { createLogger } from '@aztec/foundation/log'; @@ -12,11 +11,7 @@ import { BlockProposalValidator } from '@aztec/p2p/msg_validators'; import type { BlockData, L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import { getEpochAtSlot, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import type { ITxProvider, ValidatorClientFullConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; -import { - type L1ToL2MessageSource, - computeCheckpointOutHash, - computeInHashFromL1ToL2Messages, -} from '@aztec/stdlib/messaging'; +import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; import type { BlockProposal } from '@aztec/stdlib/p2p'; import type { CheckpointGlobalVariables, FailedTx, Tx } from '@aztec/stdlib/tx'; import { @@ -218,17 +213,11 @@ export class BlockProposalHandler { // Try re-executing the transactions in the proposal if needed let reexecutionResult; if (shouldReexecute) { - // Compute the previous checkpoint out hashes for the epoch. - // TODO(leila/mbps): There can be a more efficient way to get the previous checkpoint out - // hashes without having to fetch all the blocks. + // Collect the out hashes of all the checkpoints before this one in the same epoch const epoch = getEpochAtSlot(slotNumber, this.epochCache.getL1Constants()); - const checkpointedBlocks = (await this.blockSource.getCheckpointedBlocksForEpoch(epoch)) - .filter(b => b.block.number < blockNumber) - .sort((a, b) => a.block.number - b.block.number); - const blocksByCheckpoint = chunkBy(checkpointedBlocks, b => b.checkpointNumber); - const previousCheckpointOutHashes = blocksByCheckpoint.map(checkpointBlocks => - computeCheckpointOutHash(checkpointBlocks.map(b => b.block.body.txEffects.map(tx => tx.l2ToL1Msgs))), - ); + const previousCheckpointOutHashes = (await this.blockSource.getCheckpointsDataForEpoch(epoch)) + .filter(c => c.checkpointNumber < checkpointNumber) + .map(c => c.checkpointOutHash); try { this.log.verbose(`Re-executing transactions in the proposal`, proposalInfo); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index feb64eb9aba9..df6d16208a16 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -96,6 +96,7 @@ describe('ValidatorClient', () => { >[1] as any); blockSource = mock(); blockSource.getCheckpointedBlocksForEpoch.mockResolvedValue([]); + blockSource.getCheckpointsDataForEpoch.mockResolvedValue([]); blockSource.getBlocksForSlot.mockResolvedValue([]); epochCache.isEscapeHatchOpenAtSlot.mockResolvedValue(false); l1ToL2MessageSource = mock(); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 873dc9a15b50..29c5d497576e 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -662,14 +662,11 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) // Get L1-to-L2 messages for this checkpoint const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(checkpointNumber); - // Compute the previous checkpoint out hashes for the epoch. - // TODO: There can be a more efficient way to get the previous checkpoint out hashes without having to fetch the - // actual checkpoints and the blocks/txs in them. + // Collect the out hashes of all the checkpoints before this one in the same epoch const epoch = getEpochAtSlot(slot, this.epochCache.getL1Constants()); - const previousCheckpoints = (await this.blockSource.getCheckpointsForEpoch(epoch)) - .filter(b => b.number < checkpointNumber) - .sort((a, b) => a.number - b.number); - const previousCheckpointOutHashes = previousCheckpoints.map(c => c.getCheckpointOutHash()); + const previousCheckpointOutHashes = (await this.blockSource.getCheckpointsDataForEpoch(epoch)) + .filter(c => c.checkpointNumber < checkpointNumber) + .map(c => c.checkpointOutHash); // Fork world state at the block before the first block const parentBlockNumber = BlockNumber(firstBlock.number - 1); From dac73ca3ab4b8d0f338d2d2796f212295d170060 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Mon, 16 Feb 2026 09:50:02 +0000 Subject: [PATCH 39/62] feat: worker thread wallet --- .../src/spartan/n_tps_prove.test.ts | 121 ++++++++----- .../src/spartan/setup_test_wallets.ts | 40 +++++ .../src/test-wallet/wallet_worker_script.ts | 43 +++++ .../src/test-wallet/worker_wallet.ts | 165 ++++++++++++++++++ .../src/test-wallet/worker_wallet_schema.ts | 13 ++ 5 files changed, 337 insertions(+), 45 deletions(-) create mode 100644 yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts create mode 100644 yarn-project/end-to-end/src/test-wallet/worker_wallet.ts create mode 100644 yarn-project/end-to-end/src/test-wallet/worker_wallet_schema.ts diff --git a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts index fe2ffc7dd233..a98c2a43fd08 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts @@ -1,6 +1,9 @@ -import { NO_WAIT } from '@aztec/aztec.js/contracts'; +import { SchnorrAccountContract } from '@aztec/accounts/schnorr'; +import { AztecAddress } from '@aztec/aztec.js/addresses'; +import { toSendOptions } from '@aztec/aztec.js/contracts'; import { SponsoredFeePaymentMethod } from '@aztec/aztec.js/fee'; import { type AztecNode, createAztecNodeClient } from '@aztec/aztec.js/node'; +import { AccountManager } from '@aztec/aztec.js/wallet'; import { RollupCheatCodes } from '@aztec/aztec/testing'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { EthCheatCodesWithState } from '@aztec/ethereum/test'; @@ -13,6 +16,7 @@ import { sleep } from '@aztec/foundation/sleep'; import { DateProvider } from '@aztec/foundation/timer'; import { BenchmarkingContract } from '@aztec/noir-test-contracts.js/Benchmarking'; import { GasFees } from '@aztec/stdlib/gas'; +import { deriveSigningKey } from '@aztec/stdlib/keys'; import { Tx, TxHash } from '@aztec/stdlib/tx'; import { jest } from '@jest/globals'; @@ -20,15 +24,10 @@ import type { ChildProcess } from 'child_process'; import { mkdir, writeFile } from 'fs/promises'; import { dirname } from 'path'; -import { getSponsoredFPCAddress } from '../fixtures/utils.js'; +import { getSponsoredFPCAddress, registerSponsoredFPC } from '../fixtures/utils.js'; import { PrometheusClient } from '../quality_of_service/prometheus_client.js'; -import type { TestWallet } from '../test-wallet/test_wallet.js'; -import { ProvenTx, proveInteraction } from '../test-wallet/utils.js'; -import { - type WalletWrapper, - createWalletAndAztecNodeClient, - deploySponsoredTestAccounts, -} from './setup_test_wallets.js'; +import type { WorkerWallet } from '../test-wallet/worker_wallet.js'; +import { type WorkerWalletWrapper, createWorkerWalletClient } from './setup_test_wallets.js'; import { ProvingMetrics } from './tx_metrics.js'; import { getExternalIP, @@ -99,9 +98,10 @@ type MetricsSnapshot = { /** A wallet that produces transactions in the background. */ type WalletTxProducer = { - wallet: TestWallet; - prototypeTx: ProvenTx | undefined; // Each wallet's own prototype (for fake proving) - readyTx: ProvenTx | null; + wallet: WorkerWallet; + accountAddress: AztecAddress; + prototypeTx: Tx | undefined; // Each wallet's own prototype (for fake proving) + readyTx: Tx | null; }; describe(`prove ${TARGET_TPS}TPS test`, () => { @@ -110,8 +110,9 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { const logger = createLogger(`e2e:spartan-test:prove-${TARGET_TPS}tps`); - let testWallets: WalletWrapper[]; - let wallets: TestWallet[]; + let testWallets: WorkerWalletWrapper[]; + let wallets: WorkerWallet[]; + let accountAddresses: AztecAddress[]; let producers: WalletTxProducer[]; let producerAbortController: AbortController; @@ -267,18 +268,41 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { logger.info(`Creating ${NUM_WALLETS} wallet(s)...`); testWallets = await timesAsync(NUM_WALLETS, i => { logger.info(`Creating wallet ${i + 1}/${NUM_WALLETS}`); - return createWalletAndAztecNodeClient(rpcUrl, config.REAL_VERIFIER, logger); + return createWorkerWalletClient(rpcUrl, config.REAL_VERIFIER, logger); }); - - const localTestAccounts = await Promise.all( - testWallets.map(tw => deploySponsoredTestAccounts(tw.wallet, aztecNode, logger, 0)), - ); - wallets = localTestAccounts.map(acc => acc.wallet); + wallets = testWallets.map(tw => tw.wallet); + + // Register FPC and create/deploy accounts + const fpcAddress = await getSponsoredFPCAddress(); + const sponsor = new SponsoredFeePaymentMethod(fpcAddress); + accountAddresses = []; + for (const wallet of wallets) { + const secret = Fr.random(); + const salt = Fr.random(); + // Register account inside worker (populates TestWallet.accounts map) + const address = await wallet.registerAccount(secret, salt); + // Register FPC in worker's PXE + await registerSponsoredFPC(wallet); + // Deploy via standard AccountManager flow (from: ZERO -> SignerlessAccount, no account lookup) + const manager = await AccountManager.create( + wallet, + secret, + new SchnorrAccountContract(deriveSigningKey(secret)), + salt, + ); + const deployMethod = await manager.getDeployMethod(); + await deployMethod.send({ + from: AztecAddress.ZERO, + fee: { paymentMethod: sponsor }, + wait: { timeout: 2400 }, + }); + logger.info(`Account deployed at ${address}`); + accountAddresses.push(address); + } logger.info('Deploying benchmark contract...'); - const sponsor = new SponsoredFeePaymentMethod(await getSponsoredFPCAddress()); - benchmarkContract = await BenchmarkingContract.deploy(localTestAccounts[0].wallet).send({ - from: localTestAccounts[0].recipientAddress, + benchmarkContract = await BenchmarkingContract.deploy(wallets[0]).send({ + from: accountAddresses[0], fee: { paymentMethod: sponsor }, }); @@ -288,9 +312,12 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { beforeEach(async () => { logger.info(`Creating ${wallets.length} tx producers`); producers = await Promise.all( - wallets.map(async wallet => { - const proto = config.REAL_VERIFIER ? undefined : await createTx(wallet, benchmarkContract, logger); - return { wallet, prototypeTx: proto, readyTx: null }; + wallets.map(async (wallet, i) => { + const accountAddress = accountAddresses[i]; + const proto = config.REAL_VERIFIER + ? undefined + : await createTx(wallet, accountAddress, benchmarkContract, logger); + return { wallet, accountAddress, prototypeTx: proto, readyTx: null }; }), ); @@ -362,7 +389,8 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { // consume tx const tx = producer.readyTx; producer.readyTx = null; - sentTxs.push(await tx.send({ wait: NO_WAIT })); + await aztecNode.sendTx(tx); + sentTxs.push(tx.getTxHash()); logger.info(`Sent tx ${i + 1}/${txsToSend}`); @@ -486,48 +514,51 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { }); async function createTx( - wallet: TestWallet, + wallet: WorkerWallet, + accountAddress: AztecAddress, benchmarkContract: BenchmarkingContract, logger: Logger, -): Promise { +): Promise { logger.info('Creating prototype transaction...'); const sponsor = new SponsoredFeePaymentMethod(await getSponsoredFPCAddress()); - const tx = await proveInteraction(wallet, benchmarkContract.methods.sha256_hash_1024(Array(1024).fill(42)), { - from: (await wallet.getAccounts())[0].item, + const options = { + from: accountAddress, fee: { paymentMethod: sponsor, gasSettings: { maxPriorityFeesPerGas: GasFees.empty() } }, - }); + }; + const interaction = benchmarkContract.methods.sha256_hash_1024(Array(1024).fill(42)); + const execPayload = await interaction.request(options); + const tx = await wallet.proveTx(execPayload, toSendOptions(options)); logger.info('Prototype transaction created'); return tx; } -async function cloneTx(tx: ProvenTx, aztecNode: AztecNode): Promise { - const clonedTxData = Tx.clone(tx, false); +async function cloneTx(tx: Tx, aztecNode: AztecNode): Promise { + const clonedTx = Tx.clone(tx, false); // Fetch current minimum fees and apply 50% buffer for safety const currentFees = await aztecNode.getCurrentMinFees(); const paddedFees = currentFees.mul(1.5); // Update gas settings with current fees - (clonedTxData.data.constants.txContext.gasSettings as any).maxFeesPerGas = paddedFees; + (clonedTx.data.constants.txContext.gasSettings as any).maxFeesPerGas = paddedFees; // Randomize nullifiers to avoid conflicts - if (clonedTxData.data.forRollup) { - for (let i = 0; i < clonedTxData.data.forRollup.end.nullifiers.length; i++) { - if (clonedTxData.data.forRollup.end.nullifiers[i].isZero()) { + if (clonedTx.data.forRollup) { + for (let i = 0; i < clonedTx.data.forRollup.end.nullifiers.length; i++) { + if (clonedTx.data.forRollup.end.nullifiers[i].isZero()) { continue; } - clonedTxData.data.forRollup.end.nullifiers[i] = Fr.random(); + clonedTx.data.forRollup.end.nullifiers[i] = Fr.random(); } - } else if (clonedTxData.data.forPublic) { - for (let i = 0; i < clonedTxData.data.forPublic.nonRevertibleAccumulatedData.nullifiers.length; i++) { - if (clonedTxData.data.forPublic.nonRevertibleAccumulatedData.nullifiers[i].isZero()) { + } else if (clonedTx.data.forPublic) { + for (let i = 0; i < clonedTx.data.forPublic.nonRevertibleAccumulatedData.nullifiers.length; i++) { + if (clonedTx.data.forPublic.nonRevertibleAccumulatedData.nullifiers[i].isZero()) { continue; } - clonedTxData.data.forPublic.nonRevertibleAccumulatedData.nullifiers[i] = Fr.random(); + clonedTx.data.forPublic.nonRevertibleAccumulatedData.nullifiers[i] = Fr.random(); } } - const clonedTx = new ProvenTx((tx as any).node, clonedTxData, tx.offchainEffects, tx.stats); await clonedTx.recomputeHash(); return clonedTx; } @@ -548,7 +579,7 @@ async function startProducing( try { const tx = config.REAL_VERIFIER - ? await createTx(producer.wallet, benchmarkContract, logger) + ? await createTx(producer.wallet, producer.accountAddress, benchmarkContract, logger) : await cloneTx(producer.prototypeTx!, aztecNode); producer.readyTx = tx; diff --git a/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts b/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts index a0af523c70b7..c7567e794750 100644 --- a/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts +++ b/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts @@ -20,6 +20,7 @@ import { getBBConfig } from '../fixtures/get_bb_config.js'; import { getSponsoredFPCAddress, registerSponsoredFPC } from '../fixtures/utils.js'; import { TestWallet } from '../test-wallet/test_wallet.js'; import { proveInteraction } from '../test-wallet/utils.js'; +import { WorkerWallet } from '../test-wallet/worker_wallet.js'; export interface TestAccounts { aztecNode: AztecNode; @@ -397,3 +398,42 @@ export async function createWalletAndAztecNodeClient( }, }; } + +export type WorkerWalletWrapper = { + wallet: WorkerWallet; + aztecNode: AztecNode; + cleanup: () => Promise; +}; + +export async function createWorkerWalletClient( + nodeUrl: string, + proverEnabled: boolean, + logger: Logger, +): Promise { + const aztecNode = createAztecNodeClient(nodeUrl); + const [bbConfig, acvmConfig] = await Promise.all([getBBConfig(logger), getACVMConfig(logger)]); + + // Strip cleanup functions — they can't be structured-cloned for worker transfer + const { cleanup: bbCleanup, ...bbPaths } = bbConfig ?? {}; + const { cleanup: acvmCleanup, ...acvmPaths } = acvmConfig ?? {}; + + const pxeConfig = { + dataDirectory: undefined, + dataStoreMapSizeKb: 1024 * 1024, + ...bbPaths, + ...acvmPaths, + proverEnabled, + }; + + const wallet = await WorkerWallet.create(nodeUrl, pxeConfig); + + return { + wallet, + aztecNode, + async cleanup() { + await wallet.stop(); + await bbCleanup?.(); + await acvmCleanup?.(); + }, + }; +} diff --git a/yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts b/yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts new file mode 100644 index 000000000000..820c1c402e95 --- /dev/null +++ b/yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts @@ -0,0 +1,43 @@ +import { createAztecNodeClient } from '@aztec/aztec.js/node'; +import { jsonStringify } from '@aztec/foundation/json-rpc'; +import type { ApiSchema } from '@aztec/foundation/schemas'; +import { parseWithOptionals, schemaHasMethod } from '@aztec/foundation/schemas'; +import { NodeListener, TransportServer } from '@aztec/foundation/transport'; + +import { workerData } from 'worker_threads'; + +import { TestWallet } from './test_wallet.js'; +import { WorkerWalletSchema } from './worker_wallet_schema.js'; + +const { nodeUrl, pxeConfig } = workerData as { nodeUrl: string; pxeConfig?: Record }; + +const node = createAztecNodeClient(nodeUrl); +const wallet = await TestWallet.create(node, pxeConfig); + +/** Handlers for methods that need custom implementation (not direct wallet passthrough). */ +const handlers: Record Promise> = { + proveTx: async (exec, opts) => { + const provenTx = await wallet.proveTx(exec, opts); + // ProvenTx has non-serializable fields (node proxy, etc.) — extract only Tx-compatible fields + const { data, chonkProof, contractClassLogFields, publicFunctionCalldata } = provenTx; + return { data, chonkProof, contractClassLogFields, publicFunctionCalldata }; + }, + registerAccount: async (secret, salt) => { + const manager = await wallet.createSchnorrAccount(secret, salt); + return manager.address; + }, +}; + +const schema = WorkerWalletSchema as ApiSchema; +const listener = new NodeListener(); +const server = new TransportServer<{ fn: string; args: string }>(listener, async msg => { + if (!schemaHasMethod(schema, msg.fn)) { + throw new Error(`Unknown method: ${msg.fn}`); + } + const jsonParams = JSON.parse(msg.args) as unknown[]; + const args = await parseWithOptionals(jsonParams, schema[msg.fn].parameters()); + const handler = handlers[msg.fn]; + const result = handler ? await handler(...args) : await (wallet as any)[msg.fn](...args); + return jsonStringify(result); +}); +server.start(); diff --git a/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts b/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts new file mode 100644 index 000000000000..d5f8b34c591b --- /dev/null +++ b/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts @@ -0,0 +1,165 @@ +import type { CallIntent, IntentInnerHash } from '@aztec/aztec.js/authorization'; +import type { InteractionWaitOptions, SendReturn } from '@aztec/aztec.js/contracts'; +import type { + Aliased, + AppCapabilities, + BatchResults, + BatchedMethod, + ContractClassMetadata, + ContractMetadata, + PrivateEvent, + PrivateEventFilter, + ProfileOptions, + SendOptions, + SimulateOptions, + SimulateUtilityOptions, + Wallet, + WalletCapabilities, +} from '@aztec/aztec.js/wallet'; +import type { ChainInfo } from '@aztec/entrypoints/interfaces'; +import type { Fr } from '@aztec/foundation/curves/bn254'; +import { jsonStringify } from '@aztec/foundation/json-rpc'; +import type { ApiSchema } from '@aztec/foundation/schemas'; +import { NodeConnector, TransportClient } from '@aztec/foundation/transport'; +import type { PXEConfig } from '@aztec/pxe/config'; +import type { ContractArtifact, EventMetadataDefinition, FunctionCall } from '@aztec/stdlib/abi'; +import type { AuthWitness } from '@aztec/stdlib/auth-witness'; +import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; +import type { ExecutionPayload, TxProfileResult, TxSimulationResult, UtilitySimulationResult } from '@aztec/stdlib/tx'; +import { Tx } from '@aztec/stdlib/tx'; + +import { Worker } from 'worker_threads'; + +import { WorkerWalletSchema } from './worker_wallet_schema.js'; + +type WorkerMsg = { fn: string; args: string }; + +/** + * Wallet implementation that offloads all work to a worker thread. + * Implements the Wallet interface by proxying calls over a transport layer + * using JSON serialization with Zod schema parsing on both ends. + */ +export class WorkerWallet implements Wallet { + private constructor( + private worker: Worker, + private client: TransportClient, + ) {} + + /** + * Creates a WorkerWallet by spawning a worker thread that creates a TestWallet internally. + * @param nodeUrl - URL of the Aztec node to connect to. + * @param pxeConfig - Optional PXE configuration overrides. + * @returns A WorkerWallet ready to use. + */ + static async create(nodeUrl: string, pxeConfig?: Partial): Promise { + const worker = new Worker(new URL('./wallet_worker_script.js', import.meta.url), { + workerData: { nodeUrl, pxeConfig }, + }); + + const connector = new NodeConnector(worker); + const client = new TransportClient(connector); + await client.open(); + + const wallet = new WorkerWallet(worker, client); + // Warmup / readiness check — blocks until the worker has finished creating the TestWallet. + await wallet.getChainInfo(); + return wallet; + } + + private async callRaw(fn: string, ...args: any[]): Promise { + const argsJson = jsonStringify(args); + return (await this.client.request({ fn, args: argsJson })) as string; + } + + private async call(fn: string, ...args: any[]): Promise { + const resultJson = await this.callRaw(fn, ...args); + const methodSchema = (WorkerWalletSchema as ApiSchema)[fn]; + return methodSchema.returnType().parseAsync(JSON.parse(resultJson)); + } + + getChainInfo(): Promise { + return this.call('getChainInfo'); + } + + getContractMetadata(address: AztecAddress): Promise { + return this.call('getContractMetadata', address); + } + + getContractClassMetadata(id: Fr): Promise { + return this.call('getContractClassMetadata', id); + } + + getPrivateEvents( + eventMetadata: EventMetadataDefinition, + eventFilter: PrivateEventFilter, + ): Promise[]> { + return this.call('getPrivateEvents', eventMetadata, eventFilter); + } + + registerSender(address: AztecAddress, alias?: string): Promise { + return this.call('registerSender', address, alias); + } + + getAddressBook(): Promise[]> { + return this.call('getAddressBook'); + } + + getAccounts(): Promise[]> { + return this.call('getAccounts'); + } + + registerContract( + instance: ContractInstanceWithAddress, + artifact?: ContractArtifact, + secretKey?: Fr, + ): Promise { + return this.call('registerContract', instance, artifact, secretKey); + } + + simulateTx(exec: ExecutionPayload, opts: SimulateOptions): Promise { + return this.call('simulateTx', exec, opts); + } + + simulateUtility(call: FunctionCall, opts: SimulateUtilityOptions): Promise { + return this.call('simulateUtility', call, opts); + } + + profileTx(exec: ExecutionPayload, opts: ProfileOptions): Promise { + return this.call('profileTx', exec, opts); + } + + sendTx( + exec: ExecutionPayload, + opts: SendOptions, + ): Promise> { + return this.call('sendTx', exec, opts); + } + + proveTx(exec: ExecutionPayload, opts: Omit): Promise { + return this.call('proveTx', exec, opts); + } + + /** Registers an account inside the worker's TestWallet, populating its accounts map. */ + registerAccount(secret: Fr, salt: Fr): Promise { + return this.call('registerAccount', secret, salt); + } + + createAuthWit(from: AztecAddress, messageHashOrIntent: IntentInnerHash | CallIntent): Promise { + return this.call('createAuthWit', from, messageHashOrIntent); + } + + requestCapabilities(manifest: AppCapabilities): Promise { + return this.call('requestCapabilities', manifest); + } + + batch(methods: T): Promise> { + return this.call('batch', methods); + } + + /** Shuts down the worker thread and closes the transport. */ + async stop(): Promise { + this.client.close(); + await this.worker.terminate(); + } +} diff --git a/yarn-project/end-to-end/src/test-wallet/worker_wallet_schema.ts b/yarn-project/end-to-end/src/test-wallet/worker_wallet_schema.ts new file mode 100644 index 000000000000..7e2a47c4d8bf --- /dev/null +++ b/yarn-project/end-to-end/src/test-wallet/worker_wallet_schema.ts @@ -0,0 +1,13 @@ +import { ExecutionPayloadSchema, SendOptionsSchema, WalletSchema } from '@aztec/aztec.js/wallet'; +import { schemas } from '@aztec/foundation/schemas'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { Tx } from '@aztec/stdlib/tx'; + +import { z } from 'zod'; + +/** Schema for the WorkerWallet API — extends WalletSchema with proveTx and registerAccount. */ +export const WorkerWalletSchema = { + ...WalletSchema, + proveTx: z.function().args(ExecutionPayloadSchema, SendOptionsSchema).returns(Tx.schema), + registerAccount: z.function().args(schemas.Fr, schemas.Fr).returns(AztecAddress.schema), +}; From 80a54b50be9b0bbd6695f0126ab7c16ad2d7f65e Mon Sep 17 00:00:00 2001 From: spalladino <429604+spalladino@users.noreply.github.com> Date: Mon, 16 Feb 2026 16:39:09 +0000 Subject: [PATCH 40/62] refactor: make prover-node a subsystem of aztec-node MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changes the prover-node so, instead of a separate entrypoint, it acts as a subsystem of the aztec-node. This should help eliminate differences between the two. - Turns the prover-node from a standalone process into an optional subsystem of the aztec-node, controlled by an `enableProverNode` config flag - When enabled, `AztecNodeService.createAndSync` calls the prover-node factory passing shared subsystems (archiver, world-state, p2p, epoch cache, blob client, telemetry), so they are created once instead of duplicated - The prover-node factory creates only prover-specific sub-subsystems (broker, prover client, publisher factory, epoch monitor, L1 metrics, rollup contract) and returns a self-contained `ProverNode` - Adds owned-resources tracking to `ProverNode` so it doesn't stop shared resources on shutdown when running as a subsystem - Updates all e2e tests to create prover nodes as aztec-node subsystems instead of standalone instances - Breaks `TxSenderConfig` and `PublisherConfig` into distinct Sequencer and Prover configs, so they don't overlap with each other. Future work: - Remove the p2p client "prover" flavor - Rename prover-node to prover-client (following the naming of sequencer-client) and rename the current prover-client - Lift node-lib to aztec-node - Simplify config (eventually) 🤖 Generated with [Claude Code](https://claude.com/claude-code) --- yarn-project/aztec-node/package.json | 1 + .../aztec-node/src/aztec-node/config.test.ts | 14 +- .../aztec-node/src/aztec-node/config.ts | 32 +++- .../aztec-node/src/aztec-node/server.test.ts | 1 + .../aztec-node/src/aztec-node/server.ts | 70 ++++++-- yarn-project/aztec-node/tsconfig.json | 3 + .../aztec/src/cli/aztec_start_action.ts | 8 +- .../aztec/src/cli/aztec_start_options.ts | 4 - yarn-project/aztec/src/cli/cmds/start_node.ts | 56 ++++++- .../aztec/src/cli/cmds/start_prover_node.ts | 124 -------------- .../aztec/src/local-network/local-network.ts | 24 ++- .../src/composed/ha/e2e_ha_full.test.ts | 13 +- .../epochs_l1_reorgs.parallel.test.ts | 29 ++-- .../epochs_long_proving_time.test.ts | 2 +- .../src/e2e_epochs/epochs_multi_proof.test.ts | 8 +- .../e2e_epochs/epochs_partial_proof.test.ts | 2 +- .../epochs_proof_fails.parallel.test.ts | 7 +- .../epochs_proof_public_cross_chain.test.ts | 2 +- .../end-to-end/src/e2e_epochs/epochs_test.ts | 25 +-- .../epochs_upload_failed_proof.test.ts | 2 +- .../e2e_l1_publisher/e2e_l1_publisher.test.ts | 5 - .../end-to-end/src/e2e_multi_eoa.test.ts | 2 +- .../e2e_multi_validator_node.test.ts | 2 +- .../end-to-end/src/e2e_p2p/add_rollup.test.ts | 19 +-- .../fee_asset_price_oracle_gossip.test.ts | 8 +- .../src/e2e_p2p/gossip_network.test.ts | 10 +- .../e2e_p2p/valid_epoch_pruned_slash.test.ts | 2 +- .../end-to-end/src/e2e_snapshot_sync.test.ts | 29 +--- .../end-to-end/src/e2e_synching.test.ts | 10 +- .../src/fixtures/e2e_prover_test.ts | 60 +++---- yarn-project/end-to-end/src/fixtures/setup.ts | 122 ++++++-------- .../end-to-end/src/fixtures/setup_p2p_test.ts | 26 +-- yarn-project/foundation/src/config/env_var.ts | 1 + yarn-project/p2p/src/client/factory.ts | 6 +- yarn-project/p2p/src/client/interface.ts | 5 +- .../prover-node/src/bin/run-failed-epoch.ts | 5 +- yarn-project/prover-node/src/config.test.ts | 17 +- yarn-project/prover-node/src/config.ts | 50 +++--- yarn-project/prover-node/src/factory.ts | 135 +++++---------- yarn-project/prover-node/src/prover-node.ts | 9 +- .../src/prover-publisher-factory.ts | 4 +- .../src/client/sequencer-client.ts | 8 +- yarn-project/sequencer-client/src/config.ts | 16 +- .../sequencer-client/src/publisher/config.ts | 155 +++++++++++++----- .../src/publisher/sequencer-publisher.ts | 5 +- .../txe/src/state_machine/dummy_p2p_client.ts | 6 +- yarn-project/txe/src/state_machine/index.ts | 1 + yarn-project/yarn.lock | 1 + 48 files changed, 542 insertions(+), 604 deletions(-) delete mode 100644 yarn-project/aztec/src/cli/cmds/start_prover_node.ts diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index 77fb0c217327..be268d1753df 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -82,6 +82,7 @@ "@aztec/p2p": "workspace:^", "@aztec/protocol-contracts": "workspace:^", "@aztec/prover-client": "workspace:^", + "@aztec/prover-node": "workspace:^", "@aztec/sequencer-client": "workspace:^", "@aztec/simulator": "workspace:^", "@aztec/slasher": "workspace:^", diff --git a/yarn-project/aztec-node/src/aztec-node/config.test.ts b/yarn-project/aztec-node/src/aztec-node/config.test.ts index 29de6785796a..0cbd120fba45 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.test.ts @@ -1,7 +1,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import type { EthPrivateKey } from '@aztec/node-keystore'; import type { SharedNodeConfig } from '@aztec/node-lib/config'; -import type { SequencerClientConfig, TxSenderConfig } from '@aztec/sequencer-client/config'; +import type { SequencerClientConfig, SequencerTxSenderConfig } from '@aztec/sequencer-client/config'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { ValidatorClientConfig } from '@aztec/validator-client/config'; @@ -33,7 +33,7 @@ describe('createKeyStoreForValidator', () => { web3SignerUrl?: string, validatorAddresses: EthAddress[] = [], publisherAddresses: EthAddress[] = [], - ): TxSenderConfig & ValidatorClientConfig & SequencerClientConfig & SharedNodeConfig => { + ): SequencerTxSenderConfig & ValidatorClientConfig & SequencerClientConfig & SharedNodeConfig => { const mockValidatorPrivateKeys = validatorKeys.length > 0 ? { @@ -46,14 +46,14 @@ describe('createKeyStoreForValidator', () => { return { validatorPrivateKeys: mockValidatorPrivateKeys, - publisherPrivateKeys: mockPublisherPrivateKeys, + sequencerPublisherPrivateKeys: mockPublisherPrivateKeys, coinbase: coinbase, feeRecipient: feeRecipient, web3SignerUrl, validatorAddresses: validatorAddresses.map(addr => addr), - publisherAddresses: publisherAddresses.map(addr => addr), + sequencerPublisherAddresses: publisherAddresses.map(addr => addr), l1Contracts: { rollupAddress: EthAddress.random() }, - } as TxSenderConfig & ValidatorClientConfig & SequencerClientConfig & SharedNodeConfig; + } as SequencerTxSenderConfig & ValidatorClientConfig & SequencerClientConfig & SharedNodeConfig; }; beforeAll(async () => { @@ -69,11 +69,11 @@ describe('createKeyStoreForValidator', () => { it('should return undefined when validatorPrivateKeys is undefined', () => { const config = { validatorPrivateKeys: undefined, - publisherPrivateKeys: undefined, + sequencerPublisherPrivateKeys: undefined, coinbase: undefined, feeRecipient: undefined, l1Contracts: { rollupAddress: EthAddress.random() }, - } as unknown as TxSenderConfig & ValidatorClientConfig & SequencerClientConfig & SharedNodeConfig; + } as unknown as SequencerTxSenderConfig & ValidatorClientConfig & SequencerClientConfig & SharedNodeConfig; const result = createKeyStoreForValidator(config); expect(result).toBeUndefined(); }); diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index 6b89c57a52ac..27d433cc6db4 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -13,9 +13,14 @@ import { import { type SharedNodeConfig, sharedNodeConfigMappings } from '@aztec/node-lib/config'; import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config'; import { type ProverClientUserConfig, proverClientConfigMappings } from '@aztec/prover-client/config'; +import { + type ProverNodeConfig, + proverNodeConfigMappings, + specificProverNodeConfigMappings, +} from '@aztec/prover-node/config'; import { type SequencerClientConfig, - type TxSenderConfig, + type SequencerTxSenderConfig, sequencerClientConfigMappings, } from '@aztec/sequencer-client/config'; import { slasherConfigMappings } from '@aztec/slasher'; @@ -46,16 +51,18 @@ export type AztecNodeConfig = ArchiverConfig & SharedNodeConfig & GenesisStateConfig & NodeRPCConfig & - SlasherConfig & { + SlasherConfig & + ProverNodeConfig & { /** L1 contracts addresses */ l1Contracts: L1ContractAddresses; /** Whether the validator is disabled for this node */ disableValidator: boolean; /** Whether to skip waiting for the archiver to be fully synced before starting other services */ skipArchiverInitialSync: boolean; - /** A flag to force verification of tx Chonk proofs. Only used for testnet */ debugForceTxProofVerification: boolean; + /** Whether to enable the prover node as a subsystem. */ + enableProverNode: boolean; }; export const aztecNodeConfigMappings: ConfigMappingsType = { @@ -63,6 +70,7 @@ export const aztecNodeConfigMappings: ConfigMappingsType = { ...keyStoreConfigMappings, ...archiverConfigMappings, ...sequencerClientConfigMappings, + ...proverNodeConfigMappings, ...validatorClientConfigMappings, ...proverClientConfigMappings, ...worldStateConfigMappings, @@ -72,6 +80,7 @@ export const aztecNodeConfigMappings: ConfigMappingsType = { ...genesisStateConfigMappings, ...nodeRpcConfigMappings, ...slasherConfigMappings, + ...specificProverNodeConfigMappings, l1Contracts: { description: 'The deployed L1 contract addresses', nested: l1ContractAddressesMapping, @@ -91,6 +100,11 @@ export const aztecNodeConfigMappings: ConfigMappingsType = { description: 'Whether to skip waiting for the archiver to be fully synced before starting other services.', ...booleanConfigHelper(false), }, + enableProverNode: { + env: 'ENABLE_PROVER_NODE', + description: 'Whether to enable the prover node as a subsystem.', + ...booleanConfigHelper(false), + }, }; /** @@ -101,7 +115,7 @@ export function getConfigEnvVars(): AztecNodeConfig { return getConfigFromMappings(aztecNodeConfigMappings); } -type ConfigRequiredToBuildKeyStore = TxSenderConfig & SequencerClientConfig & SharedNodeConfig & ValidatorClientConfig; +type ConfigRequiredToBuildKeyStore = SequencerClientConfig & SharedNodeConfig & ValidatorClientConfig; function createKeyStoreFromWeb3Signer(config: ConfigRequiredToBuildKeyStore): KeyStore | undefined { const validatorKeyStores: ValidatorKeyStore[] = []; @@ -120,7 +134,7 @@ function createKeyStoreFromWeb3Signer(config: ConfigRequiredToBuildKeyStore): Ke feeRecipient: config.feeRecipient ?? AztecAddress.ZERO, coinbase: config.coinbase ?? config.validatorAddresses[0], remoteSigner: config.web3SignerUrl, - publisher: config.publisherAddresses ?? [], + publisher: config.sequencerPublisherAddresses ?? [], }); const keyStore: KeyStore = { @@ -145,8 +159,10 @@ function createKeyStoreFromPrivateKeys(config: ConfigRequiredToBuildKeyStore): K const coinbase = config.coinbase ?? EthAddress.fromString(privateKeyToAddress(ethPrivateKeys[0])); const feeRecipient = config.feeRecipient ?? AztecAddress.ZERO; - const publisherKeys = config.publisherPrivateKeys - ? config.publisherPrivateKeys.map((k: { getValue: () => string }) => ethPrivateKeySchema.parse(k.getValue())) + const publisherKeys = config.sequencerPublisherPrivateKeys + ? config.sequencerPublisherPrivateKeys.map((k: { getValue: () => string }) => + ethPrivateKeySchema.parse(k.getValue()), + ) : []; validatorKeyStores.push({ @@ -168,7 +184,7 @@ function createKeyStoreFromPrivateKeys(config: ConfigRequiredToBuildKeyStore): K } export function createKeyStoreForValidator( - config: TxSenderConfig & SequencerClientConfig & SharedNodeConfig, + config: SequencerTxSenderConfig & SequencerClientConfig & SharedNodeConfig, ): KeyStore | undefined { if (config.web3SignerUrl !== undefined && config.web3SignerUrl.length > 0) { return createKeyStoreFromWeb3Signer(config); diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 11405b80c3c1..2a124abe20d4 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -173,6 +173,7 @@ describe('aztec node', () => { undefined, undefined, undefined, + undefined, 12345, rollupVersion.toNumber(), globalVariablesBuilder, diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 30e0b38bbaf6..9d9a8a6d4f24 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -17,11 +17,13 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; import { count } from '@aztec/foundation/string'; import { DateProvider, Timer } from '@aztec/foundation/timer'; import { MembershipWitness, SiblingPath } from '@aztec/foundation/trees'; -import { KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore'; +import { type KeyStore, KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore'; import { trySnapshotSync, uploadSnapshot } from '@aztec/node-lib/actions'; import { createForwarderL1TxUtilsFromSigners, createL1TxUtilsFromSigners } from '@aztec/node-lib/factories'; import { type P2P, type P2PClientDeps, createP2PClient, getDefaultAllowedSetupFunctions } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; +import { type ProverNode, type ProverNodeDeps, createProverNode } from '@aztec/prover-node'; +import { createKeyStoreForProver } from '@aztec/prover-node/config'; import { GlobalVariableBuilder, SequencerClient, type SequencerPublisher } from '@aztec/sequencer-client'; import { PublicProcessorFactory } from '@aztec/simulator/server'; import { @@ -134,6 +136,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { protected readonly l1ToL2MessageSource: L1ToL2MessageSource, protected readonly worldStateSynchronizer: WorldStateSynchronizer, protected readonly sequencer: SequencerClient | undefined, + protected readonly proverNode: ProverNode | undefined, protected readonly slasherClient: SlasherClientInterface | undefined, protected readonly validatorsSentinel: Sentinel | undefined, protected readonly epochPruneWatcher: EpochPruneWatcher | undefined, @@ -176,10 +179,12 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { publisher?: SequencerPublisher; dateProvider?: DateProvider; p2pClientDeps?: P2PClientDeps; + proverNodeDeps?: Partial; } = {}, options: { prefilledPublicData?: PublicDataTreeLeaf[]; dontStartSequencer?: boolean; + dontStartProverNode?: boolean; } = {}, ): Promise { const config = { ...inputConfig }; // Copy the config so we dont mutate the input object @@ -189,16 +194,29 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { const dateProvider = deps.dateProvider ?? new DateProvider(); const ethereumChain = createEthereumChain(config.l1RpcUrls, config.l1ChainId); - // Build a key store from file if given or from environment otherwise + // Build a key store from file if given or from environment otherwise. + // We keep the raw KeyStore available so we can merge with prover keys if enableProverNode is set. let keyStoreManager: KeystoreManager | undefined; const keyStoreProvided = config.keyStoreDirectory !== undefined && config.keyStoreDirectory.length > 0; if (keyStoreProvided) { const keyStores = loadKeystores(config.keyStoreDirectory!); keyStoreManager = new KeystoreManager(mergeKeystores(keyStores)); } else { - const keyStore = createKeyStoreForValidator(config); - if (keyStore) { - keyStoreManager = new KeystoreManager(keyStore); + const rawKeyStores: KeyStore[] = []; + const validatorKeyStore = createKeyStoreForValidator(config); + if (validatorKeyStore) { + rawKeyStores.push(validatorKeyStore); + } + if (config.enableProverNode) { + const proverKeyStore = createKeyStoreForProver(config); + if (proverKeyStore) { + rawKeyStores.push(proverKeyStore); + } + } + if (rawKeyStores.length > 0) { + keyStoreManager = new KeystoreManager( + rawKeyStores.length === 1 ? rawKeyStores[0] : mergeKeystores(rawKeyStores), + ); } } @@ -209,10 +227,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { if (keyStoreManager === undefined) { throw new Error('Failed to create key store, a requirement for running a validator'); } - if (!keyStoreProvided) { - log.warn( - 'KEY STORE CREATED FROM ENVIRONMENT, IT IS RECOMMENDED TO USE A FILE-BASED KEY STORE IN PRODUCTION ENVIRONMENTS', - ); + if (!keyStoreProvided && process.env.NODE_ENV !== 'test') { + log.warn("Keystore created from env: it's recommended to use a file-based key store for production"); } ValidatorClient.validateKeyStoreConfiguration(keyStoreManager, log); } @@ -254,7 +270,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { ); } - const blobClient = await createBlobClientWithFileStores(config, createLogger('node:blob-client:client')); + const blobClient = await createBlobClientWithFileStores(config, log.createChild('blob-client')); // attempt snapshot sync if possible await trySnapshotSync(config, log); @@ -417,11 +433,11 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { ); await slasherClient.start(); - const l1TxUtils = config.publisherForwarderAddress + const l1TxUtils = config.sequencerPublisherForwarderAddress ? await createForwarderL1TxUtilsFromSigners( publicClient, keyStoreManager!.createAllValidatorPublisherSigners(), - config.publisherForwarderAddress, + config.sequencerPublisherForwarderAddress, { ...config, scope: 'sequencer' }, { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider, kzg: Blob.getViemKzgInstance() }, ) @@ -466,6 +482,29 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { log.warn(`Sequencer created but not started`); } + // Create prover node subsystem if enabled + let proverNode: ProverNode | undefined; + if (config.enableProverNode) { + proverNode = await createProverNode(config, { + ...deps.proverNodeDeps, + telemetry, + dateProvider, + archiver, + worldStateSynchronizer, + p2pClient, + epochCache, + blobClient, + keyStoreManager, + }); + + if (!options.dontStartProverNode) { + await proverNode.start(); + log.info(`Prover node subsystem started`); + } else { + log.info(`Prover node subsystem created but not started`); + } + } + const globalVariableBuilder = new GlobalVariableBuilder({ ...config, rollupVersion: BigInt(config.rollupVersion), @@ -482,6 +521,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { archiver, worldStateSynchronizer, sequencer, + proverNode, slasherClient, validatorsSentinel, epochPruneWatcher, @@ -507,6 +547,11 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { return this.sequencer; } + /** Returns the prover node subsystem, if enabled. */ + public getProverNode(): ProverNode | undefined { + return this.proverNode; + } + public getBlockSource(): L2BlockSource { return this.blockSource; } @@ -810,6 +855,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { await tryStop(this.slasherClient); await tryStop(this.proofVerifier); await tryStop(this.sequencer); + await tryStop(this.proverNode); await tryStop(this.p2pClient); await tryStop(this.worldStateSynchronizer); await tryStop(this.blockSource); diff --git a/yarn-project/aztec-node/tsconfig.json b/yarn-project/aztec-node/tsconfig.json index e640323316a3..272c00696903 100644 --- a/yarn-project/aztec-node/tsconfig.json +++ b/yarn-project/aztec-node/tsconfig.json @@ -57,6 +57,9 @@ { "path": "../prover-client" }, + { + "path": "../prover-node" + }, { "path": "../sequencer-client" }, diff --git a/yarn-project/aztec/src/cli/aztec_start_action.ts b/yarn-project/aztec/src/cli/aztec_start_action.ts index 8217313dd09c..dcda7bc1f568 100644 --- a/yarn-project/aztec/src/cli/aztec_start_action.ts +++ b/yarn-project/aztec/src/cli/aztec_start_action.ts @@ -48,15 +48,17 @@ export async function aztecStart(options: any, userLog: LogFn, debugLogger: Logg signalHandlers.push(stop); services.node = [node, AztecNodeApiSchema]; } else { + // Route --prover-node through startNode + if (options.proverNode && !options.node) { + options.node = true; + } + if (options.node) { const { startNode } = await import('./cmds/start_node.js'); ({ config } = await startNode(options, signalHandlers, services, adminServices, userLog)); } else if (options.bot) { const { startBot } = await import('./cmds/start_bot.js'); await startBot(options, signalHandlers, services, userLog); - } else if (options.proverNode) { - const { startProverNode } = await import('./cmds/start_prover_node.js'); - ({ config } = await startProverNode(options, signalHandlers, services, userLog)); } else if (options.archiver) { const { startArchiver } = await import('./cmds/start_archiver.js'); ({ config } = await startArchiver(options, signalHandlers, services)); diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 46ef250c1359..d99dfab99690 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -222,12 +222,8 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { 'proverNode', omitConfigMappings(proverNodeConfigMappings, [ // filter out options passed separately - ...getKeys(archiverConfigMappings), ...getKeys(proverBrokerConfigMappings), ...getKeys(proverAgentConfigMappings), - ...getKeys(p2pConfigMappings), - ...getKeys(worldStateConfigMappings), - ...getKeys(sharedNodeConfigMappings), ]), ), ], diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index cb3e211d2b12..abed355ce4f6 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -6,13 +6,16 @@ import { getL1Config } from '@aztec/cli/config'; import { getPublicClient } from '@aztec/ethereum/client'; import { SecretValue } from '@aztec/foundation/config'; import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; +import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici'; import type { LogFn } from '@aztec/foundation/log'; +import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker'; import { type CliPXEOptions, type PXEConfig, allPxeConfigMappings } from '@aztec/pxe/config'; import { AztecNodeAdminApiSchema, AztecNodeApiSchema } from '@aztec/stdlib/interfaces/client'; -import { P2PApiSchema } from '@aztec/stdlib/interfaces/server'; +import { P2PApiSchema, ProverNodeApiSchema, type ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; import { type TelemetryClientConfig, initTelemetryClient, + makeTracedFetch, telemetryClientConfigMappings, } from '@aztec/telemetry-client'; import { EmbeddedWallet } from '@aztec/wallets/embedded'; @@ -25,6 +28,8 @@ import { preloadCrsDataForVerifying, setupUpdateMonitor, } from '../util.js'; +import { getVersions } from '../versioning.js'; +import { startProverBroker } from './start_prover_broker.js'; export async function startNode( options: any, @@ -45,9 +50,32 @@ export async function startNode( ...relevantOptions, }; + // Prover node configuration and broker setup + // REFACTOR: Move the broker setup out of here and into the prover-node factory + let broker: ProvingJobBroker | undefined = undefined; if (options.proverNode) { - userLog(`Running a Prover Node within a Node is not yet supported`); - process.exit(1); + nodeConfig.enableProverNode = true; + if (nodeConfig.proverAgentCount === 0) { + userLog( + `Running prover node without local prover agent. Connect prover agents or pass --proverAgent.proverAgentCount`, + ); + } + if (nodeConfig.proverBrokerUrl) { + // at 1TPS we'd enqueue ~1k chonk verifier proofs and ~1k AVM proofs immediately + // set a lower connection limit such that we don't overload the server + // Keep retrying up to 30s + const fetch = makeTracedFetch( + [1, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3], + false, + makeUndiciFetch(new Agent({ connections: 100 })), + ); + broker = createProvingJobBrokerClient(nodeConfig.proverBrokerUrl, getVersions(nodeConfig), fetch); + } else if (options.proverBroker) { + ({ broker } = await startProverBroker(options, signalHandlers, services, userLog)); + } else { + userLog(`--prover-broker-url or --prover-broker is required to start a Prover Node`); + process.exit(1); + } } await preloadCrsDataForVerifying(nodeConfig, userLog); @@ -101,12 +129,17 @@ export async function startNode( ...extractNamespacedOptions(options, 'sequencer'), }; // If no publisher private keys have been given, use the first validator key - if (sequencerConfig.publisherPrivateKeys === undefined || !sequencerConfig.publisherPrivateKeys.length) { + if ( + sequencerConfig.sequencerPublisherPrivateKeys === undefined || + !sequencerConfig.sequencerPublisherPrivateKeys.length + ) { if (sequencerConfig.validatorPrivateKeys?.getValue().length) { - sequencerConfig.publisherPrivateKeys = [new SecretValue(sequencerConfig.validatorPrivateKeys.getValue()[0])]; + sequencerConfig.sequencerPublisherPrivateKeys = [ + new SecretValue(sequencerConfig.validatorPrivateKeys.getValue()[0]), + ]; } } - nodeConfig.publisherPrivateKeys = sequencerConfig.publisherPrivateKeys; + nodeConfig.sequencerPublisherPrivateKeys = sequencerConfig.sequencerPublisherPrivateKeys; } if (nodeConfig.p2pEnabled) { @@ -120,13 +153,22 @@ export async function startNode( const telemetry = await initTelemetryClient(telemetryConfig); // Create and start Aztec Node - const node = await createAztecNode(nodeConfig, { telemetry }, { prefilledPublicData }); + const node = await createAztecNode(nodeConfig, { telemetry, proverBroker: broker }, { prefilledPublicData }); // Add node and p2p to services list services.node = [node, AztecNodeApiSchema]; services.p2p = [node.getP2P(), P2PApiSchema]; adminServices.nodeAdmin = [node, AztecNodeAdminApiSchema]; + // Register prover-node services if the prover node subsystem is running + const proverNode = node.getProverNode(); + if (proverNode) { + services.prover = [proverNode, ProverNodeApiSchema]; + if (!nodeConfig.proverBrokerUrl) { + services.provingJobSource = [proverNode.getProver().getProvingJobSource(), ProvingJobConsumerSchema]; + } + } + // Add node stop function to signal handlers signalHandlers.push(node.stop.bind(node)); diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts deleted file mode 100644 index 0778616eee3a..000000000000 --- a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { getInitialTestAccountsData } from '@aztec/accounts/testing'; -import { Fr } from '@aztec/aztec.js/fields'; -import { getSponsoredFPCAddress } from '@aztec/cli/cli-utils'; -import { getL1Config } from '@aztec/cli/config'; -import { getPublicClient } from '@aztec/ethereum/client'; -import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; -import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici'; -import type { LogFn } from '@aztec/foundation/log'; -import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker'; -import { - type ProverNodeConfig, - createProverNode, - getProverNodeConfigFromEnv, - proverNodeConfigMappings, -} from '@aztec/prover-node'; -import { P2PApiSchema, ProverNodeApiSchema, type ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; -import { initTelemetryClient, makeTracedFetch, telemetryClientConfigMappings } from '@aztec/telemetry-client'; -import { getGenesisValues } from '@aztec/world-state/testing'; - -import { extractRelevantOptions, preloadCrsDataForVerifying, setupUpdateMonitor } from '../util.js'; -import { getVersions } from '../versioning.js'; -import { startProverBroker } from './start_prover_broker.js'; - -export async function startProverNode( - options: any, - signalHandlers: (() => Promise)[], - services: NamespacedApiHandlers, - userLog: LogFn, -): Promise<{ config: ProverNodeConfig }> { - if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) { - userLog(`Starting a prover-node with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`); - process.exit(1); - } - - let proverConfig = { - ...getProverNodeConfigFromEnv(), // get default config from env - ...extractRelevantOptions(options, proverNodeConfigMappings, 'proverNode'), // override with command line options - }; - - if (!proverConfig.l1Contracts.registryAddress || proverConfig.l1Contracts.registryAddress.isZero()) { - throw new Error('L1 registry address is required to start a Prover Node'); - } - - const followsCanonicalRollup = typeof proverConfig.rollupVersion !== 'number'; - const { addresses, config } = await getL1Config( - proverConfig.l1Contracts.registryAddress, - proverConfig.l1RpcUrls, - proverConfig.l1ChainId, - proverConfig.rollupVersion, - ); - process.env.ROLLUP_CONTRACT_ADDRESS ??= addresses.rollupAddress.toString(); - proverConfig.l1Contracts = addresses; - proverConfig = { ...proverConfig, ...config }; - - const testAccounts = proverConfig.testAccounts ? (await getInitialTestAccountsData()).map(a => a.address) : []; - const sponsoredFPCAccounts = proverConfig.sponsoredFPC ? [await getSponsoredFPCAddress()] : []; - const initialFundedAccounts = testAccounts.concat(sponsoredFPCAccounts); - - userLog(`Initial funded accounts: ${initialFundedAccounts.map(a => a.toString()).join(', ')}`); - const { genesisArchiveRoot, prefilledPublicData } = await getGenesisValues(initialFundedAccounts); - - userLog(`Genesis archive root: ${genesisArchiveRoot.toString()}`); - - if (!Fr.fromHexString(config.genesisArchiveTreeRoot).equals(genesisArchiveRoot)) { - throw new Error( - `The computed genesis archive tree root ${genesisArchiveRoot} does not match the expected genesis archive tree root ${config.genesisArchiveTreeRoot} for the rollup deployed at ${addresses.rollupAddress}`, - ); - } - - const telemetry = await initTelemetryClient(extractRelevantOptions(options, telemetryClientConfigMappings, 'tel')); - - let broker: ProvingJobBroker; - if (proverConfig.proverBrokerUrl) { - // at 1TPS we'd enqueue ~1k chonk verifier proofs and ~1k AVM proofs immediately - // set a lower connection limit such that we don't overload the server - // Keep retrying up to 30s - const fetch = makeTracedFetch( - [1, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3], - false, - makeUndiciFetch(new Agent({ connections: 100 })), - ); - broker = createProvingJobBrokerClient(proverConfig.proverBrokerUrl, getVersions(proverConfig), fetch); - } else if (options.proverBroker) { - ({ broker } = await startProverBroker(options, signalHandlers, services, userLog)); - } else { - userLog(`--prover-broker-url or --prover-broker is required to start a Prover Node`); - process.exit(1); - } - - if (proverConfig.proverAgentCount === 0) { - userLog( - `Running prover node without local prover agent. Connect one or more prover agents to this node or pass --proverAgent.proverAgentCount`, - ); - } - - await preloadCrsDataForVerifying(proverConfig, userLog); - - const proverNode = await createProverNode(proverConfig, { telemetry, broker }, { prefilledPublicData }); - services.proverNode = [proverNode, ProverNodeApiSchema]; - - if (proverNode.getP2P()) { - services.p2p = [proverNode.getP2P(), P2PApiSchema]; - } - - if (!proverConfig.proverBrokerUrl) { - services.provingJobSource = [proverNode.getProver().getProvingJobSource(), ProvingJobConsumerSchema]; - } - - signalHandlers.push(proverNode.stop.bind(proverNode)); - - await proverNode.start(); - - if (proverConfig.autoUpdate !== 'disabled' && proverConfig.autoUpdateUrl) { - await setupUpdateMonitor( - proverConfig.autoUpdate, - new URL(proverConfig.autoUpdateUrl), - followsCanonicalRollup, - getPublicClient(proverConfig), - proverConfig.l1Contracts.registryAddress, - signalHandlers, - ); - } - return { config: proverConfig }; -} diff --git a/yarn-project/aztec/src/local-network/local-network.ts b/yarn-project/aztec/src/local-network/local-network.ts index b25c9bf8dce1..733ddd550a84 100644 --- a/yarn-project/aztec/src/local-network/local-network.ts +++ b/yarn-project/aztec/src/local-network/local-network.ts @@ -18,6 +18,7 @@ import type { LogFn } from '@aztec/foundation/log'; import { DateProvider, TestDateProvider } from '@aztec/foundation/timer'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractsHash } from '@aztec/protocol-contracts'; +import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import { type TelemetryClient, @@ -105,12 +106,14 @@ export async function createLocalNetwork(config: Partial = { }; const hdAccount = mnemonicToAccount(config.l1Mnemonic || DefaultMnemonic); if ( - aztecNodeConfig.publisherPrivateKeys == undefined || - !aztecNodeConfig.publisherPrivateKeys.length || - aztecNodeConfig.publisherPrivateKeys[0].getValue() === NULL_KEY + aztecNodeConfig.sequencerPublisherPrivateKeys == undefined || + !aztecNodeConfig.sequencerPublisherPrivateKeys.length || + aztecNodeConfig.sequencerPublisherPrivateKeys[0].getValue() === NULL_KEY ) { const privKey = hdAccount.getHdKey().privateKey; - aztecNodeConfig.publisherPrivateKeys = [new SecretValue(`0x${Buffer.from(privKey!).toString('hex')}` as const)]; + aztecNodeConfig.sequencerPublisherPrivateKeys = [ + new SecretValue(`0x${Buffer.from(privKey!).toString('hex')}` as const), + ]; } if (!aztecNodeConfig.validatorPrivateKeys?.getValue().length) { const privKey = hdAccount.getHdKey().privateKey; @@ -221,7 +224,12 @@ export async function createLocalNetwork(config: Partial = { */ export async function createAztecNode( config: Partial = {}, - deps: { telemetry?: TelemetryClient; blobClient?: BlobClientInterface; dateProvider?: DateProvider } = {}, + deps: { + telemetry?: TelemetryClient; + blobClient?: BlobClientInterface; + dateProvider?: DateProvider; + proverBroker?: ProvingJobBroker; + } = {}, options: { prefilledPublicData?: PublicDataTreeLeaf[] } = {}, ) { // TODO(#12272): will clean this up. This is criminal. @@ -231,6 +239,10 @@ export async function createAztecNode( ...config, l1Contracts: { ...l1Contracts, ...config.l1Contracts }, }; - const node = await AztecNodeService.createAndSync(aztecNodeConfig, deps, options); + const node = await AztecNodeService.createAndSync( + aztecNodeConfig, + { ...deps, proverNodeDeps: { broker: deps.proverBroker } }, + options, + ); return node; } diff --git a/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts b/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts index be6446fa4b9b..1ce73ebffd1c 100644 --- a/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts +++ b/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts @@ -135,7 +135,7 @@ describe('HA Full Setup', () => { prefilledPublicData, } = await setup(1, { initialValidators, - publisherPrivateKeys: [new SecretValue(publisherPrivateKeys[0])], + sequencerPublisherPrivateKeys: [new SecretValue(publisherPrivateKeys[0])], aztecTargetCommitteeSize: COMMITTEE_SIZE, minTxsPerBlock: 1, archiverPollingIntervalMS: 200, @@ -151,12 +151,7 @@ describe('HA Full Setup', () => { // Enable slashing for testing governance + slashing vote coordination slasherFlavor: 'tally', slashingRoundSizeInEpochs: 1, // 32 slots (1 epoch) - slashingQuorum: 17, // >50% of 32 slots for tally quorum - // Prover node will use publisherPrivateKeys directly, not Web3Signer - proverNodeConfig: { - web3SignerUrl: undefined, - publisherAddresses: undefined, - }, + slashingQuorum: 17, // >50% of 32 slots for tally quorum, })); logger.info(`Bootstrap node setup complete (validation disabled)`); @@ -203,10 +198,10 @@ describe('HA Full Setup', () => { bootstrapNodes: [bootstrapNodeEnr], web3SignerUrl, validatorAddresses: attesterAddresses.map(addr => EthAddress.fromString(addr)), - publisherAddresses: publisherAddresses.map(addr => EthAddress.fromString(addr)), + sequencerPublisherAddresses: publisherAddresses.map(addr => EthAddress.fromString(addr)), validatorPrivateKeys: new SecretValue(attesterPrivateKeys), // Each node has a unique publisher key - publisherPrivateKeys: [new SecretValue(publisherPrivateKeys[i])], + sequencerPublisherPrivateKeys: [new SecretValue(publisherPrivateKeys[i])], }; const nodeService = await withLoggerBindings({ actor: `HA-${i}` }, async () => { diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts index 776a9374a093..186b3b02c12d 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts @@ -17,7 +17,6 @@ import { retryUntil } from '@aztec/foundation/retry'; import { hexToBuffer } from '@aztec/foundation/string'; import { executeTimeout } from '@aztec/foundation/timer'; import type { TestContract } from '@aztec/noir-test-contracts.js/Test'; -import type { ProverNode } from '@aztec/prover-node'; import { jest } from '@jest/globals'; import 'jest-extended'; @@ -35,7 +34,6 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { let logger: Logger; let node: AztecNode; let archiver: Archiver; - let proverNode: ProverNode; let monitor: ChainMonitor; let proverDelayer: Delayer; let sequencerDelayer: Delayer; @@ -79,7 +77,6 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { ({ proverDelayer, sequencerDelayer, context, logger, monitor, L1_BLOCK_TIME_IN_S, L2_SLOT_DURATION_IN_S } = test); node = context.aztecNode; archiver = (node as AztecNodeService).getBlockSource() as Archiver; - proverNode = context.proverNode!; from = context.accounts[0]; contract = await test.registerTestContract(context.wallet); }); @@ -133,9 +130,9 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { epochDurationSeconds * 4 * 1000, ); - // Stop the prover node so it doesn't re-submit the proof after we've removed it + // Stop the prover node (by stopping its hosting aztec node) so it doesn't re-submit the proof after we've removed it logger.warn(`Proof for block ${provenBlockEvent.provenCheckpointNumber} mined, stopping prover node`); - await proverNode.stop(); + await test.proverNodes[0].stop(); // And remove the proof from L1 await context.cheatCodes.eth.reorgTo(provenBlockEvent.l1BlockNumber - 1); @@ -194,23 +191,17 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { ); await retryUntil(() => getProvenCheckpointNumber(node).then(cp => cp >= provenCheckpoint), 'node sync', 10, 0.1); - // Stop the prover node - await proverNode.stop(); + // Stop the prover node (by stopping its hosting aztec node) + await test.proverNodes[0].stop(); // Remove the proof from L1 but do not change the block number await context.cheatCodes.eth.reorgWithReplacement(1); await expect(monitor.run(true).then(m => m.provenCheckpointNumber)).resolves.toEqual(initialProvenCheckpoint); // Create another prover node so it submits a proof and wait until it is submitted - // Use a longer timeout to allow the new prover to sync and generate a proof - const newProverNode = await test.createProverNode(); - const provenCheckpointRetry = await test.waitUntilProvenCheckpointNumber( - targetProvenCheckpoint, - epochDurationSeconds, - ); - await expect(monitor.run(true).then(m => m.provenCheckpointNumber)).resolves.toBeGreaterThanOrEqual( - targetProvenCheckpoint, - ); + await test.createProverNode(); + const provenCheckpointRetry = await test.waitUntilProvenCheckpointNumber(CheckpointNumber(1)); + await expect(monitor.run(true).then(m => m.provenCheckpointNumber)).resolves.toBeGreaterThanOrEqual(1); // Check that the node has followed along logger.warn(`Testing old node`); @@ -226,7 +217,7 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { await test.assertMultipleBlocksPerSlot(2); logger.warn(`Test succeeded`); - await newProverNode.stop(); + // New prover's aztec node is stopped in test.teardown() }); it('restores L2 blocks if a proof is added due to an L1 reorg', async () => { @@ -253,10 +244,10 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { `End of epoch ${epochToWaitFor} submission window (L1 block ${await monitor.run(true).then(m => m.l1BlockNumber)}).`, ); - // Grab the prover's tx to submit it later as part of a reorg and stop the prover + // Grab the prover's tx to submit it later as part of a reorg and stop the prover (by stopping its hosting aztec node) const [proofTx] = proverDelayer.getCancelledTxs(); expect(proofTx).toBeDefined(); - await proverNode.stop(); + await test.proverNodes[0].stop(); logger.warn(`Prover node stopped.`); // Wait for the node to prune diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts index d640bea37cc6..669132d4efbb 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts @@ -42,7 +42,7 @@ describe('e2e_epochs/epochs_long_proving_time', () => { // Wait until we hit the target proven block number, and keep an eye on how many proving jobs are run in parallel. let maxJobCount = 0; while (monitor.provenCheckpointNumber === undefined || monitor.provenCheckpointNumber < targetProvenBlockNumber) { - const jobs = await test.proverNodes[0].getJobs(); + const jobs = await test.proverNodes[0].getProverNode()!.getJobs(); if (jobs.length > maxJobCount) { maxJobCount = jobs.length; logger.info(`Updated max job count to ${maxJobCount}`, jobs); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts index 4c4fb94806b2..b7a8e92fda53 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts @@ -44,8 +44,8 @@ describe('e2e_epochs/epochs_multi_proof', () => { // Add a delay to prover nodes so not all txs land on the same place // We apply patches BEFORE starting the prover nodes to ensure all provers get the delay // This prevents the race condition where multiple provers submit to L1 at the same time - test.proverNodes.forEach((prover, index) => { - const proverManager = prover.getProver(); + test.proverNodes.forEach((proverAztecNode, index) => { + const proverManager = proverAztecNode.getProverNode()!.getProver(); const origCreateEpochProver = proverManager.createEpochProver.bind(proverManager); proverManager.createEpochProver = () => { const epochProver = origCreateEpochProver(); @@ -62,9 +62,9 @@ describe('e2e_epochs/epochs_multi_proof', () => { }); // Now start all prover nodes after patches have been applied - await Promise.all(test.proverNodes.map(prover => prover.start())); + await Promise.all(test.proverNodes.map(node => node.getProverNode()!.start())); - const proverIds = test.proverNodes.map(prover => prover.getProverId()); + const proverIds = test.proverNodes.map(node => node.getProverNode()!.getProverId()); logger.info(`Prover nodes running with ids ${proverIds.map(id => id.toString()).join(', ')}`); // Wait until the start of epoch one and collect info on epoch zero diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts index 86b2395f6406..bf87e55066d1 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts @@ -29,7 +29,7 @@ describe('e2e_epochs/epochs_partial_proof', () => { await test.waitUntilCheckpointNumber(CheckpointNumber(4), test.L2_SLOT_DURATION_IN_S * 6); logger.info(`Kicking off partial proof`); - await test.context.proverNode!.startProof(EpochNumber(0)); + await test.context.proverNode!.getProverNode()!.startProof(EpochNumber(0)); await retryUntil(() => monitor.provenCheckpointNumber > CheckpointNumber(0), 'proof', 120, 1); logger.info(`Test succeeded with proven checkpoint number ${monitor.provenCheckpointNumber}`); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts index c228d7694280..ce32f2055b30 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.parallel.test.ts @@ -72,7 +72,7 @@ describe('e2e_epochs/epochs_proof_fails', () => { context.proverNode = proverNode; // Get the prover delayer from the newly created prover node - proverDelayer = proverNode.getDelayer()!; + proverDelayer = proverNode.getProverNode()!.getDelayer()!; // Hold off prover tx until end epoch 1 const [epoch2Start] = getTimestampRangeForEpoch(EpochNumber(2), constants); @@ -113,10 +113,11 @@ describe('e2e_epochs/epochs_proof_fails', () => { const proverNode = await test.createProverNode({ cancelTxOnTimeout: false, maxSpeedUpAttempts: 0 }); // Get the prover delayer from the newly created prover node - proverDelayer = proverNode.getDelayer()!; + const testProverNode = proverNode.getProverNode() as TestProverNode; + proverDelayer = testProverNode.getDelayer()!; // Inject a delay in prover node proving equal to the length of an epoch, to make sure deadline will be hit - const epochProverManager = (proverNode as TestProverNode).prover; + const epochProverManager = testProverNode.prover; const originalCreate = epochProverManager.createEpochProver.bind(epochProverManager); const finalizeEpochPromise = promiseWithResolvers(); let hasFinalizeEpochWaited = false; diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts index 3c71f20c852b..846cd5f82b96 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts @@ -32,7 +32,7 @@ describe('e2e_epochs/epochs_proof_public_cross_chain', () => { numberOfAccounts: 1, minTxsPerBlock: 1, disableAnvilTestWatcher: true, - publisherAllowInvalidStates: true, + sequencerPublisherAllowInvalidStates: true, }); ({ context, logger } = test); }); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts index d7818b85be44..a2e4eeaa2f7f 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts @@ -22,7 +22,7 @@ import { sleep } from '@aztec/foundation/sleep'; import { SpamContract } from '@aztec/noir-test-contracts.js/Spam'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; import { getMockPubSubP2PServiceFactory } from '@aztec/p2p/test-helpers'; -import { ProverNode, type ProverNodeConfig } from '@aztec/prover-node'; +import type { ProverNodeConfig } from '@aztec/prover-node'; import type { PXEConfig } from '@aztec/pxe/config'; import { type SequencerClient, type SequencerEvents, SequencerState } from '@aztec/sequencer-client'; import { type BlockParameter, EthAddress } from '@aztec/stdlib/block'; @@ -76,7 +76,7 @@ export class EpochsTestContext { public proverDelayer!: Delayer; public sequencerDelayer!: Delayer; - public proverNodes: ProverNode[] = []; + public proverNodes: AztecNodeService[] = []; public nodes: AztecNodeService[] = []; public epochDuration!: number; @@ -200,26 +200,29 @@ export class EpochsTestContext { const proverNodePrivateKey = this.getNextPrivateKey(); const proverIndex = this.proverNodes.length + 1; const { mockGossipSubNetwork } = this.context; - const proverNode = await withLoggerBindings({ actor: `prover-${proverIndex}` }, () => + const { proverNode } = await withLoggerBindings({ actor: `prover-${proverIndex}` }, () => createAndSyncProverNode( proverNodePrivateKey, { ...this.context.config, p2pEnabled: this.context.config.p2pEnabled || mockGossipSubNetwork !== undefined, - }, - { - dataDirectory: join(this.context.config.dataDirectory!, randomBytes(8).toString('hex')), proverId: EthAddress.fromNumber(proverIndex), dontStart: opts.dontStart, ...opts, }, - this.context.aztecNode, - this.context.prefilledPublicData ?? [], + { + dataDirectory: join(this.context.config.dataDirectory!, randomBytes(8).toString('hex')), + }, { dateProvider: this.context.dateProvider, - p2pClientDeps: mockGossipSubNetwork - ? { p2pServiceFactory: getMockPubSubP2PServiceFactory(mockGossipSubNetwork) } - : undefined, + p2pClientDeps: { + p2pServiceFactory: mockGossipSubNetwork ? getMockPubSubP2PServiceFactory(mockGossipSubNetwork) : undefined, + rpcTxProviders: [this.context.aztecNode], + }, + }, + { + prefilledPublicData: this.context.prefilledPublicData ?? [], + dontStart: opts.dontStart, }, ), ); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_upload_failed_proof.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_upload_failed_proof.test.ts index 096868366a5c..28c24b6a3133 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_upload_failed_proof.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_upload_failed_proof.test.ts @@ -52,7 +52,7 @@ describe('e2e_epochs/epochs_upload_failed_proof', () => { it('uploads failed proving job state and re-runs it on a fresh instance', async () => { // Make initial prover node fail to prove - const proverNode = test.proverNodes[0] as TestProverNode; + const proverNode = test.proverNodes[0].getProverNode() as TestProverNode; const proverManager = proverNode.getProver(); const origCreateEpochProver = proverManager.createEpochProver.bind(proverManager); proverManager.createEpochProver = () => { diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index d635d11c0ffb..9b36f58493ff 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -272,12 +272,7 @@ describe('L1Publisher integration', () => { publisher = new SequencerPublisher( { - l1RpcUrls: config.l1RpcUrls, - l1DebugRpcUrls: [], - l1Contracts: l1ContractAddresses, - publisherPrivateKeys: [new SecretValue(sequencerPK)], l1ChainId: chainId, - viemPollingIntervalMS: 100, ethereumSlotDuration: config.ethereumSlotDuration, }, { diff --git a/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts b/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts index 642be0a448bf..02bad7926b51 100644 --- a/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts +++ b/yarn-project/end-to-end/src/e2e_multi_eoa.test.ts @@ -80,7 +80,7 @@ describe('e2e_multi_eoa', () => { sequencerPollingIntervalMS: 200, worldStateBlockCheckIntervalMS: 200, blockCheckIntervalMS: 200, - publisherPrivateKeys: sequencerKeysAndAddresses.map(k => k.key), + sequencerPublisherPrivateKeys: sequencerKeysAndAddresses.map(k => k.key), l1PublisherKey: allKeysAndAddresses[0].key, maxSpeedUpAttempts: 0, // Disable speed ups, so that cancellation txs never make it through })); diff --git a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts index ab66bef2f868..35dccd29e152 100644 --- a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts +++ b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts @@ -79,7 +79,7 @@ describe('e2e_multi_validator_node', () => { } = await setup(1, { initialValidators, aztecTargetCommitteeSize: COMMITTEE_SIZE, - publisherPrivateKeys: publisherPrivateKeys.map(k => new SecretValue(k)), + sequencerPublisherPrivateKeys: publisherPrivateKeys.map(k => new SecretValue(k)), minTxsPerBlock: 1, archiverPollingIntervalMS: 200, sequencerPollingIntervalMS: 200, diff --git a/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts b/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts index 7cf04c384437..a1964ef8f74b 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts @@ -25,7 +25,6 @@ import { import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; import { protocolContractsHash } from '@aztec/protocol-contracts'; -import type { ProverNode } from '@aztec/prover-node'; import { getPXEConfig } from '@aztec/pxe/server'; import { computeL2ToL1MessageHash } from '@aztec/stdlib/hash'; import { tryStop } from '@aztec/stdlib/interfaces/server'; @@ -65,7 +64,7 @@ jest.setTimeout(1000 * 60 * 10); describe('e2e_p2p_add_rollup', () => { let t: P2PNetworkTest; let nodes: AztecNodeService[]; - let proverNode: ProverNode; + let proverAztecNode: AztecNodeService; let l1TxUtils: L1TxUtils; beforeAll(async () => { @@ -94,7 +93,7 @@ describe('e2e_p2p_add_rollup', () => { }); afterAll(async () => { - await tryStop(proverNode); + await tryStop(proverAztecNode); await t.stopNodes(nodes); await t.teardown(); for (let i = 0; i < NUM_VALIDATORS; i++) { @@ -246,7 +245,7 @@ describe('e2e_p2p_add_rollup', () => { // create a prover node that uses p2p only (not rpc) to gather txs to test prover tx collection t.logger.warn(`Creating prover node`); - proverNode = await createProverNode( + ({ proverNode: proverAztecNode } = await createProverNode( t.ctx.aztecNodeConfig, BOOT_NODE_UDP_PORT + NUM_VALIDATORS + 1, t.bootstrapNodeEnr, @@ -255,8 +254,7 @@ describe('e2e_p2p_add_rollup', () => { t.prefilledPublicData, `${DATA_DIR}-prover`, shouldCollectMetrics(), - ); - await proverNode.start(); + )); await sleep(4000); @@ -501,8 +499,8 @@ describe('e2e_p2p_add_rollup', () => { `Attesters new before: ${attestersBeforeNew.length}. Attesters new after: ${attestersAfterNew.length}`, ); - // Stop the prover node. - await proverNode.stop(); + // Stop the prover aztec node (which stops the prover subsystem). + await proverAztecNode.stop(); // stop all nodes for (let i = 0; i < NUM_VALIDATORS; i++) { @@ -561,7 +559,7 @@ describe('e2e_p2p_add_rollup', () => { ); t.logger.warn(`Creating new prover node`); - proverNode = await createProverNode( + ({ proverNode: proverAztecNode } = await createProverNode( newConfig, BOOT_NODE_UDP_PORT + NUM_VALIDATORS + 1, t.bootstrapNodeEnr, @@ -570,8 +568,7 @@ describe('e2e_p2p_add_rollup', () => { prefilledPublicData, `${DATA_DIR_NEW}-prover`, shouldCollectMetrics(), - ); - await proverNode.start(); + )); // wait a bit for peers to discover each other await sleep(4000); diff --git a/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts b/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts index 8b385c374b6e..6f43df3cefec 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts @@ -6,7 +6,6 @@ import { CheckpointNumber } from '@aztec/foundation/branded-types'; import { Signature } from '@aztec/foundation/eth-signature'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; -import type { ProverNode } from '@aztec/prover-node'; import type { SequencerClient } from '@aztec/sequencer-client'; import { tryStop } from '@aztec/stdlib/interfaces/server'; import { CheckpointAttestation, ConsensusPayload } from '@aztec/stdlib/p2p'; @@ -47,7 +46,7 @@ const qosAlerts: AlertConfig[] = [ describe('e2e_p2p_network', () => { let t: P2PNetworkTest; let nodes: AztecNodeService[]; - let proverNode: ProverNode; + let proverNode: AztecNodeService; beforeEach(async () => { t = await P2PNetworkTest.create({ @@ -125,7 +124,7 @@ describe('e2e_p2p_network', () => { ); t.logger.warn(`Creating prover node`); - proverNode = await createProverNode( + ({ proverNode } = await createProverNode( { ...t.ctx.aztecNodeConfig, minTxsPerBlock: 0 }, BOOT_NODE_UDP_PORT + NUM_VALIDATORS + 1, t.bootstrapNodeEnr, @@ -134,8 +133,7 @@ describe('e2e_p2p_network', () => { t.prefilledPublicData, `${DATA_DIR}-prover`, shouldCollectMetrics(), - ); - await proverNode.start(); + )); // wait a bit for peers to discover each other await sleep(8000); diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 1978c217722b..ff561ecfff69 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -4,7 +4,6 @@ import { waitForTx } from '@aztec/aztec.js/node'; import { TxHash } from '@aztec/aztec.js/tx'; import { Signature } from '@aztec/foundation/eth-signature'; import { retryUntil } from '@aztec/foundation/retry'; -import type { ProverNode } from '@aztec/prover-node'; import type { SequencerClient } from '@aztec/sequencer-client'; import { tryStop } from '@aztec/stdlib/interfaces/server'; import { CheckpointAttestation, ConsensusPayload } from '@aztec/stdlib/p2p'; @@ -49,7 +48,7 @@ const qosAlerts: AlertConfig[] = [ describe('e2e_p2p_network', () => { let t: P2PNetworkTest; let nodes: AztecNodeService[]; - let proverNode: ProverNode; + let proverAztecNode: AztecNodeService; let monitoringNode: AztecNodeService; beforeEach(async () => { @@ -75,7 +74,7 @@ describe('e2e_p2p_network', () => { }); afterEach(async () => { - await tryStop(proverNode); + await tryStop(proverAztecNode); await tryStop(monitoringNode); await t.stopNodes(nodes); await t.teardown(); @@ -119,7 +118,7 @@ describe('e2e_p2p_network', () => { // create a prover node that uses p2p only (not rpc) to gather txs to test prover tx collection t.logger.warn(`Creating prover node`); - proverNode = await createProverNode( + ({ proverNode: proverAztecNode } = await createProverNode( t.ctx.aztecNodeConfig, BOOT_NODE_UDP_PORT + NUM_VALIDATORS + 1, t.bootstrapNodeEnr, @@ -128,8 +127,7 @@ describe('e2e_p2p_network', () => { t.prefilledPublicData, `${DATA_DIR}-prover`, shouldCollectMetrics(), - ); - await proverNode.start(); + )); t.logger.warn(`Creating non validator node`); const monitoringNodeConfig: AztecNodeConfig = { ...t.ctx.aztecNodeConfig, alwaysReexecuteBlockProposals: true }; diff --git a/yarn-project/end-to-end/src/e2e_p2p/valid_epoch_pruned_slash.test.ts b/yarn-project/end-to-end/src/e2e_p2p/valid_epoch_pruned_slash.test.ts index 6349efaecb31..8683138cb02a 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/valid_epoch_pruned_slash.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/valid_epoch_pruned_slash.test.ts @@ -54,7 +54,7 @@ describe('e2e_p2p_valid_epoch_pruned_slash', () => { initialConfig: { enforceTimeTable: true, cancelTxOnTimeout: false, - publisherAllowInvalidStates: true, + sequencerPublisherAllowInvalidStates: true, listenAddress: '127.0.0.1', aztecEpochDuration, ethereumSlotDuration, diff --git a/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts b/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts index cc77061328af..728a9d0e398b 100644 --- a/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts +++ b/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts @@ -10,14 +10,12 @@ import { tryRmDir } from '@aztec/foundation/fs'; import { logger } from '@aztec/foundation/log'; import { withLoggerBindings } from '@aztec/foundation/log/server'; import { retryUntil } from '@aztec/foundation/retry'; -import { bufferToHex } from '@aztec/foundation/string'; -import { ProverNode, type ProverNodeConfig } from '@aztec/prover-node'; import { cp, mkdtemp, readFile, readdir, rm, writeFile } from 'fs/promises'; import { tmpdir } from 'os'; import { join } from 'path'; -import { type EndToEndContext, createAndSyncProverNode, getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; +import { type EndToEndContext, setup } from './fixtures/utils.js'; const L1_BLOCK_TIME_IN_S = process.env.L1_BLOCK_TIME ? parseInt(process.env.L1_BLOCK_TIME) : 8; const L2_TARGET_BLOCK_NUM = 3; @@ -68,19 +66,7 @@ describe('e2e_snapshot_sync', () => { ); }; - const createTestProverNode = async (config: Partial = {}) => { - log.warn('Creating and syncing a prover node...'); - const dataDirectory = join(context.config.dataDirectory!, randomBytes(8).toString('hex')); - return await createAndSyncProverNode( - bufferToHex(getPrivateKeyFromIndex(5)!), - context.config, - { ...config, realProofs: false, dataDirectory }, - context.aztecNode, - context.prefilledPublicData ?? [], - ); - }; - - const expectNodeSyncedToL2Block = async (node: AztecNode | ProverNode, blockNumber: number) => { + const expectNodeSyncedToL2Block = async (node: AztecNode, blockNumber: number) => { const tips = await node.getL2Tips(); expect(tips.proposed.number).toBeGreaterThanOrEqual(blockNumber); const worldState = await node.getWorldStateSyncStatus(); @@ -123,17 +109,6 @@ describe('e2e_snapshot_sync', () => { await node.stop(); }); - it('downloads snapshot when syncing new prover node', async () => { - log.warn(`Syncing brand new prover node with snapshot sync`); - const node = await createTestProverNode({ snapshotsUrls: [snapshotLocation], syncMode: 'snapshot' }); - - log.warn(`New node prover synced`); - await expectNodeSyncedToL2Block(node, L2_TARGET_BLOCK_NUM); - - log.warn(`Stopping new prover node`); - await node.stop(); - }); - it('downloads snapshot from multiple sources', async () => { log.warn(`Setting up multiple snapshot locations with different L1 block heights`); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 924321385e19..76a54cd05832 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -46,7 +46,6 @@ import { getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import { EmpireSlashingProposerContract, GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; import { createL1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import { CheckpointNumber } from '@aztec/foundation/branded-types'; -import { SecretValue } from '@aztec/foundation/config'; import { Signature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; import { bufferToHex, hexToBuffer } from '@aztec/foundation/string'; @@ -67,7 +66,7 @@ import { type MockProxy, mock } from 'jest-mock-extended'; import { getContract } from 'viem'; import { mintTokensToPrivate } from './fixtures/token_utils.js'; -import { type EndToEndContext, getPrivateKeyFromIndex, setup, setupPXEAndGetWallet } from './fixtures/utils.js'; +import { type EndToEndContext, setup, setupPXEAndGetWallet } from './fixtures/utils.js'; import { TestWallet } from './test-wallet/test_wallet.js'; const AZTEC_GENERATE_TEST_DATA = !!process.env.AZTEC_GENERATE_TEST_DATA; @@ -408,8 +407,6 @@ describe('e2e_synching', () => { const blobClient = await createBlobClientWithFileStores(config, createLogger('test:blob-client:client')); - const sequencerPK: `0x${string}` = `0x${getPrivateKeyFromIndex(0)!.toString('hex')}`; - const l1TxUtils = createL1TxUtils( deployL1ContractsValues.l1Client, { logger, dateProvider, kzg: Blob.getViemKzgInstance() }, @@ -435,12 +432,7 @@ describe('e2e_synching', () => { const sequencerPublisherMetrics: MockProxy = mock(); const publisher = new SequencerPublisher( { - l1RpcUrls: config.l1RpcUrls, - l1DebugRpcUrls: [], - l1Contracts: deployL1ContractsValues.l1ContractAddresses, - publisherPrivateKeys: [new SecretValue(sequencerPK)], l1ChainId: 31337, - viemPollingIntervalMS: 100, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, }, { diff --git a/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts b/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts index d870f8c73231..6dad123c594a 100644 --- a/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts @@ -1,5 +1,5 @@ import type { InitialAccountData } from '@aztec/accounts/testing'; -import { type Archiver, createArchiver } from '@aztec/archiver'; +import { AztecNodeService } from '@aztec/aztec-node'; import { AztecAddress, EthAddress } from '@aztec/aztec.js/addresses'; import { type Logger, createLogger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; @@ -11,13 +11,11 @@ import { TestCircuitVerifier, } from '@aztec/bb-prover'; import { BackendType, Barretenberg } from '@aztec/bb.js'; -import { createBlobClientWithFileStores } from '@aztec/blob-client/client'; import type { DeployAztecL1ContractsReturnType } from '@aztec/ethereum/deploy-aztec-l1-contracts'; import { Buffer32 } from '@aztec/foundation/buffer'; import { SecretValue } from '@aztec/foundation/config'; import { FeeAssetHandlerAbi } from '@aztec/l1-artifacts'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; -import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; import { getGenesisValues } from '@aztec/world-state/testing'; @@ -73,8 +71,8 @@ export class FullProverTest { circuitProofVerifier?: ClientProtocolCircuitVerifier; provenAsset!: TokenContract; context!: EndToEndContext; - private proverNode!: ProverNode; - private simulatedProverNode!: ProverNode; + private proverAztecNode!: AztecNodeService; + private simulatedProverAztecNode!: AztecNodeService; public l1Contracts!: DeployAztecL1ContractsReturnType; public proverAddress!: EthAddress; private minNumberOfTxsPerBlock: number; @@ -146,7 +144,7 @@ export class FullProverTest { // We don't wish to mark as proven automatically, so we set the flag to false this.context.watcher.setIsMarkingAsProven(false); - this.simulatedProverNode = this.context.proverNode!; + this.simulatedProverAztecNode = this.context.proverNode!; ({ aztecNode: this.aztecNode, deployL1ContractsValues: this.l1Contracts, @@ -155,7 +153,6 @@ export class FullProverTest { this.aztecNodeAdmin = this.context.aztecNodeService; const config = this.context.aztecNodeConfig; - const blobClient = await createBlobClientWithFileStores(config, this.logger); // Configure a full prover PXE let acvmConfig: Awaited> | undefined; @@ -217,20 +214,13 @@ export class FullProverTest { this.provenWallet = provenWallet; this.logger.info(`Full prover PXE started`); - // Shutdown the current, simulated prover node + // Shutdown the current, simulated prover node (by stopping its hosting aztec node) this.logger.verbose('Shutting down simulated prover node'); - await this.simulatedProverNode.stop(); - - // Creating temp store and archiver for fully proven prover node - this.logger.verbose('Starting archiver for new prover node'); - const archiver = await createArchiver( - { ...this.context.aztecNodeConfig, dataDirectory: undefined }, - { blobClient, dateProvider: this.context.dateProvider }, - { blockUntilSync: true }, - ); + await this.simulatedProverAztecNode.stop(); // The simulated prover node (now shutdown) used private key index 2 const proverNodePrivateKey = getPrivateKeyFromIndex(2); + const proverNodePrivateKeyHex = `0x${proverNodePrivateKey!.toString('hex')}` as const; const proverNodeSenderAddress = privateKeyToAddress(new Buffer32(proverNodePrivateKey!).toString()); this.proverAddress = EthAddress.fromString(proverNodeSenderAddress); @@ -238,14 +228,21 @@ export class FullProverTest { await this.mintFeeJuice(proverNodeSenderAddress); this.logger.verbose('Starting prover node'); - const proverConfig: ProverNodeConfig = { - ...this.context.aztecNodeConfig, - txCollectionNodeRpcUrls: [], + const sponsoredFPCAddress = await getSponsoredFPCAddress(); + const { prefilledPublicData } = await getGenesisValues( + this.context.initialFundedAccounts.map(a => a.address).concat(sponsoredFPCAddress), + ); + + const proverNodeConfig: Parameters[0] = { + ...config, + enableProverNode: true, + disableValidator: true, dataDirectory: undefined, + txCollectionNodeRpcUrls: [], proverId: this.proverAddress, realProofs: this.realProofs, proverAgentCount: 2, - publisherPrivateKeys: [new SecretValue(`0x${proverNodePrivateKey!.toString('hex')}` as const)], + proverPublisherPrivateKeys: [new SecretValue(proverNodePrivateKeyHex)], proverNodeMaxPendingJobs: 100, proverNodeMaxParallelBlocksPerEpoch: 32, proverNodePollingIntervalMs: 100, @@ -255,21 +252,14 @@ export class FullProverTest { txGatheringTimeoutMs: 24_000, proverNodeFailedEpochStore: undefined, proverNodeEpochProvingDelayMs: undefined, + validatorPrivateKeys: new SecretValue([]), }; - const sponsoredFPCAddress = await getSponsoredFPCAddress(); - const { prefilledPublicData } = await getGenesisValues( - this.context.initialFundedAccounts.map(a => a.address).concat(sponsoredFPCAddress), - ); - this.proverNode = await createProverNode( - proverConfig, - { - aztecNodeTxProvider: this.aztecNode, - archiver: archiver as Archiver, - }, + + this.proverAztecNode = await AztecNodeService.createAndSync( + proverNodeConfig, + { dateProvider: this.context.dateProvider, p2pClientDeps: { rpcTxProviders: [this.aztecNode] } }, { prefilledPublicData }, ); - await this.proverNode.start(); - this.logger.warn(`Proofs are now enabled`, { realProofs: this.realProofs }); return this; } @@ -289,8 +279,8 @@ export class FullProverTest { await this.provenComponents[i].teardown(); } - // clean up the full prover node - await this.proverNode.stop(); + // clean up the full prover node (by stopping its hosting aztec node) + await this.proverAztecNode.stop(); await Barretenberg.destroySingleton(); await this.bbConfigCleanup?.(); diff --git a/yarn-project/end-to-end/src/fixtures/setup.ts b/yarn-project/end-to-end/src/fixtures/setup.ts index b329d7790583..6def0cbc67f6 100644 --- a/yarn-project/end-to-end/src/fixtures/setup.ts +++ b/yarn-project/end-to-end/src/fixtures/setup.ts @@ -1,6 +1,5 @@ import { SchnorrAccountContractArtifact } from '@aztec/accounts/schnorr'; import { type InitialAccountData, generateSchnorrAccounts } from '@aztec/accounts/testing'; -import { type Archiver, createArchiver } from '@aztec/archiver'; import { type AztecNodeConfig, AztecNodeService, getConfigEnvVars } from '@aztec/aztec-node'; import { AztecAddress, EthAddress } from '@aztec/aztec.js/addresses'; import { @@ -16,7 +15,6 @@ import { type Logger, createLogger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; import type { Wallet } from '@aztec/aztec.js/wallet'; import { AnvilTestWatcher, CheatCodes } from '@aztec/aztec/testing'; -import { createBlobClientWithFileStores } from '@aztec/blob-client/client'; import { SPONSORED_FPC_SALT } from '@aztec/constants'; import { isAnvilTestChain } from '@aztec/ethereum/chain'; import { createExtendedL1Client } from '@aztec/ethereum/client'; @@ -39,14 +37,13 @@ import { tryRmDir } from '@aztec/foundation/fs'; import { withLoggerBindings } from '@aztec/foundation/log/server'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; -import { TestDateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; +import { DateProvider, TestDateProvider } from '@aztec/foundation/timer'; import { SponsoredFPCContract } from '@aztec/noir-contracts.js/SponsoredFPC'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import type { P2PClientDeps } from '@aztec/p2p'; import { MockGossipSubNetwork, getMockPubSubP2PServiceFactory } from '@aztec/p2p/test-helpers'; import { protocolContractsHash } from '@aztec/protocol-contracts'; -import { type ProverNode, type ProverNodeConfig, type ProverNodeDeps, createProverNode } from '@aztec/prover-node'; +import type { ProverNodeConfig } from '@aztec/prover-node'; import { type PXEConfig, getPXEConfig } from '@aztec/pxe/server'; import type { SequencerClient } from '@aztec/sequencer-client'; import { type ContractInstanceWithAddress, getContractInstanceFromInstantiationParams } from '@aztec/stdlib/contract'; @@ -213,8 +210,8 @@ export type EndToEndContext = { aztecNodeService: AztecNodeService; /** Client to the Aztec Node admin interface. */ aztecNodeAdmin: AztecNodeAdmin; - /** The prover node service (only set if startProverNode is true) */ - proverNode: ProverNode | undefined; + /** The aztec node running the prover node subsystem (only set if startProverNode is true). */ + proverNode: AztecNodeService | undefined; /** A client to the sequencer service. */ sequencer: SequencerClient | undefined; /** Return values from deployAztecL1Contracts function. */ @@ -339,11 +336,11 @@ export async function setup( publisherPrivKeyHex = opts.l1PublisherKey.getValue(); publisherHdAccount = privateKeyToAccount(publisherPrivKeyHex); } else if ( - config.publisherPrivateKeys && - config.publisherPrivateKeys.length > 0 && - config.publisherPrivateKeys[0].getValue() != NULL_KEY + config.sequencerPublisherPrivateKeys && + config.sequencerPublisherPrivateKeys.length > 0 && + config.sequencerPublisherPrivateKeys[0].getValue() != NULL_KEY ) { - publisherPrivKeyHex = config.publisherPrivateKeys[0].getValue(); + publisherPrivKeyHex = config.sequencerPublisherPrivateKeys[0].getValue(); publisherHdAccount = privateKeyToAccount(publisherPrivKeyHex); } else if (!MNEMONIC) { throw new Error(`Mnemonic not provided and no publisher private key`); @@ -352,7 +349,7 @@ export async function setup( const publisherPrivKeyRaw = publisherHdAccount.getHdKey().privateKey; const publisherPrivKey = publisherPrivKeyRaw === null ? null : Buffer.from(publisherPrivKeyRaw); publisherPrivKeyHex = `0x${publisherPrivKey!.toString('hex')}` as const; - config.publisherPrivateKeys = [new SecretValue(publisherPrivKeyHex)]; + config.sequencerPublisherPrivateKeys = [new SecretValue(publisherPrivKeyHex)]; } if (config.coinbase === undefined) { @@ -499,33 +496,32 @@ export async function setup( ); const sequencerClient = aztecNodeService.getSequencer(); - let proverNode: ProverNode | undefined = undefined; + let proverNode: AztecNodeService | undefined = undefined; if (opts.startProverNode) { logger.verbose('Creating and syncing a simulated prover node...'); const proverNodePrivateKey = getPrivateKeyFromIndex(2); const proverNodePrivateKeyHex: Hex = `0x${proverNodePrivateKey!.toString('hex')}`; const proverNodeDataDirectory = path.join(directoryToCleanup, randomBytes(8).toString('hex')); - const proverNodeConfig = { - ...config.proverNodeConfig, - dataDirectory: proverNodeDataDirectory, - p2pEnabled: !!mockGossipSubNetwork, + + const p2pClientDeps: Partial> = { + p2pServiceFactory: mockGossipSubNetwork && getMockPubSubP2PServiceFactory(mockGossipSubNetwork!), + rpcTxProviders: [aztecNodeService], }; - proverNode = await createAndSyncProverNode( + + ({ proverNode } = await createAndSyncProverNode( proverNodePrivateKeyHex, config, - proverNodeConfig, - aztecNodeService, - prefilledPublicData, { - p2pClientDeps: mockGossipSubNetwork - ? { p2pServiceFactory: getMockPubSubP2PServiceFactory(mockGossipSubNetwork) } - : undefined, + ...config.proverNodeConfig, + dataDirectory: proverNodeDataDirectory, }, - ); + { dateProvider, p2pClientDeps, telemetry: telemetryClient }, + { prefilledPublicData }, + )); } const sequencerDelayer = sequencerClient?.getDelayer(); - const proverDelayer = proverNode?.getDelayer(); + const proverDelayer = proverNode?.getProverNode()?.getDelayer(); logger.verbose('Creating a pxe...'); const pxeConfig = { ...getPXEConfig(), ...pxeOpts }; @@ -712,59 +708,41 @@ export async function waitForProvenChain(node: AztecNode, targetBlock?: BlockNum ); } +/** + * Creates an AztecNodeService with the prover node enabled as a subsystem. + * Returns both the aztec node service (for lifecycle management) and the prover node (for test internals access). + */ export function createAndSyncProverNode( proverNodePrivateKey: `0x${string}`, - aztecNodeConfig: AztecNodeConfig, - proverNodeConfig: Partial & Pick & { dontStart?: boolean }, - aztecNode: AztecNode | undefined, - prefilledPublicData: PublicDataTreeLeaf[] = [], - proverNodeDeps: ProverNodeDeps = {}, -) { + baseConfig: AztecNodeConfig, + configOverrides: Pick, + deps: { + telemetry?: TelemetryClient; + dateProvider: DateProvider; + p2pClientDeps?: P2PClientDeps; + }, + options: { prefilledPublicData: PublicDataTreeLeaf[]; dontStart?: boolean }, +): Promise<{ proverNode: AztecNodeService }> { return withLoggerBindings({ actor: 'prover-0' }, async () => { - const aztecNodeTxProvider = aztecNode && { - getTxByHash: aztecNode.getTxByHash.bind(aztecNode), - getTxsByHash: aztecNode.getTxsByHash.bind(aztecNode), - stop: () => Promise.resolve(), - }; - - const blobClient = await createBlobClientWithFileStores(aztecNodeConfig, createLogger('blob-client:prover-node')); - - const archiverConfig = { ...aztecNodeConfig, dataDirectory: proverNodeConfig.dataDirectory }; - const archiver = await createArchiver( - archiverConfig, - { blobClient, dateProvider: proverNodeDeps.dateProvider }, - { blockUntilSync: true }, + const proverNode = await AztecNodeService.createAndSync( + { + ...baseConfig, + ...configOverrides, + p2pPort: 0, + enableProverNode: true, + disableValidator: true, + proverPublisherPrivateKeys: [new SecretValue(proverNodePrivateKey)], + }, + deps, + { ...options, dontStartProverNode: options.dontStart }, ); - const proverConfig: ProverNodeConfig = { - ...aztecNodeConfig, - txCollectionNodeRpcUrls: [], - realProofs: false, - proverAgentCount: 2, - publisherPrivateKeys: [new SecretValue(proverNodePrivateKey)], - proverNodeMaxPendingJobs: 10, - proverNodeMaxParallelBlocksPerEpoch: 32, - proverNodePollingIntervalMs: 200, - txGatheringIntervalMs: 1000, - txGatheringBatchSize: 10, - txGatheringMaxParallelRequestsPerNode: 10, - txGatheringTimeoutMs: 24_000, - proverNodeFailedEpochStore: undefined, - proverId: EthAddress.fromNumber(1), - proverNodeEpochProvingDelayMs: undefined, - ...proverNodeConfig, - }; + if (!proverNode.getProverNode()) { + throw new Error('Prover node subsystem was not created despite enableProverNode being set'); + } - const proverNode = await createProverNode( - proverConfig, - { ...proverNodeDeps, aztecNodeTxProvider, archiver: archiver as Archiver }, - { prefilledPublicData }, - ); getLogger().info(`Created and synced prover node`); - if (!proverNodeConfig.dontStart) { - await proverNode.start(); - } - return proverNode; + return { proverNode }; }); } diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 925bb76dd86d..5c1ed24a68fd 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -7,7 +7,6 @@ import { SecretValue } from '@aztec/foundation/config'; import { withLoggerBindings } from '@aztec/foundation/log/server'; import { bufferToHex } from '@aztec/foundation/string'; import type { DateProvider } from '@aztec/foundation/timer'; -import type { ProverNodeConfig, ProverNodeDeps } from '@aztec/prover-node'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import getPort from 'get-port'; @@ -131,7 +130,7 @@ export async function createNonValidatorNode( ...p2pConfig, disableValidator: true, validatorPrivateKeys: undefined, - publisherPrivateKeys: [], + sequencerPublisherPrivateKeys: [], }; const telemetry = await getEndToEndTestTelemetryClient(metricsPort); return await AztecNodeService.createAndSync(config, { telemetry, dateProvider }, { prefilledPublicData }); @@ -143,31 +142,24 @@ export async function createProverNode( tcpPort: number, bootstrapNode: string | undefined, addressIndex: number, - proverNodeDeps: ProverNodeDeps & Required>, + deps: { dateProvider: DateProvider }, prefilledPublicData?: PublicDataTreeLeaf[], dataDirectory?: string, metricsPort?: number, -) { +): Promise<{ proverNode: AztecNodeService }> { const actorIndex = proverCounter++; return await withLoggerBindings({ actor: `prover-${actorIndex}` }, async () => { const proverNodePrivateKey = getPrivateKeyFromIndex(ATTESTER_PRIVATE_KEYS_START_INDEX + addressIndex)!; const telemetry = await getEndToEndTestTelemetryClient(metricsPort); - const proverConfig: Partial = await createP2PConfig( - config, - bootstrapNode, - tcpPort, - dataDirectory, - ); + const p2pConfig = await createP2PConfig(config, bootstrapNode, tcpPort, dataDirectory); - const aztecNodeRpcTxProvider = undefined; return await createAndSyncProverNode( bufferToHex(proverNodePrivateKey), - config, - { ...proverConfig, dataDirectory }, - aztecNodeRpcTxProvider, - prefilledPublicData, - { ...proverNodeDeps, telemetry }, + { ...config, ...p2pConfig }, + { dataDirectory }, + { ...deps, telemetry }, + { prefilledPublicData: prefilledPublicData ?? [] }, ); }); } @@ -215,7 +207,7 @@ export async function createValidatorConfig( ...config, ...p2pConfig, validatorPrivateKeys: new SecretValue(attesterPrivateKeys), - publisherPrivateKeys: [new SecretValue(attesterPrivateKeys[0])], + sequencerPublisherPrivateKeys: [new SecretValue(attesterPrivateKeys[0])], }; return nodeConfig; diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 014ff224754e..41a34959a210 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -67,6 +67,7 @@ export type EnvVar = | 'PUBLIC_DATA_TREE_MAP_SIZE_KB' | 'DEBUG' | 'DEBUG_P2P_DISABLE_COLOCATION_PENALTY' + | 'ENABLE_PROVER_NODE' | 'ETHEREUM_HOSTS' | 'ETHEREUM_DEBUG_HOSTS' | 'ETHEREUM_ALLOW_NO_DEBUG_HOSTS' diff --git a/yarn-project/p2p/src/client/factory.ts b/yarn-project/p2p/src/client/factory.ts index 1a4a813c5fa5..1ec728155f98 100644 --- a/yarn-project/p2p/src/client/factory.ts +++ b/yarn-project/p2p/src/client/factory.ts @@ -7,7 +7,7 @@ import { AztecLMDBStoreV2, createStore } from '@aztec/kv-store/lmdb-v2'; import type { BlockHash, L2BlockSource } from '@aztec/stdlib/block'; import type { ChainConfig } from '@aztec/stdlib/config'; import type { ContractDataSource } from '@aztec/stdlib/contract'; -import type { ClientProtocolCircuitVerifier, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; +import type { AztecNode, ClientProtocolCircuitVerifier, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { P2PClientType } from '@aztec/stdlib/p2p'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; @@ -26,7 +26,7 @@ import { DummyP2PService } from '../services/dummy_service.js'; import { LibP2PService } from '../services/index.js'; import { createFileStoreTxSources } from '../services/tx_collection/file_store_tx_source.js'; import { TxCollection } from '../services/tx_collection/tx_collection.js'; -import { type TxSource, createNodeRpcTxSources } from '../services/tx_collection/tx_source.js'; +import { NodeRpcTxSource, type TxSource, createNodeRpcTxSources } from '../services/tx_collection/tx_source.js'; import { TxFileStore } from '../services/tx_file_store/tx_file_store.js'; import { configureP2PClientAddresses, createLibP2PPeerIdFromPrivateKey, getPeerIdPrivateKey } from '../util.js'; @@ -36,6 +36,7 @@ export type P2PClientDeps = { attestationPool?: AttestationPoolApi; logger?: Logger; txCollectionNodeSources?: TxSource[]; + rpcTxProviders?: AztecNode[]; p2pServiceFactory?: (...args: Parameters<(typeof LibP2PService)['new']>) => Promise>; }; @@ -147,6 +148,7 @@ export async function createP2PClient( const nodeSources = [ ...createNodeRpcTxSources(config.txCollectionNodeRpcUrls, config), + ...(deps.rpcTxProviders ?? []).map((node, i) => new NodeRpcTxSource(node, `node-rpc-provider-${i}`)), ...(deps.txCollectionNodeSources ?? []), ]; if (nodeSources.length > 0) { diff --git a/yarn-project/p2p/src/client/interface.ts b/yarn-project/p2p/src/client/interface.ts index 1aa02f01a1c7..9585dd6b89e8 100644 --- a/yarn-project/p2p/src/client/interface.ts +++ b/yarn-project/p2p/src/client/interface.ts @@ -1,6 +1,6 @@ import type { SlotNumber } from '@aztec/foundation/branded-types'; import type { EthAddress, L2BlockId } from '@aztec/stdlib/block'; -import type { P2PApiFull } from '@aztec/stdlib/interfaces/server'; +import type { ITxProvider, P2PApiFull } from '@aztec/stdlib/interfaces/server'; import type { BlockProposal, CheckpointAttestation, CheckpointProposal, P2PClientType } from '@aztec/stdlib/p2p'; import type { BlockHeader, Tx, TxHash } from '@aztec/stdlib/tx'; @@ -213,6 +213,9 @@ export type P2P = P2PApiFull & /** Identifies a p2p client. */ isP2PClient(): true; + /** Returns the tx provider used for fetching transactions. */ + getTxProvider(): ITxProvider; + updateP2PConfig(config: Partial): Promise; /** Validates a set of txs. */ diff --git a/yarn-project/prover-node/src/bin/run-failed-epoch.ts b/yarn-project/prover-node/src/bin/run-failed-epoch.ts index 2eb584adad9b..9406faef4fc1 100644 --- a/yarn-project/prover-node/src/bin/run-failed-epoch.ts +++ b/yarn-project/prover-node/src/bin/run-failed-epoch.ts @@ -1,4 +1,5 @@ /* eslint-disable no-console */ +import { getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; import { EthAddress } from '@aztec/foundation/eth-address'; import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; @@ -22,8 +23,10 @@ async function rerunFailedEpoch(provingJobUrl: string, baseLocalDir: string) { const dataDir = join(localDir, 'state'); const env = getProverNodeConfigFromEnv(); + const l1Config = getL1ContractsConfigEnvVars(); const config = { - ...getProverNodeConfigFromEnv(), + ...env, + ...l1Config, dataDirectory: dataDir, dataStoreMapSizeKb: env.dataStoreMapSizeKb ?? 1024 * 1024, proverId: env.proverId ?? EthAddress.random(), diff --git a/yarn-project/prover-node/src/config.test.ts b/yarn-project/prover-node/src/config.test.ts index e50f6082715e..da92da3a332b 100644 --- a/yarn-project/prover-node/src/config.test.ts +++ b/yarn-project/prover-node/src/config.test.ts @@ -20,9 +20,9 @@ describe('createKeyStoreForProver', () => { ): ProverNodeConfig => { const mockValue = (val: string) => ({ getValue: () => val }); return { - publisherPrivateKeys: publisherPrivateKeys.map(mockValue), + proverPublisherPrivateKeys: publisherPrivateKeys.map(mockValue), proverId, - publisherAddresses, + proverPublisherAddresses: publisherAddresses, web3SignerUrl, } as ProverNodeConfig; }; @@ -115,4 +115,17 @@ describe('createKeyStoreForProver', () => { validators: undefined, }); }); + + it('should fall through to publisher keys when web3SignerUrl is set but proverId is missing', () => { + const config = createMockConfig([mockKey1], undefined, [], mockSignerUrl); + const result = createKeyStoreForProver(config); + + expect(result).toEqual({ + schemaVersion: 1, + slasher: undefined, + prover: mockKey1, + remoteSigner: undefined, + validators: undefined, + }); + }); }); diff --git a/yarn-project/prover-node/src/config.ts b/yarn-project/prover-node/src/config.ts index dac85d6f3f45..9ba657d77f0a 100644 --- a/yarn-project/prover-node/src/config.ts +++ b/yarn-project/prover-node/src/config.ts @@ -1,18 +1,16 @@ -import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver/config'; import type { ACVMConfig, BBConfig } from '@aztec/bb-prover/config'; -import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethereum/config'; import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings, numberConfigHelper, + pickConfigMappings, } from '@aztec/foundation/config'; import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { type KeyStoreConfig, keyStoreConfigMappings } from '@aztec/node-keystore/config'; import { ethPrivateKeySchema } from '@aztec/node-keystore/schemas'; import type { KeyStore } from '@aztec/node-keystore/types'; import { type SharedNodeConfig, sharedNodeConfigMappings } from '@aztec/node-lib/config'; -import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config'; import { type ProverAgentConfig, type ProverBrokerConfig, @@ -21,24 +19,19 @@ import { } from '@aztec/prover-client/broker/config'; import { type ProverClientUserConfig, bbConfigMappings, proverClientConfigMappings } from '@aztec/prover-client/config'; import { - type PublisherConfig, - type TxSenderConfig, - getPublisherConfigMappings, - getTxSenderConfigMappings, + type ProverPublisherConfig, + type ProverTxSenderConfig, + proverPublisherConfigMappings, + proverTxSenderConfigMappings, } from '@aztec/sequencer-client/config'; -import { type WorldStateConfig, worldStateConfigMappings } from '@aztec/world-state/config'; - -export type ProverNodeConfig = ArchiverConfig & - ProverClientUserConfig & - P2PConfig & - WorldStateConfig & - PublisherConfig & - TxSenderConfig & + +export type ProverNodeConfig = ProverClientUserConfig & + ProverPublisherConfig & + ProverTxSenderConfig & DataStoreConfig & KeyStoreConfig & - SharedNodeConfig & SpecificProverNodeConfig & - GenesisStateConfig; + Pick; export type SpecificProverNodeConfig = { proverNodeMaxPendingJobs: number; @@ -53,7 +46,7 @@ export type SpecificProverNodeConfig = { txGatheringMaxParallelRequestsPerNode: number; }; -const specificProverNodeConfigMappings: ConfigMappingsType = { +export const specificProverNodeConfigMappings: ConfigMappingsType = { proverNodeMaxPendingJobs: { env: 'PROVER_NODE_MAX_PENDING_JOBS', description: 'The maximum number of pending jobs for the prover node', @@ -108,15 +101,11 @@ const specificProverNodeConfigMappings: ConfigMappingsType = { ...dataConfigMappings, ...keyStoreConfigMappings, - ...archiverConfigMappings, ...proverClientConfigMappings, - ...p2pConfigMappings, - ...worldStateConfigMappings, - ...getPublisherConfigMappings('PROVER'), - ...getTxSenderConfigMappings('PROVER'), + ...proverPublisherConfigMappings, + ...proverTxSenderConfigMappings, ...specificProverNodeConfigMappings, - ...genesisStateConfigMappings, - ...sharedNodeConfigMappings, + ...pickConfigMappings(sharedNodeConfigMappings, ['web3SignerUrl']), }; export function getProverNodeConfigFromEnv(): ProverNodeConfig { @@ -143,7 +132,7 @@ function createKeyStoreFromWeb3Signer(config: ProverNodeConfig): KeyStore | unde } // Also, we need at least one publisher address. - const publishers = config.publisherAddresses ?? []; + const publishers = config.proverPublisherAddresses ?? []; if (publishers.length === 0) { return undefined; @@ -164,8 +153,8 @@ function createKeyStoreFromWeb3Signer(config: ProverNodeConfig): KeyStore | unde function createKeyStoreFromPublisherKeys(config: ProverNodeConfig): KeyStore | undefined { // Extract the publisher keys from the provided config. - const publisherKeys = config.publisherPrivateKeys - ? config.publisherPrivateKeys.map((k: { getValue: () => string }) => ethPrivateKeySchema.parse(k.getValue())) + const publisherKeys = config.proverPublisherPrivateKeys + ? config.proverPublisherPrivateKeys.map((k: { getValue: () => string }) => ethPrivateKeySchema.parse(k.getValue())) : []; // There must be at least 1. @@ -194,7 +183,10 @@ function createKeyStoreFromPublisherKeys(config: ProverNodeConfig): KeyStore | u export function createKeyStoreForProver(config: ProverNodeConfig): KeyStore | undefined { if (config.web3SignerUrl !== undefined && config.web3SignerUrl.length > 0) { - return createKeyStoreFromWeb3Signer(config); + const keyStore = createKeyStoreFromWeb3Signer(config); + if (keyStore) { + return keyStore; + } } return createKeyStoreFromPublisherKeys(config); diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index db2c855ddf7f..c0d3660b09dd 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -1,8 +1,7 @@ -import { type Archiver, createArchiver } from '@aztec/archiver'; -import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover'; -import { createBlobClientWithFileStores } from '@aztec/blob-client/client'; +import type { Archiver } from '@aztec/archiver'; +import type { BlobClientInterface } from '@aztec/blob-client/client'; import { Blob } from '@aztec/blob-lib'; -import { EpochCache } from '@aztec/epoch-cache'; +import type { EpochCacheInterface } from '@aztec/epoch-cache'; import { createEthereumChain } from '@aztec/ethereum/chain'; import { RollupContract } from '@aztec/ethereum/contracts'; import { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; @@ -10,23 +9,28 @@ import { PublisherManager } from '@aztec/ethereum/publisher-manager'; import { pick } from '@aztec/foundation/collection'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; -import { type KeyStoreConfig, KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore'; -import { trySnapshotSync } from '@aztec/node-lib/actions'; +import { KeystoreManager } from '@aztec/node-keystore'; import { createForwarderL1TxUtilsFromSigners, createL1TxUtilsFromSigners } from '@aztec/node-lib/factories'; -import { NodeRpcTxSource, type P2PClientDeps, createP2PClient } from '@aztec/p2p'; -import { type ProverClientConfig, createProverClient } from '@aztec/prover-client'; +import { type ProverClientConfig, type ProverClientUserConfig, createProverClient } from '@aztec/prover-client'; import { createAndStartProvingBroker } from '@aztec/prover-client/broker'; -import type { AztecNode, ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; -import { P2PClientType } from '@aztec/stdlib/p2p'; -import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; -import { getPackageVersion } from '@aztec/stdlib/update-checker'; +import { + type ProverPublisherConfig, + type ProverTxSenderConfig, + getPublisherConfigFromProverConfig, +} from '@aztec/sequencer-client'; +import type { + AztecNode, + ITxProvider, + ProverConfig, + ProvingJobBroker, + Service, + WorldStateSynchronizer, +} from '@aztec/stdlib/interfaces/server'; import { L1Metrics, type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; -import { createWorldStateSynchronizer } from '@aztec/world-state'; import { createPublicClient, fallback, http } from 'viem'; -import { type ProverNodeConfig, createKeyStoreForProver } from './config.js'; +import type { SpecificProverNodeConfig } from './config.js'; import { EpochMonitor } from './monitors/epoch-monitor.js'; import { ProverNode } from './prover-node.js'; import { ProverPublisherFactory } from './prover-publisher-factory.js'; @@ -35,54 +39,42 @@ export type ProverNodeDeps = { telemetry?: TelemetryClient; log?: Logger; aztecNodeTxProvider?: Pick; - archiver?: Archiver; + archiver: Archiver; publisherFactory?: ProverPublisherFactory; broker?: ProvingJobBroker; l1TxUtils?: L1TxUtils; dateProvider?: DateProvider; - p2pClientDeps?: P2PClientDeps; + worldStateSynchronizer: WorldStateSynchronizer; + p2pClient: { getTxProvider(): ITxProvider } & Partial; + epochCache: EpochCacheInterface; + blobClient: BlobClientInterface; + keyStoreManager?: KeystoreManager; }; -/** Creates a new prover node given a config. */ +/** Creates a new prover node subsystem given a config and dependencies */ export async function createProverNode( - userConfig: ProverNodeConfig & DataStoreConfig & KeyStoreConfig, - deps: ProverNodeDeps = {}, - options: { - prefilledPublicData?: PublicDataTreeLeaf[]; - } = {}, + userConfig: SpecificProverNodeConfig & + ProverConfig & + ProverClientUserConfig & + ProverPublisherConfig & + ProverTxSenderConfig, + deps: ProverNodeDeps, ) { const config = { ...userConfig }; const telemetry = deps.telemetry ?? getTelemetryClient(); const dateProvider = deps.dateProvider ?? new DateProvider(); - const blobClient = await createBlobClientWithFileStores(config, createLogger('prover-node:blob-client:client')); - const log = deps.log ?? createLogger('prover-node'); - - // Build a key store from file if given or from environment otherwise - let keyStoreManager: KeystoreManager | undefined; - const keyStoreProvided = config.keyStoreDirectory !== undefined && config.keyStoreDirectory.length > 0; - if (keyStoreProvided) { - const keyStores = loadKeystores(config.keyStoreDirectory!); - keyStoreManager = new KeystoreManager(mergeKeystores(keyStores)); - } else { - const keyStore = createKeyStoreForProver(config); - if (keyStore) { - keyStoreManager = new KeystoreManager(keyStore); - } - } + const log = deps.log ?? createLogger('prover'); - await keyStoreManager?.validateSigners(); + const { p2pClient, archiver, keyStoreManager, worldStateSynchronizer } = deps; // Extract the prover signers from the key store and verify that we have one. + await keyStoreManager?.validateSigners(); const proverSigners = keyStoreManager?.createProverSigners(); if (proverSigners === undefined) { throw new Error('Failed to create prover key store configuration'); } else if (proverSigners.signers.length === 0) { throw new Error('No prover signers found in the key store'); - } else if (!keyStoreProvided) { - log.warn( - 'KEY STORE CREATED FROM ENVIRONMENT, IT IS RECOMMENDED TO USE A FILE-BASED KEY STORE IN PRODUCTION ENVIRONMENTS', - ); } log.info(`Creating prover with publishers ${proverSigners.signers.map(signer => signer.address.toString()).join()}`); @@ -94,27 +86,7 @@ export async function createProverNode( const proverId = proverSigners.id ?? proverIdInUserConfig ?? proverSigners.signers[0].address; // Now create the prover client configuration from this. - const proverClientConfig: ProverClientConfig = { - ...config, - proverId, - }; - - await trySnapshotSync(config, log); - - const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config); - - const archiver = - deps.archiver ?? - (await createArchiver(config, { blobClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true })); - log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`); - - const worldStateSynchronizer = await createWorldStateSynchronizer( - config, - archiver, - options.prefilledPublicData, - telemetry, - ); - await worldStateSynchronizer.start(); + const proverClientConfig: ProverClientConfig = { ...config, proverId }; const broker = deps.broker ?? (await createAndStartProvingBroker(config, telemetry)); @@ -133,11 +105,11 @@ export async function createProverNode( const l1TxUtils = deps.l1TxUtils ? [deps.l1TxUtils] - : config.publisherForwarderAddress + : config.proverPublisherForwarderAddress ? await createForwarderL1TxUtilsFromSigners( publicClient, proverSigners.signers, - config.publisherForwarderAddress, + config.proverPublisherForwarderAddress, { ...config, scope: 'prover' }, { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider, kzg: Blob.getViemKzgInstance() }, ) @@ -152,37 +124,12 @@ export async function createProverNode( deps.publisherFactory ?? new ProverPublisherFactory(config, { rollupContract, - publisherManager: new PublisherManager(l1TxUtils, config, log.getBindings()), + publisherManager: new PublisherManager(l1TxUtils, getPublisherConfigFromProverConfig(config), log.getBindings()), telemetry, }); - const proofVerifier = new QueuedIVCVerifier( - config, - config.realProofs || config.debugForceTxProofVerification - ? await BBCircuitVerifier.new(config) - : new TestCircuitVerifier(config.proverTestVerificationDelayMs), - ); - - const p2pClient = await createP2PClient( - P2PClientType.Prover, - config, - archiver, - proofVerifier, - worldStateSynchronizer, - epochCache, - getPackageVersion() ?? '', - dateProvider, - telemetry, - { - ...deps.p2pClientDeps, - txCollectionNodeSources: [ - ...(deps.p2pClientDeps?.txCollectionNodeSources ?? []), - ...(deps.aztecNodeTxProvider ? [new NodeRpcTxSource(deps.aztecNodeTxProvider, 'TestNode')] : []), - ], - }, - ); - - await p2pClient.start(); + // TODO(#20393): Check that the tx collection node sources are properly injected + // See aztecNodeTxProvider const proverNodeConfig = { ...pick( diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 25f7bbb3eb60..8adf039d7651 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -8,7 +8,6 @@ import { memoize } from '@aztec/foundation/decorators'; import { createLogger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; import type { DataStoreConfig } from '@aztec/kv-store/config'; -import type { P2PClient } from '@aztec/p2p'; import { PublicProcessorFactory } from '@aztec/simulator/server'; import type { L2BlockSource } from '@aztec/stdlib/block'; import type { Checkpoint } from '@aztec/stdlib/checkpoint'; @@ -18,6 +17,7 @@ import { getProofSubmissionDeadlineTimestamp } from '@aztec/stdlib/epoch-helpers import { type EpochProverManager, EpochProvingJobTerminalState, + type ITxProvider, type ProverNodeApi, type Service, type WorldStateSyncStatus, @@ -25,7 +25,6 @@ import { tryStop, } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; -import type { P2PClientType } from '@aztec/stdlib/p2p'; import type { Tx } from '@aztec/stdlib/tx'; import { Attributes, @@ -73,7 +72,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable protected readonly l1ToL2MessageSource: L1ToL2MessageSource, protected readonly contractDataSource: ContractDataSource, protected readonly worldState: WorldStateSynchronizer, - protected readonly p2pClient: Pick, 'getTxProvider'> & Partial, + protected readonly p2pClient: { getTxProvider(): ITxProvider } & Partial, protected readonly epochsMonitor: EpochMonitor, protected readonly rollupContract: RollupContract, protected readonly l1Metrics: L1Metrics, @@ -162,17 +161,15 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable /** * Stops the prover node and all its dependencies. + * Resources not owned by this node (shared with the parent aztec-node) are skipped. */ async stop() { this.log.info('Stopping ProverNode'); await this.epochsMonitor.stop(); await this.prover.stop(); - await tryStop(this.p2pClient); - await tryStop(this.l2BlockSource); await tryStop(this.publisherFactory); this.publisher?.interrupt(); await Promise.all(Array.from(this.jobs.values()).map(job => job.stop())); - await this.worldState.stop(); this.rewardsMetrics.stop(); this.l1Metrics.stop(); await this.telemetryClient.stop(); diff --git a/yarn-project/prover-node/src/prover-publisher-factory.ts b/yarn-project/prover-node/src/prover-publisher-factory.ts index 8bcf72b321ed..8e1b88d1560e 100644 --- a/yarn-project/prover-node/src/prover-publisher-factory.ts +++ b/yarn-project/prover-node/src/prover-publisher-factory.ts @@ -2,14 +2,14 @@ import type { RollupContract } from '@aztec/ethereum/contracts'; import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils'; import type { PublisherManager } from '@aztec/ethereum/publisher-manager'; import type { LoggerBindings } from '@aztec/foundation/log'; -import type { PublisherConfig, TxSenderConfig } from '@aztec/sequencer-client'; +import type { ProverPublisherConfig, ProverTxSenderConfig } from '@aztec/sequencer-client'; import type { TelemetryClient } from '@aztec/telemetry-client'; import { ProverNodePublisher } from './prover-node-publisher.js'; export class ProverPublisherFactory { constructor( - private config: TxSenderConfig & PublisherConfig, + private config: ProverTxSenderConfig & ProverPublisherConfig, private deps: { rollupContract: RollupContract; publisherManager: PublisherManager; diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 7b3135501ce3..fa0488a1ac5d 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -18,7 +18,7 @@ import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { L1Metrics, type TelemetryClient } from '@aztec/telemetry-client'; import { FullNodeCheckpointsBuilder, NodeKeystoreAdapter, type ValidatorClient } from '@aztec/validator-client'; -import type { SequencerClientConfig } from '../config.js'; +import { type SequencerClientConfig, getPublisherConfigFromSequencerConfig } from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { SequencerPublisherFactory } from '../publisher/sequencer-publisher-factory.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; @@ -86,7 +86,11 @@ export class SequencerClient { publicClient, l1TxUtils.map(x => x.getSenderAddress()), ); - const publisherManager = new PublisherManager(l1TxUtils, config, log.getBindings()); + const publisherManager = new PublisherManager( + l1TxUtils, + getPublisherConfigFromSequencerConfig(config), + log.getBindings(), + ); const rollupContract = new RollupContract(publicClient, config.l1Contracts.rollupAddress.toString()); const [l1GenesisTime, slotDuration, rollupVersion, rollupManaLimit] = await Promise.all([ rollupContract.getL1GenesisTime(), diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 982616eba126..469651fba387 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -22,10 +22,10 @@ import { DEFAULT_P2P_PROPAGATION_TIME } from '@aztec/stdlib/timetable'; import { type ValidatorClientConfig, validatorClientConfigMappings } from '@aztec/validator-client/config'; import { - type PublisherConfig, - type TxSenderConfig, - getPublisherConfigMappings, - getTxSenderConfigMappings, + type SequencerPublisherConfig, + type SequencerTxSenderConfig, + sequencerPublisherConfigMappings, + sequencerTxSenderConfigMappings, } from './publisher/config.js'; export * from './publisher/config.js'; @@ -61,10 +61,10 @@ export const DefaultSequencerConfig: ResolvedSequencerConfig = { /** * Configuration settings for the SequencerClient. */ -export type SequencerClientConfig = PublisherConfig & +export type SequencerClientConfig = SequencerPublisherConfig & KeyStoreConfig & ValidatorClientConfig & - TxSenderConfig & + SequencerTxSenderConfig & SequencerConfig & L1ReaderConfig & ChainConfig & @@ -222,8 +222,8 @@ export const sequencerClientConfigMappings: ConfigMappingsType[]; - /** - * Publisher addresses to be used with a remote signer - */ + /** Publisher addresses to be used with a remote signer */ publisherAddresses?: EthAddress[]; }; -/** - * Configuration of the L1Publisher. - */ +export type ProverTxSenderConfig = L1ReaderConfig & { + proverPublisherPrivateKeys?: SecretValue<`0x${string}`>[]; + proverPublisherAddresses?: EthAddress[]; +}; + +export type SequencerTxSenderConfig = L1ReaderConfig & { + sequencerPublisherPrivateKeys?: SecretValue<`0x${string}`>[]; + sequencerPublisherAddresses?: EthAddress[]; +}; + +export function getTxSenderConfigFromProverConfig(config: ProverTxSenderConfig): TxSenderConfig { + return { + ...config, + publisherPrivateKeys: config.proverPublisherPrivateKeys, + publisherAddresses: config.proverPublisherAddresses, + }; +} + +export function getTxSenderConfigFromSequencerConfig(config: SequencerTxSenderConfig): TxSenderConfig { + return { + ...config, + publisherPrivateKeys: config.sequencerPublisherPrivateKeys, + publisherAddresses: config.sequencerPublisherAddresses, + }; +} + +/** Configuration of the L1Publisher. */ export type PublisherConfig = L1TxUtilsConfig & BlobClientConfig & { /** True to use publishers in invalid states (timed out, cancelled, etc) if no other is available */ @@ -37,35 +50,76 @@ export type PublisherConfig = L1TxUtilsConfig & publisherForwarderAddress?: EthAddress; }; -export const getTxSenderConfigMappings: ( - scope: 'PROVER' | 'SEQ', -) => ConfigMappingsType> = (scope: 'PROVER' | 'SEQ') => ({ +export type ProverPublisherConfig = L1TxUtilsConfig & + BlobClientConfig & { + fishermanMode?: boolean; + proverPublisherAllowInvalidStates?: boolean; + proverPublisherForwarderAddress?: EthAddress; + }; + +export type SequencerPublisherConfig = L1TxUtilsConfig & + BlobClientConfig & { + fishermanMode?: boolean; + sequencerPublisherAllowInvalidStates?: boolean; + sequencerPublisherForwarderAddress?: EthAddress; + }; + +export function getPublisherConfigFromProverConfig(config: ProverPublisherConfig): PublisherConfig { + return { + ...config, + publisherAllowInvalidStates: config.proverPublisherAllowInvalidStates, + publisherForwarderAddress: config.proverPublisherForwarderAddress, + }; +} + +export function getPublisherConfigFromSequencerConfig(config: SequencerPublisherConfig): PublisherConfig { + return { + ...config, + publisherAllowInvalidStates: config.sequencerPublisherAllowInvalidStates, + publisherForwarderAddress: config.sequencerPublisherForwarderAddress, + }; +} + +export const proverTxSenderConfigMappings: ConfigMappingsType> = { ...l1ReaderConfigMappings, - publisherPrivateKeys: { - env: scope === 'PROVER' ? `PROVER_PUBLISHER_PRIVATE_KEYS` : `SEQ_PUBLISHER_PRIVATE_KEYS`, - description: 'The private keys to be used by the publisher.', + proverPublisherPrivateKeys: { + env: `PROVER_PUBLISHER_PRIVATE_KEYS`, + description: 'The private keys to be used by the prover publisher.', parseEnv: (val: string) => val.split(',').map(key => new SecretValue(`0x${key.replace('0x', '')}`)), defaultValue: [], - fallback: [scope === 'PROVER' ? `PROVER_PUBLISHER_PRIVATE_KEY` : `SEQ_PUBLISHER_PRIVATE_KEY`], + fallback: [`PROVER_PUBLISHER_PRIVATE_KEY`], }, - publisherAddresses: { - env: scope === 'PROVER' ? `PROVER_PUBLISHER_ADDRESSES` : `SEQ_PUBLISHER_ADDRESSES`, + proverPublisherAddresses: { + env: `PROVER_PUBLISHER_ADDRESSES`, description: 'The addresses of the publishers to use with remote signers', parseEnv: (val: string) => val.split(',').map(address => EthAddress.fromString(address)), defaultValue: [], }, -}); +}; -export function getTxSenderConfigFromEnv(scope: 'PROVER' | 'SEQ'): Omit { - return getConfigFromMappings(getTxSenderConfigMappings(scope)); -} +export const sequencerTxSenderConfigMappings: ConfigMappingsType> = { + ...l1ReaderConfigMappings, + sequencerPublisherPrivateKeys: { + env: `SEQ_PUBLISHER_PRIVATE_KEYS`, + description: 'The private keys to be used by the sequencer publisher.', + parseEnv: (val: string) => val.split(',').map(key => new SecretValue(`0x${key.replace('0x', '')}`)), + defaultValue: [], + fallback: [`SEQ_PUBLISHER_PRIVATE_KEY`], + }, + sequencerPublisherAddresses: { + env: `SEQ_PUBLISHER_ADDRESSES`, + description: 'The addresses of the publishers to use with remote signers', + parseEnv: (val: string) => val.split(',').map(address => EthAddress.fromString(address)), + defaultValue: [], + }, +}; -export const getPublisherConfigMappings: ( - scope: 'PROVER' | 'SEQ', -) => ConfigMappingsType = scope => ({ - publisherAllowInvalidStates: { +export const sequencerPublisherConfigMappings: ConfigMappingsType = { + ...l1TxUtilsConfigMappings, + ...blobClientConfigMapping, + sequencerPublisherAllowInvalidStates: { + env: `SEQ_PUBLISHER_ALLOW_INVALID_STATES`, description: 'True to use publishers in invalid states (timed out, cancelled, etc) if no other is available', - env: scope === `PROVER` ? `PROVER_PUBLISHER_ALLOW_INVALID_STATES` : `SEQ_PUBLISHER_ALLOW_INVALID_STATES`, ...booleanConfigHelper(true), }, fishermanMode: { @@ -74,15 +128,30 @@ export const getPublisherConfigMappings: ( 'Whether to run in fisherman mode: builds blocks on every slot for validation without publishing to L1', ...booleanConfigHelper(false), }, - publisherForwarderAddress: { - env: scope === `PROVER` ? `PROVER_PUBLISHER_FORWARDER_ADDRESS` : `SEQ_PUBLISHER_FORWARDER_ADDRESS`, + sequencerPublisherForwarderAddress: { + env: `SEQ_PUBLISHER_FORWARDER_ADDRESS`, description: 'Address of the forwarder contract to wrap all L1 transactions through (for testing purposes only)', parseEnv: (val: string) => (val ? EthAddress.fromString(val) : undefined), }, +}; + +export const proverPublisherConfigMappings: ConfigMappingsType = { ...l1TxUtilsConfigMappings, ...blobClientConfigMapping, -}); - -export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { - return getConfigFromMappings(getPublisherConfigMappings(scope)); -} + proverPublisherAllowInvalidStates: { + env: `PROVER_PUBLISHER_ALLOW_INVALID_STATES`, + description: 'True to use publishers in invalid states (timed out, cancelled, etc) if no other is available', + ...booleanConfigHelper(true), + }, + fishermanMode: { + env: 'FISHERMAN_MODE', + description: + 'Whether to run in fisherman mode: builds blocks on every slot for validation without publishing to L1', + ...booleanConfigHelper(false), + }, + proverPublisherForwarderAddress: { + env: `PROVER_PUBLISHER_FORWARDER_ADDRESS`, + description: 'Address of the forwarder contract to wrap all L1 transactions through (for testing purposes only)', + parseEnv: (val: string) => (val ? EthAddress.fromString(val) : undefined), + }, +}; diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index 77751458a016..a88d42f05029 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -47,7 +47,7 @@ import { type TelemetryClient, type Tracer, getTelemetryClient, trackSpan } from import { type StateOverride, type TransactionReceipt, type TypedDataDefinition, encodeFunctionData, toHex } from 'viem'; -import type { PublisherConfig, TxSenderConfig } from './config.js'; +import type { SequencerPublisherConfig } from './config.js'; import { SequencerPublisherMetrics } from './sequencer-publisher-metrics.js'; /** Arguments to the process method of the rollup contract */ @@ -149,7 +149,8 @@ export class SequencerPublisher { protected requests: RequestWithExpiry[] = []; constructor( - private config: TxSenderConfig & PublisherConfig & Pick, + private config: Pick & + Pick & { l1ChainId: number }, deps: { telemetry?: TelemetryClient; blobClient: BlobClientInterface; diff --git a/yarn-project/txe/src/state_machine/dummy_p2p_client.ts b/yarn-project/txe/src/state_machine/dummy_p2p_client.ts index 2dc0d77d9376..db9c94b94942 100644 --- a/yarn-project/txe/src/state_machine/dummy_p2p_client.ts +++ b/yarn-project/txe/src/state_machine/dummy_p2p_client.ts @@ -16,7 +16,7 @@ import type { StatusMessage, } from '@aztec/p2p'; import type { EthAddress, L2BlockStreamEvent, L2Tips } from '@aztec/stdlib/block'; -import type { PeerInfo } from '@aztec/stdlib/interfaces/server'; +import type { ITxProvider, PeerInfo } from '@aztec/stdlib/interfaces/server'; import type { BlockProposal, CheckpointAttestation, CheckpointProposal } from '@aztec/stdlib/p2p'; import type { BlockHeader, Tx, TxHash } from '@aztec/stdlib/tx'; @@ -131,6 +131,10 @@ export class DummyP2P implements P2P { throw new Error('DummyP2P does not implement "isP2PClient"'); } + public getTxProvider(): ITxProvider { + throw new Error('DummyP2P does not implement "getTxProvider"'); + } + public getTxsByHash(_txHashes: TxHash[]): Promise { throw new Error('DummyP2P does not implement "getTxsByHash"'); } diff --git a/yarn-project/txe/src/state_machine/index.ts b/yarn-project/txe/src/state_machine/index.ts index 194166720e15..cf7884c24ff4 100644 --- a/yarn-project/txe/src/state_machine/index.ts +++ b/yarn-project/txe/src/state_machine/index.ts @@ -50,6 +50,7 @@ export class TXEStateMachine { undefined, undefined, undefined, + undefined, VERSION, CHAIN_ID, new TXEGlobalVariablesBuilder(), diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index cd76323dbd2c..aeb6b8c124d7 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -816,6 +816,7 @@ __metadata: "@aztec/p2p": "workspace:^" "@aztec/protocol-contracts": "workspace:^" "@aztec/prover-client": "workspace:^" + "@aztec/prover-node": "workspace:^" "@aztec/sequencer-client": "workspace:^" "@aztec/simulator": "workspace:^" "@aztec/slasher": "workspace:^" From 7739e5bc2e10f787444609017e585e939b97bf8c Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Mon, 16 Feb 2026 20:40:55 +0000 Subject: [PATCH 41/62] World state history based on checkpoints --- spartan/environments/devnet.env | 2 +- spartan/scripts/deploy_network.sh | 2 +- spartan/terraform/deploy-aztec-infra/main.tf | 12 +++--- .../terraform/deploy-aztec-infra/variables.tf | 4 +- .../aztec/src/cli/aztec_start_options.ts | 2 +- .../src/e2e_epochs/epochs_multiple.test.ts | 4 +- .../end-to-end/src/e2e_epochs/epochs_test.ts | 4 +- .../e2e_l1_publisher/e2e_l1_publisher.test.ts | 2 +- .../end-to-end/src/e2e_pruned_blocks.test.ts | 6 +-- yarn-project/foundation/src/config/env_var.ts | 1 + .../src/validator.integration.test.ts | 2 +- .../world-state/src/synchronizer/config.ts | 12 +++--- .../server_world_state_synchronizer.test.ts | 2 +- .../server_world_state_synchronizer.ts | 37 ++++++++++++++++--- .../world-state/src/test/integration.test.ts | 2 +- 15 files changed, 61 insertions(+), 33 deletions(-) diff --git a/spartan/environments/devnet.env b/spartan/environments/devnet.env index 6fef140f7011..07479bd312b9 100644 --- a/spartan/environments/devnet.env +++ b/spartan/environments/devnet.env @@ -74,4 +74,4 @@ RPC_INGRESS_HOSTS="[\"$NAMESPACE.aztec-labs.com\"]" RPC_INGRESS_STATIC_IP_NAME=$NAMESPACE-rpc-ip RPC_INGRESS_SSL_CERT_NAMES="[\"$NAMESPACE-rpc-cert\"]" -WS_NUM_HISTORIC_BLOCKS=300 +WS_NUM_HISTORIC_CHECKPOINTS=300 diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index 9d8103c0794b..d0e4e189ba28 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -596,7 +596,7 @@ FULL_NODE_INCLUDE_METRICS = "${FULL_NODE_INCLUDE_METRICS-null}" LOG_LEVEL = "${LOG_LEVEL}" FISHERMAN_LOG_LEVEL = "${FISHERMAN_LOG_LEVEL}" -WS_NUM_HISTORIC_BLOCKS = ${WS_NUM_HISTORIC_BLOCKS:-null} +WS_NUM_HISTORIC_CHECKPOINTS = ${WS_NUM_HISTORIC_CHECKPOINTS:-null} P2P_PUBLIC_IP = ${P2P_PUBLIC_IP} P2P_NODEPORT_ENABLED = ${P2P_NODEPORT_ENABLED} diff --git a/spartan/terraform/deploy-aztec-infra/main.tf b/spartan/terraform/deploy-aztec-infra/main.tf index 347939595c5b..85c891329229 100644 --- a/spartan/terraform/deploy-aztec-infra/main.tf +++ b/spartan/terraform/deploy-aztec-infra/main.tf @@ -218,7 +218,7 @@ locals { "validator.node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "validator.node.env.P2P_DROP_TX" = var.P2P_DROP_TX "validator.node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE - "validator.node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS + "validator.node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "validator.node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS "validator.node.env.SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT" = var.SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT } @@ -358,7 +358,7 @@ locals { "node.node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.node.env.P2P_DROP_TX" = var.P2P_DROP_TX "node.node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE - "node.node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS + "node.node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS "node.service.p2p.nodePortEnabled" = var.P2P_NODEPORT_ENABLED "node.service.p2p.announcePort" = local.p2p_port_prover @@ -438,7 +438,7 @@ locals { "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX" = var.P2P_DROP_TX "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE - "node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS + "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.env.TX_FILE_STORE_ENABLED" = var.TX_FILE_STORE_ENABLED "node.env.TX_FILE_STORE_URL" = var.TX_FILE_STORE_URL "node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS @@ -495,7 +495,7 @@ locals { "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX" = var.P2P_DROP_TX "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE - "node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS + "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS } boot_node_host_path = "node.env.BOOT_NODE_HOST" @@ -535,7 +535,7 @@ locals { "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX" = var.P2P_DROP_TX "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE - "node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS + "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS } boot_node_host_path = "node.env.BOOT_NODE_HOST" @@ -572,7 +572,7 @@ locals { "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX" = var.P2P_DROP_TX "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE - "node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS + "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS } boot_node_host_path = "node.env.BOOT_NODE_HOST" bootstrap_nodes_path = "node.env.BOOTSTRAP_NODES" diff --git a/spartan/terraform/deploy-aztec-infra/variables.tf b/spartan/terraform/deploy-aztec-infra/variables.tf index 9947d2379f43..358e2b0ec335 100644 --- a/spartan/terraform/deploy-aztec-infra/variables.tf +++ b/spartan/terraform/deploy-aztec-infra/variables.tf @@ -772,8 +772,8 @@ variable "P2P_DROP_TX_CHANCE" { default = 0 } -variable "WS_NUM_HISTORIC_BLOCKS" { - description = "Number of historic blocks for world state" +variable "WS_NUM_HISTORIC_CHECKPOINTS" { + description = "Number of historic checkpoints for world state" type = string nullable = true default = null diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index d99dfab99690..d6df6e872530 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -170,7 +170,7 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { 'WORLD STATE': [ configToFlag('--world-state-data-directory', worldStateConfigMappings.worldStateDataDirectory), configToFlag('--world-state-db-map-size-kb', worldStateConfigMappings.worldStateDbMapSizeKb), - configToFlag('--world-state-block-history', worldStateConfigMappings.worldStateBlockHistory), + configToFlag('--world-state-checkpoint-history', worldStateConfigMappings.worldStateCheckpointHistory), ], // We can't easily auto-generate node options as they're parts of modules defined below 'AZTEC NODE': [ diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts index d0f94c81e433..c9d60642e641 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts @@ -5,7 +5,7 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { jest } from '@jest/globals'; import type { EndToEndContext } from '../fixtures/utils.js'; -import { EpochsTestContext, WORLD_STATE_BLOCK_HISTORY } from './epochs_test.js'; +import { EpochsTestContext, WORLD_STATE_CHECKPOINT_HISTORY } from './epochs_test.js'; jest.setTimeout(1000 * 60 * 15); @@ -52,7 +52,7 @@ describe('e2e_epochs/epochs_multiple', () => { // Right now finalization means a checkpoint is two L2 epochs deep. If this rule changes then this test needs to be updated. const provenBlockNumber = epochEndBlockNumber; const finalizedBlockNumber = Math.max(provenBlockNumber - context.config.aztecEpochDuration * 2, 0); - const expectedOldestHistoricBlock = Math.max(finalizedBlockNumber - WORLD_STATE_BLOCK_HISTORY + 1, 1); + const expectedOldestHistoricBlock = Math.max(finalizedBlockNumber - WORLD_STATE_CHECKPOINT_HISTORY + 1, 1); const expectedBlockRemoved = expectedOldestHistoricBlock - 1; await test.waitForNodeToSync(BlockNumber(expectedOldestHistoricBlock), 'historic'); await test.verifyHistoricBlock(BlockNumber(expectedOldestHistoricBlock), true); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts index a2e4eeaa2f7f..a272fc521d94 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts @@ -41,7 +41,7 @@ import { setup, } from '../fixtures/utils.js'; -export const WORLD_STATE_BLOCK_HISTORY = 2; +export const WORLD_STATE_CHECKPOINT_HISTORY = 2; export const WORLD_STATE_BLOCK_CHECK_INTERVAL = 50; export const ARCHIVER_POLL_INTERVAL = 50; export const DEFAULT_L1_BLOCK_TIME = process.env.CI ? 12 : 8; @@ -142,7 +142,7 @@ export class EpochsTestContext { // using the prover's eth address if the proverId is used for something in the rollup contract // Use numeric EthAddress for deterministic prover id proverId: EthAddress.fromNumber(1), - worldStateBlockHistory: WORLD_STATE_BLOCK_HISTORY, + worldStateCheckpointHistory: WORLD_STATE_CHECKPOINT_HISTORY, exitDelaySeconds: DefaultL1ContractsConfig.exitDelaySeconds, slasherFlavor: 'none', l1PublishingTime, diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 9b36f58493ff..615d869d5e7c 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -249,7 +249,7 @@ describe('L1Publisher integration', () => { const worldStateConfig: WorldStateConfig = { worldStateBlockCheckIntervalMS: 10000, worldStateDbMapSizeKb: 10 * 1024 * 1024, - worldStateBlockHistory: 0, + worldStateCheckpointHistory: 0, }; worldStateSynchronizer = new ServerWorldStateSynchronizer(builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); diff --git a/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts b/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts index 65f1bd64dfad..8a9017b46219 100644 --- a/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts +++ b/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts @@ -31,7 +31,7 @@ describe('e2e_pruned_blocks', () => { const MINT_AMOUNT = 1000n; // Don't make this value too high since we need to mine this number of empty blocks, which is relatively slow. - const WORLD_STATE_BLOCK_HISTORY = 2; + const WORLD_STATE_CHECKPOINT_HISTORY = 2; const EPOCH_LENGTH = 2; const WORLD_STATE_CHECK_INTERVAL_MS = 300; const ARCHIVER_POLLING_INTERVAL_MS = 300; @@ -47,7 +47,7 @@ describe('e2e_pruned_blocks', () => { accounts: [admin, sender, recipient], } = await setup(3, { aztecEpochDuration: EPOCH_LENGTH, - worldStateBlockHistory: WORLD_STATE_BLOCK_HISTORY, + worldStateCheckpointHistory: WORLD_STATE_CHECKPOINT_HISTORY, worldStateBlockCheckIntervalMS: WORLD_STATE_CHECK_INTERVAL_MS, archiverPollingIntervalMS: ARCHIVER_POLLING_INTERVAL_MS, aztecProofSubmissionEpochs: 1024, // effectively do not reorg @@ -94,7 +94,7 @@ describe('e2e_pruned_blocks', () => { // blocks (notably the one with the minted note) being pruned. Given world state prunes based on the finalized tip, // and we are defining the finalized tip as two epochs behind the proven one, we need to mine two extra epochs. await aztecNodeAdmin!.setConfig({ minTxsPerBlock: 0 }); - await waitBlocks(WORLD_STATE_BLOCK_HISTORY + EPOCH_LENGTH * 2 + 1); + await waitBlocks(WORLD_STATE_CHECKPOINT_HISTORY + EPOCH_LENGTH * 2 + 1); await cheatCodes.rollup.markAsProven(); // The same historical query we performed before should now fail since this block is not available anymore. We poll diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 41a34959a210..33e4f7b66832 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -281,6 +281,7 @@ export type EnvVar = | 'WS_BLOCK_REQUEST_BATCH_SIZE' | 'L1_READER_VIEM_POLLING_INTERVAL_MS' | 'WS_DATA_DIRECTORY' + | 'WS_NUM_HISTORIC_CHECKPOINTS' | 'WS_NUM_HISTORIC_BLOCKS' | 'ETHEREUM_SLOT_DURATION' | 'AZTEC_SLOT_DURATION' diff --git a/yarn-project/validator-client/src/validator.integration.test.ts b/yarn-project/validator-client/src/validator.integration.test.ts index 551928645950..539c0df9b233 100644 --- a/yarn-project/validator-client/src/validator.integration.test.ts +++ b/yarn-project/validator-client/src/validator.integration.test.ts @@ -114,7 +114,7 @@ describe('ValidatorClient Integration', () => { worldStateBlockCheckIntervalMS: 20, worldStateBlockRequestBatchSize: 10, worldStateDbMapSizeKb: 1024 * 1024, - worldStateBlockHistory: 0, + worldStateCheckpointHistory: 0, }; const worldStateDb = await NativeWorldStateService.tmp(rollupAddress, true, prefilledPublicData); const synchronizer = new ServerWorldStateSynchronizer(worldStateDb, archiver, wsConfig); diff --git a/yarn-project/world-state/src/synchronizer/config.ts b/yarn-project/world-state/src/synchronizer/config.ts index 2ba5dd6b8a9e..b1f4a7321781 100644 --- a/yarn-project/world-state/src/synchronizer/config.ts +++ b/yarn-project/world-state/src/synchronizer/config.ts @@ -29,8 +29,8 @@ export interface WorldStateConfig { /** Optional directory for the world state DB, if unspecified will default to the general data directory */ worldStateDataDirectory?: string; - /** The number of historic blocks to maintain */ - worldStateBlockHistory: number; + /** The number of historic checkpoints worth of blocks to maintain */ + worldStateCheckpointHistory: number; } export const worldStateConfigMappings: ConfigMappingsType = { @@ -84,9 +84,11 @@ export const worldStateConfigMappings: ConfigMappingsType = { env: 'WS_DATA_DIRECTORY', description: 'Optional directory for the world state database', }, - worldStateBlockHistory: { - env: 'WS_NUM_HISTORIC_BLOCKS', - description: 'The number of historic blocks to maintain. Values less than 1 mean all history is maintained', + worldStateCheckpointHistory: { + env: 'WS_NUM_HISTORIC_CHECKPOINTS', + description: + 'The number of historic checkpoints worth of blocks to maintain. Values less than 1 mean all history is maintained', + fallback: ['WS_NUM_HISTORIC_BLOCKS'], ...numberConfigHelper(64), }, }; diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts index bc5576d98d95..c9eebe16914a 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts @@ -80,7 +80,7 @@ describe('ServerWorldStateSynchronizer', () => { const config: WorldStateConfig = { worldStateBlockCheckIntervalMS: 100, worldStateDbMapSizeKb: 1024 * 1024, - worldStateBlockHistory: 0, + worldStateCheckpointHistory: 0, }; server = new TestWorldStateSynchronizer(merkleTreeDb, blockAndMessagesSource, config, l2BlockStream); diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts index daa67b1882d1..38a4e9ce81e5 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts @@ -1,5 +1,5 @@ import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM, INITIAL_L2_CHECKPOINT_NUM } from '@aztec/constants'; -import { BlockNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import type { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; @@ -64,7 +64,7 @@ export class ServerWorldStateSynchronizer private readonly log: Logger = createLogger('world_state'), ) { this.merkleTreeCommitted = this.merkleTreeDb.getCommitted(); - this.historyToKeep = config.worldStateBlockHistory < 1 ? undefined : config.worldStateBlockHistory; + this.historyToKeep = config.worldStateCheckpointHistory < 1 ? undefined : config.worldStateCheckpointHistory; this.log.info( `Created world state synchroniser with block history of ${ this.historyToKeep === undefined ? 'infinity' : this.historyToKeep @@ -364,12 +364,37 @@ export class ServerWorldStateSynchronizer if (this.historyToKeep === undefined) { return; } - const newHistoricBlock = summary.finalizedBlockNumber - this.historyToKeep + 1; - if (newHistoricBlock <= 1) { + // Get the checkpointed block for the finalized block number + const finalisedCheckpoint = await this.l2BlockSource.getCheckpointedBlock(summary.finalizedBlockNumber); + if (finalisedCheckpoint === undefined) { + this.log.warn( + `Failed to retrieve checkpointed block for finalized block number: ${summary.finalizedBlockNumber}`, + ); + return; + } + // Compute the required historic checkpoint number + const newHistoricCheckpointNumber = finalisedCheckpoint.checkpointNumber - this.historyToKeep + 1; + if (newHistoricCheckpointNumber <= 1) { + return; + } + // Retrieve the historic checkpoint + const historicCheckpoints = await this.l2BlockSource.getCheckpoints( + CheckpointNumber(newHistoricCheckpointNumber), + 1, + ); + if (historicCheckpoints.length === 0 || historicCheckpoints[0] === undefined) { + this.log.warn(`Failed to retrieve checkpoint number ${newHistoricCheckpointNumber} from Archiver`); + return; + } + const historicCheckpoint = historicCheckpoints[0]; + if (historicCheckpoint.checkpoint.blocks.length === 0 || historicCheckpoint.checkpoint.blocks[0] === undefined) { + this.log.warn(`Retrieved checkpoint number ${newHistoricCheckpointNumber} has no blocks!`); return; } - this.log.verbose(`Pruning historic blocks to ${newHistoricBlock}`); - const status = await this.merkleTreeDb.removeHistoricalBlocks(BlockNumber(newHistoricBlock)); + // Find the block at the start of the checkpoint and remove blocks up to this one + const newHistoricBlock = historicCheckpoint.checkpoint.blocks[0]; + this.log.verbose(`Pruning historic blocks to ${newHistoricBlock.number}`); + const status = await this.merkleTreeDb.removeHistoricalBlocks(BlockNumber(newHistoricBlock.number)); this.log.debug(`World state summary `, status.summary); } diff --git a/yarn-project/world-state/src/test/integration.test.ts b/yarn-project/world-state/src/test/integration.test.ts index 9a784bcfec27..4f75871da279 100644 --- a/yarn-project/world-state/src/test/integration.test.ts +++ b/yarn-project/world-state/src/test/integration.test.ts @@ -52,7 +52,7 @@ describe('world-state integration', () => { worldStateBlockCheckIntervalMS: 20, worldStateBlockRequestBatchSize: 5, worldStateDbMapSizeKb: 1024 * 1024, - worldStateBlockHistory: 0, + worldStateCheckpointHistory: 0, }; archiver = new MockPrefilledArchiver(checkpoints); From ff4cf991874da41a26e755ffa45126adbc3ca2d4 Mon Sep 17 00:00:00 2001 From: danielntmd Date: Thu, 12 Feb 2026 08:59:14 +0000 Subject: [PATCH 42/62] feat: add API key authentication for admin RPC endpoint MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Introduces auto-generated API key authentication for the admin JSON-RPC endpoint (for when docker maps NAT to the outside and general security). Key design decisions: - Keys are always auto-generated (operators cannot supply their own) to prevent weak key usage. - The raw key is displayed once at startup via stdout; only the SHA-256 hash is persisted to `/admin/api_key_hash`. - Supports both `x-api-key` and `Authorization: Bearer ` headers. - Health check endpoint (`GET /status`) is excluded from auth for k8s liveness/readiness probes. Modes of operation: - Persistent mode (with `--data-directory`): key hash is persisted and reused across restarts. - Opt-out: `--no-admin-api-key` / `AZTEC_NO_ADMIN_API_KEY=true` disables auth entirely. - Reset: `--reset-admin-api-key` / `AZTEC_RESET_ADMIN_API_KEY=true` forces regeneration of the key. Deployments: - Admin API key auth is disabled by default in Helm charts for now (`noAdminApiKey: true` in aztec-node base chart). Set to `false` in production values to enable. - E2E tests and Spartan deployments use no admin API key. Files added: - `api_key_auth.ts` — Koa middleware + SHA-256 hashing utility - `admin_api_key_store.ts` — key resolution, generation, and persistence - Unit, integration, and middleware tests Files modified: - `aztec_start_action.ts` — wires up middleware and displays key at startup - `aztec_start_options.ts` — adds `--no-admin-api-key` and `--reset-admin-api-key` flags - `safe_json_rpc_client.ts` — supports passing API key header from clients - `aztec-node-admin.ts` — `createAztecNodeAdminClient` now accepts an optional `apiKey` parameter that is sent as a header - Helm charts and env_var.ts updated accordingly --- .../aztec-node/templates/_pod-template.yaml | 7 + spartan/aztec-node/values.yaml | 9 + spartan/aztec-validator/values.yaml | 2 + .../aztec/src/cli/admin_api_key_store.test.ts | 170 ++++++++++++++++++ .../aztec/src/cli/admin_api_key_store.ts | 128 +++++++++++++ .../aztec/src/cli/aztec_start_action.ts | 44 ++++- .../aztec/src/cli/aztec_start_options.ts | 23 +++ yarn-project/foundation/src/config/env_var.ts | 3 + .../json-rpc/client/safe_json_rpc_client.ts | 2 + .../server/api_key_auth.integration.test.ts | 140 +++++++++++++++ .../src/json-rpc/server/api_key_auth.test.ts | 91 ++++++++++ .../src/json-rpc/server/api_key_auth.ts | 63 +++++++ .../foundation/src/json-rpc/server/index.ts | 1 + .../stdlib/src/interfaces/aztec-node-admin.ts | 2 + 14 files changed, 684 insertions(+), 1 deletion(-) create mode 100644 yarn-project/aztec/src/cli/admin_api_key_store.test.ts create mode 100644 yarn-project/aztec/src/cli/admin_api_key_store.ts create mode 100644 yarn-project/foundation/src/json-rpc/server/api_key_auth.integration.test.ts create mode 100644 yarn-project/foundation/src/json-rpc/server/api_key_auth.test.ts create mode 100644 yarn-project/foundation/src/json-rpc/server/api_key_auth.ts diff --git a/spartan/aztec-node/templates/_pod-template.yaml b/spartan/aztec-node/templates/_pod-template.yaml index 2011f6a609a8..37f5efbb4ee1 100644 --- a/spartan/aztec-node/templates/_pod-template.yaml +++ b/spartan/aztec-node/templates/_pod-template.yaml @@ -190,6 +190,13 @@ spec: value: "{{ .Values.service.rpc.port }}" - name: AZTEC_ADMIN_PORT value: "{{ .Values.service.admin.port }}" + {{- if .Values.node.adminApiKeyHash }} + - name: AZTEC_ADMIN_API_KEY_HASH + value: {{ .Values.node.adminApiKeyHash | quote }} + {{- else if .Values.node.noAdminApiKey }} + - name: AZTEC_NO_ADMIN_API_KEY + value: "true" + {{- end }} - name: LOG_LEVEL value: "{{ .Values.node.logLevel }}" - name: LOG_JSON diff --git a/spartan/aztec-node/values.yaml b/spartan/aztec-node/values.yaml index 8f1cb7af0ce2..734b80e51d9d 100644 --- a/spartan/aztec-node/values.yaml +++ b/spartan/aztec-node/values.yaml @@ -95,6 +95,15 @@ node: envEnabled: false filesEnabled: false + # -- SHA-256 hex hash of a pre-generated admin API key. + # When set, the node uses this hash for authentication instead of auto-generating a key. + # Generate with: echo -n "your-api-key" | sha256sum | cut -d' ' -f1 + adminApiKeyHash: "" + + # -- Disable admin API key authentication. + # Set to false in production to enable API key auth. + noAdminApiKey: true + # the address that will receive block or proof rewards coinbase: diff --git a/spartan/aztec-validator/values.yaml b/spartan/aztec-validator/values.yaml index 9868263c5baa..b5112c561d8a 100644 --- a/spartan/aztec-validator/values.yaml +++ b/spartan/aztec-validator/values.yaml @@ -25,6 +25,8 @@ validator: replicaCount: 1 node: + # Set to false in production to enable API key auth. + noAdminApiKey: true configMap: envEnabled: true secret: diff --git a/yarn-project/aztec/src/cli/admin_api_key_store.test.ts b/yarn-project/aztec/src/cli/admin_api_key_store.test.ts new file mode 100644 index 000000000000..c91e05c44865 --- /dev/null +++ b/yarn-project/aztec/src/cli/admin_api_key_store.test.ts @@ -0,0 +1,170 @@ +import { sha256Hash } from '@aztec/foundation/json-rpc/server'; +import { createLogger } from '@aztec/foundation/log'; + +import { promises as fs } from 'fs'; +import { mkdtemp, rm } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { type ResolveAdminApiKeyOptions, resolveAdminApiKey } from './admin_api_key_store.js'; + +describe('resolveAdminApiKey', () => { + const log = createLogger('test:admin-api-key'); + let tempDir: string | undefined; + + beforeEach(() => { + tempDir = undefined; + }); + + afterEach(async () => { + if (tempDir) { + await rm(tempDir, { recursive: true, force: true }); + } + }); + + describe('opt-out (noAdminApiKey = true)', () => { + it('returns undefined when auth is disabled', async () => { + const result = await resolveAdminApiKey({ noAdminApiKey: true }, log); + expect(result).toBeUndefined(); + }); + }); + + describe('ephemeral mode (no dataDirectory)', () => { + it('returns a key resolution with rawKey and apiKeyHash', async () => { + const result = await resolveAdminApiKey({}, log); + expect(result).toBeDefined(); + expect(result!.rawKey).toBeDefined(); + expect(result!.apiKeyHash).toBeDefined(); + }); + + it('returns rawKey that is a 64-char hex string', async () => { + const result = await resolveAdminApiKey({}, log); + expect(result!.rawKey).toMatch(/^[0-9a-f]{64}$/); + }); + + it('returns apiKeyHash that is SHA-256 of rawKey', async () => { + const result = await resolveAdminApiKey({}, log); + expect(result!.apiKeyHash).toEqual(sha256Hash(result!.rawKey!)); + }); + + it('generates a different key each call', async () => { + const result1 = await resolveAdminApiKey({}, log); + const result2 = await resolveAdminApiKey({}, log); + expect(result1!.rawKey).not.toBe(result2!.rawKey); + }); + }); + + describe('persistent mode (with dataDirectory)', () => { + let opts: ResolveAdminApiKeyOptions; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'aztec-api-key-test-')); + opts = { dataDirectory: tempDir }; + }); + + it('generates a new key on first run', async () => { + const result = await resolveAdminApiKey(opts, log); + expect(result).toBeDefined(); + expect(result!.rawKey).toBeDefined(); + expect(result!.rawKey).toMatch(/^[0-9a-f]{64}$/); + expect(result!.apiKeyHash).toEqual(sha256Hash(result!.rawKey!)); + }); + + it('persists the hash to disk on first run', async () => { + const result = await resolveAdminApiKey(opts, log); + const hashFilePath = join(tempDir!, 'admin', 'api_key_hash'); + const storedHash = (await fs.readFile(hashFilePath, 'utf-8')).trim(); + expect(storedHash).toBe(result!.apiKeyHash.toString('hex')); + }); + + it('sets restrictive permissions on the hash file', async () => { + await resolveAdminApiKey(opts, log); + const hashFilePath = join(tempDir!, 'admin', 'api_key_hash'); + const stat = await fs.stat(hashFilePath); + expect(stat.mode & 0o777).toBe(0o600); + }); + + it('loads the stored hash on subsequent runs (no rawKey)', async () => { + // First run, generates and persists + const firstResult = await resolveAdminApiKey(opts, log); + const firstHash = firstResult!.apiKeyHash; + + // Second run, loads from disk + const secondResult = await resolveAdminApiKey(opts, log); + + expect(secondResult).toBeDefined(); + expect(secondResult!.apiKeyHash).toEqual(firstHash); + expect(secondResult!.rawKey).toBeUndefined(); // Not newly generated + }); + + it('regenerates if stored hash is invalid (wrong length)', async () => { + // Write an invalid hash + const adminDir = join(tempDir!, 'admin'); + await fs.mkdir(adminDir, { recursive: true }); + await fs.writeFile(join(adminDir, 'api_key_hash'), 'tooshort', 'utf-8'); + + const result = await resolveAdminApiKey(opts, log); + expect(result).toBeDefined(); + expect(result!.rawKey).toBeDefined(); // Freshly generated + expect(result!.apiKeyHash).toEqual(sha256Hash(result!.rawKey!)); + }); + + it('creates the admin subdirectory if it does not exist', async () => { + await resolveAdminApiKey(opts, log); + const adminDir = join(tempDir!, 'admin'); + const stat = await fs.stat(adminDir); + expect(stat.isDirectory()).toBe(true); + }); + }); + + describe('reset (resetAdminApiKey = true)', () => { + let opts: ResolveAdminApiKeyOptions; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'aztec-api-key-test-')); + opts = { dataDirectory: tempDir, resetAdminApiKey: true }; + }); + + it('generates a new key even when a valid hash already exists', async () => { + // First run, normal generation + const firstResult = await resolveAdminApiKey({ dataDirectory: tempDir }, log); + const firstHash = firstResult!.apiKeyHash; + + // Second run with reset, should generate a new key + const resetResult = await resolveAdminApiKey(opts, log); + + expect(resetResult).toBeDefined(); + expect(resetResult!.rawKey).toBeDefined(); // New raw key returned + expect(resetResult!.apiKeyHash).not.toEqual(firstHash); // Different hash + expect(resetResult!.apiKeyHash).toEqual(sha256Hash(resetResult!.rawKey!)); + }); + + it('overwrites the persisted hash file', async () => { + // First run — normal generation + await resolveAdminApiKey({ dataDirectory: tempDir }, log); + const hashFilePath = join(tempDir!, 'admin', 'api_key_hash'); + const oldHash = (await fs.readFile(hashFilePath, 'utf-8')).trim(); + + // Reset run + const resetResult = await resolveAdminApiKey(opts, log); + const newHash = (await fs.readFile(hashFilePath, 'utf-8')).trim(); + + expect(newHash).not.toBe(oldHash); + expect(newHash).toBe(resetResult!.apiKeyHash.toString('hex')); + }); + + it('works even when no hash file exists yet (first run with reset)', async () => { + const result = await resolveAdminApiKey(opts, log); + + expect(result).toBeDefined(); + expect(result!.rawKey).toBeDefined(); + expect(result!.apiKeyHash).toEqual(sha256Hash(result!.rawKey!)); + }); + + it('has no effect in ephemeral mode (always generates anyway)', async () => { + const result = await resolveAdminApiKey({ resetAdminApiKey: true }, log); + expect(result).toBeDefined(); + expect(result!.rawKey).toBeDefined(); + }); + }); +}); diff --git a/yarn-project/aztec/src/cli/admin_api_key_store.ts b/yarn-project/aztec/src/cli/admin_api_key_store.ts new file mode 100644 index 000000000000..6d0ea07c9c81 --- /dev/null +++ b/yarn-project/aztec/src/cli/admin_api_key_store.ts @@ -0,0 +1,128 @@ +import { randomBytes } from '@aztec/foundation/crypto/random'; +import { sha256Hash } from '@aztec/foundation/json-rpc/server'; +import type { Logger } from '@aztec/foundation/log'; + +import { promises as fs } from 'fs'; +import { join } from 'path'; + +/** Subdirectory under dataDirectory for admin API key storage. */ +const ADMIN_STORE_DIR = 'admin'; +const HASH_FILE_NAME = 'api_key_hash'; + +/** + * Result of resolving the admin API key. + * Contains the SHA-256 hex hash of the API key to be used by the auth middleware, + * and optionally the raw key when newly generated (so the caller can display it). + */ +export interface AdminApiKeyResolution { + /** The SHA-256 hash of the API key. */ + apiKeyHash: Buffer; + /** + * The raw API key, only present when a new key was generated during this call. + * The caller MUST display this to the operator — it will not be stored or returned again. + */ + rawKey?: string; +} + +export interface ResolveAdminApiKeyOptions { + /** SHA-256 hex hash of a pre-generated API key. When set, the node uses this hash directly. */ + adminApiKeyHash?: string; + /** If true, disable admin API key auth entirely. */ + noAdminApiKey?: boolean; + /** If true, force-generate a new key even if one is already persisted. */ + resetAdminApiKey?: boolean; + /** Root data directory for persistent storage. */ + dataDirectory?: string; +} + +/** + * Resolves the admin API key for the admin RPC endpoint. + * + * Strategy: + * 1. If opt-out flag is set (`noAdminApiKey`), return undefined (no auth). + * 2. If a pre-generated hash is provided (`adminApiKeyHash`), use it directly. + * 3. If a data directory exists, look for a persisted hash file + * at `/admin/api_key_hash`: + * - If `resetAdminApiKey` is set, skip loading and force-generate a new key. + * - Found: use the stored hash (operator already saved the key from first run). + * - Not found: auto-generate a random key, display it once, persist the hash. + * 3. If no data directory: generate a random key + * each run and display it (cannot persist). + * + * @param options - The options for resolving the admin API key. + * @param log - Logger for outputting the key and status messages. + * @returns The resolved API key hash, or undefined if auth is disabled. + */ +export async function resolveAdminApiKey( + options: ResolveAdminApiKeyOptions, + log: Logger, +): Promise { + // Operator explicitly opted out of admin auth + if (options.noAdminApiKey) { + log.warn('Admin API key authentication is DISABLED (--no-admin-api-key / AZTEC_NO_ADMIN_API_KEY)'); + return undefined; + } + + // Operator provided a pre-generated hash (e.g. via AZTEC_ADMIN_API_KEY_HASH env var) + if (options.adminApiKeyHash) { + const hex = options.adminApiKeyHash.trim(); + if (hex.length !== 64 || !/^[0-9a-f]{64}$/.test(hex)) { + throw new Error(`Invalid admin API key hash: expected 64-char hex string, got "${hex}"`); + } + log.info('Admin API key authentication enabled (using pre-configured key hash)'); + return { apiKeyHash: Buffer.from(hex, 'hex') }; + } + + // Persistent storage available, load or generate key + if (options.dataDirectory) { + const adminDir = join(options.dataDirectory, ADMIN_STORE_DIR); + const hashFilePath = join(adminDir, HASH_FILE_NAME); + + // Unless a reset is forced, try to load the existing hash from disk + if (!options.resetAdminApiKey) { + try { + const storedHash = (await fs.readFile(hashFilePath, 'utf-8')).trim(); + if (storedHash.length === 64) { + log.info('Admin API key authentication enabled (loaded stored key hash from disk)'); + return { apiKeyHash: Buffer.from(storedHash, 'hex') }; + } + log.warn(`Invalid stored admin API key hash at ${hashFilePath}, regenerating...`); + } catch (err: any) { + if (err.code !== 'ENOENT') { + log.warn(`Failed to read admin API key hash from ${hashFilePath}: ${err.message}`); + } + // File doesn't exist — fall through to generate + } + } else { + log.warn('Admin API key reset requested — generating a new key'); + } + + // Generate a new key, persist the hash, and return the raw key for the caller to display + const { rawKey, hash } = generateApiKey(); + await fs.mkdir(adminDir, { recursive: true }); + await fs.writeFile(hashFilePath, hash.toString('hex'), 'utf-8'); + // Set restrictive permissions (owner read/write only) + await fs.chmod(hashFilePath, 0o600); + + log.info('Admin API key authentication enabled (new key generated and hash persisted to disk)'); + return { apiKeyHash: hash, rawKey }; + } + + // No data directory, generate a temporary key per session + const { rawKey, hash } = generateApiKey(); + + log.warn('No data directory configured — admin API key cannot be persisted.'); + log.warn('A temporary key has been generated for this session only.'); + + return { apiKeyHash: hash, rawKey }; +} + +/** + * Generates a cryptographically random API key and its SHA-256 hash. + * @returns The raw key (hex string) and its SHA-256 hash as a Buffer. + */ +function generateApiKey(): { rawKey: string; hash: Buffer } { + const rawKey = randomBytes(32).toString('hex'); + const hash = sha256Hash(rawKey); + return { rawKey, hash }; +} diff --git a/yarn-project/aztec/src/cli/aztec_start_action.ts b/yarn-project/aztec/src/cli/aztec_start_action.ts index 8217313dd09c..8d452f21f276 100644 --- a/yarn-project/aztec/src/cli/aztec_start_action.ts +++ b/yarn-project/aztec/src/cli/aztec_start_action.ts @@ -1,6 +1,7 @@ import { type NamespacedApiHandlers, createNamespacedSafeJsonRpcServer, + getApiKeyAuthMiddleware, startHttpRpcServer, } from '@aztec/foundation/json-rpc/server'; import type { LogFn, Logger } from '@aztec/foundation/log'; @@ -11,6 +12,7 @@ import { getOtelJsonRpcPropagationMiddleware } from '@aztec/telemetry-client'; import { createLocalNetwork } from '../local-network/index.js'; import { github, splash } from '../splash.js'; +import { resolveAdminApiKey } from './admin_api_key_store.js'; import { getCliVersion } from './release_version.js'; import { extractNamespacedOptions, installSignalHandlers } from './util.js'; import { getVersions } from './versioning.js'; @@ -99,14 +101,54 @@ export async function aztecStart(options: any, userLog: LogFn, debugLogger: Logg // If there are any admin services, start a separate JSON-RPC server for them if (Object.entries(adminServices).length > 0) { + const adminMiddlewares = [getOtelJsonRpcPropagationMiddleware(), getVersioningMiddleware(versions)]; + + // Resolve the admin API key (auto-generated and persisted, or opt-out) + const apiKeyResolution = await resolveAdminApiKey( + { + adminApiKeyHash: options.adminApiKeyHash, + noAdminApiKey: options.noAdminApiKey, + resetAdminApiKey: options.resetAdminApiKey, + dataDirectory: options.dataDirectory, + }, + debugLogger, + ); + if (apiKeyResolution) { + adminMiddlewares.unshift(getApiKeyAuthMiddleware(apiKeyResolution.apiKeyHash)); + } else { + debugLogger.warn('No admin API key set — admin endpoint is unauthenticated'); + } + const rpcServer = createNamespacedSafeJsonRpcServer(adminServices, { http200OnError: false, log: debugLogger, - middlewares: [getOtelJsonRpcPropagationMiddleware(), getVersioningMiddleware(versions)], + middlewares: adminMiddlewares, maxBatchSize: options.rpcMaxBatchSize, maxBodySizeBytes: options.rpcMaxBodySize, }); const { port } = await startHttpRpcServer(rpcServer, { port: options.adminPort }); debugLogger.info(`Aztec Server admin API listening on port ${port}`, versions); + + // Display the API key after the server has started + // Uses userLog which is never filtered by LOG_LEVEL. + if (apiKeyResolution?.rawKey) { + const separator = '='.repeat(70); + userLog(''); + userLog(separator); + userLog(' ADMIN API KEY (save this — it will NOT be shown again)'); + userLog(''); + userLog(` ${apiKeyResolution.rawKey}`); + userLog(''); + userLog(` Use via header: x-api-key: `); + userLog(` Or via header: Authorization: Bearer `); + if (options.dataDirectory) { + userLog(''); + userLog(' The key hash has been persisted — on next restart, the same key will be used.'); + } + userLog(''); + userLog(' To disable admin auth: --no-admin-api-key or AZTEC_NO_ADMIN_API_KEY=true'); + userLog(separator); + userLog(''); + } } } diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 46ef250c1359..48bec01d75cc 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -142,6 +142,29 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { env: 'AZTEC_ADMIN_PORT', parseVal: val => parseInt(val, 10), }, + { + flag: '--admin-api-key-hash ', + description: + 'SHA-256 hex hash of a pre-generated admin API key. When set, the node uses this hash for authentication instead of auto-generating a key.', + defaultValue: undefined, + env: 'AZTEC_ADMIN_API_KEY_HASH', + }, + { + flag: '--no-admin-api-key', + description: + 'Disable API key authentication on the admin RPC endpoint. By default, a key is auto-generated, displayed once, and its hash is persisted.', + defaultValue: false, + env: 'AZTEC_NO_ADMIN_API_KEY', + parseVal: val => val === 'true' || val === '1', + }, + { + flag: '--reset-admin-api-key', + description: + 'Force-generate a new admin API key, replacing any previously persisted key hash. The new key is displayed once at startup.', + defaultValue: false, + env: 'AZTEC_RESET_ADMIN_API_KEY', + parseVal: val => val === 'true' || val === '1', + }, { flag: '--api-prefix ', description: 'Prefix for API routes on any service that is started', diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index fbc42a161bdb..17c7bbcc21a3 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -12,6 +12,9 @@ export type EnvVar = | 'ARCHIVER_VIEM_POLLING_INTERVAL_MS' | 'ARCHIVER_BATCH_SIZE' | 'AZTEC_ADMIN_PORT' + | 'AZTEC_ADMIN_API_KEY_HASH' + | 'AZTEC_NO_ADMIN_API_KEY' + | 'AZTEC_RESET_ADMIN_API_KEY' | 'AZTEC_NODE_ADMIN_URL' | 'AZTEC_NODE_URL' | 'AZTEC_PORT' diff --git a/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts index 926b227b7cdf..b10c6535f8ed 100644 --- a/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts +++ b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts @@ -24,6 +24,7 @@ export type SafeJsonRpcClientOptions = { batchWindowMS?: number; maxBatchSize?: number; maxRequestBodySize?: number; + extraHeaders?: Record; onResponse?: (res: { response: any; headers: { get: (header: string) => string | null | undefined }; @@ -129,6 +130,7 @@ export function createSafeJsonRpcClient( const { response, headers } = await fetch( host, rpcCalls.map(({ request }) => request), + config.extraHeaders, ); if (config.onResponse) { diff --git a/yarn-project/foundation/src/json-rpc/server/api_key_auth.integration.test.ts b/yarn-project/foundation/src/json-rpc/server/api_key_auth.integration.test.ts new file mode 100644 index 000000000000..fb9e6077480e --- /dev/null +++ b/yarn-project/foundation/src/json-rpc/server/api_key_auth.integration.test.ts @@ -0,0 +1,140 @@ +import type http from 'http'; + +import { makeFetch } from '../client/fetch.js'; +import { createSafeJsonRpcClient } from '../client/safe_json_rpc_client.js'; +import { TestNote, TestState, type TestStateApi, TestStateSchema } from '../fixtures/test_state.js'; +import { getApiKeyAuthMiddleware, sha256Hash } from './api_key_auth.js'; +import { createSafeJsonRpcServer, startHttpRpcServer } from './safe_json_rpc_server.js'; + +describe('API key auth integration', () => { + const RAW_API_KEY = 'integration-test-api-key-0123456789abcdef0123456789abcdef'; + const API_KEY_HASH = sha256Hash(RAW_API_KEY); + + let testState: TestState; + let httpServer: http.Server & { port: number }; + let url: string; + + beforeEach(async () => { + testState = new TestState([new TestNote('a'), new TestNote('b')]); + const rpcServer = createSafeJsonRpcServer(testState, TestStateSchema, { + middlewares: [getApiKeyAuthMiddleware(API_KEY_HASH)], + }); + httpServer = await startHttpRpcServer(rpcServer, { host: '127.0.0.1' }); + url = `http://127.0.0.1:${httpServer.port}`; + }); + + afterEach(() => { + httpServer?.close(); + }); + + const noRetryFetch = makeFetch([], true); + + function createClient(apiKey?: string) { + return createSafeJsonRpcClient(url, TestStateSchema, { + fetch: noRetryFetch, + ...(apiKey ? { extraHeaders: { 'x-api-key': apiKey } } : {}), + }); + } + + function createClientWithBearer(apiKey: string) { + return createSafeJsonRpcClient(url, TestStateSchema, { + fetch: noRetryFetch, + extraHeaders: { Authorization: `Bearer ${apiKey}` }, + }); + } + + describe('with valid API key', () => { + it('allows RPC calls via x-api-key header', async () => { + const client = createClient(RAW_API_KEY); + const count = await client.count(); + expect(count).toBe(2); + }); + + it('allows RPC calls via Authorization: Bearer header', async () => { + const client = createClientWithBearer(RAW_API_KEY); + const note = await client.getNote(0); + expect(note?.toString()).toBe('a'); + }); + + it('allows multiple sequential calls', async () => { + const client = createClient(RAW_API_KEY); + const count1 = await client.count(); + await client.addNotes([new TestNote('c')]); + const count2 = await client.count(); + expect(count1).toBe(2); + expect(count2).toBe(3); + }); + }); + + describe('with invalid API key', () => { + it('rejects RPC calls with wrong key', async () => { + const client = createClient('wrong-api-key'); + await expect(client.count()).rejects.toThrow(); + }); + + it('rejects RPC calls with empty key header', async () => { + const client = createClient(''); + await expect(client.count()).rejects.toThrow(); + }); + }); + + describe('with no API key', () => { + it('rejects RPC calls without any auth header', async () => { + const client = createClient(); // no key + await expect(client.count()).rejects.toThrow(); + }); + }); + + describe('health check bypass', () => { + it('allows GET /status without auth', async () => { + const response = await fetch(`${url}/status`); + expect(response.status).toBe(200); + }); + }); + + describe('full flow: generate key, authenticate, reject bad key', () => { + it('simulates the operator flow end-to-end', async () => { + // 1: "Generate" an API key (simulating what resolveAdminApiKey does) + const { randomBytes } = await import('crypto'); + const generatedKey = randomBytes(32).toString('hex'); + const generatedHash = sha256Hash(generatedKey); + + // 2: Start a NEW server with the generated hash + const freshState = new TestState([new TestNote('x'), new TestNote('y')]); + const freshRpcServer = createSafeJsonRpcServer(freshState, TestStateSchema, { + middlewares: [getApiKeyAuthMiddleware(generatedHash)], + }); + const freshHttpServer = await startHttpRpcServer(freshRpcServer, { host: '127.0.0.1' }); + const freshUrl = `http://127.0.0.1:${freshHttpServer.port}`; + + try { + // 3: Make an authenticated request — should succeed + const goodClient = createSafeJsonRpcClient(freshUrl, TestStateSchema, { + fetch: noRetryFetch, + extraHeaders: { 'x-api-key': generatedKey }, + }); + const count = await goodClient.count(); + expect(count).toBe(2); + + // 4: Make a request with a bad key, should fail + const badClient = createSafeJsonRpcClient(freshUrl, TestStateSchema, { + fetch: noRetryFetch, + extraHeaders: { 'x-api-key': 'definitely-not-the-right-key' }, + }); + await expect(badClient.count()).rejects.toThrow(); + + // 5: Make a request with no key, should fail + const noAuthClient = createSafeJsonRpcClient(freshUrl, TestStateSchema, { + fetch: noRetryFetch, + }); + await expect(noAuthClient.count()).rejects.toThrow(); + + // 6: Health check should still work without auth + const statusResp = await fetch(`${freshUrl}/status`); + expect(statusResp.status).toBe(200); + } finally { + freshHttpServer.close(); + } + }); + }); +}); diff --git a/yarn-project/foundation/src/json-rpc/server/api_key_auth.test.ts b/yarn-project/foundation/src/json-rpc/server/api_key_auth.test.ts new file mode 100644 index 000000000000..5d0ad7e826ba --- /dev/null +++ b/yarn-project/foundation/src/json-rpc/server/api_key_auth.test.ts @@ -0,0 +1,91 @@ +import Koa from 'koa'; +import request from 'supertest'; + +import { getApiKeyAuthMiddleware, sha256Hash } from './api_key_auth.js'; + +describe('getApiKeyAuthMiddleware', () => { + const RAW_API_KEY = 'test-api-key-for-unit-tests-1234567890abcdef'; + const API_KEY_HASH = sha256Hash(RAW_API_KEY); + + let app: Koa; + + beforeEach(() => { + app = new Koa(); + app.use(getApiKeyAuthMiddleware(API_KEY_HASH)); + // A simple handler that returns 200 if middleware passes + app.use((ctx: Koa.Context) => { + ctx.status = 200; + ctx.body = { jsonrpc: '2.0', result: 'ok' }; + }); + }); + + const sendPost = (headers: Record = {}) => + request(app.callback()) + .post('/') + .send({ jsonrpc: '2.0', method: 'test', params: [], id: 1 }) + .set({ 'content-type': 'application/json', ...headers }); + + describe('x-api-key header', () => { + it('allows request with valid API key', async () => { + const response = await sendPost({ 'x-api-key': RAW_API_KEY }); + expect(response.status).toBe(200); + expect(response.body.result).toBe('ok'); + }); + + it('rejects request with invalid API key', async () => { + const response = await sendPost({ 'x-api-key': 'wrong-key' }); + expect(response.status).toBe(401); + expect(response.body.error.message).toContain('Unauthorized'); + }); + }); + + describe('Authorization: Bearer header', () => { + it('allows request with valid Bearer token', async () => { + const response = await sendPost({ Authorization: `Bearer ${RAW_API_KEY}` }); + expect(response.status).toBe(200); + expect(response.body.result).toBe('ok'); + }); + + it('allows case-insensitive Bearer prefix', async () => { + const response = await sendPost({ Authorization: `bearer ${RAW_API_KEY}` }); + expect(response.status).toBe(200); + }); + + it('rejects request with invalid Bearer token', async () => { + const response = await sendPost({ Authorization: 'Bearer wrong-key' }); + expect(response.status).toBe(401); + expect(response.body.error.message).toContain('Unauthorized'); + }); + }); + + describe('missing credentials', () => { + it('rejects request with no auth headers', async () => { + const response = await sendPost(); + expect(response.status).toBe(401); + expect(response.body.error.message).toContain('Unauthorized'); + }); + + it('returns a JSON-RPC error envelope', async () => { + const response = await sendPost(); + expect(response.body).toMatchObject({ + jsonrpc: '2.0', + id: null, + error: { code: -32000 }, + }); + }); + }); + + describe('health check bypass', () => { + it('allows GET /status without any auth', async () => { + const response = await request(app.callback()).get('/status'); + // The status endpoint itself isn't handled by our simple handler, + // but the important thing is the middleware does NOT return 401. + expect(response.status).not.toBe(401); + }); + + it('still requires auth for POST /status', async () => { + const response = await request(app.callback()).post('/status').send({}); + expect(response.status).toBe(401); + }); + }); +}); diff --git a/yarn-project/foundation/src/json-rpc/server/api_key_auth.ts b/yarn-project/foundation/src/json-rpc/server/api_key_auth.ts new file mode 100644 index 000000000000..a22d45730f32 --- /dev/null +++ b/yarn-project/foundation/src/json-rpc/server/api_key_auth.ts @@ -0,0 +1,63 @@ +import { timingSafeEqual } from 'crypto'; +import type Koa from 'koa'; + +import { sha256 } from '../../crypto/sha256/index.js'; +import { createLogger } from '../../log/index.js'; + +const log = createLogger('json-rpc:api-key-auth'); + +/** + * Computes the SHA-256 hash of a string and returns it as a Buffer. + * @param input - The input string to hash. + * @returns The SHA-256 hash as a Buffer. + */ +export function sha256Hash(input: string): Buffer { + return sha256(Buffer.from(input)); +} + +/** + * Creates a Koa middleware that enforces API key authentication on all requests + * except the health check endpoint (GET /status). + * + * The API key can be provided via the `x-api-key` header or the `Authorization: Bearer ` header. + * Comparison is done by hashing the provided key with SHA-256 and comparing against the stored hash. + * + * @param apiKeyHash - The SHA-256 hash of the expected API key as a Buffer. + * @returns A Koa middleware that rejects requests without a valid API key. + */ +export function getApiKeyAuthMiddleware( + apiKeyHash: Buffer, +): (ctx: Koa.Context, next: () => Promise) => Promise { + return async (ctx: Koa.Context, next: () => Promise) => { + // Allow health check through without auth + if (ctx.path === '/status' && ctx.method === 'GET') { + return next(); + } + + const providedKey = ctx.get('x-api-key') || ctx.get('authorization')?.replace(/^Bearer\s+/i, ''); + if (!providedKey) { + log.warn(`Rejected admin RPC request from ${ctx.ip}: missing API key`); + ctx.status = 401; + ctx.body = { + jsonrpc: '2.0', + id: null, + error: { code: -32000, message: 'Unauthorized: invalid or missing API key' }, + }; + return; + } + + const providedHashBuf = sha256Hash(providedKey); + if (!timingSafeEqual(apiKeyHash, providedHashBuf)) { + log.warn(`Rejected admin RPC request from ${ctx.ip}: invalid API key`); + ctx.status = 401; + ctx.body = { + jsonrpc: '2.0', + id: null, + error: { code: -32000, message: 'Unauthorized: invalid or missing API key' }, + }; + return; + } + + await next(); + }; +} diff --git a/yarn-project/foundation/src/json-rpc/server/index.ts b/yarn-project/foundation/src/json-rpc/server/index.ts index 048e53af1fcc..2e35821006ed 100644 --- a/yarn-project/foundation/src/json-rpc/server/index.ts +++ b/yarn-project/foundation/src/json-rpc/server/index.ts @@ -1 +1,2 @@ +export * from './api_key_auth.js'; export * from './safe_json_rpc_server.js'; diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts index 1003734261f8..6071b1adc310 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts @@ -94,10 +94,12 @@ export function createAztecNodeAdminClient( url: string, versions: Partial = {}, fetch = defaultFetch, + apiKey?: string, ): AztecNodeAdmin { return createSafeJsonRpcClient(url, AztecNodeAdminApiSchema, { namespaceMethods: 'nodeAdmin', fetch, onResponse: getVersioningResponseHandler(versions), + ...(apiKey ? { extraHeaders: { 'x-api-key': apiKey } } : {}), }); } From fc143f8d3f1bdb4f555f4194b3dd3b88883aad7c Mon Sep 17 00:00:00 2001 From: Phil Windle Date: Mon, 16 Feb 2026 20:51:15 +0000 Subject: [PATCH 43/62] Comments --- yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts | 1 + yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts | 1 + 2 files changed, 2 insertions(+) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts index c9d60642e641..6703786b0f88 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_multiple.test.ts @@ -50,6 +50,7 @@ describe('e2e_epochs/epochs_multiple', () => { // Check that finalized blocks are purged from world state // Right now finalization means a checkpoint is two L2 epochs deep. If this rule changes then this test needs to be updated. + // This test is setup as 1 block per checkpoint const provenBlockNumber = epochEndBlockNumber; const finalizedBlockNumber = Math.max(provenBlockNumber - context.config.aztecEpochDuration * 2, 0); const expectedOldestHistoricBlock = Math.max(finalizedBlockNumber - WORLD_STATE_CHECKPOINT_HISTORY + 1, 1); diff --git a/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts b/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts index 8a9017b46219..50a8db649dd6 100644 --- a/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts +++ b/yarn-project/end-to-end/src/e2e_pruned_blocks.test.ts @@ -93,6 +93,7 @@ describe('e2e_pruned_blocks', () => { // We now mine dummy blocks, mark them as proven and wait for the node to process them, which should result in older // blocks (notably the one with the minted note) being pruned. Given world state prunes based on the finalized tip, // and we are defining the finalized tip as two epochs behind the proven one, we need to mine two extra epochs. + // This test assumes 1 block per checkpoint await aztecNodeAdmin!.setConfig({ minTxsPerBlock: 0 }); await waitBlocks(WORLD_STATE_CHECKPOINT_HISTORY + EPOCH_LENGTH * 2 + 1); await cheatCodes.rollup.markAsProven(); From e9c7d86950091320f4599d7d2b2d036f8901b6cb Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Mon, 16 Feb 2026 17:09:45 +0000 Subject: [PATCH 44/62] chore: fix deployments --- spartan/scripts/deploy_network.sh | 2 +- spartan/scripts/setup_gcp_secrets.sh | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index fedba6a4fd38..9ad2d08982b7 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -97,7 +97,7 @@ SEQ_MIN_TX_PER_BLOCK=${SEQ_MIN_TX_PER_BLOCK:-0} SEQ_MAX_TX_PER_BLOCK=${SEQ_MAX_TX_PER_BLOCK:-8} SEQ_BLOCK_DURATION_MS=${SEQ_BLOCK_DURATION_MS:-} SEQ_BUILD_CHECKPOINT_IF_EMPTY=${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-} -SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT=${SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT:0} +SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT=${SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT:-0} PROVER_REPLICAS=${PROVER_REPLICAS:-4} PROVER_AGENTS_PER_PROVER=${PROVER_AGENTS_PER_PROVER:-1} R2_ACCESS_KEY_ID=${R2_ACCESS_KEY_ID:-} diff --git a/spartan/scripts/setup_gcp_secrets.sh b/spartan/scripts/setup_gcp_secrets.sh index 2bde3c4e4b15..362544669e36 100755 --- a/spartan/scripts/setup_gcp_secrets.sh +++ b/spartan/scripts/setup_gcp_secrets.sh @@ -62,9 +62,11 @@ mask_secret_value() { if [[ "$is_json_secret" == "true" ]]; then jq -r '.[]' "$secret_file" | while IFS= read -r element; do - echo "::add-mask::$element" + if [[ -n "$element" ]]; then + echo "::add-mask::$element" + fi done - else + elif [[ -n "$secret_value" ]]; then echo "::add-mask::$secret_value" fi } From 1207cfa7726117bc8382adf5beb2cf739c5e6ce0 Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Mon, 16 Feb 2026 23:59:25 +0100 Subject: [PATCH 45/62] feat: suspend sentinel during escape hatch (#20471) Suspends the `processSlot` and `computeProvenPerformance` to do nothing when in an active escape hatch. --- .../aztec-node/src/sentinel/sentinel.test.ts | 79 +++++++++++++++++++ .../aztec-node/src/sentinel/sentinel.ts | 13 ++- 2 files changed, 90 insertions(+), 2 deletions(-) diff --git a/yarn-project/aztec-node/src/sentinel/sentinel.test.ts b/yarn-project/aztec-node/src/sentinel/sentinel.test.ts index add05d7e7525..b57b619be97d 100644 --- a/yarn-project/aztec-node/src/sentinel/sentinel.test.ts +++ b/yarn-project/aztec-node/src/sentinel/sentinel.test.ts @@ -657,6 +657,81 @@ describe('sentinel', () => { }); }); + describe('escape hatch', () => { + it('processSlot skips tracking when escape hatch is open', async () => { + const validator1 = EthAddress.random(); + const validator2 = EthAddress.random(); + const committee = [validator1, validator2]; + + epochCache.getCommittee.mockResolvedValue({ + committee, + seed: 0n, + epoch, + isEscapeHatchOpen: true, + }); + + const updateSpy = jest.spyOn(store, 'updateValidators'); + + await sentinel.doProcessSlot(slot); + + // Should NOT have called updateValidators since escape hatch is open + expect(updateSpy).not.toHaveBeenCalled(); + // But lastProcessedSlot should still advance + expect(sentinel.getLastProcessedSlot()).toEqual(slot); + }); + + it('processSlot tracks normally when escape hatch is closed', async () => { + const signers = times(4, Secp256k1Signer.random); + const validators = signers.map(s => s.address); + const committee = [...validators]; + + epochCache.getCommittee.mockResolvedValue({ + committee, + seed: 0n, + epoch, + isEscapeHatchOpen: false, + }); + epochCache.computeProposerIndex.mockReturnValue(0n); + p2p.getCheckpointAttestationsForSlot.mockResolvedValue([]); + + const updateSpy = jest.spyOn(store, 'updateValidators'); + + await sentinel.doProcessSlot(slot); + + // Should have called updateValidators since escape hatch is closed + expect(updateSpy).toHaveBeenCalled(); + expect(sentinel.getLastProcessedSlot()).toEqual(slot); + }); + + it('handleChainProven skips proven performance when escape hatch is open', async () => { + const blockNumber = BlockNumber(15); + const blockHash = '0xblockhash'; + const mockBlock = await L2Block.random(blockNumber); + const blockSlot = mockBlock.header.getSlot(); + const epochNumber = getEpochAtSlot(blockSlot, l1Constants); + const validator1 = EthAddress.random(); + + archiver.getBlockHeader.calledWith(blockNumber).mockResolvedValue(mockBlock.header); + + epochCache.getCommittee.mockResolvedValue({ + committee: [validator1], + seed: 0n, + epoch: epochNumber, + isEscapeHatchOpen: true, + }); + + const emitSpy = jest.spyOn(sentinel, 'emit'); + const updateProvenSpy = jest.spyOn(store, 'updateProvenPerformance'); + + await sentinel.handleChainProven({ type: 'chain-proven', block: { number: blockNumber, hash: blockHash } }); + + // Should have stored empty performance (no offenses during escape hatch) + expect(updateProvenSpy).toHaveBeenCalledWith(epochNumber, {}); + // Should NOT have emitted any slash events + expect(emitSpy).not.toHaveBeenCalled(); + }); + }); + describe('consecutive epoch inactivity', () => { let validator1: EthAddress; let validator2: EthAddress; @@ -905,4 +980,8 @@ class TestSentinel extends Sentinel { ) { return super.checkPastInactivity(validator, currentEpoch, requiredConsecutiveEpochs); } + + public doProcessSlot(slot: SlotNumber) { + return super.processSlot(slot); + } } diff --git a/yarn-project/aztec-node/src/sentinel/sentinel.ts b/yarn-project/aztec-node/src/sentinel/sentinel.ts index 4be3a6972772..23f4cb21a613 100644 --- a/yarn-project/aztec-node/src/sentinel/sentinel.ts +++ b/yarn-project/aztec-node/src/sentinel/sentinel.ts @@ -158,7 +158,11 @@ export class Sentinel extends (EventEmitter as new () => WatcherEmitter) impleme protected async computeProvenPerformance(epoch: EpochNumber): Promise { const [fromSlot, toSlot] = getSlotRangeForEpoch(epoch, this.epochCache.getL1Constants()); - const { committee } = await this.epochCache.getCommittee(fromSlot); + const { committee, isEscapeHatchOpen } = await this.epochCache.getCommittee(fromSlot); + if (isEscapeHatchOpen) { + this.logger.info(`Skipping proven performance for epoch ${epoch} - escape hatch is open`); + return {}; + } if (!committee) { this.logger.trace(`No committee found for slot ${fromSlot}`); return {}; @@ -327,7 +331,12 @@ export class Sentinel extends (EventEmitter as new () => WatcherEmitter) impleme * and updates overall stats. */ protected async processSlot(slot: SlotNumber) { - const { epoch, seed, committee } = await this.epochCache.getCommittee(slot); + const { epoch, seed, committee, isEscapeHatchOpen } = await this.epochCache.getCommittee(slot); + if (isEscapeHatchOpen) { + this.logger.info(`Skipping slot ${slot} at epoch ${epoch} - escape hatch is open`); + this.lastProcessedSlot = slot; + return; + } if (!committee || committee.length === 0) { this.logger.trace(`No committee found for slot ${slot} at epoch ${epoch}`); this.lastProcessedSlot = slot; From ab30ee487503bc3aed072229a9cd9dfa275c8585 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Mon, 16 Feb 2026 23:25:32 +0000 Subject: [PATCH 46/62] fix: async blob (#20559) This replaces all the sync methods used to work with blobs to their async counterparts (already supported by the kzg lib we're using). The reason for this change: at high TPS I noticed that the prover node was stuck for approx two to three minutes blocking the entire event loop soon after it started proving an epoch. The only thing it does before it sends any jobs to the orchestrator is preparing the blob committments. image --- .../archiver/src/archiver-sync.test.ts | 2 +- .../archiver/src/test/fake_l1_state.ts | 18 +++++++------- .../src/blobstore/blob_store_test_suite.ts | 18 +++++++------- .../blob-client/src/client/http.test.ts | 20 ++++++++-------- yarn-project/blob-client/src/client/http.ts | 22 ++++++++++------- yarn-project/blob-client/src/client/tests.ts | 4 ++-- .../filestore/filestore_blob_client.test.ts | 24 +++++++++---------- yarn-project/blob-lib/src/blob.test.ts | 18 +++++++------- yarn-project/blob-lib/src/blob.ts | 16 ++++++------- .../blob-lib/src/blob_batching.test.ts | 12 +++++----- yarn-project/blob-lib/src/blob_batching.ts | 8 +++---- yarn-project/blob-lib/src/blob_utils.test.ts | 12 +++++----- yarn-project/blob-lib/src/blob_utils.ts | 8 ++++--- yarn-project/blob-lib/src/hash.ts | 4 ++-- yarn-project/blob-lib/src/testing.ts | 2 +- .../epochs_l1_reorgs.parallel.test.ts | 6 ++--- .../e2e_l1_publisher/e2e_l1_publisher.test.ts | 2 +- .../src/utils/server/foreign_call_handler.ts | 2 +- .../light/lightweight_checkpoint_builder.ts | 2 +- .../orchestrator/block-building-helpers.ts | 4 ++-- .../block_building_helpers.test.ts | 4 ++-- .../orchestrator/checkpoint-proving-state.ts | 8 +++---- .../src/orchestrator/epoch-proving-state.ts | 2 +- .../src/orchestrator/orchestrator.ts | 23 ++++++++++-------- .../prover-node/src/job/epoch-proving-job.ts | 2 ++ .../src/publisher/sequencer-publisher.test.ts | 2 +- .../src/publisher/sequencer-publisher.ts | 4 ++-- .../validator-client/src/validator.ts | 2 +- 28 files changed, 131 insertions(+), 120 deletions(-) diff --git a/yarn-project/archiver/src/archiver-sync.test.ts b/yarn-project/archiver/src/archiver-sync.test.ts index 76a12af1f559..bbe5f3aa236e 100644 --- a/yarn-project/archiver/src/archiver-sync.test.ts +++ b/yarn-project/archiver/src/archiver-sync.test.ts @@ -380,7 +380,7 @@ describe('Archiver Sync', () => { }); // Create a random blob that doesn't match the checkpoint - const randomBlob = makeRandomBlob(3); + const randomBlob = await makeRandomBlob(3); // Override blob client to return the random blob instead of the correct one blobClient.getBlobSidecar.mockResolvedValue([randomBlob]); diff --git a/yarn-project/archiver/src/test/fake_l1_state.ts b/yarn-project/archiver/src/test/fake_l1_state.ts index 4afb84902926..e55a234b544b 100644 --- a/yarn-project/archiver/src/test/fake_l1_state.ts +++ b/yarn-project/archiver/src/test/fake_l1_state.ts @@ -195,9 +195,9 @@ export class FakeL1State { this.addMessages(checkpointNumber, messagesL1BlockNumber, messages); // Create the transaction and blobs - const tx = this.makeRollupTx(checkpoint, signers); - const blobHashes = this.makeVersionedBlobHashes(checkpoint); - const blobs = this.makeBlobsFromCheckpoint(checkpoint); + const tx = await this.makeRollupTx(checkpoint, signers); + const blobHashes = await this.makeVersionedBlobHashes(checkpoint); + const blobs = await this.makeBlobsFromCheckpoint(checkpoint); // Store the checkpoint data this.checkpoints.push({ @@ -539,14 +539,14 @@ export class FakeL1State { })); } - private makeRollupTx(checkpoint: Checkpoint, signers: Secp256k1Signer[]): Transaction { + private async makeRollupTx(checkpoint: Checkpoint, signers: Secp256k1Signer[]): Promise { const attestations = signers .map(signer => makeCheckpointAttestationFromCheckpoint(checkpoint, signer)) .map(attestation => CommitteeAttestation.fromSignature(attestation.signature)) .map(committeeAttestation => committeeAttestation.toViem()); const header = checkpoint.header.toViem(); - const blobInput = getPrefixedEthBlobCommitments(getBlobsPerL1Block(checkpoint.toBlobFields())); + const blobInput = getPrefixedEthBlobCommitments(await getBlobsPerL1Block(checkpoint.toBlobFields())); const archive = toHex(checkpoint.archive.root.toBuffer()); const attestationsAndSigners = new CommitteeAttestationsAndSigners( attestations.map(attestation => CommitteeAttestation.fromViem(attestation)), @@ -595,13 +595,13 @@ export class FakeL1State { } as Transaction; } - private makeVersionedBlobHashes(checkpoint: Checkpoint): `0x${string}`[] { - return getBlobsPerL1Block(checkpoint.toBlobFields()).map( + private async makeVersionedBlobHashes(checkpoint: Checkpoint): Promise<`0x${string}`[]> { + return (await getBlobsPerL1Block(checkpoint.toBlobFields())).map( b => `0x${b.getEthVersionedBlobHash().toString('hex')}` as `0x${string}`, ); } - private makeBlobsFromCheckpoint(checkpoint: Checkpoint): Blob[] { - return getBlobsPerL1Block(checkpoint.toBlobFields()); + private async makeBlobsFromCheckpoint(checkpoint: Checkpoint): Promise { + return await getBlobsPerL1Block(checkpoint.toBlobFields()); } } diff --git a/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts b/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts index e7f9df2e627e..3786f1e3bfff 100644 --- a/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts +++ b/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts @@ -13,7 +13,7 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should store and retrieve a blob by hash', async () => { // Create a test blob with random fields const testFields = [Fr.random(), Fr.random(), Fr.random()]; - const blob = Blob.fromFields(testFields); + const blob = await Blob.fromFields(testFields); const blobHash = blob.getEthVersionedBlobHash(); // Store the blob @@ -29,8 +29,8 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should handle multiple blobs stored and retrieved by their hashes', async () => { // Create two different blobs - const blob1 = Blob.fromFields([Fr.random(), Fr.random()]); - const blob2 = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random(), Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); const blobHash1 = blob1.getEthVersionedBlobHash(); const blobHash2 = blob2.getEthVersionedBlobHash(); @@ -57,9 +57,9 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should handle retrieving subset of stored blobs', async () => { // Store multiple blobs - const blob1 = Blob.fromFields([Fr.random()]); - const blob2 = Blob.fromFields([Fr.random()]); - const blob3 = Blob.fromFields([Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random()]); + const blob3 = await Blob.fromFields([Fr.random()]); await blobStore.addBlobs([blob1, blob2, blob3]); @@ -75,7 +75,7 @@ export function describeBlobStore(getBlobStore: () => Promise) { }); it('should handle duplicate blob hashes in request', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const blobHash = blob.getEthVersionedBlobHash(); await blobStore.addBlobs([blob]); @@ -91,8 +91,8 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should overwrite blob when storing with same hash', async () => { // Create two blobs that will have the same hash (same content) const fields = [Fr.random(), Fr.random()]; - const blob1 = Blob.fromFields(fields); - const blob2 = Blob.fromFields(fields); + const blob1 = await Blob.fromFields(fields); + const blob2 = await Blob.fromFields(fields); const blobHash = blob1.getEthVersionedBlobHash(); diff --git a/yarn-project/blob-client/src/client/http.test.ts b/yarn-project/blob-client/src/client/http.test.ts index 5d3c6b16422e..c5c7d8eb832f 100644 --- a/yarn-project/blob-client/src/client/http.test.ts +++ b/yarn-project/blob-client/src/client/http.test.ts @@ -15,7 +15,7 @@ import { HttpBlobClient } from './http.js'; describe('HttpBlobClient', () => { it('should handle no sources configured', async () => { const client = new HttpBlobClient({}); - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const blobHash = blob.getEthVersionedBlobHash(); const success = await client.sendBlobsToFilestore([blob]); @@ -40,11 +40,11 @@ describe('HttpBlobClient', () => { let latestSlotNumber: number; let missedSlots: number[]; - beforeEach(() => { + beforeEach(async () => { latestSlotNumber = 1; missedSlots = []; - testBlobs = Array.from({ length: 2 }, () => makeRandomBlob(3)); + testBlobs = await Promise.all(Array.from({ length: 2 }, () => makeRandomBlob(3))); testBlobsHashes = testBlobs.map(b => b.getEthVersionedBlobHash()); blobData = testBlobs.map(b => b.toJSON()); @@ -292,7 +292,7 @@ describe('HttpBlobClient', () => { }); // Create a blob that has mismatch data and commitment. - const randomBlobs = Array.from({ length: 2 }, () => makeRandomBlob(3)); + const randomBlobs = await Promise.all(Array.from({ length: 2 }, () => makeRandomBlob(3))); const incorrectBlob = new Blob(randomBlobs[0].data, randomBlobs[1].commitment); const incorrectBlobHash = incorrectBlob.getEthVersionedBlobHash(); // Update blobData to include the incorrect blob @@ -312,7 +312,7 @@ describe('HttpBlobClient', () => { it('should accumulate blobs across all three sources (filestore, consensus, archive)', async () => { // Create three blobs for testing - const blobs = Array.from({ length: 3 }, () => makeRandomBlob(3)); + const blobs = await Promise.all(Array.from({ length: 3 }, () => makeRandomBlob(3))); const blobHashes = blobs.map(b => b.getEthVersionedBlobHash()); // Blob 0 only in filestore @@ -368,7 +368,7 @@ describe('HttpBlobClient', () => { it('should preserve blob order when requesting multiple blobs', async () => { // Create three distinct blobs - const blobs = Array.from({ length: 3 }, () => makeRandomBlob(3)); + const blobs = await Promise.all(Array.from({ length: 3 }, () => makeRandomBlob(3))); const blobHashes = blobs.map(b => b.getEthVersionedBlobHash()); // Add all blobs to filestore @@ -477,7 +477,7 @@ describe('HttpBlobClient', () => { it('should return only one blob when multiple blobs with the same blobHash exist on a block', async () => { // Create a blob data array with two blobs that have the same commitment (thus same blobHash) - const blob = makeRandomBlob(3); + const blob = await makeRandomBlob(3); const blobHash = blob.getEthVersionedBlobHash(); const duplicateBlobData = [blob.toJSON(), blob.toJSON()]; @@ -503,7 +503,7 @@ describe('HttpBlobClient', () => { l1ConsensusHostUrls: [`http://localhost:${consensusHostPort}`], }); - const blob = makeRandomBlob(3); + const blob = await makeRandomBlob(3); const blobHash = blob.getEthVersionedBlobHash(); const blobJson = blob.toJSON(); @@ -616,8 +616,8 @@ describe('HttpBlobClient FileStore Integration', () => { let testBlobs: Blob[]; let testBlobsHashes: Buffer[]; - beforeEach(() => { - testBlobs = Array.from({ length: 2 }, () => makeRandomBlob(3)); + beforeEach(async () => { + testBlobs = await Promise.all(Array.from({ length: 2 }, () => makeRandomBlob(3))); testBlobsHashes = testBlobs.map(b => b.getEthVersionedBlobHash()); }); diff --git a/yarn-project/blob-client/src/client/http.ts b/yarn-project/blob-client/src/client/http.ts index a27fd2ea1d98..5d626933261a 100644 --- a/yarn-project/blob-client/src/client/http.ts +++ b/yarn-project/blob-client/src/client/http.ts @@ -215,8 +215,8 @@ export class HttpBlobClient implements BlobClientInterface { const getFilledBlobs = (): Blob[] => resultBlobs.filter((b): b is Blob => b !== undefined); // Helper to fill in results from fetched blobs - const fillResults = (fetchedBlobs: BlobJson[]): Blob[] => { - const blobs = processFetchedBlobs(fetchedBlobs, blobHashes, this.log); + const fillResults = async (fetchedBlobs: BlobJson[]): Promise => { + const blobs = await processFetchedBlobs(fetchedBlobs, blobHashes, this.log); // Fill in any missing positions with matching blobs for (let i = 0; i < blobHashes.length; i++) { if (resultBlobs[i] === undefined) { @@ -269,7 +269,7 @@ export class HttpBlobClient implements BlobClientInterface { ...ctx, }); const blobs = await this.getBlobsFromHost(l1ConsensusHostUrl, slotNumber, l1ConsensusHostIndex); - const result = fillResults(blobs); + const result = await fillResults(blobs); this.log.debug( `Got ${blobs.length} blobs from consensus host (total: ${result.length}/${blobHashes.length})`, { slotNumber, l1ConsensusHostUrl, ...ctx }, @@ -312,7 +312,7 @@ export class HttpBlobClient implements BlobClientInterface { this.log.debug('No blobs found from archive client', archiveCtx); } else { this.log.trace(`Got ${allBlobs.length} blobs from archive client before filtering`, archiveCtx); - const result = fillResults(allBlobs); + const result = await fillResults(allBlobs); this.log.debug( `Got ${allBlobs.length} blobs from archive client (total: ${result.length}/${blobHashes.length})`, archiveCtx, @@ -345,7 +345,7 @@ export class HttpBlobClient implements BlobClientInterface { */ private async tryFileStores( getMissingBlobHashes: () => Buffer[], - fillResults: (blobs: BlobJson[]) => Blob[], + fillResults: (blobs: BlobJson[]) => Promise, ctx: { blockHash: string; blobHashes: string[] }, ): Promise { // Shuffle clients for load distribution @@ -366,7 +366,7 @@ export class HttpBlobClient implements BlobClientInterface { }); const blobs = await client.getBlobsByHashes(blobHashStrings); if (blobs.length > 0) { - const result = fillResults(blobs); + const result = await fillResults(blobs); this.log.debug( `Got ${blobs.length} blobs from filestore (total: ${result.length}/${ctx.blobHashes.length})`, { @@ -388,7 +388,7 @@ export class HttpBlobClient implements BlobClientInterface { l1ConsensusHostIndex?: number, ): Promise { const blobs = await this.getBlobsFromHost(hostUrl, blockHashOrSlot, l1ConsensusHostIndex); - return processFetchedBlobs(blobs, blobHashes, this.log).filter((b): b is Blob => b !== undefined); + return (await processFetchedBlobs(blobs, blobHashes, this.log)).filter((b): b is Blob => b !== undefined); } public async getBlobsFromHost( @@ -616,7 +616,11 @@ function parseBlobJson(data: any): BlobJson { // Returns an array that maps each blob hash to the corresponding blob, or undefined if the blob is not found // or the data does not match the commitment. -function processFetchedBlobs(blobs: BlobJson[], blobHashes: Buffer[], logger: Logger): (Blob | undefined)[] { +async function processFetchedBlobs( + blobs: BlobJson[], + blobHashes: Buffer[], + logger: Logger, +): Promise<(Blob | undefined)[]> { const requestedBlobHashes = new Set(blobHashes.map(bufferToHex)); const hashToBlob = new Map(); for (const blobJson of blobs) { @@ -626,7 +630,7 @@ function processFetchedBlobs(blobs: BlobJson[], blobHashes: Buffer[], logger: Lo } try { - const blob = Blob.fromJson(blobJson); + const blob = await Blob.fromJson(blobJson); hashToBlob.set(hashHex, blob); } catch (err) { // If the above throws, it's likely that the blob commitment does not match the hash of the blob data. diff --git a/yarn-project/blob-client/src/client/tests.ts b/yarn-project/blob-client/src/client/tests.ts index 826aa286781f..d85430f4fb51 100644 --- a/yarn-project/blob-client/src/client/tests.ts +++ b/yarn-project/blob-client/src/client/tests.ts @@ -28,7 +28,7 @@ export function runBlobClientTests( }); it('should send and retrieve blobs by hash', async () => { - const blob = makeRandomBlob(5); + const blob = await makeRandomBlob(5); const blobHash = blob.getEthVersionedBlobHash(); await client.sendBlobsToFilestore([blob]); @@ -39,7 +39,7 @@ export function runBlobClientTests( }); it('should handle multiple blobs', async () => { - const blobs = Array.from({ length: 3 }, () => makeRandomBlob(7)); + const blobs = await Promise.all(Array.from({ length: 3 }, () => makeRandomBlob(7))); const blobHashes = blobs.map(blob => blob.getEthVersionedBlobHash()); await client.sendBlobsToFilestore(blobs); diff --git a/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts b/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts index fe30df599709..872f37cb20b4 100644 --- a/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts +++ b/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts @@ -74,7 +74,7 @@ describe('FileStoreBlobClient', () => { describe('saveBlob', () => { it('should save a blob to the filestore', async () => { - const blob = Blob.fromFields([Fr.random(), Fr.random()]); + const blob = await Blob.fromFields([Fr.random(), Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; await client.saveBlob(blob); @@ -88,7 +88,7 @@ describe('FileStoreBlobClient', () => { }); it('should skip saving if blob already exists and skipIfExists=true', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; // Save first time @@ -107,7 +107,7 @@ describe('FileStoreBlobClient', () => { }); it('should overwrite if skipIfExists=false', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; // Save first time @@ -130,8 +130,8 @@ describe('FileStoreBlobClient', () => { describe('saveBlobs', () => { it('should save multiple blobs', async () => { - const blob1 = Blob.fromFields([Fr.random()]); - const blob2 = Blob.fromFields([Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random()]); await client.saveBlobs([blob1, blob2]); @@ -145,7 +145,7 @@ describe('FileStoreBlobClient', () => { describe('getBlobsByHashes', () => { it('should retrieve blobs by their versioned hashes', async () => { - const blob = Blob.fromFields([Fr.random(), Fr.random()]); + const blob = await Blob.fromFields([Fr.random(), Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; await client.saveBlob(blob); @@ -163,8 +163,8 @@ describe('FileStoreBlobClient', () => { }); it('should retrieve multiple blobs', async () => { - const blob1 = Blob.fromFields([Fr.random()]); - const blob2 = Blob.fromFields([Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random()]); await client.saveBlobs([blob1, blob2]); @@ -177,7 +177,7 @@ describe('FileStoreBlobClient', () => { }); it('should skip blobs that fail to parse', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const hash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; // Save invalid JSON @@ -191,7 +191,7 @@ describe('FileStoreBlobClient', () => { describe('exists', () => { it('should return true if blob exists', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; await client.saveBlob(blob); @@ -240,14 +240,14 @@ describe('FileStoreBlobClient', () => { const readOnlyStore = new MockReadOnlyFileStore(); const readOnlyClient = new FileStoreBlobClient(readOnlyStore, basePath); - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); await expect(readOnlyClient.saveBlob(blob)).rejects.toThrow('FileStore is read-only'); }); it('should be able to read from read-only store', async () => { const files = new Map(); - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; const path = `${basePath}/blobs/${versionedHash}.data`; diff --git a/yarn-project/blob-lib/src/blob.test.ts b/yarn-project/blob-lib/src/blob.test.ts index 4939baa09f84..3053eb1d077d 100644 --- a/yarn-project/blob-lib/src/blob.test.ts +++ b/yarn-project/blob-lib/src/blob.test.ts @@ -58,10 +58,10 @@ describe('blob', () => { // This test ensures that the noir blob lib correctly matches the kzg lib const blobFields = Array(400).fill(new Fr(3)); const blobFieldsHash = await poseidon2Hash(blobFields); - const blob = Blob.fromFields(blobFields); + const blob = await Blob.fromFields(blobFields); const challengeZ = await blob.computeChallengeZ(blobFieldsHash); - const { y } = blob.evaluate(challengeZ, true /* verifyProof */); + const { y } = await blob.evaluate(challengeZ, true /* verifyProof */); expect(blob.commitment.toString('hex')).toMatchInlineSnapshot( `"b2803d5fe972914ba3616033e2748bbaa6dbcddefc3721a54895a7a45e77504dd1a971c7e8d8292be943d05bccebcfea"`, @@ -88,10 +88,10 @@ describe('blob', () => { // This test ensures that the noir blob lib correctly matches the kzg lib const blobFields = Array.from({ length: FIELDS_PER_BLOB }).map((_, i) => new Fr(i + 2)); const blobFieldsHash = await poseidon2Hash(blobFields); - const blob = Blob.fromFields(blobFields); + const blob = await Blob.fromFields(blobFields); const challengeZ = await blob.computeChallengeZ(blobFieldsHash); - const { y } = blob.evaluate(challengeZ, true /* verifyProof */); + const { y } = await blob.evaluate(challengeZ, true /* verifyProof */); expect(blob.commitment.toString('hex')).toMatchInlineSnapshot( `"ac771dea41e29fc2b7016c32731602c0812548ba0f491864a4e03fdb94b8d3d195faad1967cdf005acf73088b0e8474a"`, @@ -114,15 +114,15 @@ describe('blob', () => { ); }); - it('should serialize and deserialize a blob', () => { - const blob = makeRandomBlob(5); + it('should serialize and deserialize a blob', async () => { + const blob = await makeRandomBlob(5); const blobBuffer = blob.toBuffer(); expect(Blob.fromBuffer(blobBuffer)).toEqual(blob); }); - it('should create a blob from a JSON object', () => { - const blob = makeRandomBlob(7); + it('should create a blob from a JSON object', async () => { + const blob = await makeRandomBlob(7); const blobJson = blob.toJSON(); - expect(Blob.fromJson(blobJson)).toEqual(blob); + expect(await Blob.fromJson(blobJson)).toEqual(blob); }); }); diff --git a/yarn-project/blob-lib/src/blob.ts b/yarn-project/blob-lib/src/blob.ts index 25f0b087e773..230b2ff957a9 100644 --- a/yarn-project/blob-lib/src/blob.ts +++ b/yarn-project/blob-lib/src/blob.ts @@ -42,8 +42,8 @@ export class Blob { * * @throws If data does not match the expected length (BYTES_PER_BLOB). */ - static fromBlobBuffer(data: Uint8Array): Blob { - const commitment = computeBlobCommitment(data); + static async fromBlobBuffer(data: Uint8Array): Promise { + const commitment = await computeBlobCommitment(data); return new Blob(data, commitment); } @@ -55,13 +55,13 @@ export class Blob { * @param fields - The array of fields to create the Blob from. * @returns A Blob created from the array of fields. */ - static fromFields(fields: Fr[]): Blob { + static async fromFields(fields: Fr[]): Promise { if (fields.length > FIELDS_PER_BLOB) { throw new Error(`Attempted to overfill blob with ${fields.length} fields. The maximum is ${FIELDS_PER_BLOB}.`); } const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); - const commitment = computeBlobCommitment(data); + const commitment = await computeBlobCommitment(data); return new Blob(data, commitment); } @@ -88,9 +88,9 @@ export class Blob { * @param json - The JSON object to create the Blob from. * @returns A Blob created from the JSON object. */ - static fromJson(json: BlobJson): Blob { + static async fromJson(json: BlobJson): Promise { const blobBuffer = Buffer.from(json.blob.slice(2), 'hex'); - const blob = Blob.fromBlobBuffer(blobBuffer); + const blob = await Blob.fromBlobBuffer(blobBuffer); if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) { throw new Error('KZG commitment does not match'); @@ -134,9 +134,9 @@ export class Blob { * y: BLS12Fr - Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. * proof: Buffer - KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - evaluate(challengeZ: Fr, verifyProof = false) { + async evaluate(challengeZ: Fr, verifyProof = false) { const kzg = getKzg(); - const res = kzg.computeKzgProof(this.data, challengeZ.toBuffer()); + const res = await kzg.asyncComputeKzgProof(this.data, challengeZ.toBuffer()); if (verifyProof && !kzg.verifyKzgProof(this.commitment, challengeZ.toBuffer(), res[1], res[0])) { throw new Error(`KZG proof did not verify.`); } diff --git a/yarn-project/blob-lib/src/blob_batching.test.ts b/yarn-project/blob-lib/src/blob_batching.test.ts index 1568be1ef2ea..11e2da774411 100644 --- a/yarn-project/blob-lib/src/blob_batching.test.ts +++ b/yarn-project/blob-lib/src/blob_batching.test.ts @@ -24,9 +24,9 @@ const trustedSetup = JSON.parse( ); describe('Blob Batching', () => { - it.each([10, 100, 400])('our BLS library should correctly commit to a blob of %p items', size => { + it.each([10, 100, 400])('our BLS library should correctly commit to a blob of %p items', async size => { const blobFields = [new Fr(size)].concat(Array.from({ length: size - 1 }).map((_, i) => new Fr(size + i))); - const ourBlob = Blob.fromFields(blobFields); + const ourBlob = await Blob.fromFields(blobFields); const point = BLS12Point.decompress(ourBlob.commitment); @@ -49,7 +49,7 @@ describe('Blob Batching', () => { it('should construct and verify 1 blob', async () => { // Initialize 400 fields. This test shows that a single blob works with batching methods. const blobFields = Array.from({ length: 400 }, (_, i) => new Fr(i + 123)); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(1); const onlyBlob = blobs[0]; @@ -66,7 +66,7 @@ describe('Blob Batching', () => { const commitment = BLS12Point.decompress(onlyBlob.commitment); // 'Batched' evaluation - const { y, proof } = onlyBlob.evaluate(finalZ); + const { y, proof } = await onlyBlob.evaluate(finalZ); const q = BLS12Point.decompress(proof); const finalBlobCommitmentsHash = sha256ToField([onlyBlob.commitment]); @@ -134,7 +134,7 @@ describe('Blob Batching', () => { blobFields[numBlobFields - 1] = encodeCheckpointEndMarker({ numBlobFields }); } - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(numBlobs); const finalChallenges = await BatchedBlobAccumulator.precomputeBatchedBlobChallenges([blobFields]); @@ -153,7 +153,7 @@ describe('Blob Batching', () => { // Batched evaluation // NB: we share the same finalZ between blobs - const proofObjects = blobs.map(b => b.evaluate(finalZ)); + const proofObjects = await Promise.all(blobs.map(b => b.evaluate(finalZ))); const evalYs = proofObjects.map(({ y }) => y); const qs = proofObjects.map(({ proof }) => BLS12Point.decompress(proof)); diff --git a/yarn-project/blob-lib/src/blob_batching.ts b/yarn-project/blob-lib/src/blob_batching.ts index 25b58416426a..d4f1b058622b 100644 --- a/yarn-project/blob-lib/src/blob_batching.ts +++ b/yarn-project/blob-lib/src/blob_batching.ts @@ -109,7 +109,7 @@ export class BatchedBlobAccumulator { for (const blobFields of blobFieldsPerCheckpoint) { // Compute the hash of all the fields in the block. const blobFieldsHash = await computeBlobFieldsHash(blobFields); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); for (const blob of blobs) { // Compute the challenge z for each blob and accumulate it. const challengeZ = await blob.computeChallengeZ(blobFieldsHash); @@ -126,7 +126,7 @@ export class BatchedBlobAccumulator { } // Now we have a shared challenge for all blobs, evaluate them... - const proofObjects = allBlobs.map(b => b.evaluate(z)); + const proofObjects = await Promise.all(allBlobs.map(b => b.evaluate(z))); const evaluations = await Promise.all(proofObjects.map(({ y }) => hashNoirBigNumLimbs(y))); // ...and find the challenge for the linear combination of blobs. let gamma = evaluations[0]; @@ -145,7 +145,7 @@ export class BatchedBlobAccumulator { * @returns An updated blob accumulator. */ async accumulateBlob(blob: Blob, blobFieldsHash: Fr) { - const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z); + const { proof, y: thisY } = await blob.evaluate(this.finalBlobChallenges.z); const thisC = BLS12Point.decompress(blob.commitment); const thisQ = BLS12Point.decompress(proof); const blobChallengeZ = await blob.computeChallengeZ(blobFieldsHash); @@ -192,7 +192,7 @@ export class BatchedBlobAccumulator { * @returns An updated blob accumulator. */ async accumulateFields(blobFields: Fr[]) { - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); if (blobs.length > BLOBS_PER_CHECKPOINT) { throw new Error( diff --git a/yarn-project/blob-lib/src/blob_utils.test.ts b/yarn-project/blob-lib/src/blob_utils.test.ts index ba7b9120d5b7..c1e5cbf0820d 100644 --- a/yarn-project/blob-lib/src/blob_utils.test.ts +++ b/yarn-project/blob-lib/src/blob_utils.test.ts @@ -7,33 +7,33 @@ import { makeCheckpointBlobData } from './encoding/fixtures.js'; import { BlobDeserializationError } from './errors.js'; describe('blob fields encoding', () => { - it('can process correct encoding for a single blob', () => { + it('can process correct encoding for a single blob', async () => { const checkpointBlobData = makeCheckpointBlobData(); const blobFields = encodeCheckpointBlobData(checkpointBlobData); expect(blobFields.length).toBeLessThan(FIELDS_PER_BLOB); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(1); const decoded = decodeCheckpointBlobDataFromBlobs(blobs); expect(decoded).toEqual(checkpointBlobData); }); - it('can process correct encoding for multiple blobs', () => { + it('can process correct encoding for multiple blobs', async () => { const checkpointBlobData = makeCheckpointBlobData({ numBlocks: 2, numTxsPerBlock: 1, isFullTx: true }); const blobFields = encodeCheckpointBlobData(checkpointBlobData); expect(blobFields.length).toBeGreaterThan(FIELDS_PER_BLOB); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBeGreaterThan(1); const decoded = decodeCheckpointBlobDataFromBlobs(blobs); expect(decoded).toEqual(checkpointBlobData); }); - it('throws processing random blob data', () => { + it('throws processing random blob data', async () => { const blobFields = Array.from({ length: 10 }, () => Fr.random()); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(1); expect(() => decodeCheckpointBlobDataFromBlobs(blobs)).toThrow(BlobDeserializationError); diff --git a/yarn-project/blob-lib/src/blob_utils.ts b/yarn-project/blob-lib/src/blob_utils.ts index 14a380b4446b..d0d23804fe43 100644 --- a/yarn-project/blob-lib/src/blob_utils.ts +++ b/yarn-project/blob-lib/src/blob_utils.ts @@ -30,14 +30,16 @@ export function getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` { * * @throws If the number of fields does not match what's indicated by the checkpoint prefix. */ -export function getBlobsPerL1Block(fields: Fr[]): Blob[] { +export async function getBlobsPerL1Block(fields: Fr[]): Promise { if (!fields.length) { throw new Error('Cannot create blobs from empty fields.'); } const numBlobs = Math.ceil(fields.length / FIELDS_PER_BLOB); - return Array.from({ length: numBlobs }, (_, i) => - Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)), + return await Promise.all( + Array.from({ length: numBlobs }, (_, i) => + Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)), + ), ); } diff --git a/yarn-project/blob-lib/src/hash.ts b/yarn-project/blob-lib/src/hash.ts index 31f83e6b8524..a1ae9040fc88 100644 --- a/yarn-project/blob-lib/src/hash.ts +++ b/yarn-project/blob-lib/src/hash.ts @@ -44,12 +44,12 @@ export async function computeBlobFieldsHash(fields: Fr[]): Promise { return sponge.squeeze(); } -export function computeBlobCommitment(data: Uint8Array): Buffer { +export async function computeBlobCommitment(data: Uint8Array): Promise { if (data.length !== BYTES_PER_BLOB) { throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`); } - return Buffer.from(getKzg().blobToKzgCommitment(data)); + return Buffer.from(await getKzg().asyncBlobToKzgCommitment(data)); } /** diff --git a/yarn-project/blob-lib/src/testing.ts b/yarn-project/blob-lib/src/testing.ts index 8b7bf19740cd..759f7da0790d 100644 --- a/yarn-project/blob-lib/src/testing.ts +++ b/yarn-project/blob-lib/src/testing.ts @@ -89,6 +89,6 @@ export function makeFinalBlobBatchingChallenges(seed = 1) { * @param length * @returns */ -export function makeRandomBlob(length: number): Blob { +export function makeRandomBlob(length: number): Promise { return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]); } diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts index 186b3b02c12d..f2f78d843213 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts @@ -86,12 +86,12 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { }); describe('blocks', () => { - const getBlobs = (serializedTx: `0x${string}`) => { + const getBlobs = async (serializedTx: `0x${string}`) => { const parsedTx = parseTransaction(serializedTx); if (parsedTx.sidecars === false) { throw new Error('No sidecars found in tx'); } - return parsedTx.sidecars!.map(sidecar => Blob.fromBlobBuffer(hexToBuffer(sidecar.blob))); + return await Promise.all(parsedTx.sidecars!.map(sidecar => Blob.fromBlobBuffer(hexToBuffer(sidecar.blob)))); }; /** Returns the last synced checkpoint number for a node */ @@ -383,7 +383,7 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { // We also need to send the blob to the sink, so the node can get it logger.warn(`Sending blobs to blob client`); - const blobs = getBlobs(l2BlockTx); + const blobs = await getBlobs(l2BlockTx); const blobClient = createBlobClient(context.config); await blobClient.sendBlobsToFilestore(blobs); diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 9b36f58493ff..c28b6532bace 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -457,7 +457,7 @@ describe('L1Publisher integration', () => { blockSource.getL1ToL2Messages.mockResolvedValueOnce(currentL1ToL2Messages); const checkpointBlobFields = checkpoint.toBlobFields(); - const blockBlobs = getBlobsPerL1Block(checkpointBlobFields); + const blockBlobs = await getBlobsPerL1Block(checkpointBlobFields); let prevBlobAccumulatorHash = (await rollup.getCurrentBlobCommitmentsHash()).toBuffer(); diff --git a/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts b/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts index ccc3d73ee1c5..33e40d554ee6 100644 --- a/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts +++ b/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts @@ -94,7 +94,7 @@ export async function foreignCallHandler(name: string, args: ForeignCallInput[]) ); } - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); blobs.forEach((blob, i) => { const injected = kzgCommitments[i]; const calculated = BLS12Point.decompress(blob.commitment); diff --git a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts index 883e75dc2102..cb789075c3f8 100644 --- a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts +++ b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts @@ -242,7 +242,7 @@ export class LightweightCheckpointBuilder { const newArchive = this.lastArchives[this.lastArchives.length - 1]; - const blobs = getBlobsPerL1Block(this.blobFields); + const blobs = await getBlobsPerL1Block(this.blobFields); const blobsHash = computeBlobsHashFromBlobs(blobs); const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages); diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 13d29d31bc98..01c0726d70bb 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -253,8 +253,8 @@ export function getPublicChonkVerifierPrivateInputsFromTx(tx: Tx | ProcessedTx, // Build "hints" as the private inputs for the checkpoint root rollup circuit. // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`. // The `blobsHash` will be validated on L1 against the submitted blob data. -export const buildBlobHints = (blobFields: Fr[]) => { - const blobs = getBlobsPerL1Block(blobFields); +export const buildBlobHints = async (blobFields: Fr[]) => { + const blobs = await getBlobsPerL1Block(blobFields); const blobCommitments = getBlobCommitmentsFromBlobs(blobs); const blobsHash = computeBlobsHashFromBlobs(blobs); return { blobCommitments, blobs, blobsHash }; diff --git a/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts b/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts index e38d60e8ccb6..5f1755f192fe 100644 --- a/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts +++ b/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts @@ -16,7 +16,7 @@ describe('buildBlobHints', () => { encodeCheckpointEndMarker({ numBlobFields: blobFieldsWithoutEndMarker.length + 1 }), ]); - const { blobCommitments, blobsHash, blobs } = buildBlobHints(blobFields); + const { blobCommitments, blobsHash, blobs } = await buildBlobHints(blobFields); expect(blobs.length).toBe(1); const onlyBlob = blobs[0]; @@ -37,7 +37,7 @@ describe('buildBlobHints', () => { const zStr = challengeZ.toString(); expect(zStr).toMatchInlineSnapshot(`"0x11d6daed56531bd5c5acf341663d21089bb96913f4e716dca3cdb01b8d5735a3"`); - const proof = onlyBlob.evaluate(challengeZ, true /* verifyProof */); + const proof = await onlyBlob.evaluate(challengeZ, true /* verifyProof */); const yStr = proof.y.toString(); expect(yStr).toMatchInlineSnapshot(`"0x6033e46c697b3de1a5ddedb940ae6ccdb6efc0adeb255336b0220d3fd4b76720"`); diff --git a/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts b/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts index 7b92edcd2d6e..32a813c6bc25 100644 --- a/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts @@ -85,7 +85,7 @@ export class CheckpointProvingState { typeof L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH >, public parentEpoch: EpochProvingState, - private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void, + private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => Promise, ) { this.blockProofs = new UnbalancedTreeStore(totalNumBlocks); this.firstBlockNumber = BlockNumber(headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber + 1); @@ -245,7 +245,7 @@ export class CheckpointProvingState { this.endBlobAccumulator = await accumulateBlobs(this.blobFields!, startBlobAccumulator); this.startBlobAccumulator = startBlobAccumulator; - this.onBlobAccumulatorSet(this); + await this.onBlobAccumulatorSet(this); return this.endBlobAccumulator; } @@ -271,7 +271,7 @@ export class CheckpointProvingState { return this.totalNumBlocks === 1 ? 'rollup-checkpoint-root-single-block' : 'rollup-checkpoint-root'; } - public getCheckpointRootRollupInputs() { + public async getCheckpointRootRollupInputs() { const proofs = this.#getChildProofsForRoot(); const nonEmptyProofs = proofs.filter(p => !!p); if (proofs.length !== nonEmptyProofs.length) { @@ -287,7 +287,7 @@ export class CheckpointProvingState { // `blobFields` must've been set if `startBlobAccumulator` is set (in `accumulateBlobs`). const blobFields = this.blobFields!; - const { blobCommitments, blobsHash } = buildBlobHints(blobFields); + const { blobCommitments, blobsHash } = await buildBlobHints(blobFields); const hints = CheckpointRootRollupHints.from({ previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint, diff --git a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts index 97cbc1013827..a551082873c6 100644 --- a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts @@ -76,7 +76,7 @@ export class EpochProvingState { public readonly epochNumber: EpochNumber, public readonly totalNumCheckpoints: number, private readonly finalBlobBatchingChallenges: FinalBlobBatchingChallenges, - private onCheckpointBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void, + private onCheckpointBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => Promise, private completionCallback: (result: ProvingResult) => void, private rejectionCallback: (reason: string) => void, ) { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 4ac203a6c271..23a9f040a550 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -898,9 +898,9 @@ export class ProvingOrchestrator implements EpochProver { await this.verifyBuiltBlockAgainstSyncedState(provingState); if (checkpointProvingState.totalNumBlocks === 1) { - this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState); + await this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState); } else { - this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation); + await this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation); } // We are finished with the block at this point, ensure the fork is cleaned up @@ -1009,14 +1009,14 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber), ), - result => { + async result => { provingState.setBlockMergeRollupProof(location, result); - this.checkAndEnqueueNextBlockMergeRollup(provingState, location); + await this.checkAndEnqueueNextBlockMergeRollup(provingState, location); }, ); } - private enqueueCheckpointRootRollup(provingState: CheckpointProvingState) { + private async enqueueCheckpointRootRollup(provingState: CheckpointProvingState) { if (!provingState.verifyState()) { this.logger.debug('Not running checkpoint root rollup. State no longer valid.'); return; @@ -1031,7 +1031,7 @@ export class ProvingOrchestrator implements EpochProver { this.logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`); - const inputs = provingState.getCheckpointRootRollupInputs(); + const inputs = await provingState.getCheckpointRootRollupInputs(); this.deferredProving( provingState, @@ -1191,25 +1191,28 @@ export class ProvingOrchestrator implements EpochProver { this.enqueueBlockRootRollup(provingState); } - private checkAndEnqueueNextBlockMergeRollup(provingState: CheckpointProvingState, currentLocation: TreeNodeLocation) { + private async checkAndEnqueueNextBlockMergeRollup( + provingState: CheckpointProvingState, + currentLocation: TreeNodeLocation, + ) { if (!provingState.isReadyForBlockMerge(currentLocation)) { return; } const parentLocation = provingState.getParentLocation(currentLocation); if (parentLocation.level === 0) { - this.checkAndEnqueueCheckpointRootRollup(provingState); + await this.checkAndEnqueueCheckpointRootRollup(provingState); } else { this.enqueueBlockMergeRollup(provingState, parentLocation); } } - private checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) { + private async checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) { if (!provingState.isReadyForCheckpointRoot()) { return; } - this.enqueueCheckpointRootRollup(provingState); + await this.enqueueCheckpointRootRollup(provingState); } private checkAndEnqueueNextCheckpointMergeRollup(provingState: EpochProvingState, currentLocation: TreeNodeLocation) { diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 8983a5047bdb..499049701e89 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -149,7 +149,9 @@ export class EpochProvingJob implements Traceable { try { const blobFieldsPerCheckpoint = this.checkpoints.map(checkpoint => checkpoint.toBlobFields()); + this.log.info(`Blob fields per checkpoint: ${timer.ms()}ms`); const finalBlobBatchingChallenges = await buildFinalBlobChallenges(blobFieldsPerCheckpoint); + this.log.info(`Final blob batching challeneger: ${timer.ms()}ms`); this.prover.startNewEpoch(epochNumber, epochSizeCheckpoints, finalBlobBatchingChallenges); await this.prover.startChonkVerifierCircuits(Array.from(this.txs.values())); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index 789dae9a37eb..d944412263d5 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -207,7 +207,7 @@ describe('SequencerPublisher', () => { it('bundles propose and vote tx to l1', async () => { const checkpoint = new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber); - const expectedBlobs = getBlobsPerL1Block(checkpoint.toBlobFields()); + const expectedBlobs = await getBlobsPerL1Block(checkpoint.toBlobFields()); await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); const { govPayload, voteSig } = mockGovernancePayload(); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index a88d42f05029..bbc0335d29c8 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -643,7 +643,7 @@ export class SequencerPublisher { ): Promise { const ts = BigInt((await this.l1TxUtils.getBlock()).timestamp + this.ethereumSlotDuration); const blobFields = checkpoint.toBlobFields(); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); const blobInput = getPrefixedEthBlobCommitments(blobs); const args = [ @@ -953,7 +953,7 @@ export class SequencerPublisher { const checkpointHeader = checkpoint.header; const blobFields = checkpoint.toBlobFields(); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); const proposeTxArgs: L1ProcessArgs = { header: checkpointHeader, diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 29c5d497576e..8bb3c3f773f2 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -765,7 +765,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } const blobFields = blocks.flatMap(b => b.toBlobFields()); - const blobs: Blob[] = getBlobsPerL1Block(blobFields); + const blobs: Blob[] = await getBlobsPerL1Block(blobFields); await this.blobClient.sendBlobsToFilestore(blobs); this.log.debug(`Uploaded ${blobs.length} blobs to filestore for checkpoint at slot ${proposal.slotNumber}`, { ...proposalInfo, From 50338067402a330ffd064d656e75b650ac63948a Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 00:27:36 +0000 Subject: [PATCH 47/62] chore: vendor web3signer (#20570) Should fix the random failures to deploy --- spartan/.gitignore | 1 + spartan/terraform/modules/web3signer/main.tf | 6 +++--- .../modules/web3signer/web3signer-1.0.6.tgz | Bin 0 -> 70775 bytes 3 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 spartan/terraform/modules/web3signer/web3signer-1.0.6.tgz diff --git a/spartan/.gitignore b/spartan/.gitignore index b47543f24982..792fa0ebb8b7 100644 --- a/spartan/.gitignore +++ b/spartan/.gitignore @@ -1,4 +1,5 @@ *.tgz +!terraform/modules/web3signer/web3signer-1.0.6.tgz scripts/logs scripts/LICENSE tfplan diff --git a/spartan/terraform/modules/web3signer/main.tf b/spartan/terraform/modules/web3signer/main.tf index d882c73d0d83..b51b0f9f773a 100644 --- a/spartan/terraform/modules/web3signer/main.tf +++ b/spartan/terraform/modules/web3signer/main.tf @@ -62,13 +62,13 @@ resource "helm_release" "keystore_setup" { resource "helm_release" "web3signer" { name = "${var.RELEASE_NAME}-signer" - repository = "https://ethpandaops.github.io/ethereum-helm-charts" - chart = "web3signer" - version = "1.0.6" + chart = "${path.module}/web3signer-1.0.6.tgz" namespace = var.NAMESPACE create_namespace = true upgrade_install = true + depends_on = [helm_release.keystore_setup] + values = [ file("${path.module}/values/web3signer.yaml"), yamlencode({ diff --git a/spartan/terraform/modules/web3signer/web3signer-1.0.6.tgz b/spartan/terraform/modules/web3signer/web3signer-1.0.6.tgz new file mode 100644 index 0000000000000000000000000000000000000000..6e66659c12153ee29db3dcfdefa675ec6fdf27c0 GIT binary patch literal 70775 zcmV)IK)k;niwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMZ%dK)*g064$-6d1}gl5*5dQWrbnIi4LwQXHMgwpLP-b2iQ= zgWVvBsM+W=K*=0i`vm()`%e1``{CLE`og9pE0+-`W0BoJRiOYB3WY+|4I1rGJV{aB zJD9?pg$p=K{_?P&e!t&;zPl^__WOPR@1Vca|I1+a#qQ4T?qIOJ_m}=)XM4B*7tnuT zIx0VN%3%JN{=ISejr)UqV1|D{ImIL$g6r*0j53tQD2*_p!%hHTf*~COnm{_m=_Jbu zLlMIyjYk~-B9g|KiMmxKLX^T$g5nMUDV(7p$OvVV9MNBs4gfjI2*r%#iy@dYmeFCa z7x5uT!x3gFoZ&Dcv!43i0pMEGIoJ+AgdaLFqEU`BiT_75+Bp~F7+?w_m~qU2i~%3q zIGq5Jg4sM_I7<-763E6RpHVQU{9TsgYsgRsfGf11fHIP!7~mAZ6rc|bV3G|CqlLoo*mL5%5D*qIXELfw!FX4CmdG~j@w6s2@Qd)8d*%pgt~#3{-JpV18B zWC%uc3d0f1Q#gY3pA-I&!!QiguXwqlRu0DyCI6&Xiv)!!Vy3uw!XL<=1^y0OWM~Lr zmL)iXoP(W$yJ0_k-ubP^<XS}zK5Ze3l`SU@%^Ww!`XM$3cLx$oZ=x+D7cZ2>Q*xtDqY!7$88V+{C zFL%D$9qj%`e=zL#yHB#L;bYDJYnaRt{R|3V^Zef)?C!kq=l|Yd=V|^w#%B!#0eE|j z@*KyA0yafJ%n{}`-ppqslyem=M?wvu z9sOkp2AvL`!3i3601%~%amuw$sW|~r#F9^*u!<@)uNLqaoOA&HcL);55M_;ZvU!q# zj3hYX==rZx@nZ;%$8QKb%@IW@Q*A!_z;Xy+KA~Vta>2VQpdG}^?dKCZ1V8=E(XWA{ z4{*kHS6Si<|WrOxeh2PI3TkbOk=@w7@-+TSu-iooH8;~ z)i^m|4ZKa01@H8fb8Ctzz+RMI>r+)#tG`Ifq&{k zK>*GX0`DzJ^u7~`vP1Bx13(NJ3^K^3L(uKZHfg&xgZ_| z^E?@X?l18u8Wzj!u-D^6pP>Xnio!Q=hQRG@P=%>ywXTwqEXOmLFTzv;=`N#(^8F28 z_1gl*DQ58~<^Xf6C=Zl zoOAh{6fZ#lD2j5#-iUGRzzkSIWg!dynnV#)qURz?a+deCFK}_nP(Fqc>S)n21Yh<0 zeOc|b?BgXhtsx^S4>}z+Bu$r}%in-+)Ol;ge*GYsQ-<>6Qzb_C$74QO7C_3#c`o`> z;LS)(J5aqB9W1LHizQAhT9RdQTpW_Z!%dQxby& zj!;578HrECH}AF83%tyJWvB3hGnm4O)B=mdoAVa7E-`BKuirXCMFe4<9#dG$~uSx4DPB0UDekl#(oMifY z0QOH##CJaMxw6Me{{|M47{)$QZDu)p|b!E;@UkjZJlAzp@H9Hamu3D@ZOUU&`6;D_&1Xg`xQexJpB*m3F^ znk-67GDWO27iG>>5i|Gvs*a{{MiS0VSw=7Yu{`X7iCE%nk;H!=n4f}&TpN-@dbSAo z`=HoUav>wP_h2-qixK%SR6A`U#m!!Gole)czW)j3IPql_mp=67jCJV{T^`=9#;fw0HO36T*Dmm zZdav`((4vw0ZxBG5f?kx7y_tAoznU`@{a)ZQS3M{JD8H2^bj&=?K|vl=8{Maa}bd{ zM-k&1%?wj&gl-gZt%~o=L5y>uxnM-??THfE#=4_W8J4)_9M_w-&4ei14pxrEe>%mhaDF>#W}3s4Z5N{>BN-rF?#yLfvjs`vCeXJxq}4anNsoe z8Z7&N#`Re(iq!mxcxijRlVjyIiKP-EwelZx%+T93LIwK;XZXOy|B^uK0P+G-+q2sx z6c&SeiqSO!BSgi3ACr6rHHn;PFe5nv z8x?22IZUG|%6kQlAj`=$<`V|xo1AEg$5)waw(qsPtk!4F?JY$u+w2{N9BnhmqN&S>`)xnE7`=!gnUoNA)+}2B;(ox zkyhIZ{zfE6g!YOWfmNg0B614ZR7kQYhho8#=ER=blN-(_;@F?zYpD{_3@W>e+XSV! ziPv7=6@uAowc5M^^~LV7`bzeeuA#%7ID3k3n!Wf<*GG_u8e{dmxb1jwD)Xb4!8crut`kG&INvtv~+T&20`k*=D zlqMjlpp6iSQI?PeFF^qW=5%zyhs+3Z^L+}pN&~3`Q*!ALux)Loqm5i@p8lEy(qV?GHvk~DR2U0jS-oiiMW{xn z$|`Rakn69f8+!1BX2|Qz@I(%w5_qjCLOLCq0~zbpY5fW=KNTBy{c-oNT2MG|We@eu zW38TAlXk~40=_p7BdNYXAp3z%+hVisloJVvWie4YTI@`ZEe*5P9<%aWuf4V;i zC%(%|W%mk8)r(TwmzO!3k!$pQihrFWYv0vWYv=M3#_cHXK0)j%+gGl*>HOl$uo;s8r=a`nV=+d5`sdHL-TwqJDWS$= z@Dune2*$3Q>OC`+yWr>l6#Mv6UH-Y@TlwxYJwfeyrSfC!Jc>zLNS5mg0SlH0S)s|OHgDizBL|0qBOF&x-se{dO(LOLBf(rfLny%A1(j=)17#EHl0tJD4S z^B>=y9lkUOtWt+xUmpF>@%hF1%V!%I{WSr>_r*FKOu=CLg*Yu6fPjG@0^ReYlcR$R zFaY12y?qU`$)#Rl!H?e_ogMK7mvijpdLjMRyY^TSJs$iEbe|dAbzQuKpa^si&W`pk zj=QrE7kmztCc(km*QX~(7e|Me zyxhy~vyc4mEniBq1Thr5Q&-v^hOKs45LfX5GW^u@}hjIg3>i=2t|Jr@A`{e)mD4&lX zdtY?cz@g`+^F{CWw)6300Agtb?-t`EREJN>C8!I6+uKe6=(H1n07jhv{G$Uvk7Uf& zNx7_ZHY-6OH;%y=C+MYc=20gK;uuT0dt~Y@@g1`fLx(W>jdF=V?942^- zQ4C;~3DU#Pk4VB6jOE{(2IKIw;W=C3*f^Se6RY5hq?z+r(RaNL=vOa$)lwYc<QFvU)P9|Jrsz~PLS!jnkZkT)QLyM$zcn_a<#%;M4}8*YyP*3YUC_Pku0D~-Y(`RT zfo>vF7Lim=Q8EkZRNnd!&W|{al6hR&EVUX}7SU%7Ckl9IjT+U~{VdbXy}j*}4;{?q z629=4s(b>!&Iv%gUUJ1x1O0e97x zZYu!))=Fcg{wMYD8O$E$GSKq<-@w!Vo)31P?*BZ>$GQJK=v?769;)jZuVL2F7TKY3 zP^*zv*0h!Tvcw!8EBE_~N{LwpK7kacF-jTO<<&JxYeT55XB-yG5;L=9kur$inz4V7 zPv!jAe*X`26>!P?-|g@E^MCL8-qZf?Q9jlnR$ax2$A}T*34`r^@xK7|i!y zsbrNK%%+W4$?G(*v_7{E&=%>CwyM6SSEereW~w=zzj;L?>l zU4xhC(1+L(H`?Ua9!4%aj^re&q86 zEp3BCoWAtNG4XF@61jLR8Y!>9S*KnW9`Vj@8wW3a**JLq&3)^R9{&Dfku{B$XDbXB z>jY2%F31pQDoAlULViXtx7f;6G+9yc3EGq zfTim8*0qwVH!WyTMNjpW)FtSAetQp}HpBmhpGy5di$@O`24so;KiKa3@!xh|><*sv z|Ht?!(Moc&a#dVQ3hfI?%>|u%N+s1z-n-}-Lqo+?so%=vpj9m64llrXp72eO%eL(T z!}Cg=Xep;ll8$7g9tj}2wni2sF* zMDGkix55p+`?*eYzVyV*c^L^O!HRX_Q)dLvZ>~dBum45#z=yB_HLw4J?fwh@{_pd> zy{G%XkMgmXb(m$;TE(rX&Xr5J9xsnz391ZA>>fh;@0E|1$tYo!)(@+!AP;-s>-+Mo zc6L$@+*v6=-C|{-sryMa(CSM5Nu0LcOCS~ioE?}6Qtg+j*kzRV~iDo$lp!uqK?J z^B*VW^U1@;^uDIt+i@$ESU~>nE;JLzG8SPDKKGRBSC3#`OHp^v9 zNEUTt{@5R#e8LIM#XqI=Fpd4^ly2y>a*~%uux{~2xZc9i0CM??fzs<`BkCxJMcxF>IqXJsO`KA`lA|J%UO}9pmedX9Ou8Tx>vAvGB}mA=(D!t zEu>~8h~@hIZ*H`H`aIaD(*Do*m>2&B5lK@SJN)h^0n7Y7~V_s{@iv3AySHfyfD9E(>^;2fdKqOc1jz9d0@yC+!q3 zy8g3XYh)Vc^;WGPR_}y+^Gukj1<)MvA!m*;-k@ug|7L>Jn*Sl^DxRbyM-Pi*ssG>3 zo}d5yY5qUbXAPV}#!xQvcgfzDp|VGFoWvr*35>4b1ktdw1}>(U0y@uxwFuBDN)j+h z$Vi@>;dHVk&iqB%9O3m)JcnuASpzAW$RxEJS&qi|1B#`2@~@krOe;=OQInIzHC>S4 z6osAe@ciI_dF$^iO)7_6k5F z7+uXXk&K*nz6j|}*7+hF!K=;}A)E24g3IeOy(3EA0E+8nC0Xb6tPZ- zV+4D$cus!lgx548G3xyhv?FW&pB?QVzCH?P@k8Tip8wlB{pY^_-}dwVQ~b9_`E=Io z973JH{$#X)LD&zU|7^0F;n^glQxPXO#H2TZ@dWi$;XsrPa4J%x$^2FS2W5){y&NZy z1@zZB%+V(J>rXN=2+YI*JTJHe0WTj|#h2pfbbdsmopY6Oo620biHLZ~+sNe^Nx^KM zFq|a_aD|rxq$)jQmg8&45V%4MD)wwB1~`@3@kBzf1YE;;!nOd(0ZOBMkuel=kPyU} zUWJ{`7hil!W+>wt#us0J|EkDZCX@D1H)+p5I-RvOk?s!X!i|A;I-kHAJ8g)3X*erSZA8ikr>U)YA8wYU{O4 zy;^|u&Qoe3JDFhp>Md%@Iu((*&2W3)?ed!X6)oaQuT#E3$>t(WqRt+zXSmp9d;&NX z*uU!c`y7gRT{`J>u-WO-1m?=Vwo;~AFmqcpSumpt=~fS0zydZbgMh(_3t1+ATLw#1 z(?INlZohwyMRCzMs*3O-e@LP$4NUe}RY14p0!D?Pb0BC-@+UANB;j?1&xD{x71vfs zGyT53xHtv-r^gN;5mxM{{?9s$n9YAaTmtj~&awoBBJ*gs7!H1Y8Ai-!jZ>B-6}(f_ zUYwk(vQ=&|MKf7GSFL=QjKUR2R~+e;8G&oT)(v)LLhvB^!@0&`$AP3$7Q2AVSg!Rf z@ke1(y5a&wF?nD&^nJnG;1nd)gTVM^$sJoMBG}6#J2lTn~hQ!F2 z#$D~9{;0q(sOw}X?n-{O&3If!vXV4u2eW?SElr12hUJ_P**%fsM1gFbHo63z)_2nK zti@$$^327aPOorR&WCpSqst)ubC@~~)Fha`XL9NLC6<=0ah+7lj6Nl?t2E0>emJL! znujtm03YlXA-n`fz_C0~Rq~y0kgAG`>?pk42$?j{ijEU~?Wu}xeJ4s5EjfQXW8rNc zqa4MYql>NvFf&zb*|`MT^CfH&FhvtLK=nRnJ#TfN8^}oM5B<4Z$v;zzJ)Cc zh|>3122B3{D%n$~gyWPKL@~I=5J1yk5|#%0AX^|KmYt9fPOl#5pg>(IG-N6ZS+1y9 zb?>)X+VV*mtNcIZ6d4tu)g|e&%srQa0Z}d%*%akmSa|U0l+2UZAN^J7D#~l&MXPik zp*<7%C{XlbYO=D+R7Dg_P>OOvgxoQ4pk}k-NXo{$fequSKB;*X*d}%P-Z|x0%O_t& zp1H2*t~@h)K-Q#UuL76U$-8Bla z<<0l%)TPUJi!IBN@0G4hj_;LOniHRw-+Y&r(%-);<-dO?65vbdMHcUOb!qS4nS9=| z3ed^!-&JP2*L08ZyEG;#dY9gLE=rqiNni{uTkoC0p0>3G-UUNhh>9D3oWEuo-&>|u z-ceKc?W(lm2g9NC3bhOE-xc=tXIpuf<9eu7j{m%mp5(z&1cYL^T=oZ-{MoeYf?oTp zNSIBh?^5(3BRK=7uTJ;R&wqS-cKA}`eV4vV55K-V`k&+Ti}RPyHkRbxXCR2cyYBhX z$B-T> z(cvY3{gO*y{^gdd0$G9>ilvaWh3_-B(u&NB?L)6%44w(P{{DBs$UK@>f#+cFB%m%vM8|2Iv`8oC9Dldmn|~X9d^%1 z!guN4i=thzEs@_jA7wq?=YlGWp$opk&ga6e0jQWuPoE zOw~2V_wV0#I>%$MAaigdY=a5D5=R9wroxp;q$M9C$mTiP66GRMPQS;D*cegU)J1UYdhQ&gpu1iv#fM4IIBs38BzuY^OvGAT17dYhof}Son91MaO zjWA4u!63+Ayl|lDQ5Q(_49b^*Emauy!@+LYw{dJ&qWE_^9ZQEA+PcJsK2P2RSQt}ChEkwhh~$1hkoWq8 z7lJVv^2L~=8Oy_ZEOYQKgqw+Q*~KYClN_@}2=)(-4+CKx!!TuATpuus&6wl}Pf~I1 zW()j6a7wp0^%&jIEx>X-nV?+yFsVy9SxzDp&vQh>4%hjH#+}2u0F`34asBCf7XyHesluEpiThfJLR~=`PT__ zKC{jY;^MBd9t}UdzJXkWI!>do1He2_hE=XAdaSBZP#WSUmPN3(BFPvHm9{y1EyRW%}RFu7CgY`HSuCr}h6aKA&ervX-3zUm0UOL6i+u zblu?yq37F!ov&W>`{Vx3_TJ7{&$kD0yz}K44&oQvU+#^!U%VK7HQ0OcBHG(SaQk_G zcW--dH{RL#a(fTO9pk7t1l{fa_D<0MD%jq>7`zzv2gAL7xV!V^-izm74fg)8?vt=; z{4~!02igWU&j0Pfi~hDB|NX^w|0(~=qkPuD0m&9Qo=lmmd8~o+)5HG>PNbq69LFeS zc#Kg#1pBA^2j3nEmAA9D2L4HvdFfR`Mlb;x%;5|%lvDopRAfEa0x4n2R3)rA!d^x6 zJs)u(W~yzeKr1+%09!t78AnMJE9YiWb0YDI(b`#C>&V|+Bj(fzxT=iw3prCFOcQM1 z_ybzLiQ0PeqiYO7w_6L=zH8`0)-biFyzsdd?u{vcc@8BqfWk>Azg30eU0V}-nb|@U z^Br2q*KgnqIpo?k!B0O|QEXpt(bS&X&$vo|Y9h@6={~qYH#H&~fMScaZ4x`I64IhT zjUTG{qE8EpQ)d9<8BT#Xm);=e7TEhrK<_u(5x}8|gKBZrL%NE~>(&wxBidWOgi=>+ ziz=&aV@rhDifuESv~t_bs|IwsOj~sN8kih+5-AxMx5L!XSJPIEu44Zg5nH{% zd@Ma$dp%^oL36F$^`o`A8s~#GUi{(ZbO1+R#3`A}KUx%7kFs(pJ)x~fE(z9)qMb$j zNPk#c#U%-K@2WXviZpe!zWpU*IutML4%4@ugjbi{V|A5{M3Z4fWvPPAP8f7+N#pc7 z>W&H@<)}Ndglx{$e1vqY7IUNdyZH#^DPoA~O%ktV6%}|2AkKB@bLkbs2uI3?XtF4D zD7j~o7k|R?t*s5AacPT_Sa@=okxJm$-~ zx&=a`I1*>CAjWW#63TESFSV<&VxNEoNsL@o+kIy|_iEs8aB{^JSXx{Tl1Ij| zF!;*g5HiS(E}#Zmj8dbIn&WDng`}1s4NOPQVWb$moER(QK}F*gIwwa&0BC|x)`V0< z)DaPP?u1SRp;WkVDq|tJ(#T%I>IBo*D&&(@R&II9R0qsZTb>dwv%xA?F;O!rD(QEnY79=w<@v)6Je$p#^i6;h3SiFRaOUtGWxQE} zB&}3sVCuQ*og28&b}R_PFbvfeSMP8!EjYgg@R+aaTNV3V1}7y7tmq{rDy%RimG$)2 zQ{FmW*tJx6n{tEEXg(<_jDdV0%|6!EWHssKd6}>Z>g}*G0eZH=iem(EPNh%6N(XA% zLa6uGnkeY!agnc8tT8d0*7uVexS;F20Y&d)3fAQ%lJzYa)|^V6Z{5EBu`a3!_eMZmZN|(7pIdzKGSnGvvSVZxFo8#nLI>G7AkPW!dL84YZ!y{iuR0ICCEq|7=hH|Es>22RA9V7 zcYRO7SDsC30m$xRvB;{773Z?ASUyME@yGQ#q6n!K+=>VYND2}%F+CxMvtAL_*1-Op zO}TC_l~K=7<*=&isLaUe#^bUz<>3^tDGG8zSfF+BfTY0~=adEcyew+(fl-VBjuHl1 z$$wN_Vs>~4J9_U@$egqS6{Sfhy-HnG8 zsH)rkP*%9G&ak@WTO&naXZ5gx%=cziV%2V5Ji}sw&NDF1$xKdCNm{65!f-NY;#dME zlxQtkhW)xi3-CUPVP<6Od(O_2!~IjvJ+&1GWtnh<)A%xLLyu34TvbIBS*ljlPL0*X z>xa*2d@Ng-me`Z(X?tip;RBNPnQVG@ps-W6hm~z%#vM%$v`(d2%z0a97_*++;L2-( zuJsL05C!7ut7E?u<_Zzl9N?5P1Y^!R8QTY@0HxPBC#i^mrGq?K3x@;U(xNmGc{Ifg zMQolUo7v9ujrWwnG=>RD(R)C0@ZM_NdtpJb%MU?OT!up~q^A=kg}-zJsRcyzo{g7d zvCz6&iS;6+i!_?%BqeitKxQ+{gdS==6P^|V#L-3xYTs}&rPs zHcQrCeSZ*~|L>cFZ_nPodHel&aPao^>*EVE)BG+Nf-V`4xrnW1hs-Qn%|I(~xR4v# zfM`7zJ0=q?Sm+<|HB2KEJ4?4x>dnr=gFFPY#ijMkG1*z!E9-37R;*o;Gij!?s}0@L zRj{|avn@9?F=hpEYikdo;zJG;r?z!MJPH#s`Ib;|Jsg~n35XKRMaNV8!8*h=ss~5rjIIJOn+WdlhHyx3Orfu@Rz81N}| z`_~6{02$=91%uRg6blx@<(@Cb zBB81^QVN5tI~ttlZqF943btw&1E0@@cqp-P2vDArTw*BLq99pK)8`LDe=IFCiXlRk z^laIpf^uT!MGuco&yEiEFOCjn@p+zrVV?yCljdU=D9r8z(`P*293H+|k@vh(oI30;!NqC3I$WH_ zE#R6h${oOGH?^ZiwhD-dF^Xwuq7NDn*PG0Q7VO5QK`-g&m4)&s2MT}4|MKxuRUx}6 z?}h8?`rMFYT|&n46?q|R?lYF-(VU@1o+~V4oN$%kE_l8XRU783LkuXsF(UIK0XX4% zTILOQr+2EMO4MqvEIUy?Q5zsJU&PHw_5MEu)1hMkErFIX5|!;$Y@X?R4Uu=&c@-a- z+-RtX4VVIbJs8FR-~apnlvu|S69&JQb56?KBn>kX@p0i|p+ z9%{MXuxIvSAmxzyA!m&KAqKzoBfHUc-0pRr%I?vR1Y6Y)rt{!6oLB0VKnmyY&ral& z70GMV*x;g2_RvrC=V7nMb+Dg?ym>$OB>nG2+*fT#+A^oYL6FS3LDexNuUU!Zoa;^TX^s*C z;}bl}p-3PgAM;_rpM%7_*bqm%0Yw}TL$QdPvguM%4RZ*(*+l3-qP^z~=FVwiM&b>+ zDO5NzXEGYIQaSk+V>$yfG$Z*!EZ$$6yV~lIO~&rkI(!T^gyD)%ibu&pb!)MYP`fX> zDbaxUy%=5h=yWC~^!w+#uirNv3rLiK?Ja4L9P-%Zvb$6cc?Q(9? z=}bD6y8ChS*jKhp2uu~fov$joQNF=Pa=PRiP_pwE$SD37I>CC&FN}VLxPdZwyyT=( zoH$lC%f@m`sCL%k1uv>JGEo3TGGUAs_J$?vRCTz7Y@C1Xquu&hykgA*k5goC1v*y5(vsj6MD z7SeZUQ3>n{Eh+(8=z*S>q&GMCu{18mB`Xl#5nDppP#4>qhPeqQSH@cU+YjN zV{b7`DhXXW8cKu~O*W?ttT6^Kpr`$Udy&h`wkVOT{VM?J~1rsu{(ffdx zlcIvTfm^$J z0V{&wDcwik-B_naPc2R*Mo9a`r}4tuywd8)>nt0;&_R$M%?Q6w_IJkm?9v)6rKsv%~s)5rb`D$8{p#9 z>Ozdce0AXBsYJ`~`7VaixPh>Iv0p?WQD-!)F7tbp18aumKh02nWSXl%t zGmwqN@~W15vTxqL?U=Pf_@N9zcLejUrGpi<{kz~*4LNvS_8Kf!3TuCBTFF~SeG6Q< zNggo=uUql8&XG|)e>i$-j4s7aLS7a!_unj~T$x;Tk&SsN^|VrXmlwR6wfuzz@Q8i zxZukL5EE^M`<{r@}+SuG<27 z=7gS?f#~lAP$e|e5Oqql`+yunoXm4{G0hR3k|YM-$OnL#CwBo$=fdN^0DCSUECnk< z_gN3UH9?ts<=)Bbxgpr;%ca5Q`w$G|Q{Q>J` z9KF+s^)Re9WZlYb;4dAttAlJYY*z=`a^S8GxaH8T04(kS*iTm)ynBy6c9npx9J%(p zyNz21`f8)ruDTrd(h<8FxE7;zHGnNg>S{n+j?xlPscoBQwJP`h~+P4E~+izq?g5^_bWjDIBoAT~|%Y#Dj>@(~)79Pynh9P=%!qPd`e4p7(=JVq=U7{MhwCB-t*U3J&UqAf6$-lZjbrKQKlPL z`R*QKpQ2Of1uBJkb#G@vs5!az{8iCQh29p-n7+!%JOlFI<3q#dj93>;{c3gUgM!8- zZ+~ERi)t$VcI-3dmoTvq8?D$`3%1CEDE_gyYO34;U`qF-}z4e{l(`;`u#MMKF`rU5scL=Ohz|%1^7& zNQZ(-$aNFTj3_g;)H5~T`o$B~g;wP{hTB?|%OourIj@G`OgcuWz!~C|-Bewex3~y@ z6@dMdlS-~m31#~Uh7>u+huRz~vd1Y$15roBr`(!#kE?phR>T zR>L&_RVk2(Vs7{W(;KRgx%XVY+Dgfo$aF3FpR8B zpQ9c8-x9(6p+{}a!F9Q$R1hk9xvE8Cg?zwnHs{`Q<*hw$uORyhEo!M2GEDg9n)^eo zNY>boWO!-u{YnJjbo&i$qX7OZNZ|}Q9wt|n!8#inq2h-%w?h@uO=-#gs<_W`Df&zo7*iAXK)wZBHLAp6csn)+zgkl-FLFYmT6Ana# z7+!H|m4{Mq%_1%!V|SP|(Ouixs_f+zdX&9ua?+N!0h-$h(3e41g;iR0&suKX9=R(o zBa$+XgAWV~Z*NT^3{>lI;bh~sW@sr!PG)y$`>>zXN@(G+BJb_JmRIX4f@(^)7LoIw z@5tg9tK@apcEvXKX!|CikGa{UN|w9}d5x5Lw;L@?wd7D7qe!@^#j%Y|J-34ptDR~y zUhoZZ3ikiT7>e=2aQreSRjxL8VDj7eZTz)LI~&FNx9J`~Rj z*Ru7PtZ#vJQ+NGmjnhNeIFe6Ee4e4GekAJxs|{OKtMOn?$M-K^rs#Ol`zwaMN&z6` zvK2}GT)|W#*xRtFFj2>+OQ5#RO5k2!u<{F(LON+bbgm)G%$UN*IwKUhKI+kw5UO$> zj1ap)C6P7tNKgJ{*|4O4X}D%wO^ua=UuqLEWobMD%d zOo2`K_zL|sOkf&`W}grkgOU0Tj!$JQ@?zkNFnK3do%iMz43Z&9?3Zp;Mnr1YoC`pb z3f=4hhlmC(E}#Xv2{vQgrncw{^g)8WkQd%nkac}zC~tEr2Y};KX&1_`wW!feYoS~V zuam$E1y?B9BGz7@331yd;)x4dRpe+%QXokRaAr5YvMokztuRQ=1@tqRPLNpEazrUo z(F9?RLfda#KdAuo_ zfkDOWSz7~jtu<3U0E2!g|Lga*ck9NOQ&+B(OGX=C+_(Tg63e3}%=LAJc2b~#)&ey} zlwy(^o7K7e?HaMHrBKz|5;vYk2z0r`>vEadJrHB?_|(j;WbB;al=l`p)wSW7KGM zZ`P)D}8 zx(i&v4R!wjdP|;JdY6oPLs89HUzKX96BX9!iB^F+UC}DyyOdO56oHCtwDRi3=?@3a z%q{rNTu}Hm(`pz@9=j6>eG?RBZqs~X#$J#39;8`m}NsSTX5~xU7MZ5 zrezJ|6JKhY+83L5zYqs6ihiK6NkJea%@o81i4Y74`vBW7QNt1?A%a#WUsHN(mq6M@dt0rE6L)v2o4R8?u4k8B>Ec~O# z4sbYo=Kum#{Ai2jq2dTMptwgcfd;7eiYU+k_ugRz8esqFV+(jX#hn8TtTv)QSDb++ zB=-q6&;<8>5eJ$e-#_d?6ZAiS>;b3scML$V+PMB4aR}-W+&vgUJ?Mr zkpJ|t30xiW@dFf8;D78W1r;PbUZ?_3Ian62ph8}J)`$i6zWecF7I^E*9RnAvsx#a- ze!;!*wGLxYi|Nl3$iU&%J);>cgY~BlY2XVd=Bn$QQu0`U6}eo$)9!$d%+;zrvT2qlt!i$Dmj=C@)r zgvQp_#6xhcw(b0d-GOF|v<*=M{Kd9)Y=p)p)kjD0@!cUr!ZOTFK@v*nYw!L$to-+l zl~C@lrho|^)1E>mR2&)oj)Nw+2iVIaCp0(gv12DJquvXk;1U1(3Zby9E#){0mCgD6 zMpAHBw`JiJJO%10ph87Jh0>5|6IJ1!jJ;=E1&8F8!4*EwP`Gn=1+NKDF&6F~W5HE5 z?i6UDSv6c5YQa5N_`}9qaL44TK^I&d?hg}o!D;l8*b7Ay+eKe!f?XSep&H4bIS519 z_v*7nVyG~wuM~@+j?@PW$l%PPwjmi@`E#GR43+5rEg~~GJiKpshH5tc{sT0WhxdJ= zG&GLorZ^3j2)sk226r|*Xt;(-(0`zq4VCo&@dGzhHtTl~y`hq>l})lM2XUxG`xMDx z34{J^!#OM+E>AHX9w??m-MCvdutTG4XbkO8Nx@Tmhx^BOXv5flw;&IdJbA!K57pBE zw2$>*DX8~|_Tbu|T7`Tl5Z^!OLp}P>6#1bR;qN~DLoMaM;}{V2G_Mp0!W(fRBrmk@#LJosDU#$=Mwi2hPIM=SFrMiYv04v{B z=z<;@j3uh<{+OZs8=PV~MR5ZeH8tefMfJPoWaLP+VCk52%E^OW`^gZzP1TjmW_-&p z`kaNEG(fpBCNaTqe0IUIgQ}X|muTzQGBi z3(C+898>pF48V+{WGwU``}AZ*Y_&Ixjw!$lWQ0;IR#pO#d}$agPisM@E7LoYPg!DYg{I<4Q)8dVNJKRZ*RL7jd{CTKJ`BdB(Hr5kN2lg@5Sg#tY;wt!L6{^U2%-eR z6a)bt2VyTyK_E}it}zTUzCeq^q@2(CAoz7o7z$vL1UTk9g)v4suaS}h<#3$J1Ab-@iV+JUl+T?G-qKXB)y7YyCg|_P@d3X2IX$;BVgse|x>Y8D#cm;Y^3lY*>)8lob|4SQN;t?)*o2)u3ASux74<4U87g9;~H@88#t5+u6zbj z()&a?&o+K44-y?S&im7=3c0k9nW@1?XICL~4gHw!Xs)9UFD2Vgd*42*d1AYlTU z!E`RXYSblu&saVp^ECF4HL4BtEn2NFjTefZXeyRw+$)5#uDCgMO)%g3&B(PnC7>Xl z=lnYaQ42_m#ICerx9R;REB2Cp*X?!8 z*=-tXiCvD`7qyK?k*zQkM{44q;sB~Rl!6TS$ecql0k|huq}I$-=V9k9&cYxope{S| zDi1O5+)F_D2B!$Hxd^Jr!CV*e_d*%VOZ=@WQ)nwo8(dYrF!=AQYf-<>5$D4-%8d&z zBhm^Nqg>EB!fDLu-Vo0H)?sEsqQdic)28h6LO(~SY0b^i809F9P;Y`#F3nMF6$@a7 zd)I?-yWd-r3%_DXa6JfSh(X!SeBp`_gE*lUYV*}Oj&Lle>hiW)j~oN4D7YWT`o6fZ zi(@2SrFJ#feCWLLtqS_;xq+p~ESU@I;LQ-raYUEFm0Xc!!A6z~KU_4|e!z{OQ#eC3 zgHfS&{X|B;poskpUXv6vBGzG3$}m?cW+AwkB5Kq$tw)H6#f$)@>@MWBXhYa~0WfSq zqcD*)P?w1aVt1zN)X*jFYK6L+6#CjITb|QqiIdjRnIK~bW`<@*!z@*_^kf@@#fP@A z?JDNz>^Pm|e2F}pC&i$|@}YR<+pXPhu}g;Po}gTG`QGrGoXq?It7?FJo~R&KGkDdI zUt8$BaDi|9tp-++>`B)iL%c0$wFI6~7 z23gxCV$abgIpEG#)i7VJs22qlh)UwXMIn35$50jss-6yN2tENTd?av|18tDD363uJcBMP;K z7C*dXdw^!HEZiCzcqa+xx4{;2^|+oSorn40k0R-6wUDB1}2*f(L)VcZt%xCa+6u0rVk4nKXbLzBvaQ;yTyf&SsN}ZGAjqCQC+EBRFOJ7b9T3IQvpZiW|S zTmU+S+k4N45QbqGg3|;+in#dT;}%<^d`sSDWLr`@&;gtU90n(?h|SD>^wtFPDoQf8 zMj>;)(A98IS;<%mX6EQi53?bNNpyws5EJ!W504?x*CYrH=oBULk_3a3As7sT7>zJY zgTWxkU%Ze4S9F81&}P;>{p0Z)!bA|F;*hW6qBu+e6sd~HxVY=D8g4W)%?N!!W)8;m zNtqf4j-l!STxfWERKS8P$JbDVtCmdE$Bz2qeqr}1=yz6%+2Tx9{SGZ;fj7`~nYXJN zMZxR*$Be;U9-E~=+{=fiA=sv|1p?E)6;S9pD+a>0Y;88G^_o!4@=B6CtRy$7?QROj zB#BWjyoE`Ml#PCgyHv*Xa&THw78QTg8Qs_Nh^bJz4ut$w zYmQ8Vq3(m4)2O2l8Oretr3@yU9Y1iGDy~X%8D-tI6&JXBoD>iLu&kJM#4;gq!${sc z@cdBZZN^q^>TPpq6ccEOqOdJ9Ji+2VY>!$2ix5NrcFF{nPryeem zJMuy;lIBW=i$-%u`x$U~L=h9RNzsa_a3I?PPy)gfn8QJJ-Y7j5XETyY16MenCy6NR zM36ICD7~Ot9^E3Z1^?#KdO@yGD*fB3$bF$soJ+iu1xc!|P&@5a>VrgoATO*d2MWPY zt}2R%fVyFr`XFK=xRfGkVVY9}zWb7b{nKL;T(O}6#v8A``77OoF866&yk_2D(RvDU zl1Opo)wHhNj9{b(tD5p>yb>Q5{L9yRvhyVs=Q>9A`6*|~4au)$lG724hK)m3;K^;9 z!r9mR2Xa@=L7B+5MKa1ABewZJN40>z2n*C-$PB_^5x{UE{A zusAoHazAB;a*AK&evI5Ih-it%U0Xe~Iwqr;?w22@NXT%ma>ysp_LNs99Ps1**_-1x zuZHD!@`_WIqe!dNR#=DYLAX5t!a6JVQ1cXCL!78Bw|Kp0Uh~e*XjblPXUTc3&cMAz z$2!O@+UcLIi+nl&TBp<^AV$uL6}DmHuAzC`btjv(fqSIjHtp4YfM$o@5@hA}+y%-v z+O43iyd!t-N}XsCq+b0{%br{#l0^2&`eLHq!lYup9R6 z&Gh!uUixwN(%yKGi>Q`Ehr*@Oj^(EoSyjAZcWvd2zRAfP(G(@7>o0ZJuW^v+BuLRs zpkN8Vb8+Klj3>gAyknV}UppcYfNXN9?cCOSBwWIVpnHCFa&&M3;xL9H+#%RM7oE2S z4))KFz>nV^z2Oid$Y4Sy;S^E>?b2N1*f7<%!1}@4H*by(F4i}}g;&|4H2SDM57#MD%-Z5YhBaab#W98t;O_e|NB=* zOE8_|f0ZzCP6)^*dA2(~{8*`Gm9!IqAFZ1G`?O|F1r(u`hrWe3FnH?%hgN15*YPe- zUtJ#k&*|HezZ&dslgj_`vYqQz=XX!ye}}=Ff2K_SHSq6~ z#J>jQ{qpzM0KI?e{u;o4{;d7F;de~fe}^&rIkNUw!oGXL{z`E7$<<#8@V;sKD}nwd z8Ttp0n4#Y_SUqZTei!v)=jC^?KVC}y(#AmMK+xCrmIwn~tZ-UUDGXdi-^yX2%~#;7 z!Iv*u873an52#ou?wE$ZQDQDZW!mSa#V^p_3Ev%@gU;`eZp4TyCml%hFfxZ4t@4BT zBc}=beY0LXOmKA_Y9J8le#lK4;4FYLbAaRN+>kuOZ0Uy6FuH zgufLLL<+wIb&0rf0;)1g=AkO(yD@oq&a{XX$^&mRPA7(QDi(X0T$`1RL0D^uDz0xA zcfdT~juM3%YMC?nrp}N&k2N5ipwtG%@~Ne%5}8%5VFHfz7syoH63vk!Q@R`=arMce zb#f4q91+?phE6~f6MLhCjCwODa(f(-Xf7PcWQNrYhqHLCYUFAn+c!&6s`Xm^SB9gJ ztrJp0loIio(->7JoA`{)a)@l-Hg%oe@*2{q*Ttm>>Fy-1#!=#rtRmlr6dEW7qlGov zZSraZQPuHW-}JL44^AnAX*p!UGFI#Jb`ds+_E?TkZw7H%#QQII!kYEWmnEP@nCzNn z@PJQ@X4i#!akVD5N?KDEL}MMoNaeq^8&|jNx~2SO7y2yn z6qq|NtRC8?h1H|rb1tkNRVztpkIV;^(jHyX>C!{!jsOAWHM~NA3KNbDNF-A+%P{cT zaZ|Vii`dl6%GReRTcIbE?pMMDaYgdm`2?v;&>Z;*oD_us-{a*&kWDW0qR3?eCm$93 zms}6ca-6a;=>CmrC!8+Wz-g4sV+6XD{>ohtZgS1^7WBZ*^L`)v0}O1uW4=5LX7;0` z>P{y`*C-e2eUxK{BbeClOX{|%=Xi)%Z4;yR<43TRdT@K|c=Az@z{!N`t)m48!1?k_ zijdpeE-yVL^W44*uNp9)&0xM5x_8zKdR7Ol4kxw4>|dTf{~n(kG}@tfB92t0%2tSj z-nHB*(rAiiF#Ls*^iDYX{eJ)X?ymUT@AtjGgZ_)1oxcorU+nJe?hXdqdw=O~_jh;q z{sQ_B-r9=KoHCgIrGIZ+e&hZipN|~?y3b?}cZZ!5 zks)4I+B)R3BHdhf2tEpF(zPS5nx|HM%5t1eM1A=jvqZeGq?!Z6lpK?M)|Ka1=9wj& zIgEW3OXONvjNV%zU%4Xo-jbK^rD|}yrRbisX%)RzdtG|>=WL2nrlqg5hiN9df^Fit zRHqalgjb34-Xdy5NP=L2#-&Uhp1tepS^cgHU_8Sq*I;tDlFvX95?I9AQ6ZL<-cSQ9 zly}x{FhyG;9xX#ULW~KJn~>;AT}BZ;>184;CYi^#MG1rYR1sCj7H@2M<4dC*?wIz%{kd zcDD8Esa=zHQC9(ndBK6WlpxOM@W0IUxq^b#n$3Gb=uQvQ?4_Mr(PUod?#)$^Qv?tZ zP-qG%uQn=Y|8>VfUP|wG&OxvH1fgifUE4Jd@_bFx^ z?kn6~!as99ypnEqrf0+H6! zfcU*HF{Q96_gQG}(sOk@@q1JR_Bk*_Ik%;IBQL6qN7iy`l}sKft%+s=mDX#dk^U9|V+?Zwe~$Ud;U*qD~s|6XkG?Uw9+ zd;KT--(!3ZzTH2&0B`nR9}U6BkLq|b6#JUn+m3wu!_nFK@!L0cfgkwXBPlPle|lO~ zVn55QLY*(Z&=Cnn2xO262e6wdR_S78mLW{R2yv}GMp;4@C;P?OYf@e3LywC@lct6ciP~OhjGZPZD(_2~6QNQZ;1KpIEqW>c~0# zyMalrQGSzS#!xEapAI`6D;Jue);kAcS)xqoO1}UbF^-sC66%_!F?ZFC(=K?n2|j@o zr!nW=E^jjcFrUx|Mauy;;_SRqS<4bGeu)anGg<)j0Y!6$(}`fo)qI2^mVgOjvP2LR zC%$~3sH5AkIQ_l71wjH}mW8hGp7ut0=~olv>=$vUGZKRZnS&cBmw_0K<`aiBn=<~e zB~=WA|KhU*y!{4Te0u~=-yZ%01Oe!gj2Yh+xokNi6s0U*h*c@1Q<)!c1nIQXIZgq| zV`*2>N*eN#OX_QL3a)B|iszw6oXlIdA{|Rg=kqk=MDkIE5-Qoue>{^GA#s6t__7#r zMcwdv5E`qUIPsJfeZ#F&T@6?j>LW}DxteEYxXQ=vPKUc!XiD@^*gknp)l=g z3cf=Nu4@RZ!w+&sY>4Q@`N9QIBHewfpMnkatHU6}mG0)IJD~#26%OG@NwDEdA#*lT z!oUW}4VFnlMlcC277M&Ov78L5;nL*3+uRiJ%3a{rDIGb0je;JTC(dde2b!YV_HOEq zR|XoVrF=A5agU6>$cR{TVKWvIR=5B%vuf;I1b{+1{Q828TQfsSq zZ)$b^e3w|=afD%**S7%4(m-~mW zkKbIL?w_Ck`1b7Z<+BZYQB+YHTP;9MBTNdfBmK)GGXua{P1Pj-^gfKRTn)%!p;YP`l!cX*cX({9HG}}3d zmY!gNx&{*j`3wXBV+#;q{!6Cmf4Tk!U88)xrGhF$c{OySc&~dfKI= zUKf+*guwV~n7}kbd8>(|1R4V~xB!gHD+tEu2F!3eXVztH8!$LNRSr)krf_J^8h(UK zU&0eJMw&pux}$eqw>7KXtUSKg9dU4eboRsXL4`zUO~UHyTcD3cgx=2opNj*k4V=d4 z1JIHru{f%;)q(z|7zk=SsJ`ag>+ZwO+k!88fUjiA_@htm`!@e~ECCMmE&P`2rk)hM zdBK?EH!zRe%?x@S^;Vu8tMGZaQm6d=At%US`$gCf`{CesKGg5MCGn?*)$l)9gGyU@ zTYERwNkNKkr5`-Xgo#{3&A0$KxApC?m+KUwI=@>@8#e6to+aTY3cuQIYkK{W+BUx zm2outf9&kO7(Dm@J^Y{G z1V<>PC^*)sn)4ypKixn0_9)m6`<*X(@<^ul1-Qnup~%)V#tGs&$jcdLZZt*1|DU~g z?QYyg7DeZ?e+7;@S!riS@}pmKeNWCgitKcIkL}jB+?hQmGs}iZNWz#R7y@!fiF1GZ zU3icLNq__;N^LvFnl*8YcvPW)LRFzq1+r>8=_B*x%;gfq68y4&fy@LAqe#B(F9n~K zY3HRo7Tx(f0xVvH;@%uG@b;B({-9utJh-3;P%=k)$`sirMu9IAaEJg?NIh%96ucZS z1)EgYcAT?v^?bJ{k>s3AMhU1Fg}T|jfZP3AS3BvnhtXd&CWk&eVw>+dlZjY^cDk=>0|J_6UXQc) z<9{7rl+u-{om31gRy)he^Rh5$&Krc@dA(#z~W6d9J{wq{FTVQbBp zHS(xNW^=uLrvX0$T3R1Mtr47CFT4h{&h(RD@vCg=@-)}_{r>o z<_IJnx9*7KG8nCmP_DPN5zDXuFU?6Wtr?4V>+rTn=WEY^P3m8FE{;WFC~zt|OL7#c zluI&XjMI|U@;23>8tGMag!w;(!(@ILo7X_iyzH(%G3mJr&96J_<|Fg%>z17#EUs!v zo1CQ4q>WKj$$4Cqt6RmB%E{0lIJ-?qX8mxVkaQHY+}8mwSrj;TqB#^M_QKrUG#ky{ zw{=F}7oA$9%Si1>r+b#TPhP?Cah)kFbX7NztBmY2j@1BLX=$S~VrV&Mhq( z5-0AAHa$&yF0s2SUJGr znjN=DgPoCxwednA3={8PX+hJ1K_^MpYxaw*Hd1eI(iba@9{_b9=L?{h9)SU(9;#c9 zHLrtgUv0frzA@U}6j<_wkiqS`@==rJRa5S9#$$iTBqpHVg1;-rNGb+CjMAs6`W%Q4 zV;uU|B}%8CqR)J4bw(gIwKEIQEL*vh6}8%!e);aCkL7a*WFH(A{mSN*I)BmS#ooh) zCM##8eMA$NyJc8o_7XYFaI26Df2cA`o+>PV9P9Wd#0yj`X#M)7ZYD?tE zvAYVIv@j3W+qUg(aPyos+v%lsQ`+(66{ojI2vxbGOM|Sxvs;+Y-1=K{QgaK>WoC^B zw0UaRohqQ+f$Qlv68Q&u9$Cov84HSbfScUdm#wUPE?-T}iH1vh9pA)hqR?rmLV_50 zT;}4d{^&S$vdVhP7Mh4jrGgmbLcBIvN}`k83K*Q*tXHd^zzwA;M|~vW@r3}nNjQ^+ zyqx#qk9(?hVGfG$7b>SCH9AXeoj9JPym?nO-n4*tbvBaeu%yb#IosKBf2aphsF_}& zezJfjYHL~2yTze@O4GoKdBv3lbT_ABm%i5V_~I~vLzc%V{*tQMYb?+#TfK+}mH2SG zF!?N5>OeMH*H+dA_sOi=mW<|bzbk#@MI@tWdW|WEY3cT=;U4_5APlM34IO{X!T8?Q z&%bC5)=BZlAAr)pLouu9jss$L1_4N(82}uLkkS-~(*S`vq7+V%d~V_K ziW7)~+S8}_G9umNsz1uNxbW{Weu0t52A2J1+uQkZ1eeR5)aQuJh@U$oS{W#!k-gI- z2vhcBr#u6^;=c(FLFb=gN2}V^g7&0!y9HOqe}+Hx&pw@uz)$_N)8qct>Bozse}?>I zPMh2dLOF2tpA=>)_lFjcU)Z+5=>&+wV-W{H#^x7q>_uoM;ERVgA(FoV!e%1#uXvRs zpUd*L%~p$@iWb!>Z$2&>uXD8)aj;u)@N(G}#fd#R1M(=Hg71JNEAPeA$ELg&w ze@xg6+$7a+1fVxsn;QUGmb;hXxq2?$!TM`sI2!7r)a4KTfhN)?KQlra zE&{Q`Y`Qd!5{)RQILBk?sSjleW{tiBT17FF7N~Ivy)@i}P=-h1<@aY2-&PQ$D${gC z0_-jKBI0+c3OmtNzrEmuA~F6Ahg10*M%3L`FM?blSE=hzET5UwvhDOdHRuZbfQAc3 z-lGu3!XRK02-aCT$HEnWzY?|p?v}`onMfZxfC;V3LKWFqXPxjvw{Hsm)-_UZqb3W)dT;TnU zE^!amtZ?@!DNgO6pU&*+vvqmqK2w)x)@kzMpajf{m>GI%U`XQY(TkCIZwCX~jvvQW zK#Q&%g(IlERB|U#LNc|dRB!8CQ?I-1N7dfLZcG1b+7*ktk*jUNvY0WQenKpzG2w73 z$*E18g#l4J1IkZ%m1(m1`%lOBzhmg#E}}gzCgH!yxVMD!2G4+N;=dmrzAL`}J$$?W z{Qh?g$sd6+WZrD=?qKH@hyD>5#3cMrGTxaZ z27Soj(T>dceAGCqPdA$Si+KctU?-O}+8i7uv-#{uZt|5%5pD2j%Yv}aQ7j%o0WAnS z<^RsT3Q{wd>@?MJX1L*JmI(Fl1Y^c86> zA1k4$>hE_wj?=%%_)49fGIVT0P=gnBVGzaw)o*{oQ2ZkfF`Z>WjsUP!r~0 zuep%;XoLdfF%sw7>0j1MGEz0<>JtXCN6l29xNaMAKvu+LSM{ca)GasUR5Gw(d2m< zarEYDH7&PFrbC5*`|h{qB;l*wE zF^+XE<+3&FluEf6QwPZa;<*h%^Y+Q&b;>;4qKon=?(v^5l4IQJtmp(^Svs-tHJja<_>t zOiNz4>E{2`@IN%Mq33IWM{y01f3UEU5x-^?#fXwc>>*ux(oK9j0JH{*cBTAq?>ANy zf*agjl!PfaA<9H4AOfEE=xi_fsvO8%RD?o@1#LYCqv5(mCVV<4zLZUC6~Sawtz9yd zQU*&bA{{<~R4vCC{jxwb(*;%6PHGYUt;CDUfi+ZRFMu=)Oqhyuo?YirEuA6`K8s|J ztvJem=vNlQE@w~e%YXZa@7|Q;zjuev^4~T}oAO_tSBfQbEb{MjzR>M7{M_PFvF!@vzF>^Not=L zdL^^gsmGBJpt*UN3hAv@?`%uKGisAH+Fn7fwxM+$_`?{f8Yg}1w12s^4AEjX8c>XJM*81-U3i6YG_Z=*+u2iMLUE#I6&m%I(O_nODBh z(OYL|mDoM9Gddyb@8&Qr`8YP}^h{~WzaD}6?&>Da>havsup(dxwIi{ZUeCrb!Js_^ z4CA?D@hDHQ=dw;|c>WiH;<*U|HRu1q>$iu6^Z(7k;lcCyzm4)m&VNHByjJ9ZRzg=k z3j_~BARu=rjlnvT{ z%~2H1fnhC?7TY)!)`-IRc#eWc$Kls9-C0(a=J}#SaQLJd_?o35|5sTm8;Sq5{Qs~d z|G$3s_RTZ@-$vOW|4%ZSCO3b@-zC>OSGjlBXY{JyXCD7hc>E4fSydC#2xcLsn2~?L zc~G+?v`K$v-y13`@_*XoAwb>z&)b6{|9|!N;F*NtF2%ptgx7l2pbg#DzjX8+wv5mb7z7K-P<%Sv0e~Wrg}pzSaMdI4 zkzPc+P}g{d7u$HrSh9Bd&&q)cjdHhAn%hV6uy4j#)Xf%@@a7N>9u-5#1Yz?`RFmx( zjHieVMe9Phzsrn|{#k1;V$vI~v{U!jFtjp+iMzLthEY4V*)J~FG(7)vexR-rK+XC8 z?(pDUQU5!9_k8|ur92@2Pn*7&uZ2U+Q%fz&zIcE0F$|VJ#$=v!@DQ9o6|{d=x#e+Q zK_%zEJi{ak!-sN|ZPJ;wjBJf3Yl8)#c0$gx)x}y=%_Hi$Ny!hjroJ}6yP5m!Bo~LN z(q+{?uoh{-2WU0SEe_QNA-a%k zryxa}y`k;rsQdX>u{j(%aosl#U9G*PRs#0{?6#HQ-pEF5Mx`&iM;pSrsqI^tBWu$; z*D>dO#b){{M$m6nYRP3LT}5n3*N<;gPG6>E|h#83;4L~*apVzNT_dl;+A3neT-Aeg__y1}wovXEExhhgi4Tsa7 zM`A?8L9Gj6lEFw@kWd)U3NV>qmliFC!x;2$yiL9 z>?ZkF)g6qvWln&gIiX4{GkWK>CjeR?qD2MKMG|aN)XR`u*Ibn?Jrw#8!C`iJ7MX6j z3__b_l5ogSe3!k*1|a#xh3MthC56Tf4C=)&LRWZ>$bw~!b3eNPWa(!`sA43jEi$a8 zO|>7_+|~}GTBOytYojKAzBD0N`m`A|%Mfe~*%=8*thQp>RQu6oWf}X(!DF%I2u>l+ z=0qp!)?b|b=~a)W@;`$NO%?$~4FZ8x%Kr!ZZx3I;F8Y7HJAC&n|8JwTDgUpX-mUI} zR$qOV>)Da2GD2(<%D|SRz$Q>VUSYm;1MK9vmf*b=k2TX^kIYj|S%v8lI1qY(I8j~W zuhToPH$%`5grh>QGMvF8@*zVb7DI-n%BL;=o5a81-)(7LA5q}}x{$Sh>Ar#6_I&=f zV(zw;nIUK?>03R@f7R{sj!1)CSm#wj(X8`)-LS|EfuaqOs5P)x44;$q;?0nEOBeIK z-1~RwlZ3t7toxV3MG1V>OR^O9et;B-8F zmQ&pgV?>ci7p2ax4(O}-bHhFt4%lhB96N6w)0%5|{o9ZJA^ydvn_ zibRF=wQtcflO!GL%c?dEUAKxk7i)WYSdHVfx6>I3P+UB^v>)36ULU1R*dr2<>2egs z2y%ggvKZo!(GHK3`w@~INv7iF%GBTWd+h!-WD&yJ{1>*yStzR7mReu!+A6EIRrP@2i?WZk@)X>a1Xo%16?0< zKnHXuhc`wMRl}wxnYuYQxg6Tivjawd{BdLgsVZUex;U6pc+YQr9D{Dur0+exbL;H0 zqCqZp=HOfzWgCsLlCM8P_$N5@n{)@dbIuhaHUkHsJBPm}e=BUb+_wrw zUEZ)OGUjs~a?;U2&VB6B5Jq%HSSR;$Kq&i<30Z_fUijyit2F)@7{quAKmTI*?h&}Dzde&Dc^mOGsj_BHOHdr zq}k?DTLE%AgI-ziw)VY6g4Zik?xTTDI-I*=Ray~7h*kh*Rn5+>4y`UXCJI79D*$sGQws4}nuXQ_A7)bSL zy;ijyG)5xGuG)vWI6@O(TbOq)$hSJ_LNjP##zj9ZgPQAmory-_o!JKZ!|ByU|NQj& z_~eIA?+XlD03(C?qBw-{5_DM1!Rt(5Kv5@WZj>y;N8`n`hMv1-Zj!+?{5ZOLe|a+c zue0mn$IGkhi%;i2oLpLgtACU4KHg6s&pw@>Tpyp-4_7VAScY_vXsq&33hw*k{#Cyo zz7+ps$lx)Kt6(z76E8r2!Dc9ADky~s4-$)j7W(sm^z7;Ef|qC>v8Dcbw`M}%g3Z)A z8LEDiGF01YLD3LW`kBQ3k2okAb<5Pg9`;A0zkj?uzW(v_?8G{qNys;w$U>#h*WT9^ zq2QoD$6*4TRGEwuVbUY5V4EQRF&8W(DH6oLq2(o-l)u(H4a@c=k2t;DqGeSNh6!jO zS659-|LpAJ->*;3hgbiooS7rVyV@8tCB>&v1zXZLg&Q|`=*xIPQT%6K#bx|gQx3Pv zd&~=FO}5w6_+TT54;dUoicEwkO*oe=e&~1Eq!|+lv*0&ck3G5!y;)2`vY-PppJN6( zWHKpTvy^6a^pA_dhs%!_A3u$*2OrPRPp_(XmD|{^Tj=7`c@6?a<58pG!^M1*L6F4B zYslhCpbgGGjjm2EulvK{^+o@@*^Iy_{X6exTM5Gpa4OQOxoD&+0bALqVL;{d;`HkH zhwJ|3`}&DB_1|nJs{zjT#NT1Sm0m~S|K2}qqV(HFYZH})K_)bUKN?(~4zHv-;-BtYenp zh9tJKQ|k<#9ruTImb89VazL6Z~uA zY5nx0^9Xb?Sz!dO8(tq@G+g>zF+9E~|K^9z~Wx6gk zyiSgt-QDVn(dS*~^(Jt8xH<>Nshi-v&iWsn^e+b=uKQP4c8+DHBSl>2zlMy(rVo9_ zVmw|n*e4Q_A5YJ&D!6AQEE60sWQ68N`DHDW&I?dvLiRJ(%C4%9SlvU!?SyskDbb+2 zIvWX&cXFvtsq4Y%@WaXF%D{4#h~kkr#RfQzFVEICy1oGYg^yV+ zU-|f62>wnQ800|ewB^RGSJ}T#l^|s>h$A9y~>@AH6US#Jo&b|*>K0Fqm@QH z#?-4Hv}A`=t1@k|NVH;V9lfh zv;c236^r|fOkKP&-uFKppI&jW9ii2_U(a5aAN_jJzv{07#Q-uG*nxQgJ~Do+)PXWM zJ3YC$x;{U>xIRA_jr#9TTsMUmU=*@*9G)Xe;S@F5iK7qw%adciz|TJRkFU>8f4J;l zo;nPjvBl90#>gMWC?L>3!{Zpn*balmDk9?YoP>8j!MJ1`Y{T0r*T19JQ?-Mc;aQ|U z$l$#4vkdD~a;Pc`TI4=|0OA_H5<@nGRF`>HKW(K@EbUBz(U05APbH7%ym3}N_ zWdYhIVP4cV`o;p?p?26fi%V|K^!NELo{Bd2Sm72jVlZZlXc&`m@u7hBr6o_z@`x?? zJL+&+Tzl8X^r;etOEU-MK#M-iQzFWus+C@U%FoD$2IQaWuF{45KXT! zjS=*hp!-SBRgt;ZSsaYSOD2EJ_r>>i(O3gRK8~ymO;M?E07?V?W5sAW?TOdFMa%l? zsScEJgAVA$sZ>D|;g}SZh8x0XIIU9sR9nn{(>K$}3sp8NNdy zqSU|}1N~E*%rQS^3*0dWshK^tfQ*?x26<$j8RRpAd}feOh(TT==wl0qjC0@GWRZnY zTi}s-Sj|kb4SdX8GA`irz}RFLbdi~N+T6c%bOsWNd?ea~w~i~%bvb^Sa9tK)0z5(9 z(hJZBLT+jCs8!$E*${=H)_Is!`z+gFx=;=<)QfS%=peBk)YP*XnB?y6iXcy_YGK{w z9wZa*7R6l@vUn-dXm!;n{@xnqhdiV<1B4act~5=o2DYJXHSlI*k`~HbMQSnB!j$1~ zS_*t_LsffMT?krkN40638}WauQLmB1u!f8AD%t6Mc;Xto;?+~l-Q@X|AnoR5PIyUw*0}RDAY&M4ntZ;N<$#<*D`RHhCnK+8vI*_V>P~|C`caPJHyek10o60AmjysB)SJ4_yw`t~Q}9 z%62+Z!~SUWD4rr0fi}VWP~Up3{1Vx?46EK2I-B&W7E^jmZpd-K*#CNt0~MFehD^~( zv0pWp4nb)zoSS%TJNM0bX1FYwB_9x~wGf zc5cq9#%zJkU*hJ>IQB}U=CpioAhguIH%tC)-3wHX+QJI>GWP=J(I4s}SUtYyqq5pL zXe1@eu=!}#;VjuLnw#e=aISPCQf}Nx{jE1sq>fYAM;_cjB+Rtq)nZ`EwOdY=vvr%9 zi@hxM*39oR*UYLY2T#tSiI*(Ptt}bg zDB5}D^t5a7pqsA&+4MAFZQH!!XkT+C!HXV$ni9w9b{Z(Z#2OSNg@n@{uyXEc zN1IWNcl3eqGYAdJ!tMIlBVT#3l7L}u!@!)nn^qR96;)~}M8jWMVgF&%Vn*3maA3M+ z+dM^E2gR@wbmBEi$}I6+e=@$8J1e=BRaE>N)lg3%+oae@kZzjasFpAR-ZCsVlyZk@ z2kWxbVBi<7bzHsW%=n>0m56e2>wJ8ZPpg$k7*NLwfqo&$3C_LQt8K_sAJ7@tbSbVj z@`QFn%nW;Qp-cpkj^)(&hIcd+XHkTE1 zb^AR_>x*VEL8~+`eUKk%fqMAOXjHR_Pi49N{8d6h3)?Rf&VMyuVutGgd0~LfJ9B1p zRnD=7{XeTJP3vFjNbX?;#3Z!#=X%3`wzjtX-(G(|UwwZ1-SpJy_xL@e2d`Y|_jrB0 zZf{>t89X8zbb0$eW7qe1-UDybAFA9%a5CpX?dZ7#G*+_9<1|Y z55+mFsdJo^Es8;rvMBvg^6@Uc9JLsVF$(YOF%2Wlp}ivVeK#oeOBOMbnqaqz!P z6K?$y;`WaBzC%)M5;qu_O^(y2LIIUKnHN?GCr=do_0=M)e-y7%gDqdQiG_HN^aB(y z##}bEm_WioUcY1cvw{6V06lJiL=bGkfzFs_gy|5SHF)$D5Kp1SABSh|16=pvK<=~Q zkRvmS9!Xxku_1vm+K3v)IB*@1Nr&+L%uo~-X4YrkBwHaaGbBjXp#Uj1jqA-_x5h|- zeb#B8k5jG2H9_THUCTE9rW5mWeq}bH1VmS97j(IrGT7?oUr(HmD4{|q!lkb<5(mQ& z32Tp54KWTB4AxeP%Xg+@n<-t>@xvq#NG`{XoY!w5EdT$Dx# zyi|l4wiGs=JG#xriPXtGa1StMEW9Qb^LNq&8-{^dVd+LPbfC)_E8tZ5Fqs;$iQ$vS zpVNy!Yfbx|v`O^il020Ku4qt!f;+*Qua%WFuFoSXQ{O&r0h`cR^om)#!t(apbA~uyrm19!+c#4W66n5)Pknc9*g6 zp|sI|RCa{yV9Zfe)LwW~CN+!iZjAUVqxos^DTGsSo_n5haxEeOHOdB!i%+XI+yFYgslN3 z`04asiVnZXrM=*L)O;h%5kSNy5}%RlQ>Uv{CjX1^US|>NZ5%?R73RKjaT0+;Mx*r` z3<^G;a86p;vN!qJXrtfr{ey_bNBkseb|GXeHmFqv*z&P|_**#T$JIu*#T!svTIY2O z&QI-cR{OrSv@`Nt>KHdH3;7os;g27TfKhKqugVzErRK;g+r<=4u2Er5rndbWy$Y2$ zTayT9hu1VL%@7PN_ty$O>Q=){Ap-NykHha8T=WbpcxTqBkDyk}EXNrO8T3U#&xhUj zAsYQ;f@j9YS{OxVR_S9$XO99Fj{LwMi|d!!DeZT_?Wwc=mmc8j#@f-HKYr=d9k4n= z{DU_F+|j`8p5H@i?j30eoHgDsM)1g==pJdy<;j)WVrb`og;7Klxo|F!l|V>?1wd)I zKO*G{kN41tBjmYxN+wD%Y3l6`Z8B&`%Y`Bk$xJq7$uN$DyvX%|GqVI3cIOgQ*(VN! zMF6bQ@Ijmf*(-Ct*(Vw7Cr94riyLg-Tkl@ysKt-<48`&R&BOs6g6C|J_G~cE@#OZGu%PG5pp zmGpuLWjapfci*^2c2Kz_^OE$MZr+dBH|aVbvqdB_3l84}JU%YvcHaFzPN%Z}T`{wdt-MIWS1EZLSLyW4EGnj~ z7>%)$YKc3R+O87X8s&%m-?UOba9KN!OT}D&tl6#OcF1`@(ZP~=F-9~Y3Z7IK7mAelU zwS}iIQ&=x6_i-}Mk0bV!Zw?{wh)m~Nc>_3!(xLh!Oy{!JJ)k82*Beb=!dRw-dY7FW zDDnd;eT4>*A|UGy_>etN^!ao;wbS@vPiGc<)T7N9dd&1wIiL-j;I@>e6Yn;=gQ?%p z2O;<%o+WcFClUeMt1Wxv-B3%$b%y#N0lrJQmc5ANlTC&^e)cmX=Mec?G%7+s8mW}f z6E{8YyAD@r)42nqsK$iAQ}@O+MV8%FdX{8|w!+D$^r6etC;MrM-Kie)m>CSfCZrjJ zDfDsNmr>NAT8Hgt_n|GQeYDbDyViEm@5r?Lp?Q68lDl&7&ecgwR&1AvT_4-&`BQ47 zYSOZxP^KoX+O>Q=!U!m2Dx%VjhRF9i4Bmspi=QBiP)9*Cp8kb8P(`4`A4{ApsuRNBmJ7cuB6oa=zRZA_}B3VR8PIy>FoQ z$mu+>UMT1MD^}C`e%e}Lr9n_My2$M#$Xa7!EJ0Uef~Ka2o@_pjQ4P1I+)j0bROE(m zQNNn1r&Y0gy`2K8Jy56%NBkIW235<_DC58Uie%hCrMcd+pUnw_+)xG9&h^D=#aKR* z98B7GTc{~k2z(JYabdK4%>G9!f%Y1D6Jl-;dbkEd?A?^CjU69Z(^iz9%1B&236gQOEXrj_%wVc(dwhdHBIB3X2h zz$9q|;5d5XcBwn0HRfcM)YQZsP90ALJ-f+AuBaG?$ zI}>JCR)p0!cuV5CIEziOLd{9iCAn3|r!*4azon;imrc;~b}#(Hm05oYcrgQCufYr{AQK$mh_^h zhiCGHc16V===eUKl2)6kZp}D*$+r_NX;?o`$9{xr>QXwUkD$oGwS}rl=yrdDU~aje zi=IC%Q}tT%HZMJYNh885oz>vB{#UV}34JM+-zZ`v51Gl8Hq4MjC8MZyO^yr_nt^nG zc4Z6N@NA-9KHF?Dj2E@BkI{lT z((K7e+KP^~aNZ4Wd3#&R`~?j#_eTe_Ai`|ZooNe`ihf=^ygW3U&PF25Cj23Z!&@~o zq<3CtMReN?jydI3Ejv5tE*j=0lw?ObcFlJ8W7l{45z=*H30KYnSy{pEIYPz#;@dbMu<0pR4 zZLk|vioECA^8asLTx`qvW_gTvu(_)Uur!_NtES7C8jsa``=$pp($;Hz?Gx`GXfxj| z3zCtJerBdFhoKf>256~qG@YsBj8?{BrvXC@wN+|65%Gg)r9tI5X|V);78*Q4^45o!S+F9XAKFtSuU79^ui)gLE9 z5+Oxb`?9%7h0m6q8Ow@pE0fl&8DYc{8dppT;y_n7{&x>gSKphH_wZyHI)}`&@{7Q( zHxB_{zBd;?Ctn|Jft+9%Q`{9blwae@G}~A4ZyF!kl8u>MVF*Nft#Hi25Vv6W*fwFp zWkduS2}&mn57aOxxRk%NqMA}xk2bQj0^z@@(3w}fDp{%7Rzr%3NS;`ry^;9~6K>Rn zf6N_U5i`Zt`SbDfHpp9`g^W_|k5b^eT%AR2i_YfnF@wsV>J3LSSC*>R%b@@>q7#xZ zV0X7>Z_(t&4bJqU%-)h33-s;gepNvaK(HqaR+p9fyFK1qtxz8gRQsz=PbTgf+s0%j z?JiUV37us3$5Qg$n{^fdKH2JaSwE{QlXc9T;&a#d42K;>9`t`zR%?JYR)t2WnAc%Q`< z>MVte@pe80;r}R)b@^G@eR zR3fWq^Vf)eKy{;1?pMc)IpEIue!J#E1sx;mMS#2{kN8^Yn!J0xo7JpdZtTzSbZUV- zUH$kk@xas8NtA>1<;{K~*kBaHMo9p=MQ%VO*f18L#wnmJ9GpDWrcA4@%n8yW@J{Th zftnU1Z%FhB10pQ=_uLbRgv~Onh##p&XF6ql8@iW6LMjeSGwNYqO#WgZmu~&Lxobk6 zuMg9o1oVmE&JlgVGGZ7rHH%uag1(XH=wwg<=ktzA_T_*O(%XIfQ1b!8{5W;I8yuB7 zzu6?dLhbw=6hssDz8;<+Pd6_|GR^4||Biu**!f8m6Yw{JwOaN3uUoX-m{B`1uNhwlF-`|t5p{0kH zlc9%~G0R1N|J`<)PLSe++`Vf8CAjj6F?j;tV}m?`J4nCB27e!4x7PtP;O(N(aSTJ8 zIkWgDvp8trCoa(l^ROFEm2B~@@41x66MBqHzn^v5<2p$k*3+pqQZ{e>_xbJ4?l>P? z9oDn-P@ffUf}C^>!K~Mn3WAVSrueEiHxty@`Xdk-TT?m9sa%~%?bimBcWle-NKd5| zk8=3?=xw7t!@jn^3kJVh*UQxO&){?pBKk5Iii~83#jtTMWU`;44vf~?a~DXPb>-_~ z7i8YlzD@0<=o(zLvti#`W95q!h;=f|bKaJDR_GBn zu+}C;Y!9<-4b}U&_Q1@MW1QWDurvQw^8eBfn-&yKMCNq8UUua8rd?!kt!9O(37SV@ ze*$?7PTJDsK~`!>nN`1YA55fF6Yx zdOL@yx&Nm8ZZKm2$s0b=g4m6cFnRl5O#R(D_9&bS-z!3;`QW3(F=Hw#+?l-Ag#2@Z zU}mfSTas&{f)}XnW*9}+H-GXZ>%aH4lJl(>7G$m;pFa#D_*$vaXi7t7ef1}Ga{J!d zj$M{${=At5?wWB1*(|>xxh#G)B9}z#9*t<;HgHv?=kwfl?Uqh4cDVONa~OrrT|i0r zBf(+_A$dymhCn|C76S9~jvkv3PAvl>v^q7BvzYW*qxT{U&Kee2rg2bxxk@-vbq)i| z%XMk+P$xkwpob*?kBii8_DI zi$(b8QWEyHiCjr7^cHW`e??2P2%%=OY!MK`Bw z#g{S|W?y91&2UEaBNubht`I}gtWM0wdKkcM4=NaH0+>wFIO0DQvHS7|b&m=XErs-* zwIv{PbS1PFoXi59DcM}w9LXPAHf0l^?E1dnZkO%amvq%(d5Zkpb&}j(nrw-cZK)*J z^Lt_+{i_mUi9{ZpV4(1GmGHEg_n>*gB)O7^jt1~huR}kJ6lkXooenupJPXp$@mHtQ zU10bI^?3twhsv|NcD*{yd5K|MH2pJ+DzCC{+yzuna5iVT3QUJ~^A)`kX$gGz%RWOkK)^JvO>lb;f^cY&##46?A`FVuh zA$qS~QM?uWU&_j|TIx~)Ckd~Uto167g<-mq#B_j?z@PF2t>3H;Y|0Vq=T+K3{~z~8 z!1?0d1m@h<`4;!A<-f=D+lKk>FuBmkIyQOUTt)6`5j#$$$1##}(rzcf=89e=aU+xA zCtj;f-m`DUGVdVWXi8JkKAqVbrL56pGLL@gIL}sVoq2+mXlXOsu6vCNO6ZHZ!fB|4 zi5L{uKOUt|*L-kE7r$S*>vKwNtx`Sl{xW26o5ZzG068f0jG; z@Jc2%Ou(W!sHRm(^}09HH@`OC}Z_u+Utkn%`)ZRI0`q??fKJGzFVp#|Lp-C(tS zGDGfVtdoJ_P&z|g?l%_)$o6i{L%Nq}PvJx#&uwmEH2yW7wD4!rNqx=Fn1?L#w*P#6 zyx2h+oqu^WM42))qN!ewujqgox(j85`-m95kEU(cBZF-;3yJF`(NqlEUD!d=h*{c_ z*bWLI;7<%YV?)3V5CS3FRmrr64@CVZm9YIt*{)D|T#ka#-UO|?0&FAChQnbtaia3rzS@%IV;HGqQjC*2T@x9~3m ztwlN+@l<8?vBwsU|G?mh{Vy0iTyQrZ!QtK@tZ2%-61z>??&4cKjT`uh%p6b{Zj9=_ zTDb&ygDMcmYn3%yN5-t{fH^P*;SIIrVPY|C{=#q6A+3eVpBN=Oy!_LK(DI_B;R-EG zMbsU$zxnEcYfbw*j>{Jbrf9oggZRB*#37tYSc=v{w^`YmM=vR7UKh-RKMChx<{c&a zSVZyHpA7WSuO__z&qRB@qy!AP=&ee$_=Xiq4(^u7JiXxN)^;hc#?teiG;d4aGUAfr ztIoaNR5i(qeh-8kzuCv}O1s=wqr@x+)MMwGzZ|+)?XAh!z2De-TA563BgDfpEqw1; z&WdNuI)>DM)S%i@Pe7TjPk#@^cHrMH-}CZmL04+_Az+qia{Lc4t(5qhCYJ;OD1Scd z4>k@XirN118#^WGHGxS4Rkh$%#Dy;c%E5w0j11W9XVt@xpadYQ)Fbe{ZNdQ}P>aQ~ zC`@236!@b}Nbk_3O5??u{~}zar(^Hyq@owXC}>lnKAga5Am1@z_sQj1UQXHXzL{nl zz4J@jd5n zYd5IKhsn6o40Ao*u9UT(Gb+RU;%5Nhc?L+lPv(UMo|lHx`w|=lT`=Y~1)H2R^wsOex6hnn zxMysVv&H|w>l{fh1{lltqxdvveM)KTZj>w~5^%bQax4cIZ;?fxAmRIy`_}&jRa|b7OR!qigy)G@cb9KDv!x^~a2P`$2I9e!9ALt~$xwqUe<_>vmB7@Do zSd@cb_B~%%6MK$HOEXdn_m5XwaU$n)JC}L#`ym_Sync{m^+gZqzU*N0ZS=63mhHi} zEzA`-yN%U9T5jQvc@=bVBZ##&+g_nM+kHQiXVKV#%mq-n|B?7q7{d%`1wF<<7odtA zzd@nJj+Afg4Npwfd|jfi%lIC(ppqVqtc%~lcrkLXa__mqxBFDUK4JHf=@40UJzF<> zKh*-SA2yoh*|GQ_qVF3jsk&^YQDI?V&#~TGtZv%$$+{>q!5wO<@YSnGweli%ad9Gi z35$tI|3J&%Tku{0O*S~o{!KW5|bKMG)ky`mw{6A(E-Sd zpT_ZwOGYHzYB<<)85I&aT)azu$QComR~BeF00Zf2`@B^^mIZYt64ZQ4@rIRm2NXp4 zI+OFujL{ukr?g~lxUF*GgKYO;T+ifUK4f5jE*4`+M(X|bb~0kAT2^aW+dx=3p687x z&u`D*YfO}DKoxr()Q^cYSCPz&)_!-y8U=Szy)TI~+bEsRsPbB_u7%W)JZQ(Ns%%p} z9i>QBbS@PQXOFeV=jRs4Zqka(WHio+4o{MsoYUudk}y~R-f;kd{0tdpKR~#Z9y0;< z&jc%*?~iJI#$5b)?$>un`i{^?#jQLxgI-zz_Mtu=JL{h_-P{rAsrawfwsih_5t7-U zRrdR*4f**qz{u#v4+CZh0Fw-!KKSA$@Un;I>)z${cm%FW|DFA?|8$b;fc@$9l)Apj z;rXfN82yxC3N9B+C#x4s{g&-9`x}iN`IH8GWkl?G;hnaFtOYO`K0#NYJ$HGMlN$G) z582dlOX#5Yp59`wpZe~<$@d1dDQ%hrR>ux#NL9?IFurFuU18dZ&I2^;w@Mz8hN+N#uya>;us8;pLXXZpm6=dNig)*LQ#nV7C~+EP zv>~5_hIbMI3xWy(Kb@6A=6y=-B7BMk!~7bFtandF*{Dz!(V%0v81$(`-|2rE?*O}(3`Sd=Y_E;xoFMZT}M|>njorUEvv!Xz2g6#mrvkHDa6viNxW}gL%)aD z=tqFthFec;3-OqExS(y2|9ZqQet=--xQX!T9kR%?+hpOn*@v{2cvz*>hA_Ayj*_6{g7ez?Dsk9c*N$9}sP}eXMsjl+-hpEP zV^)lUC=#4ULUg8$|*rG#2958<-yHsr1%V#oMqJ$6w3H2FdvG1doSbizAWP+5}3Yy zbwAuQ2R=)&JB!Aw#NZDW3af%;&e$@nB zFI5lpfBL_ja(^DVjRW6_H|(%`0jJUpGThX0pJxaz9YAY~qtMUjU5M{EOnu0=Wfm3^ zZ=bA=w(F2&05TF(xImvLsEF&;E9pl;VI0#RH1;C5+!)iyEO7Nc&kY!#NS$!Kq#_GA zjb_OA(B&g~@sq_M0|~y+%oDA1A%idqA`a1M_q^BkK_ah&tpmqwA4v5$3gv*ZB9(F6 zNaUgnk5PlcoW_gy_nWwhp<^}@bi@Pyt{fID!ix*v&v&h-ak3j1cZ(z1u2d-Uu(1H6r2ke*X| zQOIeZ={095(sl@*A*9^V`QpL8SIdA`~G!i|A7N*Q7<0tw2G9t4P%%d#?-RzjW-MWe3noko)qE-+pzVI)vW zU?n6VCqVX1LXAcac1UsI*B9iV_+0vb1_XJk=g%m_PrIR8fuAJeJh{2~QLsGE0+B?W zh5^Fc7O3p(;;5D4Qu2D3JR7*{8-JB1q~(bri4gj5Zo~?ZGy=(Diok;2)AQmlAKivF z7pTJONFxJt%3-&Ug96Wq!^c9dbDhOUYcnv((sGB997dlqimriB8aHU~^iwhg#HDuX zh9I~IqwK+Qg*1m?p$J1tF*K_m^0GdU;DWT1w3!t66N`1a^8+}M5bYRjqV8_(+Q%vm zv%Js#pC;vWjy_Ag=NEVWi2g~O>$*l_j{W9IdTrZ0TJ;#-d^v*&zdT6atCr@mcqz5$ zjW^xgeoD57KG!)lz=Mr#&|^m^7TN#ecXMv~n>(tdl+>=p2wH`nDCzV2MEYBBRwRW) zRryh262avWY0M76P3Uc1I#ba-aYq1I0{|kwq$vBG&X8?1Ni63+%4eI&n0o`voqKY6 z8VI)K=N(#X-@^}Pkue@oZzW%`TE}TT{j8XD;{(?a`#pMpA&&_yFk*&>rAg1YAlfQr ziAljR=5QZLDWL|;*Eku3K{e!-@RR*H?8}(kFq@9)PpijKXp&@E!BXHJpHNXBQ( zKxtCT(aVQPs&=N)k$=x;>h<1XsAb5_&GjZ}x>y?L=Mw))You5{+@pCC%s}n^nC3tJ zsoTpF+hZuTjdi>QK=$GbS8=Iq-HF|!uX=Ey=b}c@3kWH;%ZZ>%_jXt~Sr1@+{gPIa zZ?vu%fn={VyYms0v2{K({V2U$VaR6Umwi(+K-z-3+js;pNj*hqo>%hP+Jxw^AAZy% z=zLuz9ogdJT^SVyPgc{+lusnCpa)#eP(NR)qM*b2gONHZ{wxX*{%>tjaMw1fKi;Lqa0kY>)Ez)DC*8V)#9W)<&_t{7putY(~I{rHnWucSmaU) z^jUqt)&^xfrsf7a5Hiex2>qTJP~y~omnu))YFLKm!GiV^gt_2=nk`|-ssnAahrwf=6J*TeK4&TR25 z*7Mg!#o%rtW+7yNjw#3!k8)Mwb@_~u*~LXPp)c;MFuJM+cU;zX*h8l%;eubK;^em) zx73bw8S&_R_hjhL;-U__EJdl{)eL;8miP5_2X0#2Cn^o!L1Qz$7{kdYoqC9|aR~8r zhJ;pzp!EJ%=5Wo7Fd6k`J$!a41+fShOAkVK$>q%K9Es^zeLeC7xJQH(6)kv5{5EV9 zWGGHFJk+!oO`JZsvofX>VN$VroT@h>$69(@b2F7A85ii_(0VDyF4kN|a1E)Z&fRs( zIPMqMg(u$w&_Pnk8e259eG4H%joj=lV7*Xr+wS`D?D6JAZIEf;@Q4E*+IYRY{5$&; zo4}2{X~=L5SOz+AU^-d0y6mvz--}@Y?CA--HTlAMh7{y)a374zvRPTff{8UMOI1#S ztiaR6L>pd5$zS-ngNw3iZcI^{!EG;XBH$rKaoXRhEDiZmn|7-`q&dN*!4Q$mEI+4v ze%;BW)8i4E9iS4G!`>C9-D>^xw=uN$tyx|Q*QPyEv2!wlxET5^zMX^cu36KK-0JX| zrTab2oL?d{LUA%8_nY`QMeMiu(PX-(?Xq)`1nCxBGn|~|j48wvBL_JB-4~xe(|H-L z(tZm{@pnm)!uWS+Q%--w1EO_;&Xcao3wK}kVdirp0++oNd-g;z<>3(((JAxJrgAhh z(_&GV7libH>Fu5D773I`ude$~4hq?WM!C@VE08>ps6Xz?n=+wfo57@>mG=;0fhag4&BDbW2F1ot=9QE^xY)}tSr5j?VxnJ3 z!8OzFoVW^9xgqI4@}BVUrhZRP2DtjKDl>sL9h+b4;$cXPHywlCbXu%Ok(;eLPkS)N z4JER!4n0uAn@s$iJ}8wx^a5J&w5)xtb(q5LLqEVS6JUZ4hOk~EnP(xsT)=giIrXaj zV1u^pK4c>Q9jscDA+k4i32l3coZj?G41$U!Q|3h{h>F-5z+ZDDsk{rSRRuA{0u3FiD@!6g5#4^nn`8VaRjG(yKjAF;a~b%Q)$wE zwsJOfvu{U*kFvO34&$1X#^hgDd{19j0N4VYEd35hN%pA_1c6F-$ur4)xiUQLZ?4=Z~4~dwP=Zp+ZuK zas~{x4P8?j;PM?iAv-OoKer}tDgU3Enl$GMd{U?;ynbwGwjIJH@DCD_{KUHR8Hw$% z5Z(KvvPo+ku|DOurz0G;L11mpQHZEG75d1Lq(b4k2B~h5Y->jRuSyNlV+`vKXzb`0 zoS__p%5SHc^$7$P*GLD)Wl!*oFd`C$B-g$An`o~zR#ZdPw-tQ2z)xS1% zAJ5t;v*~Izo=jSUL8IT+K;O+uMUJHIkC{@>uQ#;U@8PNG%)endgEV~&R8)6FcR-#V z#qa<$k42z8& zZJx(UJDX@U6X7iGr* z8tqdvHX+=oUEbhg!S3<}(tvJaa%s>=GaI3Uud@{8t4Dv@Pq*m3yq&YHoe$Yb!HUT& zqeP1SMyXSg_n9MQ65z>M@g-n)-ksBBV+nlz#TbKhFW!-Mqb6OCMpLyHk;%1gu~qJn}38J#6ONWO_y6B zUDvTRhEpW)&wv3|}xz<>U0>!2@i#ln-vCH=x!HwIdSy1B6+a9tiR z1T+;|j!o0_#=1wRu1v_!3h5Tg=168<;EqKOnBtH3Y#%FMjQ>)-3bl87pDI`UiZ@U|UwF zd;N`3fnvn*j2EVmho$3`v2wsnnk3aA4K{=Yz)|TXvMgY$-YHIAhn%I)Fy>$m#j+~J zPyvV{@#{h3%mLqCrr&+TLSm{_Wq$DU;P`EFa|~VN>?}N!BPpsKXL>V_Qt3R8gjHKec{3sc z?l_xjDCF~4TP$B>bALzinn8V>>Nz7reH_^?=sT9k5-hT?IUswa?lYs-r8|eTi@J;c zeG`+OJyO=Q0;nJ;3dGzH3{O1H+xd+ZJTyOFo|VqOC(UiziqP?9Q|Q-GDdu`s-8(2$ z71Lv^$gs=#y(3MMV$KDFL->Af>DRTyrBZ{hqwI4C(WD3bL2U6}Xvcz8b;^~h!wMMi zCs&}P!;QE)V%7$y+$) z@L)FT_ws!wP%(1O_g`5P*b*6qor(Y zfojW9>?bw<f`Dd3 z<0vwpV9tU-quRDQ=5@1~T|iL^%K7ziCiEox)seDj!j+86U~PWK#)p&-9fYMWBC0Bt z_4a6{`$8B@!XUUA+Zn9PR3&n#?vS)XKbTVWAi1^;Gd8OQ=Y#E?XqMlfEy0q&` zy)B&&o!(C946Bi6A%@NR`8>>2;Pk{&o3Q+=5E^aD`mL(*-TL zvF#zpKoYk-2a7izO@Zu&Uy7Q#*c1LRV}v2SD>oS|Dn;-{y|bY(>p*FAreQ9MfuInD zScYQFid(?smL|jp7ulmME@<)zNhclbJW||@(Z$YN!R{>^#W0eD#q^;eT+5Q0BTq># zeOj_l432HT^Bi!V49OvW)1C66l)Ul$2;8H9Fa zFY?@4gt5(wK_(r}g1f>!Uh+}Z0o2tI6pj|ppeV2@{PXjjR&n=;FRYIRZrdq@5A78S zZ8KO!3-P&=sNh$Tk5vA^uZOQAg?^@RmRbQuArAUhl_W~PG+AHy#uO2(-zmTl)qkJE0S_7T+&r3 z@`Iw<7c~XRN+}BU-jOXW-Ko=O&9dEAx*OBMjT1a-iI>#+A2hL`59g zc#kSNq217O>myA(MkmLli;PJPvN?Am?j;qf&fGWwxFbEV;#x3svaTqpQD??)ngdcK zr`IB+<;n6=_iSNHwP~dwUuDy|&MrK_U8_@)RYIRElK3^-{i(e8V0HTEPFTf@Trzo; ztukAMD;-d5LCc{4u}vI%hWE;V1NGH!5&L1v0o0>9UZAgnqqS!Y1v{_5T$#J z?ktUxyBS5ar9@=Y58FbQ=fexn6E!=bSO!GX>ycruz4)B#dA*PUAs9 z2*Ss>Rp^SWjT$24;_(VYt1=JGDO3v8y{|0Rqs)UhNq6wVUJ4iLUwKIY>E1=U!dlT? zmM+$1lXPW@V~g$FM5MHO%C6X-mV=Z-fiux@hcySScxf=oJvesyPTL*RiQ74lQg}3d zG-cUZrp@Hjt(LG=FL-1q&=}DqKC58A4<^nr09U-}XoO--8Lf2w$+o83+u_c+ z+}~~aMI#jYHb!w7nYm;*#U{<~YrxN7W8SkW1r(^_0snZ z(hL6uEnxcvmY#~cE_;R`YgUK;LK0Mf*^HeP5>v5`hFGd+;1Yqe_ z?uR^W1ThLR+MI7rS`FR^}}w6vgmtb zSV&b_Qg5m2ExTytR)QjpG+t;+NIIMJS3clZ(x{0jp>9NtEP>z>YJ{l5OU~Mfv_|SL z`tnlQ;1t=6Nf_>6PYL-J3yK_R1WuueIOS0X_hOF?>K=_k3^d|{n>jOL_Y@BSF;>t` zm1-_-rLll{hN}8f)gp@LP!EM1@p_kmk>52@Njc@T@JbxgzkH)o=`snFoE-FJI_#L$ z#t2l2q!X0+Qo9RlF{$H5EZqNT6(<9QsnUZ@BUKf@y4>I-9GOTmCUL;XuWJ_D9H;!z zZ$d!^n#m}k6jLDZZz`-&DrIId1wQCsILj%BsGwqtV?`}^FP1_K`Z2P*rZ8tT`M~z7 zvsY6jM@jGPkSmD_A~E6Kq;7i{jKNI8jGj7;n422>k3d zJ*wF8zJtFT#iC$PZ_TE^FL8sZY(ANyLQG5*H%0i2&PEI+6rTK2cLEu5reQfevqDqr zf;}-}z(4@#C6V;*+$RUT4TF~cSuCKDu0khyV_Z-F(I=)MT$vXMU+0axBTv!jZg}vt zT?|7;BH~JutLJqEt@%AYushxi6SF|^@xsMI1+#kgV=7d1>;qTIIfb^tF~03M1a0X1 z?YhCxsNY+6ok;ELEYnB|Z7bG$bNZy~s-Vs#zC$fpKKA3TOm(wndB>16|ts$w_w zOx#khM?oc@aQ^+lt_43}Gfpv=D1s7m(#6`*N?M`4YT5mQk$fO$A%505(`;5J5L&3V|tKXp%)ydF|GhQTHaX1(!r%Ofn5)gZKbZ z%0j7^gEu2j!PC^st*IRgvT0#AM*hkW`^QLY$W(bdh!-FaSA8{h0ppW$MDYPrad0ks z_%nK_ADhIRI$I~K1oP8$@DC|$y#S=7)nu6f86T>9WfPEr}2B5?lfFq39k)dhGAT|c+VZW|KLUpdGVar zM9qiN#Mi(}-*PwFGCgo4VzPt{{Gj@Q&7%!_OPjLJt?!h@<#0zJecDA&U;l9LVC9b6 zt~XqeVLYyQ-1fa~lq*jNfLTe<^nFQWXGbAejhVS{f4j4+I0a$=j)bw}=Ep~tFr8yX z>VviO(1o9r$dBpYC#0s0?Qf;kxL~NNmD6ff0TeQk#}F?dcEV8{)Wxk0#iH1IULj~H z$HwP-nt$fgrE2&>3CYnK9?{#sm7$qYgsD)-RxFJVDS$I4rKa{Rp}h;*FoYgs3&BRE zV;l|EHP=X{yx#tWtkV;%5F;1|e5|*L&H}w-a9?q}B)!o8R)+XH&)M9mwx{Hct_htH zt-n8Xk21PLOMV*{9{E)68MBIE$!MWY-d?JDiO9n=kSe zHq~V;pvf~RoeVy*b>Lv0n{|t*?Q6nU7}}A7azlj@=+Xp^JYnBL%5c-mJOU+^g{mY{ zdIT^)3c%49RFVQKip!0`8Ic@Dg2q6(ph)0bvH?*Or`&ayg)XgIhwrIqi@2$sj?yrh&v!XX^P)Qj^1kIdl-?SO2@h#K6Av+c~05~mbkik3v1 zQj&%*1}e@zP!|xz)S(?Kjg=Brm_KyE3>$35)3(YmD|2HcgwF5|HN1(yw%;HYgQkFx zNTv*b<=WU`cWoKkuM$n7hdbd3U&<51ZUB>>6y@@vId800DJFD>T6%D2hN_YUb&XU@ zD2)d=o|4u^VuG9txI=dD$r$pXFNRXT$ewB~ST(pmytZP{Zvd@*6v5V@1rzIR#xG_X z@DBy}riO9S>FXdawxIb{&slrVy{c1Y@#-r8Obw9gs4-fY!dJ?Ip3LZK4~ zZud+@gfD&5;MI@kea$DV8zBUnja%7^1GgMGbA$$O+Iwgv2RppLSD>aZ0Ij1&#h(pW zNrpkuob6eSM6=|I;6$OGC9I1LgR{{))r71=9E4wo(0}v@boKa+!R~-gW^6XwOYrqI zoD2!|<2if`cQ$v%22orF&Tb&lYywt_qp1FX)Jb3}&mz{1U~Bvvb^nb<%;78TMo?^( z#`u9y8&^g>f;B=5HC&rq9amrL;lNzyZ_9qVjaSmJb5K#0t}_G+YbO zhOCQ)B())KGxw-Pk7RDS{6-W#yCGn;;Sd7Z`Hdi&rF^A5 z5EkCUmWzEy+k+!Y-g3~|w!I~{`u^CxgI1(` z#limfm*183q4n}U#QF<8$;S7+>lM{s5a7M>*EA2`MY(CAXBlWCNMwt4sBDh&M ztE$W*?%#n!d$@Q%lh0XJ@wydWgoO~Dz2|ARb9nW2qHX>VME<^BmG>A>Pq6AnYC+wr zXYk^_jitsoo{d;Rk8rabJcAA4W8Zy%*d@Zdv7&n}r1G~Rc{g(IzJVYk!MpJSGeWUG zd5|DH+YpQHK5u6F212eD3qh<{H0>+TT#H%iP!t&k&^SsH}L^A;6h zem$!`7qfC`OlL0Ze{mdj>F1>Y=-svQ;12^t3kY7hq5i;DsvQ)AFkodh21Rnt7=+ee zjo5t_+v7SM7YQsjO7!-pcZ;YFv!*cbuYDOt8pZHt2Kh85qMW2db;++$>{5yHQ@D8Lfc?DGw-#x(1+p`)uq@~U1 zSVQ4S+eSw2#NkdtC}7com`ebk1HUNyK#edYu#2*w_4`~^3{u8DTj=OSkyPCHr9E%5APlbU@>?uY>kCem zqD*`Q4i#+;`joMz6CSDIDIVDz@=}d(jDfSP1)p>4&Nn5$ei0&-MVxc91ce#AwU;?>q`ey7elc zLcp&C-WP<=2yoH#l)|>oVy@EA0)h-B4B0`3=vLFeNId>vw5npQ+@&CFD5%m6^|fdL z`%fvHG)ymmA3cT42aZOQbdH}&;J93KzhHqyg@_~IBtIK?2U1_M`xYyI$f{xT_>7e`-^!5 zg8+29UVvZ-x?Mc!3I`7gy4`?4zk7#aH-a$}j;Gz2%MRV{FAKs@7Y0EW`zU01f>F#{ zgrplmN2i-2;)Zc&g`|$nor}N?U@#*Csd)b)XuU?9`kKg>q*Z=5#J@0<)cfbD7 z{rdcy-Ci{1b@+lYQDLP>KSc|MdW65_Vsrm^;Kf9RmEsZ=si1o=Z?8NdDzqJiLNThu zq)~`(&@MVphYHFM*VQi^NFUFb++iQ(JnC+SCNH8JmCdxA$n<;4VCchugy?4Fho4Wy zvYk*NiGb{2;uai9%sduO-JM&6qK=5Ybw{udVl*dr%4wehe-ZP)A()|HuAHv`iB#m1 z9ymkrPW6T65nD=^cvVmIAcJ%Z4mg7`OC7kLAQc>a(?NxQx@J&nJE6Y?jweS2M^Ad7 zsE~7}|3by)TK~XH!$&3TyWf6>4QWzdBM-uNE zR6uX#tKMF?Vlqc;h8C+w)oKKk1F|8UBN{=kfobd>oRbhUA~gK8 zJxG|bGMDwh|Igmr_qS~$jiUQEKLxIw-B_tfS+!e%XririZCeNOvr-n#K zLQN4YK+4h9xu5;~FaSvKO`>GkNuD)#e~m@rZ3ctE%wRBcF(D!|o|!A}sUTp;*S7Gv z3fZ0E<#FENYJn$nX1f4baAI1?0$2n#hD4KVgL#^P2?1W$kF2>6SQ?Ev5#lTj34}bI zVm?>8>YFH>f6v*pk-{@F)@>l3hB|xP6km}-qzv7%3ZV=(H>E(sS@MXpDZ&WOLi{wu z95GJEG+GDbg=E|U*d_{B+gKg#Du}7b0`eqG16K}WVrz&jzP*Bq-1RZGqK!A7zOWu=>(+fg9ImpDO}(mrov^Xc@5fUD@Re zca=3OTXwU+=i=^`ZTmI50N9#^yNNYhLc;BrZF@3a0Bp^|z2%zS0Gu_;b|b*nEZh~= zYz448dls%)V!;}aXnxG1i1=UvyI?PMh`S|G?MQja1|*VFBTJJplM+XwLgdL8QfF1? zAu1}}4k90Y5-W@6i`VC98=_D?-LLT3*1$HeSsU0Uw%gcl{mk-vx-QxBGcj;!(R?v z!2B!1J<2XyLKib3HM1q~tpc5C@eZIfq1c#k#e{?=5L**;32unkxHfbN9;0nKou-Kj zrGUo*qK?Iwqo3TCBFvsTNI~(Q7QV$&Eu-jS1TT-D=s}+dyYpG+Es@ z{nQ>XEQQYGI$wO5cm?Q8gP#$))$YYruf&zM^?P_~FJi3)UHQIQ%~i8|2XAxGmG5!g z)F+$1R|r>;#8$f#adlf`$NpF$~;pp@hiqV=*un`=swn(0KBve2k}~ zSAVBYx0k3pH|UrXNC5_WFwmO`f1RC=zx)^zPNyVFaCoPuS0y`y9>|do(F#2uM%uJ@ z<>>-sF-c);WYVbU7V1(je-=ohg~{4ffaORB#fE@}MKDxc?7OvD+mL7(xDFDWCJbC# zibK3$R|GB^LYydpWHIdmQ<)Vgi&&(?&KDq6Q@){D$}fG^LbA~BtxZt*nwh$zr(}uc zL+meE9d=#4;?Gz}z+55!_|B|#DbZ1G(v+W{N@BZa{OXR@eQBKlB)p}0F|#h7eRp&W zHbwC9SvxxG?VggsI2Lj1DM2^r-GspYnlMfC2-^`%v?&(7J34!P^7@~H>NgeWH0H#I z1w$h{LB*`^?e!6yBY@VMMtFv4sLfjN`DymKFek9}|D>H5ZnOE90lr63U~w4JTbe-c zRTfy>nIgPfxw4!mm?sEZUKxmPW#ws|tk1y=(~Tgzk>l+R*^ z%!g71rL7rqm08)0A+px!xD1pwHb>q1xlRaq*XG403uFG(21QAXdLL~YPVNO6(TLa? zJhk9yoC-Gz$^w7o;=b}R;rL}Vd)q$Cs>uvSq8a5(U6P$)PUY$bkqrmnicn96D{erm z%u17irg`tIOTtt));!JX<7y;{w$I`yHSIuI6dmlGB)5ViYr7RF3qpXc07^rMt&M=9 zjHfL?SrA8S1yB~msk${N?c!H$g1{Dq8M-YfO>NhM(pH>YJ66waK&dnCHqXj}fIRI$ zSr+i-HlS46UPxdIqS_Qd3F(YPM2J()hAngAWz4ux7qO^4cnjMvgVJ`b6hWa8O(+gu zkPy!sLn^CNH;UmRyp$Yuu;h`{$_vnK;JHDUPg`=ZA}BH86buiG;zHk`Qz&+~Fyx^0 zo4N4_fzpPB z)pD?n!;pj0mV=c*X~V+mIoKV+vi_{JVPQ=i>`s7LfrFJmX~V*rIM`i*(w2ikkoq)U zK?KV=m|gddM6gwa`~oPgM6iV%tN`66o)wmT0hCrESUm^ZI1IN0rIiTQz`^bamUY)k zD-o=bgWU-*5|n0c+zd)95v-Ae-5DrtEiPaoePv66%U$60F>+CU1pj_xiB}%}fl|j2 zi*9l7+1k*m@7ff$gr&?_gHqZ^?gW;VK{>Z*W2xmh1EoMwcQYv~fnxTpQ^pfvu9q(n z@|Wy|#-5-1_rZ0pbwT-S|Jg#1;!o^|UNkVIBjG~UfMH}w|o`K>`|J1eWXI2S;H z;v}$ux&vrd07bhcd2ws{T7H0MJo?rrs{$x(3Cw_^Z!MmjwmQH|a6)~Q)j^D-*K%>o zX_*ilgmTx>P2cVeY=vW01BxKv+FNy_OgRhD^@MPO{E(6;$zA5hYzHMw zdA;E&0dGX91F$p-T+W7c6~~wxbSaUv%B-vu=&hWR-lDREfqS1DP~k(oB?*yXX(S3_ zMpyELTYatQFdZ!|F53O=yigaNnJWz-#rPb@G#Y1MsJKuT&~G4~DOM{btN*nu)4OLAVSx%%2IQ9x0EN|~D4cnft9B1Cth1z8yrrmzN_*r3AN}#N} z+m~SaEW3RPluhm{CGhTiUnzmo<{rXLd5{4C&EQU=Aur=#j$uIr`xA@zDCt$w!KLd8 zlAtRxKb+w(CGU5#KYzJIn2%F%&Py1&gy{XSQ-7ds$UfFKn%lxW)+ASF}0*K5x z`S(zc#c?FZ!b3JAS;inil1f~pOH=I#(@FX2$^Ad&hL$wN~?IB)~0 z3Yvk49X*9VkZ|gu3%hMmXx(m9%b#kPa$0vxk_8;it6@n5+CF_Nx@z$a=T`a!iA_#i z)exe|jvSlh`Bl({DGd{sMhFK}8p%n-37tuFuuvd6LfO3&&-n*>^5h-n5sk)Ao@5y& za6Thuz0+~Tq$$d_h_J4jPnmK;O346RwBpB<@J}?9K5sT%Scqdb^`j8oB#t731`tboZI&#jCfYMO(9pPoUR~+sAaZiwhk6YB9U?; z(43_<4?zJViq&Zr1Q$SYLVX;DbGgDZdsc$Sa#U#?z>TzPnoJNz0UeD9mn|vxe#iMv zAwW^SnREx%>F{ul`V0*%nE0}USNYHUaN7Tnr7O)9en9nJU z8_m)%B0Nv-2Z@F}HIM2Pg8my8t&SL}7$TPc0p^!mBLT1atpwV)DaLV*RCNA?#V8~* z67I;K983aPBZv)uBfc?$fHR1#a^?uzCZ`bJC!b{SN`6uYVISMwG&vPdV> zJzz|-C{R~kAhR9|)Pqog>a+$Og@T#kz`h|zeT}1rz7wXq26tW4&W@@j$u%Nsi3y#t zE$a#rL}tU$ct{v*QSuE&zFf`%G9q2%r~*7-;L$L7daQb6UWPbcobU2f!r)-%i8=A5 zNFYBN<80GJXp;XK)?Yq)Yd6&^&2cUmnzFz|H?}1Zvlu~+fZ-dVX-DrT^Fr&T#jEb^ zSQ4vs%`BH_aTN3zFSKLbf2-8C@7raKSi7n$`d;UnVm`>g3Ycumq|u1+sg5fUX~E0d z0VT;y*tKD|$%IT5O~L>~7G9OpnN&y3_O)OyxXP~Y_GmQXSR_1!Y-ZNR)+_Lro?>n; zbm?lm_PTmj^ID#)g?cC+|2i3B^-J#gLJFX&#h-92Wojz4e5B++xp+hx$A-WSXCY-d zyD%(h+spP`90c~2rp8(pa;^C^3xRx0V1FxY$(Oy$!m6k*RG2nkya}h#STcu#G?Xyl zNvWFWqC#=7a?Z1{6+THNA_H9_5a2gAx$G&ZfifQdFvLxx_Z;UW%227vSP;bE zQngTAD(e>1um}+yoK(v)KjBz!%#vTIyxgUkGtpzE#={~aRRbgNmS#j}^@A3sI4&%; z*Xq(EFnKEKt~p+_Tz=OeXv)=IS(45$8gU{fnFUuVe(#M4krUHm;W!tb5>7}@3JksS zwb$Rnu4YEX4K7u?hS^tE=VH8K1^{@f*Kqn(L74_pYG9VHp5)|}Mn~fWl+g6#2 zO82~^CNzMgZl@Mk=7kkX8Q72toF;enOHWKLcjQ06hnKK%z@Ogu- zi&y84bMflj@L(evX+vlx*8sDYv!Mtnn~)?FUV-~5HBH6K0o7Ye-%27GF*$yad-E#! zVVvO^{DyA%S2$19Y5M0KO<+H&^_T%p2-nk$F>n z$I^`fN#9d0k{uHfMCkwk53EiFMh1oA)&mC&X_j6se@bD(PeRj@Ze>g&!l_@tg`0%J z)!$uLiY+QXwbzrSJfGLTmOzzWV_DXWMFMj%A<<5@mg=ahe(a3_d=C?zsv;IG6<=2A z=2sWFBJ)lTE<*oMOifa>T;p%cPz;JzDu}uxU@9cbvZF0B zn}aDqZ98LLMamLg&_)uaHhbtr3Z_Ubg;rOE6;~Fd+WpZIIc@1v*^r(-|Lfj)t$hMUe7P7eTii@moOYUu#ITQQG`&mMH^C@@$c*rLGLeJ0X8a1oIPx(j zenJAnLTNyJ%!T8qJwj<}g}d~*AwegXmy1gCsmvh<7mSH2Qy7lyc8r?><0xd~u^g0f z5$on#NWi3)JEK^!Vxfn3soAK#7oO8;9L}}ouSJc_)eQSSQ|Q-fixuT3FC{s@2gbEI zo`%jAm;>t5UpV^TfrF44mHgXW8h3E5flQ{V#Cvzy++}NlGM8d$W>Xz+eUfpANc{`AiwN zJ*j=Z&kv>YHEW%$Qr1=5b;aXKxoPBhs&%_JrM=g)aLAU=L zhHTiIVmX{%L4)t zcJ-*;8O1Jk6@p=tt{wSxV>fa!w<^uK0-|}{E7PVX3 zN1;&GVBY1D8Vg>2={06`HELeYoNsE-@=X<2l~+#P43x)oQ}aqzw6=<@;(0oBeM18` zrB{2p;(9%nNflMew*X^CdfBOUGLZ{JomrtF3E8z?D#qU?#>%7(qt&GBp&!__6oYn< zNQb&|ANa{0EJS0N_Elz@Qgj_G&@~CeqEP}wl)CmzTUQagu2}jChu92mVW{m5xQjYC zl5D^?dZRu@Hfu7|WEvC#!ze8CC3q85y=nc_)%QTumP|lKZ0|VyE_~ zS50l@l-OtB9R^h?99IT(G@^bQCLTJ95YDZz=-LpCb;M$uYn0?s-l!ybD{8U{NR?_a z-wg$;)6-C}7NcM(yTjFFN} zMJ8;mMCLz_U+$ozcjwAmV@e|`1whX+qT2gBuIVq^TRFjYkx)s!G$U2!E`^G>$5P>g zLp}kM7U%QgWlARwrIw<_A{aQ0z=0+^KpbHj0(H+LU#sp>Ff>W}KwJz%@K2$M*fD|{ zEG1q%yoH~-4WV`cHVDF3{q4(I7gpDOk$5eaxNF|#9<&@RvV)kpP zO6NGUGzZGy%&wPUaV2`j--&Qnt3|zl_#x)R)dO(VP`8vLvpZ+0J0xQobu%ZT#th5} z(~�iLe;i_bK{|{O6fvMFHD>l5#k!EW%T=(rX**3lhz(e7^bBAZ3~#SY~+=jTnc! zWEtjv#bNj>mn8p|M<|+83#Xf2J(ks z$_wX1XYiq8LG_{Y=~GAX;CFIADq=&Ucqg;88*U628x?K;CNZk=>mBUx=_vp`DCWec z!k}WOZVo}0G#LR8l@VLBDs{*KX*gveg}+Q30^6+eAyN0nbPC7@k4(`b6Car~BlZkO)<`7;`ih1?c??aKJ)uBmyZ8^VK5R zi}ucvT;|t|h3L&iTZK`nfttvw#_7l$Cm0tSpis?=E7kdr%_nvHOw{R@>D z1tz3EP^IoXcaJQOTjPWibo%D};-6qB{`19m){oPp^YeFa&R$rrPF|l} zy!h_dqqBdWSKq!ndiCPu3~+w>&lg7*M;ht+M$EHk8R;Fw9TTus%PB^$V`oEM&IJ%(vFol8-)1gk?Z2^~+Qf^6`Z7l9SC99LfE z@JTX~(K4^GvzMko`HV<<@Iimcu3?lBU7b%k#5B%vt1Zt#8EJZ?hfx}a8H#Jl)x?`T zEJl@FK0z6dioLBI3bZXj*pW-&9B+A*O@LX6XLh$=lhu8juCfjY_%=jhDM!-IY6y zGBLU8=vjF%qMze2#{s}3m)8YD{XK8j+tn*-ihpA~^Cq+5UV_QUvCEsy@wUTTKwO_FH{XY%S5$$H9wfXg_> zvnlhhUDGwSFs-Ib05j`C`}i!nhS19O)V|@vA`LRp?xyG!^+_Z-;W3NnoQ@|6`isxv z`QOmq?%qCn`_nb%WCxu@zUMf)aDd>p1Au%KV}C+SGdt+5p4UBZ7j4S|9sQ}({hI@N zFP?*jpezwodBd1A>2M!l8iy2aeP=qBZo&fxP1yh&g7XDNa;Sy{=~4(Mj`MekGd=eh zh6wW>sv@HI>g4$4>+_fHp11oC$N4#eSS49XrQzIoN<+|Md@WZt9&>F(p%J>~RIZ;L zB-kjCC~`9KvVa~lCThu_>V_1KQ0M3zot$^jcSq+Z=R3~3lZzkT{Ct7l9i5#Wy}mek zd5+$kq2o8NUz}W=ym@_&-h7XaUjGaIc=Gzi4k8rt5Rs2Dq^D;bEG3i#xxLJEZY~04 zibaT{aVi(qn9T@W>?(VPIG|T+E?xo0`_seI;<+ZneYw>V-m6@7GH(T4| z>%PjseI<_-xM$!XLB?re;bJGEu?KwD(v!8pO+)|w& z&&@SJN9YwDa?Iyy=U3-;a_K{sdv&x2LPN;L)Xz*tdOtzt>;j>S!tGhftCjg4nBj1a zXd*~B@|+2qk^wTCleI57)RzOQ`O*v6brdojh+fFXOqg%(6!R-Dj>gUvnS)dBz;X3> z9arkKG2xDj5;BdYsN=Y3lt!@8JFWr2k6g>s>~ zz=pQa`RR-QcC|F+p1>LNh!Q?PN2f=}KY&~c;(1Ybm2cD)Vp~)_KIt95&^Py`E**2` z6CsH3WcmLku4rr=*&x57v$f^ezYU*Ly4(Yc+Ss$g`ZpV!=>3}8C%hzH$_*B;|Nh?7 zr@N)~fAI9#vxoJ6AI~3u^qx2kET9boc+&gy$#G;WXhx?4FiuHr8H}e>iClj|1}HD) zoDn!o#-NrW+YDvJyK(;b1JTG2(}19kqCd|fI`C-lWCIz8mBPYP4uIIb#@$R`8s@;JqbKY6M2VIy;^$3<6UK0tr`f#3qnr%#rnD$@a_ zq*QLbR$vYa9<3584#5;WeG9zMlI%xzfo{<6DN9KCtE^6NfKz8@StfR6WQlW>acEjk1Mv9oz4M@M-3~{}s*Ug4w&p#AS0P7kdYF)M~kt8B~*} zV4@>Len%?nneJ0V4qw}G4bN(jgn|@XRrIn-tExt!RgLcjxKCKjLN=cNNLE0-RVkkU z`yF7pHF^tG)p(Kp65ZtPG{y6q2@`N{rLzo7VD#2OMXz)=(H{1g1tqrcvuVsC5+xbQ z>8Sk>39f^@w@Pb?S)juxb|C5KPjc~=3zfgrAy(Hv&A|btP->+7Sq>Bq2s`Le>#4Dz zH5h)B!HWim`NVXPmw(91T9~kl1M0fSD}2vtRgh3qvr?v5y8^x4gyaOKiAursx7uP$ zhLM5~li}Vucb(=#AHZVWE)$)KBfksoxwK`K2I$W)sEiyc-C6s4d7pW7Mdmx`5pZoe zX;oq#HD$h5OYPOb{)$ppET5_oMu)o;INSTe{TdRDWwZUfrg zBo_?nC=)>SU0!=Fmp|HV*aa7qa;;*(KCItHw)2kdasHUs_Dn0LgT|I^9-@2MOb@L# z3LS`*vzSvzd6D_~7=P)23Z8;n&oB+fB=OqKS*)+JW$hJaPLS!0@MAWOF(;_=_rAB+ zcXvBg`J|Z+yl&PYhzzKsy6nyRLz3Wrr-;_3vQ?wzy13GJ_%z_{uvEJD*A`jNZ;h)w zykI+gHDOup_CEZp~@899C`vwrYsVz*Ir(}HH!KN z_fAqcj>TrtlwIs^i>JK8e-xrbf)g@I!}b(q*SJqqY0y6hlJrgSmIyMX8<1o6XkBc< zC#!|5REr`;zxg}_Ti@|tZ{2a-uCtNN>H%x_y~Uq>cXXVmOK!QU_z?Rm@SEF94Ww=Fs?qByAQY5OE_wM`^-@k3+&+Vs#SZ6$JPFRne(!(cQuhD>1iM z!Id64jmEv%9u!%E+)9Nev@Wf@x#@UIxJpN~)?~~cCS#S!5Gdg(G5M+>$QWF2GWUn6 zK>ObQH=c8ZuJK%@^CSLML+NX*E;P|7ft*@!S2LkuR$$-T_jc9A)1nnfHlwk;qfq(_6JVJX3EkC{e9otm!bd!xqzFq9Z2^*!af0vj93~49d)7HbDli8fQ(9< zOxX-tW49{iXZl{r~y& z%ac!^9(B-m0X^sjCF~qhX&~#|2FPa-wcVZlcRl;Ry3^mr_y14J@Bi%Y9Xv1D|M#Ci zd$9lC%X6Rhf9UH%XSDgl_R{okkFph9)u4i9yFO9ZNfL@@e<=?;w3DL+_}5kK^>j!!Tj+W zoMAg7O2PoDsG{!fZYSUCDkJRQ)d>45jIf`Lurd_A)^$vVta(u#i49hE>{~M&CKp0H(`mfD6Y;Z-?O~rkJ%TZ6oDL3A=R;&d%$=i`XJ>EDROVAyk2+VY2{^<73;Fsj zEpioOPIcaC?(|TLWrdfW57lJ-dA=tyBaw~-%88v$Ri)pxBjrxoFDIs)-99M!i6ZWRJl>_f8@^c|$lbg+pE^k7* ztO4SmOO>?CPN7(pAa7LM+ON}n;;VnwlmApA$_>eX&z|ldROG+@^9T9wKAwA*|1A53 z%JZh8u3Gu8vK*{Ai~cx#uCf1S<64kgs9S}qr5jdWrq+DFq-z}Vt<*`W|FKz zUbw*_^0EO!DhMKln}l)LY6n)o3E@*3sokO2PJ+j_I%(Td>K$LS zFNX2_h%vh*C~}P&z&{W!ni7Sq-qbU`PEx&WBSueM+Xf)0dmh~NF}F(6ovwA2PEbM zGKdc#pjna5If(+oPt!0|wS+P7bQdhfI4@pWl9|}wxGMWQ&ENLAO>4Hzk}ZtN5}~d4 zatXokBu>jK!=@8`?bcoZHjdoP&9~CSA;K|aTq(TuYL`v%SHY`W168P|zr^=65IREJ zwrjT4Oc&;(jFv%Zq=k-eP#V$Sja#~kT?Dt6OT#IQW^>DcNKIL#Ce`KyDf1hNW&y2N z7RbgazNGmsq4JvXET?t-)o8DBBQbv$=G&BL_Mp@=TU%}e>!fmwki?<3e<;99F5zQFow`nwn+%W1jCo>8O=VMN^LJE2^`U@K2FdSn4O6Am3 zk&m;ebBi{unX%rlCPP9O9ptH4b!}GYtb9!H2tHikaT^J)_e7m3#46NkO)$5gAT{BX zMuF(5<2aSFaBqL~v>%ZDZ}IN8&z|{D$!K>t@(=cg`=jUkPyOfn{oUuo=exT-JwR{F zwq70d-B$+(Cy7 zX;tpgrG=}4NNDI}aHvlp8wzI9SKz__J6TolCy42r|D=)q^AzA+EY%@T#bQ3n=xwpk z_PKStHqV;+zxFt5X8>5D{~tVkUOxXh=*UfUdX(uFfl4MDG5{@I0 z#aRz@_MHUH@if$V_v9$D;|{FPLy$@(OQVF0N#&^Hg+!cv&T*mhfyvM2K;LkOOwwji z^8rX@a;_yXg){wc6p0B(WOlak1UwRJrFQTuR8=%SQ46DXm@1V}js3Q_^#mRjCDIFrFQ! zx|NQSYnPF1wVFj}+cmC#->&TXSMmLWL?DYDpA>S|^H|cL9fw#5;5N6y=Siux74cfF zzSy&Jb@bjjb~EJqgeUG4M|cb;lKMc~3b_s3^WxPx0ud~5WCc2+J|wS#JX<77z^D;p zS9i|HD326v_m<^XN#(rtG?IH;*@U`wGs`Dew*8Z8T5bbGB)J}CwbYCR2$q#RjS4!SsZVo z(ucL<;3&w~f2rGU;fwAp^P`#)J|=5e0GGtq>7pC34E+mF!>*}Y4T_?+&Pwl)Bh}N} za@JyJLc*yhCOvKJYN%#RVR^l3zuJhO~o1pL$#H%+zzrv z89jE;n#FRBrbcj9XAz>)X_`Q)4UiP2oaj#wHaWCC5fk4NI_QucL`d%0V&)eMELir% z_(O|Uz7^K2yzHHK1O?#Eca0-Rs~9pCBQ_&hc0ARcHsNa@wW-ONm5o!-$R~&oB}Yz86#f>aMw8Bi~!Zm^Q{VS7&~A#E(|xEnloK z`taQQv!48~NU){&zf}HzzS}R$|4$Dd{QvIdxp)7+mmg!fx0uBK#+t28)*5#!o9DTC z5giH%>H>E&;(#pqW=1%t0g+6Xr;(4I?ZcmRdY+C(^dsuHO`*$=Il+k@lp3ZSvpIys zS`yNk+f^ypPnQ9pCBlMr34F!eKZ{Ng5cPL1sc{mASPH`5bWrEl&iX*%3~{404Cm2*b7vo$OtQZmvotTdqt(Y>61zO?VHX%NW zv=3-&dbP%xp+mJ&+{&}D7T&Hrz%msUT2uY5LTrmQiKRrSPN_)b&%a7qY&NATYRtj2 z2-9dbvKxIAnU~Y|GPGa|Ki=xVnudDHg-TCm=f0a~dYp%?nxDs?Kgagxg0Z-RAO5 z8QQ7YnQgdLUpB*oH@DxVapOh?SKjrFGy6K&W09k%?es%T7SBr<9R*!fi8@J!mUf^WzohR?EycnG!ub)rlY!}yhVh`AfR{aY#$ZPUz< z--Rke>|b4D9^?*KG^EMggAsf~qY-P7lDL;}LV8n7qh2v7=$3(A^$R$yOox4kJIABS zC8{mY;au+Q*91-QOad_@9EW9$3d#BHppSR$%~Yh&iU6~GTPXM}Wl~oSf(lI^miHem z=v3>ixV5@ExJ;SqbJ+@9%qxCJxo3r{AK4O?8M9X}(Nsmh#-jE>a|{^p>qUh!*SR9| zR!ry0iom}P&r#vO3%(`(P|rWL_T*;$3?XsWrJ?MQ?hiinI@L6 zwJd-K>`=Yf_YPn`HsPwfqqL2&mRQ|3S`$w_0GE*{99?W&S8m6>^h&LWc z5ls9ZW*F{x=p7A11jmOX%0;5$zL11)?GUclXP_EWLsfHvT<|Z=xuqPl`l2BnYuXGD z$5Fth8RAqkYC4gNzGEyTI4U;vZbBepN5Z6HiNt(5WT7x1FKIfrUet&=rTI`s9AL~4 zJ4IlswQ)JWO0+SyCy@@d?R;5$8st_2)W|9$Ov5+PYjWKL_6xlkNH}>)JajSwF+X97 z?01k#->$-WXq7u6*OIINJLMh9b0oPi9zgUH9jOj%bE7o!;W9MWZmPJFC4?EYK~IrIEF3>vHvqnnaSCj69$Q|KD4k$kAtgIyxQS*B8B%q@TA=qmbOtLj zoLh^ugf0#75fH9kz*UUu>C@as#KczyCk?`5_2F@WWvKz>9ZT5q;VqRqs=HmbA3w@Q zHaJ9`4$7iQXqHfVoiB9DstO*eUjE4x&e>Hty(hjW!W)z1M>7AOv#H#9>=famgS^uARFitF+t}?}lj^InV<;iJwv%vyC{YW; z!rs+fzQ73veK2I~DveQRCPfURO|&o!X6KC^N45 zS+{P>=P);vwXWyhpy9Iv68ROcIh~!yiYHOJ9Zw=W3`tN7fMV9@p}p3cu5~{$mj_O* z6<>}3SWlNmC^qOb!T$h_9W#uMHKMv4epZNq@=zd**IF?Qt5CVsxp`X?08a%LtRIM_ zZ*3}&Ve?pOSAFq1zNcl%(&V2fJcDFAWY>hFZMp3nh4BQVez&WBzD}ovQy=xaXgiJL zYN;C?4e-}a(`ttD!l$&tshcX0G5L6=+zpPx@D)wK%GUYt;hlvG2-|Zb#ZQrN^t5f` zc6gjlhKamHVZn(>IvRzfcInk8;Hm&F5%oHqR))8r$@(j41?4xk3AXMUP%4P9gci8U zU%Uq@7vm0MQlYq}0&e;UL=zqDR7po>AJvLZ3*A_4Evst8x}Ue&2UD}H>0I%^uT*=r zfof>wb8D?DYHLenMKk-Ekt)k*+Ob~Vtu#{dA+~$lZjeK|Hs4o6ZWKbwd%Rbi=4v~| z0(JX&osn5e53{Vv%K9&UT7KK|T3ZeucO#F}j-t)c6`G0@+;r#zTp3fEk(+&l7uBZvY zo$p$Pr^(Hi1@eo>S$Jlj)St##1@gdxi#_5>UFEhG{Q<@Z2R~NhUg4wdIi~}tl}~hZ zytGO+J9T)m$HT>y79NrUGpp9ppSH>Ga1dN5(k&e=ncdGWqk>+1=zQqdA?R$0d|6$k z0@RPJ5t^@=+kSb^di+1X#?@vASi=AJo<1$#|2Q~!$p3La&u8HOQ9yWR+E*th**Qt( zcM}sn!Ld-vlki!bMJv%igcPL(`1dqR$~D&wQSrWE&8|_=dXW|j@2dF@fYd>ca2ziV zj8W~ds_IqT3uZ?1Aw0r-ZeDEXADFsMS>rq1`Qwic+D_R2D_Eo}b_=FuPfbUG;U{s3 zwtbw)+S(WBk#|NyoX{CLhw|>HPnjCk$*nG(F51@jM-?6mwK}@DE-IMV>n%ZY5V`|9 z)@jwoQ??+@jFGt66{7dV`3Xjel>tws7|C+fRH@nIq>yuFt`~c{jhGXk1R$e8__N5Q zoB+g4k|ZAVdec1Sqo=)ODC>!s({WE?N_dlG8g2pZT)FJvuU0DqN|q>hVNhFm)o=b( zjA`4Jp!F~^dJaUA8M)0%9gZ2$?m-BplrS_VN$%^REu_uNOOc;8 z_Kr!SSM*{#m`Qr2-$EVNyli=2s-a_R6XwS2w|>mf-vxUa&E8_((H^ekW6XGhBnL4K zeEM|wX!|N15k$p=%U5bML?q9cp!WMd}hFL(8Mq{V&r(SVP!=AA>qS>bA;%Nq zUx{>zu%!v<>FF3PNvcB_JyD(^PxAAn?PC^===dib?-;-M5#x}-hDIo6g*ci!hn@ild_CX`J=zSfYQ&wM4`Z6*7|w!MWop&tGlF4?C7EGu zn|Sk@L75ufcUX{9$wffz>e~Bzrlx3ZgiUFp8t&Kuk~>#4f_+t^sFQ0fH%LtI-qUAt z%DxWm<>9&Qv!4EUjk#R7Emgp!=YRdm`QP5NXAk<{eLVNB|GiVBgYb~0iB9;;Sr`KA z#^XgyI-@#W{sNB|q;5H5ELp^pYg@C=%pe3_ZV5FP&pOCT;n&sacFMV2Zj=|@_cRJn z$K@YB1m8TmQQURVQJHw&9i6>CdHv4;I?JI)0ZE9TkN|B<9n4A<`03L?zL8}f$V1&uh*=t8qiP zFiB|PHU-2jbagMT?8UW8+Gf>@}v$yxFkA=Wdz;WYdu+09~-z&#|c=mkn z!TxtI&%N`1?L$q_v7wthGJc*i9CP4p)fE42yv%co%o=Pm4w}R-nH#*h$XS%@+xg5D zZy4)x%1#L_5nFkJ4<^nY~N6dbFxvKN<}K(ZD;Mhv?di)Ctr}8gODzP$p9SHkoQ&wlR?S?Ixu+_X zS~7Pn>{}SQGMsw@m-;MT2mrByaV0oBD&PHv3^e5^0^noQH6JxXR8orKZSl&T2vN3H zu5zEG0&e4Fk{aUeEE`yE_^f`bzZsIdP%Y;H5aTM&@0#f4Z`!T7d{Q57*qXZq5xCNi zG^Ci%t=pRFOar2F0cX`VR>n&)9~LsVrAx&`j$6R4J_FlWI&8P-LXZv7qL}g}s=Q6L zw#so9%mGKq6KzqGtsC0Z#OsxtgRFp=Ow9{f1Y};`2j2OTc;J#pFmYlEQ zl3d*p$8GU$E{Ul5UJ2*+R$wHEOjgiofxh_FX9KZh&daN;ZaRx!{)hO%|EE5;*Z-!N zQydHzm;#sRf6t%q@0azz{pSz*-@QDaPyhQ#(#?zSHm>~{di-K^KLco^<_C8PZ>RN{ zKJG^6t3l~X8lS1Jo5N``SzGBVwNP6BR5)E zp+RjtV=QS<9l7IJ7w8zBs) zt4Q9c8KQjO$_8I05VpBb$65=W1xRRERL#^^Q@#Ze=C$qsO?6{hjIsX|Lr$9=aAUTd z4GIHo#=V9y$242h-a3Yymd$L~jAMXUVcyp?;WV{!O9PH+_6s-P*z9zbHC7Pn?$CI1 zhs&)1!fn=>y(6u%uo~)Z*f1y8?+P=Ui`if0tmeaCGNU$^teY|W{kmqj4cLEJG-kK3 z|MYjC9+dC@K0DZb`e6ULkLNSmeq(f`?d?>j(PveHoO_ ziMiNQH6^KUOItchlgZ1RR;+e1x@a&%!RwdJWVVXE#2&>GUGxsDU&;ny58;b6i)mK( zA7B^Wl(7j}*pb}WAGJLr(8 zIU3cmwN~K)A!e4P`0dS7n*DG1t>2S5S-ZixXo$DRvc3A+a|qb&XbU4lPE+lQusgr+ z&5do8)GD@b@~&)TF1CKwV#v}t3$C@bl^pwg)=O;u)v;0QqcELIBj@wND&*moIu=V> zSLr;IdQa{I67Ui}`U$Yke+wmacP)0sC@=z9wrnP=Oo9s}* z_f5h&wo258ld7z&)P}{Z%+6)i``9!1BXT+yE2#Jky!m}36ZC@p=c%RVgM)#TaKoffNCV_&@g78|zs*Qexi;ws>L)I53K7fij&)$OaL*<>x^= zo(!9T|4IK@cVW1x`<5D@VDWpj-CdwY zy2`yRw>>l=Cu^_ZqqSfb*46tNotgMs3kjb|{eo3kmTLdFXRoL~F)$cbT|YLR`dQ~G zXk#?_b=A~)dgWp-RCPQTSAMy z4BJFIT6xz+=%o(K%ICR(Irmynpzl>=uGSJuOz0_PE)x+R)dc#vznea)tC;z4vOGN< zs!Y4Fq|Ib_K|=9vt4o}dcGTxX>qv85XwLIJ8>$641ErtxbADnG?%Y!XWtLhemX_g31gVSF>QiVuvYajHx8=Yu;rprONQ|NN1J!T)(hh#ER8` zf>*j$uxe09B+mm8C6(te*4fg=4{Y6gjRl;ekJFoRW;H&`)RqdM2}HQXdrAs%ujS=? z6}?7reQ8eY-pQzTu~5*8n6cz$1fH;A1KVfa46S0 zLGo^>)p_pnt`19rG&U+K5r~d^hmRUKG_atPzo_Xg(h1DM7x3R6XkKcIgg*`9-zClV z{r-E=_DKcY7gnSpY++~Lvunx+I+01#ZD~O^d%e|Z1^3B)`32cLdnXqON}HLryCbT2 zG(PX;ORT~w2->HYlpj8vSSn+T@)+D2ffP(mZ8KyX>!u{e4?3cxh2p*^*B(&O!iO>2Xi)A3l&%*`)BiB{+S*<22r8Ro2D zJLG7y=i7l&hb?@pPC#2?S&0YSM$d;?&72juV6$%><3#H~&(+;+>ud&^fDv4peOxYI zb;#S*zy%x0`y0%jt6*k(wpMygCLXqbnuD9PxMl|I>dEEJa%A>t`8r1nIwE<>^U}o> z#RFBGzcZvnBn7yom^I{QqjKt~G9@Sb+)h9N(*_L=9L5_hyJiP%x9kz=)C9aelz11xH${;}{ks01 zM8mDBx=Z{cKHQ(pQbh*K9mgJGZ8`oN-?tTdx*2&HDuoA~!JLm2KQ3Mc7jOIW^fd?N z2_0Qh17V4^p#+q$0hkJhngOcGYy$e(aZw#(>^M_4qJ=p=3p@(wJI+Q!vcNiKtaOCz zGdF#tC$z}x<_FZelCAB=($D!vdCG&UtECz7@56ow)9J667p?jr=N{ZFeV|e0u{k=A=s}|ilO$x;q8v~$>s~i_oc%2qbX04Ex*0CM% z8D$z8@ruWQlWP0)skwtL6jU7daLZk*G9!u24BlPfc|r&2VCG}lRfhRlUgsrS4kbCN z2rH%N%uQv6ECZaz%`;e1m7IQ8X6%-Hs?3Sa_lgSBd}DDAfreZ?@u`7UEk_ITyK(k5 zbA5&okL#ehsmh@AiTy?dei-3=?XF(1^0Ooso(4a4sIfQ-U-l1BB&sWqZe*6cUNSUX zU$c;0p|_%?%ueT5d0tqNlp8c^xk+CzMo(Da+A;3ZzRm9;V6zpwz7taXOP?V_|MB8_ zu+gi8yV}J{#;N1yfxg=91AAj`3 z3rRjho!i{8KM0O>rMKP$jRj|pF*zNi4|`_i`3vf2s97$X-69I|Gp+I9d86E*8AOKz z{p2&A!8GZ}6u1yl7QMI?G87onMV**r9#^xjM9x_T`W*X9x-NnXaGmGN5OWTi)ea4z zH`T9=yAY(&T5)WbDf*WbwPEzQl2-^n;Rw zUIrJ6{gRGhfIOk7)@w6i0?XJI`kZ85>%H8FJy}#fq_E zbjq;s2xYIUm08*}Z}OFiFlZ)a`SJWM=*5p~4PH%pra)us)D|wql$I;B1QL5}iRBz<1;1cgb-FIw0~GT<&ppr{DZ^c>8*TO>CiGtwRmK_BD(2|ihz#!C z`QvTgdP0Br@9(-d+q5n5heF9YRE3@Rn1bRvPzb(s{ZxHZ(-?<4&@pb?tU~7YmkX-;b+@%>-vb4>n>v}nz4cYq zE9^B7nV*PK@{Xj6UKwlkl~|z7VvcuvHC+O+{;;{xrijy;t?}zMToPM`)ilvB^r5)B zi%LHB7RHOY93&Ld=xo4s`HhBYU}-zYOW&b=0(tz!cEeh^35CSyR{Klo89UElx} zq0zb6G>`9nej6g-MDRQ;!{PujSVplWG7?8+g*H3ryY}oI!Hmh1#ZTl?i#XeUSgBp# zCn0M;xt!^o?lWE3>YmvG2|1(u6M)&KxUT8xuEWMoYWNv7H>LOXWjHP*;NWn^P+b(@ zy$#a)-qLc4Q2H`$zx<-f`_r_uWH?q`ET&nR(;x-%Wah2vEf&Het`*2wego+`EKNyZ ze89;V4=<6p2dP_<*1mtHGuo`aL(UTHUc{*S{yk9ghAMBtpRUrPJKp=NjC}8+FF$$} zIlc8&w|&}Rp{L%NbgSEAZGRX8e@qmP>RqcE1gNt+<~w2I0&Fz@8!7fI@&;ap^&6se zqVrXdIER17iHaL4&&4)sqa#|ABQ9M`huSDukNU)<72f;{Lxg9*!=fKbkDUhM?7D(U zqR?7Gl2L3_@#Q7s)b#B1teNrn%H}eSvL0dHh$5+Yt6XiY zc0@Y7$!kTFWyu2}y+MK(+`dfqbBsfNKpoy;1wOv>)k*K{l2Xr6FvA+kX}5AZQ&Kp= zAx1-+ZWMly?UyGepiofE9PZA^oLBNtfsK;#O5Rm*elH@zaz$lKP>RF8p$0-3CmInY zR0((Bb88m7OXyRqRU62hH-3stCgwbisKETg{CdH*^LwarN+L}8T{y2@8!H_@e3Gm@ zqM)xx+dm#7KS8EOzwXwJjQ{Y7dVhy|#g?I`(!xb0tZj1bQKPZ5ofepG{i?5%!k;!w zj<|#KkW^2NM6oz0JmN;z2=6*Sg^%bFV{q=itH&5I}u!8@Z+YpDXuEX z9ka(4Bj4V3ih1VFPPOhKYcI=1Sky_&Sy7@)$UO6m+PM|>U~)JAYdCGrw z(f+h|zh~nmuRE0OKLS*irU!j&bqPX_%kb%RCy;V4OrO zmYOsh3IlI&d`jnPdvoL{XkSb4B>HALXCpLce0lW+i$16n{&p zC$5L4Pfvynn~V-eQ(Ohl0-ZGy>NNz)-tjv5@a_b+xY>d>HcZ~{&hr?knyXREX3Vg= zk!30V-AL!iYQvZL>1Wb;V;EqyR`P3 z13xLaj`rTqo`b%L^#z-=e~}rwc&5KD%6Z(saVwJot-8pxBxN&tlRMaj$vqwbOurm5%u|0Y<&=$AO+)Pl{`+r2K8c%*j6~wgAnzt?rzZO! DIYT9) literal 0 HcmV?d00001 From 7a3bbcc9b0921e7dc8eb4720f417cd90d4283e91 Mon Sep 17 00:00:00 2001 From: Mitch Date: Mon, 16 Feb 2026 22:39:50 -0500 Subject: [PATCH 48/62] chore: bump test slot duration --- .../src/e2e_epochs/epochs_invalidate_block.parallel.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts index 78ebee3bb157..61603ee36f7b 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts @@ -58,7 +58,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { // Uses multiple-blocks-per-slot timing configuration. test = await EpochsTestContext.setup({ ethereumSlotDuration: 8, - aztecSlotDuration: 36, + aztecSlotDuration: 48, blockDurationMs: 6000, l1PublishingTime: 8, enforceTimeTable: true, From 625a7c5afa709718d03f3837bc0ce9bdb26e05fd Mon Sep 17 00:00:00 2001 From: PhilWindle <60546371+PhilWindle@users.noreply.github.com> Date: Tue, 17 Feb 2026 11:39:35 +0000 Subject: [PATCH 49/62] fix: Fix checkpoint invalidation test (#20579) This PR fixes the checkpoint invalidation e2e test. --- .../e2e_epochs/epochs_invalidate_block.parallel.test.ts | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts index 61603ee36f7b..4442138cc91a 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts @@ -58,7 +58,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { // Uses multiple-blocks-per-slot timing configuration. test = await EpochsTestContext.setup({ ethereumSlotDuration: 8, - aztecSlotDuration: 48, + aztecSlotDuration: 36, blockDurationMs: 6000, l1PublishingTime: 8, enforceTimeTable: true, @@ -195,6 +195,12 @@ describe('e2e_epochs/epochs_invalidate_block', () => { expect(invalidBlockOffense).toBeDefined(); const currentCheckpoint = await test.rollup.getCheckpointNumber(); + + logger.warn('Sending further transactions to trigger more block building'); + await timesAsync(8, i => + testContract.methods.emit_nullifier(BigInt(i + 100)).send({ from: context.accounts[0], wait: NO_WAIT }), + ); + logger.warn(`Waiting for checkpoint ${currentCheckpoint + 2} to be mined to ensure chain can progress`); await test.waitUntilCheckpointNumber(CheckpointNumber(currentCheckpoint + 2), test.L2_SLOT_DURATION_IN_S * 8); From e8d3a6957a5180088e4b2e8b6b3855609f4e6116 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 12:57:13 +0000 Subject: [PATCH 50/62] fix: async world state cleanups (#20578) This PR #20559 started using the async APIs to work with blobs. This has introduced yield points where nodejs could go in and execute other bits of code. This has revealed race conditions in the way world state forks were being handled. This has passed unit tests because the tests mock cleanup --- .../src/orchestrator/block-proving-state.ts | 9 ++ .../src/orchestrator/orchestrator.ts | 89 +++++++++---------- .../orchestrator/orchestrator_errors.test.ts | 4 +- .../orchestrator_workflow.test.ts | 4 +- 4 files changed, 52 insertions(+), 54 deletions(-) diff --git a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts index fb49b4be2d0e..53c7a594db82 100644 --- a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts @@ -55,6 +55,7 @@ export class BlockProvingState { | ProofState | undefined; private builtBlockHeader: BlockHeader | undefined; + private builtArchive: AppendOnlyTreeSnapshot | undefined; private endState: StateReference | undefined; private endSpongeBlob: SpongeBlob | undefined; private txs: TxProvingState[] = []; @@ -232,6 +233,14 @@ export class BlockProvingState { return this.builtBlockHeader; } + public setBuiltArchive(archive: AppendOnlyTreeSnapshot) { + this.builtArchive = archive; + } + + public getBuiltArchive() { + return this.builtArchive; + } + public getStartSpongeBlob() { return this.startSpongeBlob; } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 23a9f040a550..001e91e0de34 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -71,11 +71,6 @@ import { EpochProvingState, type ProvingResult, type TreeSnapshots } from './epo import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js'; import { TxProvingState } from './tx-proving-state.js'; -type WorldStateFork = { - fork: MerkleTreeWriteOperations; - cleanupPromise: Promise | undefined; -}; - /** * Implements an event driven proving scheduler to build the recursive proof tree. The idea being: * 1. Transactions are provided to the scheduler post simulation. @@ -97,7 +92,7 @@ export class ProvingOrchestrator implements EpochProver { private provingPromise: Promise | undefined = undefined; private metrics: ProvingOrchestratorMetrics; // eslint-disable-next-line aztec-custom/no-non-primitive-in-collections - private dbs: Map = new Map(); + private dbs: Map = new Map(); private logger: Logger; constructor( @@ -182,7 +177,7 @@ export class ProvingOrchestrator implements EpochProver { const db = await this.dbProvider.fork(lastBlockNumber); const firstBlockNumber = BlockNumber(lastBlockNumber + 1); - this.dbs.set(firstBlockNumber, { fork: db, cleanupPromise: undefined }); + this.dbs.set(firstBlockNumber, db); // Get archive sibling path before any block in this checkpoint lands. const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db); @@ -240,9 +235,9 @@ export class ProvingOrchestrator implements EpochProver { if (!this.dbs.has(blockNumber)) { // Fork world state at the end of the immediately previous block const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1)); - this.dbs.set(blockNumber, { fork: db, cleanupPromise: undefined }); + this.dbs.set(blockNumber, db); } - const db = this.dbs.get(blockNumber)!.fork; + const db = this.getDbForBlock(blockNumber); // Get archive snapshot and sibling path before any txs in this block lands. const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db); @@ -317,7 +312,7 @@ export class ProvingOrchestrator implements EpochProver { this.logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`); - const db = this.dbs.get(blockNumber)!.fork; + const db = this.getDbForBlock(blockNumber); const lastArchive = provingState.lastArchiveTreeSnapshot; const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot; const spongeBlobState = provingState.getStartSpongeBlob().clone(); @@ -445,14 +440,20 @@ export class ProvingOrchestrator implements EpochProver { throw new Error('Block header mismatch'); } - // Get db for this block - const db = this.dbs.get(provingState.blockNumber)!.fork; + // Get db for this block and remove from map — no other code should use it after this point. + const db = this.getDbForBlock(provingState.blockNumber); + this.dbs.delete(provingState.blockNumber); - // Update the archive tree, so we're ready to start processing the next block: - this.logger.verbose( - `Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`, - ); - await db.updateArchive(header); + // Update the archive tree, capture the snapshot, and close the fork deterministically. + try { + this.logger.verbose( + `Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`, + ); + await db.updateArchive(header); + provingState.setBuiltArchive(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db)); + } finally { + await db.close(); + } await this.verifyBuiltBlockAgainstSyncedState(provingState); @@ -472,6 +473,13 @@ export class ProvingOrchestrator implements EpochProver { this.logger.debug('Block root rollup proof not built yet, skipping header check.'); return; } + + const newArchive = provingState.getBuiltArchive(); + if (!newArchive) { + this.logger.debug('Archive snapshot not yet captured, skipping header check.'); + return; + } + const header = await buildHeaderFromCircuitOutputs(output); if (!(await header.hash()).equals(await builtBlockHeader.hash())) { @@ -480,11 +488,7 @@ export class ProvingOrchestrator implements EpochProver { return; } - // Get db for this block const blockNumber = provingState.blockNumber; - const db = this.dbs.get(blockNumber)!.fork; - - const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db); const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber)); if (!syncedArchive.equals(newArchive)) { this.logger.error( @@ -502,12 +506,6 @@ export class ProvingOrchestrator implements EpochProver { provingState.reject(`New archive mismatch.`); return; } - - // TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator - // is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup, - // but have to make sure it only runs once all operations are completed, otherwise some function here - // will attempt to access the fork after it was closed. - void this.cleanupDBFork(blockNumber); } /** @@ -523,6 +521,19 @@ export class ProvingOrchestrator implements EpochProver { } this.provingState?.cancel(); + + for (const [blockNumber, db] of this.dbs.entries()) { + void db.close().catch(err => this.logger.error(`Error closing db for block ${blockNumber}`, err)); + } + this.dbs.clear(); + } + + private getDbForBlock(blockNumber: BlockNumber): MerkleTreeWriteOperations { + const db = this.dbs.get(blockNumber); + if (!db) { + throw new Error(`World state fork for block ${blockNumber} not found.`); + } + return db; } /** @@ -554,24 +565,6 @@ export class ProvingOrchestrator implements EpochProver { return epochProofResult; } - private async cleanupDBFork(blockNumber: BlockNumber): Promise { - this.logger.debug(`Cleaning up world state fork for ${blockNumber}`); - const fork = this.dbs.get(blockNumber); - if (!fork) { - return; - } - - try { - if (!fork.cleanupPromise) { - fork.cleanupPromise = fork.fork.close(); - } - await fork.cleanupPromise; - this.dbs.delete(blockNumber); - } catch (err) { - this.logger.error(`Error closing db for block ${blockNumber}`, err); - } - } - /** * Enqueue a job to be scheduled * @param provingState - The proving state object being operated on @@ -894,7 +887,8 @@ export class ProvingOrchestrator implements EpochProver { const leafLocation = provingState.setBlockRootRollupProof(result); const checkpointProvingState = provingState.parentCheckpoint; - // If the proofs were slower than the block header building, then we need to try validating the block header hashes here. + // Verification is called from both here and setBlockCompleted. Whichever runs last + // will be the first to see all three pieces (header, proof output, archive) and run the checks. await this.verifyBuiltBlockAgainstSyncedState(provingState); if (checkpointProvingState.totalNumBlocks === 1) { @@ -902,9 +896,6 @@ export class ProvingOrchestrator implements EpochProver { } else { await this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation); } - - // We are finished with the block at this point, ensure the fork is cleaned up - void this.cleanupDBFork(provingState.blockNumber); }, ); } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts index 6f5e642e54fd..3d148f6beca7 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts @@ -151,9 +151,7 @@ describe('prover/orchestrator/errors', () => { await orchestrator.startNewBlock(blockNumber, timestamp, 1); orchestrator.cancel(); - await expect(async () => await orchestrator.addTxs(block.txs)).rejects.toThrow( - 'Invalid proving state when adding a tx', - ); + await expect(async () => await orchestrator.addTxs(block.txs)).rejects.toThrow('World state fork for block'); }); it('rejects if too many l1 to l2 messages are provided', async () => { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts index 2ac46e75b7fc..0ada8a3c4267 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts @@ -183,8 +183,8 @@ describe('prover/orchestrator', () => { const result = await orchestrator.finalizeEpoch(); expect(result.proof).toBeDefined(); - const numForks = orchestrator.getNumActiveForks(); - expect(numForks).toEqual(0); + // Forks are closed deterministically in setBlockCompleted, so no cancel() needed. + expect(orchestrator.getNumActiveForks()).toEqual(0); }); it('can start chonk verifier proofs before adding processed txs', async () => { From 0aef0889eefa868d9013421ad2f243727de839a7 Mon Sep 17 00:00:00 2001 From: ludamad Date: Tue, 17 Feb 2026 08:52:57 -0500 Subject: [PATCH 51/62] fix: skip flaking epochs_long_proving_time.test.ts . --- .../end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts index 669132d4efbb..ddf522d2c102 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_long_proving_time.test.ts @@ -34,7 +34,7 @@ describe('e2e_epochs/epochs_long_proving_time', () => { await test.teardown(); }); - it('generates proof over multiple epochs', async () => { + it.skip('generates proof over multiple epochs', async () => { const targetProvenEpochs = process.env.TARGET_PROVEN_EPOCHS ? parseInt(process.env.TARGET_PROVEN_EPOCHS) : 1; const targetProvenBlockNumber = targetProvenEpochs * test.epochDuration; logger.info(`Waiting for ${targetProvenEpochs} epochs to be proven at ${targetProvenBlockNumber} L2 blocks`); From f7016beb05af0b5036d859000cc15edafa1442ca Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Mon, 16 Feb 2026 17:44:17 +0000 Subject: [PATCH 52/62] chore: better agent utilisation in proving test --- spartan/environments/prove-n-tps-real.env | 5 +- .../values/prover-resources-prod-hi-tps.yaml | 82 +++++++++++++++++++ .../src/spartan/n_tps_prove.test.ts | 14 ++++ .../end-to-end/src/spartan/utils/index.ts | 1 + .../end-to-end/src/spartan/utils/k8s.ts | 8 ++ 5 files changed, 108 insertions(+), 2 deletions(-) create mode 100644 spartan/terraform/deploy-aztec-infra/values/prover-resources-prod-hi-tps.yaml diff --git a/spartan/environments/prove-n-tps-real.env b/spartan/environments/prove-n-tps-real.env index 9b8989671922..129abf2e7750 100644 --- a/spartan/environments/prove-n-tps-real.env +++ b/spartan/environments/prove-n-tps-real.env @@ -7,6 +7,7 @@ AZTEC_SLOT_DURATION=72 AZTEC_PROOF_SUBMISSION_EPOCHS=1 AZTEC_LAG_IN_EPOCHS_FOR_VALIDATOR_SET=1 AZTEC_LAG_IN_EPOCHS_FOR_RANDAO=1 +AZTEC_MANA_TARGET=1000000000 # 1B mana CREATE_ETH_DEVNET=true DESTROY_NAMESPACE=true @@ -30,8 +31,8 @@ REAL_VERIFIER=true RPC_REPLICAS=1 RPC_INGRESS_ENABLED=false -PROVER_REPLICAS=200 -PROVER_RESOURCE_PROFILE="prod" +PROVER_REPLICAS=4 +PROVER_RESOURCE_PROFILE="prod-hi-tps" PROVER_PUBLISHER_MNEMONIC_START_INDEX=8000 PROVER_AGENT_POLL_INTERVAL_MS=10000 PUBLISHERS_PER_PROVER=1 diff --git a/spartan/terraform/deploy-aztec-infra/values/prover-resources-prod-hi-tps.yaml b/spartan/terraform/deploy-aztec-infra/values/prover-resources-prod-hi-tps.yaml new file mode 100644 index 000000000000..ffd0347086cc --- /dev/null +++ b/spartan/terraform/deploy-aztec-infra/values/prover-resources-prod-hi-tps.yaml @@ -0,0 +1,82 @@ +node: + node: + resources: + requests: + cpu: "7.5" + memory: "55Gi" + + nodeJsOptions: + - "--max-old-space-size=61440" + + nodeSelector: + local-ssd: "false" + node-type: "network" + cores: "8" + hi-mem: "true" + + persistence: + enabled: true + statefulSet: + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: [ReadWriteOnce] + resources: + requests: + storage: 16Gi + +broker: + replicaCount: 1 + + node: + resources: + requests: + cpu: "7.5" + memory: "55Gi" + + nodeJsOptions: + - "--max-old-space-size=61440" + + nodeSelector: + local-ssd: "false" + node-type: "network" + cores: "8" + hi-mem: "true" + + persistence: + enabled: true + statefulSet: + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: [ReadWriteOnce] + resources: + requests: + storage: 64Gi +agent: + replicaCount: 4 + + node: + env: + # the pod will be scheduled on a 32-core VM + HARDWARE_CONCURRENCY: "32" + resources: + requests: + memory: "115Gi" + cpu: "31" + + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: cloud.google.com/gke-spot + operator: Exists + + tolerations: + - key: "cloud.google.com/gke-spot" + operator: "Equal" + value: "true" + effect: "NoSchedule" diff --git a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts index a98c2a43fd08..5c1750c41b24 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts @@ -31,6 +31,7 @@ import { type WorkerWalletWrapper, createWorkerWalletClient } from './setup_test import { ProvingMetrics } from './tx_metrics.js'; import { getExternalIP, + scaleProverAgents, setupEnvironment, startPortForwardForEthereum, startPortForwardForPrometeheus, @@ -44,6 +45,8 @@ if (!Number.isFinite(TARGET_TPS)) { throw new Error('Invalid TPS: ' + process.env.TPS); } +const TARGET_PROVER_AGENTS = parseInt(process.env.TARGET_PROVER_AGENTS ?? '200'); + const epochDurationSlots = config.AZTEC_EPOCH_DURATION; const slotDurationSeconds = config.AZTEC_SLOT_DURATION; const epochDurationSeconds = epochDurationSlots * slotDurationSeconds; @@ -357,6 +360,9 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { ); await sleep(secondsToWait * 1000); } + + // scale to 10 agents in order to be able to prove the current epoch which contains up to 10 account contracts and the benchmark contract + await scaleProverAgents(config.NAMESPACE, 10, logger); }); it(`sends ${TARGET_TPS} TPS for a full epoch and waits for proof`, async () => { @@ -371,10 +377,18 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { const msPerTx = 1000 / TARGET_TPS; logger.info(`Will send ${txsToSend} transactions at ${TARGET_TPS} TPS over ${epochDurationSeconds} seconds`); + const scaleUpAtTx = Math.max(0, txsToSend - Math.ceil(TARGET_TPS * 8 * slotDurationSeconds)); const sentTxs: TxHash[] = []; const sendStartTime = performance.now(); for (let i = 0; i < txsToSend; i++) { + if (i === scaleUpAtTx) { + logger.info(`Scaling prover agents to ${TARGET_PROVER_AGENTS} (8 slots before end of tx sending)`); + void scaleProverAgents(config.NAMESPACE, TARGET_PROVER_AGENTS, logger).catch(err => + logger.error(`Failed to scale prover agents: ${err}`), + ); + } + const loopStart = performance.now(); // look for a wallet with an available tx diff --git a/yarn-project/end-to-end/src/spartan/utils/index.ts b/yarn-project/end-to-end/src/spartan/utils/index.ts index 5d945f48e758..b4ecc612825f 100644 --- a/yarn-project/end-to-end/src/spartan/utils/index.ts +++ b/yarn-project/end-to-end/src/spartan/utils/index.ts @@ -25,6 +25,7 @@ export { getRPCEndpoint, getEthereumEndpoint, createResilientPrometheusConnection, + scaleProverAgents, } from './k8s.js'; // Chaos Mesh diff --git a/yarn-project/end-to-end/src/spartan/utils/k8s.ts b/yarn-project/end-to-end/src/spartan/utils/k8s.ts index 70088963fbea..e9329839b7bb 100644 --- a/yarn-project/end-to-end/src/spartan/utils/k8s.ts +++ b/yarn-project/end-to-end/src/spartan/utils/k8s.ts @@ -522,6 +522,14 @@ export function createResilientPrometheusConnection( return { connect, runAlertCheck }; } +/** Scales the prover-agent Deployment to the given number of replicas. */ +export async function scaleProverAgents(namespace: string, replicas: number, log: Logger): Promise { + const label = 'app.kubernetes.io/component=prover-agent'; + const command = `kubectl scale deployment -l ${label} -n ${namespace} --replicas=${replicas} --timeout=2m`; + log.info(`Scaling prover agents to ${replicas}: ${command}`); + await execAsync(command); +} + export function getChartDir(spartanDir: string, chartName: string) { return path.join(spartanDir.trim(), chartName); } From 43ed6a90bdaf8772f479735f90fdc4fb1e4ba8b8 Mon Sep 17 00:00:00 2001 From: danielntmd Date: Mon, 16 Feb 2026 14:51:26 +0000 Subject: [PATCH 53/62] feat: Error codes for RPC calls - adds error codes for low-priority fee, nullifier conflict, and insufficient balance while keeping other errors internal. - error codes are supplied to the user when hitting the error path - update existing tests and add a few more to enforce behavior --- .../end-to-end/src/e2e_mempool_limit.test.ts | 24 +++----------- .../p2p/src/client/p2p_client.test.ts | 23 +++++++++++++ yarn-project/p2p/src/client/p2p_client.ts | 17 +++++++--- yarn-project/p2p/src/errors/tx-pool.error.ts | 12 +++++++ .../eviction/eviction_manager.test.ts | 14 ++++++-- .../tx_pool_v2/eviction/eviction_manager.ts | 6 +++- .../fee_payer_balance_pre_add_rule.test.ts | 15 ++++++--- .../fee_payer_balance_pre_add_rule.ts | 17 ++++++++-- .../mem_pools/tx_pool_v2/eviction/index.ts | 2 ++ .../tx_pool_v2/eviction/interfaces.ts | 32 ++++++++++++++++++- .../low_priority_pre_add_rule.test.ts | 9 ++++-- .../eviction/low_priority_pre_add_rule.ts | 16 ++++++++-- .../eviction/nullifier_conflict_rule.ts | 2 +- .../src/mem_pools/tx_pool_v2/interfaces.ts | 3 ++ .../mem_pools/tx_pool_v2/tx_metadata.test.ts | 7 +++- .../src/mem_pools/tx_pool_v2/tx_metadata.ts | 8 +++-- .../mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 16 ++++++++-- 17 files changed, 176 insertions(+), 47 deletions(-) create mode 100644 yarn-project/p2p/src/errors/tx-pool.error.ts diff --git a/yarn-project/end-to-end/src/e2e_mempool_limit.test.ts b/yarn-project/end-to-end/src/e2e_mempool_limit.test.ts index a6649b39bfad..a158c05540f7 100644 --- a/yarn-project/end-to-end/src/e2e_mempool_limit.test.ts +++ b/yarn-project/end-to-end/src/e2e_mempool_limit.test.ts @@ -1,7 +1,6 @@ import { AztecAddress } from '@aztec/aztec.js/addresses'; import { NO_WAIT } from '@aztec/aztec.js/contracts'; import { TxStatus } from '@aztec/aztec.js/tx'; -import { retryUntil } from '@aztec/foundation/retry'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; import type { AztecNode, AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; @@ -70,24 +69,9 @@ describe('e2e_mempool_limit', () => { expect.objectContaining({ status: TxStatus.PENDING }), ); - const txHash3 = await tx3.send({ wait: NO_WAIT }); - - const txDropped = await retryUntil( - async () => { - // one of the txs will be dropped. Which one is picked is somewhat random because all three will have the same fee - const receipts = await Promise.all([ - aztecNode.getTxReceipt(txHash1), - aztecNode.getTxReceipt(txHash2), - aztecNode.getTxReceipt(txHash3), - ]); - const numPending = receipts.reduce((count, r) => (r.status === TxStatus.PENDING ? count + 1 : count), 0); - return numPending < 3; - }, - 'Waiting for one of the txs to be evicted from the mempool', - 60, - 1, - ); - - expect(txDropped).toBe(true); + // tx3 should be rejected because pool is at capacity and its priority is not higher than existing txs + await expect(tx3.send({ wait: NO_WAIT })).rejects.toMatchObject({ + data: { code: 'LOW_PRIORITY_FEE' }, + }); }); }); diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 25200df67014..c4ffc3fa35e6 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -133,6 +133,29 @@ describe('P2P Client', () => { await client.stop(); }); + it('throws TxPoolError with structured reason when pool rejects tx', async () => { + await client.start(); + const tx1 = await mockTx(); + const txHashStr = tx1.getTxHash().toString(); + const errors = new Map(); + errors.set(txHashStr, { + code: 'LOW_PRIORITY_FEE', + message: 'Tx does not meet minimum priority fee', + minimumPriorityFee: 101n, + txPriorityFee: 50n, + }); + txPool.addPendingTxs.mockResolvedValueOnce({ + accepted: [], + ignored: [tx1.getTxHash()], + rejected: [], + errors, + }); + + await expect(client.sendTx(tx1)).rejects.toThrow('Tx does not meet minimum priority fee'); + expect(p2pService.propagate).not.toHaveBeenCalled(); + await client.stop(); + }); + it('rejects txs after being stopped', async () => { await client.start(); const tx1 = await mockTx(); diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 4bcfa04539a3..4851b7bf7bbd 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -32,6 +32,7 @@ import type { PeerId } from '@libp2p/interface'; import type { ENR } from '@nethermindeth/enr'; import { type P2PConfig, getP2PDefaultConfig } from '../config.js'; +import { TxPoolError } from '../errors/tx-pool.error.js'; import type { AttestationPoolApi } from '../mem_pools/attestation_pool/attestation_pool.js'; import type { MemPools } from '../mem_pools/interface.js'; import type { TxPoolV2 } from '../mem_pools/tx_pool_v2/interfaces.js'; @@ -585,11 +586,19 @@ export class P2PClient const result = await this.txPool.addPendingTxs([tx], { feeComparisonOnly: true }); if (result.accepted.length === 1) { await this.p2pService.propagate(tx); - } else { - this.log.warn( - `Tx ${tx.getTxHash()} not propagated: accepted=${result.accepted.length} ignored=${result.ignored.length} rejected=${result.rejected.length}`, - ); + return; } + + const txHashStr = tx.getTxHash().toString(); + const reason = result.errors?.get(txHashStr); + if (reason) { + this.log.warn(`Tx ${txHashStr} not added to pool: ${reason.message}`); + throw new TxPoolError(reason); + } + + this.log.warn( + `Tx ${txHashStr} not propagated: accepted=${result.accepted.length} ignored=${result.ignored.length} rejected=${result.rejected.length}`, + ); } /** diff --git a/yarn-project/p2p/src/errors/tx-pool.error.ts b/yarn-project/p2p/src/errors/tx-pool.error.ts new file mode 100644 index 000000000000..1b59099757e4 --- /dev/null +++ b/yarn-project/p2p/src/errors/tx-pool.error.ts @@ -0,0 +1,12 @@ +import type { TxPoolRejectionError } from '../mem_pools/tx_pool_v2/eviction/interfaces.js'; + +/** Error thrown when a transaction is not added to the mempool. */ +export class TxPoolError extends Error { + public readonly data: TxPoolRejectionError; + + constructor(public readonly reason: TxPoolRejectionError) { + super(reason.message); + this.name = 'TxPoolError'; + this.data = reason; + } +} diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts index f7fd0d3252de..974f2e31b6c9 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts @@ -12,6 +12,7 @@ import { type PoolOperations, type PreAddPoolAccess, type PreAddRule, + TxPoolRejectionCode, } from './interfaces.js'; describe('EvictionManager', () => { @@ -226,10 +227,15 @@ describe('EvictionManager', () => { it('returns ignore result immediately when a rule says to ignore', async () => { const preAddRule2 = mock({ name: 'preAddRule2' }); + const testReason = { + code: 'NULLIFIER_CONFLICT' as const, + message: 'test reason', + conflictingTxHash: '0x9999', + }; preAddRule.check.mockResolvedValue({ shouldIgnore: true, txHashesToEvict: [], - reason: 'test reason', + reason: testReason, }); preAddRule2.check.mockResolvedValue({ shouldIgnore: false, @@ -243,7 +249,7 @@ describe('EvictionManager', () => { const result = await evictionManager.runPreAddRules(incomingMeta, poolAccess); expect(result.shouldIgnore).toBe(true); - expect(result.reason).toBe('test reason'); + expect(result.reason).toEqual(testReason); expect(preAddRule.check).toHaveBeenCalledTimes(1); // Second rule should not be called since first rule ignored expect(preAddRule2.check).not.toHaveBeenCalled(); @@ -351,7 +357,9 @@ describe('EvictionManager', () => { const result = await evictionManager.runPreAddRules(incomingMeta, poolAccess); expect(result.shouldIgnore).toBe(true); - expect(result.reason).toContain('failingRule'); + expect(result.reason).toBeDefined(); + expect(result.reason!.code).toBe(TxPoolRejectionCode.INTERNAL_ERROR); + expect(result.reason!.message).toContain('failingRule'); expect(result.txHashesToEvict).toHaveLength(0); // Second rule should not be called since first rule threw expect(preAddRule2.check).not.toHaveBeenCalled(); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts index b2a8ad122011..f8245cf4614e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts @@ -14,6 +14,7 @@ import { type PreAddResult, type PreAddRule, type TaggedEviction, + TxPoolRejectionCode, } from './interfaces.js'; /** @@ -78,7 +79,10 @@ export class EvictionManager { return { shouldIgnore: true, txHashesToEvict: [], - reason: `pre-add rule ${rule.name} error: ${err}`, + reason: { + code: TxPoolRejectionCode.INTERNAL_ERROR, + message: `Pre-add rule ${rule.name} error: ${err}`, + }, }; } } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts index 1c7898a3491e..b103c0ef404f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts @@ -1,6 +1,6 @@ import { type TxMetaData, stubTxMetaValidationData } from '../tx_metadata.js'; import { FeePayerBalancePreAddRule } from './fee_payer_balance_pre_add_rule.js'; -import type { PreAddPoolAccess } from './interfaces.js'; +import { type PreAddPoolAccess, TxPoolRejectionCode } from './interfaces.js'; describe('FeePayerBalancePreAddRule', () => { let rule: FeePayerBalancePreAddRule; @@ -63,7 +63,12 @@ describe('FeePayerBalancePreAddRule', () => { expect(result.shouldIgnore).toBe(true); expect(result.txHashesToEvict).toHaveLength(0); - expect(result.reason).toContain('insufficient balance'); + expect(result.reason).toBeDefined(); + expect(result.reason!.code).toBe(TxPoolRejectionCode.INSUFFICIENT_FEE_PAYER_BALANCE); + if (result.reason!.code === TxPoolRejectionCode.INSUFFICIENT_FEE_PAYER_BALANCE) { + expect(result.reason!.currentBalance).toBe(50n); + expect(result.reason!.feeLimit).toBe(100n); + } }); it('accepts tx when balance exactly equals fee limit', async () => { @@ -108,7 +113,8 @@ describe('FeePayerBalancePreAddRule', () => { const result = await rule.check(incomingMeta, poolAccess); expect(result.shouldIgnore).toBe(true); - expect(result.reason).toContain('insufficient balance'); + expect(result.reason).toBeDefined(); + expect(result.reason!.code).toBe(TxPoolRejectionCode.INSUFFICIENT_FEE_PAYER_BALANCE); }); it('evicts lower-priority existing tx when high-priority tx is added', async () => { @@ -263,7 +269,8 @@ describe('FeePayerBalancePreAddRule', () => { expect(result.shouldIgnore).toBe(true); expect(result.reason).toBeDefined(); - expect(result.reason).toContain('insufficient balance'); + expect(result.reason!.code).toBe(TxPoolRejectionCode.INSUFFICIENT_FEE_PAYER_BALANCE); + expect(result.reason!.message).toContain('insufficient balance'); }); }); }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts index daa5ce665cfc..0cdebb94948d 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts @@ -1,7 +1,13 @@ import { createLogger } from '@aztec/foundation/log'; import { type TxMetaData, comparePriority } from '../tx_metadata.js'; -import type { PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import { + type PreAddContext, + type PreAddPoolAccess, + type PreAddResult, + type PreAddRule, + TxPoolRejectionCode, +} from './interfaces.js'; /** * Pre-add rule that checks if a fee payer has sufficient balance to cover the incoming transaction. @@ -78,7 +84,13 @@ export class FeePayerBalancePreAddRule implements PreAddRule { return { shouldIgnore: true, txHashesToEvict: [], - reason: `fee payer ${incomingMeta.feePayer} has insufficient balance`, + reason: { + code: TxPoolRejectionCode.INSUFFICIENT_FEE_PAYER_BALANCE, + message: `Fee payer ${incomingMeta.feePayer} has insufficient balance. Balance at transaction: ${available}, required: ${incomingMeta.feeLimit}`, + currentBalance: initialBalance, + availableBalance: available, + feeLimit: incomingMeta.feeLimit, + }, }; } else { // Existing tx cannot be covered after adding incoming - mark for eviction @@ -93,7 +105,6 @@ export class FeePayerBalancePreAddRule implements PreAddRule { return { shouldIgnore: true, txHashesToEvict: [], - reason: 'internal error: tx coverage not determined', }; } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts index e084e02039d8..79abcdc12812 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts @@ -11,6 +11,8 @@ export { type PreAddResult, type PreAddRule, type TaggedEviction, + TxPoolRejectionCode, + type TxPoolRejectionError, } from './interfaces.js'; // Pre-add rules diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts index ab5af10718cf..32135758973d 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts @@ -73,6 +73,36 @@ export interface TaggedEviction { readonly reason: string; } +/** + * Machine-readable rejection codes for pre-add rule rejections. + */ +export const TxPoolRejectionCode = { + LOW_PRIORITY_FEE: 'LOW_PRIORITY_FEE', + INSUFFICIENT_FEE_PAYER_BALANCE: 'INSUFFICIENT_FEE_PAYER_BALANCE', + NULLIFIER_CONFLICT: 'NULLIFIER_CONFLICT', + INTERNAL_ERROR: 'INTERNAL_ERROR', +} as const; + +export type TxPoolRejectionCode = (typeof TxPoolRejectionCode)[keyof typeof TxPoolRejectionCode]; + +/** Structured rejection reason returned by pre-add rules. */ +export type TxPoolRejectionError = + | { + code: typeof TxPoolRejectionCode.LOW_PRIORITY_FEE; + message: string; + minimumPriorityFee: bigint; + txPriorityFee: bigint; + } + | { + code: typeof TxPoolRejectionCode.INSUFFICIENT_FEE_PAYER_BALANCE; + message: string; + currentBalance: bigint; + availableBalance: bigint; + feeLimit: bigint; + } + | { code: typeof TxPoolRejectionCode.NULLIFIER_CONFLICT; message: string; conflictingTxHash: string } + | { code: typeof TxPoolRejectionCode.INTERNAL_ERROR; message: string }; + /** * Result of a pre-add check for a single transaction. */ @@ -84,7 +114,7 @@ export interface PreAddResult { /** Evictions tagged with the rule name that produced them. Populated by EvictionManager. */ readonly evictions?: TaggedEviction[]; /** Optional reason for ignoring */ - readonly reason?: string; + readonly reason?: TxPoolRejectionError; } /** Context passed to pre-add rules from addPendingTxs. */ diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts index e2fc58850110..2e61088b3b5b 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts @@ -1,5 +1,5 @@ import { type TxMetaData, comparePriority, stubTxMetaValidationData } from '../tx_metadata.js'; -import type { PreAddContext, PreAddPoolAccess } from './interfaces.js'; +import { type PreAddContext, type PreAddPoolAccess, TxPoolRejectionCode } from './interfaces.js'; import { LowPriorityPreAddRule } from './low_priority_pre_add_rule.js'; describe('LowPriorityPreAddRule', () => { @@ -101,7 +101,12 @@ describe('LowPriorityPreAddRule', () => { expect(result.shouldIgnore).toBe(true); expect(result.txHashesToEvict).toHaveLength(0); - expect(result.reason).toContain('lower priority'); + expect(result.reason).toBeDefined(); + expect(result.reason!.code).toBe(TxPoolRejectionCode.LOW_PRIORITY_FEE); + if (result.reason!.code === TxPoolRejectionCode.LOW_PRIORITY_FEE) { + expect(result.reason!.minimumPriorityFee).toBe(101n); + expect(result.reason!.txPriorityFee).toBe(50n); + } }); it('ignores tx when incoming has equal priority to lowest', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts index fa5cc0360dcd..b4d5ef8382db 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts @@ -1,7 +1,14 @@ import { createLogger } from '@aztec/foundation/log'; import { type TxMetaData, comparePriority } from '../tx_metadata.js'; -import type { EvictionConfig, PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import { + type EvictionConfig, + type PreAddContext, + type PreAddPoolAccess, + type PreAddResult, + type PreAddRule, + TxPoolRejectionCode, +} from './interfaces.js'; /** * Pre-add rule that checks if the pool is at capacity and handles low-priority eviction. @@ -66,7 +73,12 @@ export class LowPriorityPreAddRule implements PreAddRule { return Promise.resolve({ shouldIgnore: true, txHashesToEvict: [], - reason: `pool at capacity and tx has lower priority than existing transactions`, + reason: { + code: TxPoolRejectionCode.LOW_PRIORITY_FEE, + message: `Tx does not meet minimum priority fee. Required: ${lowestPriorityMeta.priorityFee + 1n}, got: ${incomingMeta.priorityFee}`, + minimumPriorityFee: lowestPriorityMeta.priorityFee + 1n, + txPriorityFee: incomingMeta.priorityFee, + }, }); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts index 05378999f704..9b638e13e83d 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts @@ -23,7 +23,7 @@ export class NullifierConflictRule implements PreAddRule { ); if (result.shouldIgnore) { - this.log.debug(`Ignoring tx ${incomingMeta.txHash}: ${result.reason}`); + this.log.debug(`Ignoring tx ${incomingMeta.txHash}: ${result.reason?.message}`); } return Promise.resolve(result); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts index 908e42a1eee7..ca23965e117e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts @@ -4,6 +4,7 @@ import type { L2Block, L2BlockId, L2BlockSource } from '@aztec/stdlib/block'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import type { BlockHeader, Tx, TxHash, TxValidator } from '@aztec/stdlib/tx'; +import type { TxPoolRejectionError } from './eviction/interfaces.js'; import type { TxMetaData, TxState } from './tx_metadata.js'; /** @@ -17,6 +18,8 @@ export type AddTxsResult = { ignored: TxHash[]; /** Transactions rejected because they failed validation (e.g., invalid proof, expired timestamp) */ rejected: TxHash[]; + /** Optional rejection errors, only present when there are rejections with structured errors. */ + errors?: Map; }; /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts index 54123c98a798..9f3e076f9f29 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts @@ -1,5 +1,6 @@ import { mockTx } from '@aztec/stdlib/testing'; +import { TxPoolRejectionCode } from './eviction/interfaces.js'; import { type TxMetaData, buildTxMetaData, @@ -131,7 +132,11 @@ describe('TxMetaData', () => { expect(result.shouldIgnore).toBe(true); expect(result.txHashesToEvict).toEqual([]); - expect(result.reason).toContain(existing.txHash); + expect(result.reason).toBeDefined(); + expect(result.reason!.code).toBe(TxPoolRejectionCode.NULLIFIER_CONFLICT); + if (result.reason!.code === TxPoolRejectionCode.NULLIFIER_CONFLICT) { + expect(result.reason!.conflictingTxHash).toBe(existing.txHash); + } }); it('ignores incoming tx when existing has equal priority (tie goes to existing)', () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts index 64b35b374401..61b12a5debf8 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts @@ -6,7 +6,7 @@ import type { Tx } from '@aztec/stdlib/tx'; import { getFeePayerBalanceDelta } from '../../msg_validators/tx_validator/fee_payer_balance.js'; import { getTxPriorityFee } from '../tx_pool/priority.js'; -import type { PreAddResult } from './eviction/interfaces.js'; +import { type PreAddResult, TxPoolRejectionCode } from './eviction/interfaces.js'; /** Validator-compatible data interface, mirroring the subset of PrivateKernelTailCircuitPublicInputs used by validators. */ export type TxMetaValidationData = { @@ -215,7 +215,11 @@ export function checkNullifierConflict( return { shouldIgnore: true, txHashesToEvict: [], - reason: `nullifier conflict with ${conflictingHashStr}`, + reason: { + code: TxPoolRejectionCode.NULLIFIER_CONFLICT, + message: `Nullifier conflict with existing tx ${conflictingHashStr}`, + conflictingTxHash: conflictingHashStr, + }, }; } } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index 73a16d061228..4b149185e63b 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -25,6 +25,8 @@ import { type PoolOperations, type PreAddContext, type PreAddPoolAccess, + TxPoolRejectionCode, + type TxPoolRejectionError, } from './eviction/index.js'; import { TxPoolV2Instrumentation } from './instrumentation.js'; import { @@ -182,6 +184,7 @@ export class TxPoolV2Impl { const accepted: TxHash[] = []; const ignored: TxHash[] = []; const rejected: TxHash[] = []; + const errors = new Map(); const acceptedPending = new Set(); const poolAccess = this.#createPreAddPoolAccess(); @@ -219,6 +222,7 @@ export class TxPoolV2Impl { poolAccess, acceptedPending, ignored, + errors, preAddContext, ); if (result.status === 'accepted') { @@ -252,7 +256,7 @@ export class TxPoolV2Impl { await this.#evictionManager.evictAfterNewTxs(Array.from(acceptedPending), [...uniqueFeePayers]); } - return { accepted, ignored, rejected }; + return { accepted, ignored, rejected, ...(errors.size > 0 ? { errors } : {}) }; } /** Validates and adds a regular pending tx. Returns status. */ @@ -262,6 +266,7 @@ export class TxPoolV2Impl { poolAccess: PreAddPoolAccess, acceptedPending: Set, ignored: TxHash[], + errors: Map, preAddContext?: PreAddContext, ): Promise<{ status: 'accepted' | 'ignored' | 'rejected' }> { const txHash = tx.getTxHash(); @@ -277,7 +282,10 @@ export class TxPoolV2Impl { const preAddResult = await this.#evictionManager.runPreAddRules(meta, poolAccess, preAddContext); if (preAddResult.shouldIgnore) { - this.#log.debug(`Ignoring tx ${txHashStr}: ${preAddResult.reason}`); + this.#log.debug(`Ignoring tx ${txHashStr}: ${preAddResult.reason?.message ?? 'unknown reason'}`); + if (preAddResult.reason && preAddResult.reason.code !== TxPoolRejectionCode.INTERNAL_ERROR) { + errors.set(txHashStr, preAddResult.reason); + } return { status: 'ignored' }; } @@ -965,7 +973,9 @@ export class TxPoolV2Impl { if (preAddResult.shouldIgnore) { // Transaction rejected - mark for deletion from DB rejected.push(meta.txHash); - this.#log.debug(`Rejected tx ${meta.txHash} during rebuild: ${preAddResult.reason}`); + this.#log.debug( + `Rejected tx ${meta.txHash} during rebuild: ${preAddResult.reason?.message ?? 'unknown reason'}`, + ); continue; } From a0018057bd585b0e4a52796e4347913c09e6f63b Mon Sep 17 00:00:00 2001 From: danielntmd <162406516+danielntmd@users.noreply.github.com> Date: Tue, 17 Feb 2026 16:50:19 +0000 Subject: [PATCH 54/62] feat: (A-302) add reloadKeystore admin RPC endpoint - Allow node operators to update coinbase, fee recipient, and validator attester keys without restarting by re-reading keystore files from disk. What is updated on reload: - Validator attester keys (add, remove, or replace) - Coinbase and fee recipient per validator - Publisher-to-validator mapping in the sequencer publisher factory - Slasher "don't-slash-self" list - HA signer preserved (PostgreSQL connection pool reused) What is NOT updated (requires node restart): - L1 publisher signers (funded accounts that send L1 transactions) - Prover keys - HA signer connections Validation is performed before any state mutation: - Keystore files are loaded, merged, and validated - New validators must use a publisher key already initialized at startup; reload is rejected with a clear error if a publisher key is unknown - If any validation fails, the old keystore remains in effect Tests: - unit tests, error paths, e2e, etc --- .../aztec-node/src/aztec-node/server.test.ts | 294 +++++++++++++++++- .../aztec-node/src/aztec-node/server.ts | 94 +++++- .../src/e2e_sequencer/reload_keystore.test.ts | 216 +++++++++++++ .../src/client/sequencer-client.ts | 5 + .../sequencer-publisher-factory.test.ts | 55 ++++ .../publisher/sequencer-publisher-factory.ts | 16 +- .../src/sequencer/sequencer.ts | 7 +- .../src/interfaces/aztec-node-admin.test.ts | 7 + .../stdlib/src/interfaces/aztec-node-admin.ts | 21 ++ .../validator-client/src/validator.test.ts | 162 +++++++++- .../validator-client/src/validator.ts | 33 +- 11 files changed, 883 insertions(+), 27 deletions(-) create mode 100644 yarn-project/end-to-end/src/e2e_sequencer/reload_keystore.test.ts diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 0bc16d38c3f4..17d6f3f51928 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -4,13 +4,17 @@ import type { RollupContract } from '@aztec/ethereum/contracts'; import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; +import { BadRequestError } from '@aztec/foundation/json-rpc'; +import type { Hex } from '@aztec/foundation/string'; import { DateProvider } from '@aztec/foundation/timer'; import { unfreeze } from '@aztec/foundation/types'; +import { type KeyStore, KeystoreManager, RemoteSigner, type ValidatorKeyStore } from '@aztec/node-keystore'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import type { P2P } from '@aztec/p2p'; import { protocolContractsHash } from '@aztec/protocol-contracts'; import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice'; -import type { GlobalVariableBuilder } from '@aztec/sequencer-client'; +import type { GlobalVariableBuilder, SequencerClient } from '@aztec/sequencer-client'; +import type { SlasherClientInterface } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { L2Block, type L2BlockSource } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; @@ -33,11 +37,15 @@ import { Tx, } from '@aztec/stdlib/tx'; import { getPackageVersion } from '@aztec/stdlib/update-checker'; +import type { ValidatorClient } from '@aztec/validator-client'; -import { readFileSync } from 'fs'; +import { jest } from '@jest/globals'; +import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'fs'; import { type MockProxy, mock } from 'jest-mock-extended'; -import { dirname, resolve } from 'path'; +import { tmpdir } from 'os'; +import { dirname, join, resolve } from 'path'; import { fileURLToPath } from 'url'; +import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; import { type AztecNodeConfig, getConfigEnvVars } from './config.js'; import { AztecNodeService } from './server.js'; @@ -391,4 +399,284 @@ describe('aztec node', () => { await expect(node.simulatePublicCalls(tx)).rejects.toThrow(/gas/i); }); }); + + describe('reloadKeystore', () => { + it('throws BadRequestError if no file-based keystore directory is configured', async () => { + // Default node has no keyStoreDirectory set + await expect(node.reloadKeystore()).rejects.toThrow(BadRequestError); + }); + + it('throws BadRequestError if keystore directory is set but validator client is not configured', async () => { + // Satisfies the first check (directory exists) but validatorClient is undefined + nodeConfig.keyStoreDirectory = '/tmp/fake-keystore-dir'; + await expect(node.reloadKeystore()).rejects.toThrow(BadRequestError); + }); + + describe('with file-based keystore', () => { + let keyStoreDir: string; + let validatorClient: MockProxy; + let slasherClient: MockProxy; + let validatorPrivateKey: string; + let nodeWithValidator: AztecNodeService; + + // Helper to build a KeyStore with default coinbase/feeRecipient/remoteSigner. + // Each entry needs only `attester` (required) and optionally `publisher`. + const makeKeyStore = ( + ...validators: Array & Pick, 'publisher'>> + ): KeyStore => ({ + schemaVersion: 1, + validators: validators.map(v => ({ + attester: v.attester, + coinbase: undefined, + feeRecipient: AztecAddress.ZERO, + remoteSigner: undefined, + ...(v.publisher !== undefined ? { publisher: v.publisher } : {}), + })), + }); + + beforeEach(() => { + // Create a temp directory with a keystore file + keyStoreDir = mkdtempSync(join(tmpdir(), 'keystore-test-')); + validatorPrivateKey = generatePrivateKey(); + const keyStore = makeKeyStore({ attester: [validatorPrivateKey as Hex<32>] }); + writeFileSync(join(keyStoreDir, 'keystore.json'), JSON.stringify(keyStore)); + + validatorClient = mock(); + slasherClient = mock(); + + const validatorNodeConfig = { ...nodeConfig, keyStoreDirectory: keyStoreDir }; + + nodeWithValidator = new AztecNodeService( + validatorNodeConfig, + p2p, + l2BlockSource, + mock(), + mock(), + mock(), + mock({ getCommitted: () => merkleTreeOps }), + undefined, + undefined, + slasherClient, + undefined, + undefined, + 12345, + rollupVersion.toNumber(), + globalVariablesBuilder, + epochCache, + getPackageVersion() ?? '', + new TestCircuitVerifier(), + undefined, + undefined, + undefined, + validatorClient as unknown as ValidatorClient, + new KeystoreManager(keyStore), + ); + }); + + afterEach(() => { + rmSync(keyStoreDir, { recursive: true, force: true }); + }); + + it('reloads keystore from disk and calls validatorClient.reloadKeystore', async () => { + await nodeWithValidator.reloadKeystore(); + expect(validatorClient.reloadKeystore).toHaveBeenCalledTimes(1); + }); + + it('adds new validators to slasher dont-slash-self list on reload', async () => { + // Write a new keystore file with an additional validator + const newPrivateKey = generatePrivateKey(); + writeFileSync( + join(keyStoreDir, 'keystore.json'), + JSON.stringify(makeKeyStore({ attester: [validatorPrivateKey as Hex<32>, newPrivateKey as Hex<32>] })), + ); + + await nodeWithValidator.reloadKeystore(); + + const updateArg = slasherClient.updateConfig.mock.calls[0][0]; + const neverSlashList = updateArg.slashValidatorsNever!; + + const originalAddress = EthAddress.fromString( + privateKeyToAccount(validatorPrivateKey as `0x${string}`).address, + ); + const newAddress = EthAddress.fromString(privateKeyToAccount(newPrivateKey as `0x${string}`).address); + + expect(neverSlashList.some(a => a.equals(originalAddress))).toBe(true); + expect(neverSlashList.some(a => a.equals(newAddress))).toBe(true); + }); + + it('removes validators from slasher dont-slash-self list when removed from keystore', async () => { + // First add two validators + const secondPrivateKey = generatePrivateKey(); + writeFileSync( + join(keyStoreDir, 'keystore.json'), + JSON.stringify(makeKeyStore({ attester: [validatorPrivateKey as Hex<32>, secondPrivateKey as Hex<32>] })), + ); + await nodeWithValidator.reloadKeystore(); + + // Now remove the second validator, keeping only the original + writeFileSync( + join(keyStoreDir, 'keystore.json'), + JSON.stringify(makeKeyStore({ attester: [validatorPrivateKey as Hex<32>] })), + ); + await nodeWithValidator.reloadKeystore(); + + // The second call to updateConfig should only contain the remaining validator + const updateArg = slasherClient.updateConfig.mock.calls[1][0]; + const neverSlashList = updateArg.slashValidatorsNever!; + + const originalAddress = EthAddress.fromString( + privateKeyToAccount(validatorPrivateKey as `0x${string}`).address, + ); + const removedAddress = EthAddress.fromString(privateKeyToAccount(secondPrivateKey as `0x${string}`).address); + + expect(neverSlashList.some(a => a.equals(originalAddress))).toBe(true); + expect(neverSlashList.some(a => a.equals(removedAddress))).toBe(false); + }); + + it('does not update slasher if slashSelfAllowed is true', async () => { + (nodeWithValidator as any).config.slashSelfAllowed = true; + await nodeWithValidator.reloadKeystore(); + + expect(validatorClient.reloadKeystore).toHaveBeenCalledTimes(1); + expect(slasherClient.updateConfig).not.toHaveBeenCalled(); + }); + + it('reloads keystore with remote signer validators from disk', async () => { + // Update keystore file to add a remote signer validator alongside the local key validator. + // This verifies the full reload path supports mixed local + remote signer keystores: + // file-on-disk -> loadKeystores -> KeystoreManager -> validateSigners (mocked) -> + // ValidatorClient.reloadKeystore -> NodeKeystoreAdapter (creates RemoteSigner instances) + const remoteSignerUrl = 'https://web3signer.example.com:9000'; + const remoteAttesterAddress = EthAddress.random(); + writeFileSync( + join(keyStoreDir, 'keystore.json'), + JSON.stringify( + makeKeyStore( + { attester: [validatorPrivateKey as Hex<32>] }, + { attester: { address: remoteAttesterAddress, remoteSignerUrl } }, + ), + ), + ); + + // Mock RemoteSigner.validateAccess to avoid a real HTTP call to web3signer. + // validateSigners() calls this to verify each remote signer URL is reachable + // and that the requested addresses are available. + const validateSpy = jest.spyOn(RemoteSigner, 'validateAccess').mockImplementation(() => Promise.resolve()); + + try { + await nodeWithValidator.reloadKeystore(); + + // Verify RemoteSigner.validateAccess was called with the correct URL and address + expect(validateSpy).toHaveBeenCalledTimes(1); + expect(validateSpy).toHaveBeenCalledWith( + remoteSignerUrl, + expect.arrayContaining([remoteAttesterAddress.toString().toLowerCase()]), + ); + + // Verify validatorClient.reloadKeystore was called (reload succeeded) + expect(validatorClient.reloadKeystore).toHaveBeenCalledTimes(1); + + // Verify the new KeystoreManager was passed through with both validators + const passedManager = validatorClient.reloadKeystore.mock.calls[0][0] as KeystoreManager; + expect(passedManager.getValidatorCount()).toBe(2); + + // Verify slasher list includes both the local and remote validator addresses + const updateArg = slasherClient.updateConfig.mock.calls[0][0]; + const neverSlashList = updateArg.slashValidatorsNever!; + expect(neverSlashList.some(a => a.equals(remoteAttesterAddress))).toBe(true); + } finally { + validateSpy.mockRestore(); + } + }); + + it('rejects reload when remote signer validation fails', async () => { + // If RemoteSigner.validateAccess fails (e.g. web3signer unreachable or address not found), + // the reload should be rejected and the old keystore should remain intact. + const remoteSignerUrl = 'https://web3signer.example.com:9000'; + const remoteAttesterAddress = EthAddress.random(); + writeFileSync( + join(keyStoreDir, 'keystore.json'), + JSON.stringify( + makeKeyStore( + { attester: [validatorPrivateKey as Hex<32>] }, + // EthAddress has toJSON() so JSON.stringify serializes it as a hex string. + { attester: { address: remoteAttesterAddress, remoteSignerUrl } }, + ), + ), + ); + + // Mock RemoteSigner.validateAccess to reject — simulates unreachable web3signer + const validateSpy = jest + .spyOn(RemoteSigner, 'validateAccess') + .mockRejectedValue(new Error('Unable to connect to web3signer')); + + try { + await expect(nodeWithValidator.reloadKeystore()).rejects.toThrow(/Unable to connect to web3signer/); + + // Validator client should NOT have been called (reload rejected before mutation) + expect(validatorClient.reloadKeystore).not.toHaveBeenCalled(); + } finally { + validateSpy.mockRestore(); + } + }); + + it('rejects reload when new validator has a publisher key not in the L1 signers', async () => { + // Initial keystore has validator with publisherKeyA + const publisherKeyA = generatePrivateKey(); + const publisherKeyB = generatePrivateKey(); // different, not in L1 signers + + const initialKeyStore = makeKeyStore({ + attester: [validatorPrivateKey as Hex<32>], + publisher: [publisherKeyA as Hex<32>], + }); + + // Recreate node with a truthy sequencer so the publisher validation path runs. + // Only truthiness matters: the code checks `if (this.keyStoreManager && this.sequencer)` + // and the validation logic uses keyStoreManager, not sequencer methods. + // The test expects rejection before sequencer.updatePublisherNodeKeyStore() is reached. + const nodeWithSequencer = new AztecNodeService( + { ...nodeConfig, keyStoreDirectory: keyStoreDir }, + p2p, + l2BlockSource, + mock(), + mock(), + mock(), + mock({ getCommitted: () => merkleTreeOps }), + {} as SequencerClient, + undefined, + slasherClient, + undefined, + undefined, + 12345, + rollupVersion.toNumber(), + globalVariablesBuilder, + epochCache, + getPackageVersion() ?? '', + new TestCircuitVerifier(), + undefined, + undefined, + undefined, + validatorClient as unknown as ValidatorClient, + new KeystoreManager(initialKeyStore), + ); + + // Write new keystore: new validator uses publisherKeyB (not in the L1 signers) + const newValidatorKey = generatePrivateKey(); + writeFileSync( + join(keyStoreDir, 'keystore.json'), + JSON.stringify( + makeKeyStore( + { attester: [validatorPrivateKey as Hex<32>], publisher: [publisherKeyA as Hex<32>] }, + { attester: [newValidatorKey as Hex<32>], publisher: [publisherKeyB as Hex<32>] }, + ), + ), + ); + + await expect(nodeWithSequencer.reloadKeystore()).rejects.toThrow(BadRequestError); + + // reload rejected before mutation + expect(validatorClient.reloadKeystore).not.toHaveBeenCalled(); + }); + }); + }); }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 51376406b6e2..0706015744ca 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -9,7 +9,7 @@ import { getPublicClient } from '@aztec/ethereum/client'; import { RegistryContract, RollupContract } from '@aztec/ethereum/contracts'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { compactArray, pick } from '@aztec/foundation/collection'; +import { compactArray, pick, unique } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { BadRequestError } from '@aztec/foundation/json-rpc'; @@ -149,6 +149,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { private telemetry: TelemetryClient = getTelemetryClient(), private log = createLogger('node'), private blobClient?: BlobClientInterface, + private validatorClient?: ValidatorClient, + private keyStoreManager?: KeystoreManager, ) { this.metrics = new NodeMetrics(telemetry, 'AztecNodeService'); this.tracer = telemetry.getTracer('AztecNodeService'); @@ -534,6 +536,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { telemetry, log, blobClient, + validatorClient, + keyStoreManager, ); return node; @@ -1441,6 +1445,94 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { } } + public async reloadKeystore(): Promise { + if (!this.config.keyStoreDirectory?.length) { + throw new BadRequestError( + 'Cannot reload keystore: node is not using a file-based keystore. ' + + 'Set KEY_STORE_DIRECTORY to use file-based keystores.', + ); + } + if (!this.validatorClient) { + throw new BadRequestError('Cannot reload keystore: validator is not enabled.'); + } + + this.log.info('Reloading keystore from disk'); + + // Re-read and validate keystore files + const keyStores = loadKeystores(this.config.keyStoreDirectory); + const newManager = new KeystoreManager(mergeKeystores(keyStores)); + await newManager.validateSigners(); + ValidatorClient.validateKeyStoreConfiguration(newManager, this.log); + + // Validate that every validator's publisher keys overlap with the L1 signers + // that were initialized at startup. Publishers cannot be hot-reloaded, so a + // validator with a publisher key that doesn't match any existing L1 signer + // would silently fail on every proposer slot. + if (this.keyStoreManager && this.sequencer) { + const oldAdapter = NodeKeystoreAdapter.fromKeyStoreManager(this.keyStoreManager); + const availablePublishers = new Set( + oldAdapter + .getAttesterAddresses() + .flatMap(a => oldAdapter.getPublisherAddresses(a).map(p => p.toString().toLowerCase())), + ); + + const newAdapter = NodeKeystoreAdapter.fromKeyStoreManager(newManager); + for (const attester of newAdapter.getAttesterAddresses()) { + const pubs = newAdapter.getPublisherAddresses(attester); + if (pubs.length > 0 && !pubs.some(p => availablePublishers.has(p.toString().toLowerCase()))) { + throw new BadRequestError( + `Cannot reload keystore: validator ${attester} has publisher keys ` + + `[${pubs.map(p => p.toString()).join(', ')}] but none match the L1 signers initialized at startup ` + + `[${[...availablePublishers].join(', ')}]. Publishers cannot be hot-reloaded — ` + + `use an existing publisher key or restart the node.`, + ); + } + } + } + + // Build adapters for old and new keystores to compute diff + const newAdapter = NodeKeystoreAdapter.fromKeyStoreManager(newManager); + const newAddresses = newAdapter.getAttesterAddresses(); + const oldAddresses = this.keyStoreManager + ? NodeKeystoreAdapter.fromKeyStoreManager(this.keyStoreManager).getAttesterAddresses() + : []; + + const oldSet = new Set(oldAddresses.map(a => a.toString())); + const newSet = new Set(newAddresses.map(a => a.toString())); + const added = newAddresses.filter(a => !oldSet.has(a.toString())); + const removed = oldAddresses.filter(a => !newSet.has(a.toString())); + + if (added.length > 0) { + this.log.info(`Keystore reload: adding attester keys: ${added.map(a => a.toString()).join(', ')}`); + } + if (removed.length > 0) { + this.log.info(`Keystore reload: removing attester keys: ${removed.map(a => a.toString()).join(', ')}`); + } + if (added.length === 0 && removed.length === 0) { + this.log.info('Keystore reload: attester keys unchanged'); + } + + // Update the validator client (coinbase, feeRecipient, attester keys) + this.validatorClient.reloadKeystore(newManager); + + // Update the publisher factory's keystore so newly-added validators + // can be matched to existing publisher keys when proposing blocks. + if (this.sequencer) { + this.sequencer.updatePublisherNodeKeyStore(newAdapter); + } + + // Update slasher's "don't-slash-self" list with new validator addresses + if (this.slasherClient && !this.config.slashSelfAllowed) { + const slashValidatorsNever = unique( + [...(this.config.slashValidatorsNever ?? []), ...newAddresses].map(a => a.toString()), + ).map(EthAddress.fromString); + this.slasherClient.updateConfig({ slashValidatorsNever }); + } + + this.keyStoreManager = newManager; + this.log.info('Keystore reloaded: coinbase, feeRecipient, and attester keys updated'); + } + #getInitialHeaderHash(): Promise { if (!this.initialHeaderHashPromise) { this.initialHeaderHashPromise = this.worldStateSynchronizer.getCommitted().getInitialHeader().hash(); diff --git a/yarn-project/end-to-end/src/e2e_sequencer/reload_keystore.test.ts b/yarn-project/end-to-end/src/e2e_sequencer/reload_keystore.test.ts new file mode 100644 index 000000000000..09deed9db69d --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_sequencer/reload_keystore.test.ts @@ -0,0 +1,216 @@ +import { AztecAddress, EthAddress } from '@aztec/aztec.js/addresses'; +import { NO_WAIT } from '@aztec/aztec.js/contracts'; +import { ContractDeployer } from '@aztec/aztec.js/deployment'; +import { Fr } from '@aztec/aztec.js/fields'; +import { type AztecNode, waitForTx } from '@aztec/aztec.js/node'; +import type { Wallet } from '@aztec/aztec.js/wallet'; +import { BlockNumber } from '@aztec/foundation/branded-types'; +import { SecretValue } from '@aztec/foundation/config'; +import type { EthPrivateKey } from '@aztec/node-keystore'; +import { StatefulTestContractArtifact } from '@aztec/noir-test-contracts.js/StatefulTest'; +import type { SequencerClient } from '@aztec/sequencer-client'; +import type { TestSequencer, TestSequencerClient } from '@aztec/sequencer-client/test'; +import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; +import type { ValidatorClient } from '@aztec/validator-client'; + +import { jest } from '@jest/globals'; +import { mkdtemp, rm, writeFile } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; +import { privateKeyToAccount } from 'viem/accounts'; + +import { getPrivateKeyFromIndex, setup } from '../fixtures/utils.js'; + +const VALIDATOR_KEY_INDICES = [0, 2, 4, 5]; +const PUBLISHER_KEY_INDEX = 3; + +// 4 validators staked on L1, committee size 4 → quorum = floor(4*2/3)+1 = 3. +// Only 3 validators are in the initial keystore (enough for quorum). +// After reload, the 4th validator is added. +const VALIDATOR_COUNT = 4; +const COMMITTEE_SIZE = VALIDATOR_COUNT; +const INITIAL_KEYSTORE_COUNT = 3; + +describe('e2e_reload_keystore', () => { + jest.setTimeout(300_000); + + let teardown: () => Promise; + let aztecNode: AztecNode; + let aztecNodeAdmin: AztecNodeAdmin | undefined; + let wallet: Wallet; + let ownerAddress: AztecAddress; + let keyStoreDirectory: string; + let sequencerClient: SequencerClient | undefined; + + const validatorKeys: EthPrivateKey[] = []; + const validatorAddresses: string[] = []; + let publisherKey: EthPrivateKey; + + const initialCoinbase = EthAddress.fromNumber(42); + const initialFeeRecipient = AztecAddress.fromNumber(42); + + const artifact = StatefulTestContractArtifact; + + beforeAll(async () => { + // Derive keys from the test mnemonic (these accounts are funded in Anvil) + for (const idx of VALIDATOR_KEY_INDICES) { + const key = `0x${getPrivateKeyFromIndex(idx)!.toString('hex')}` as EthPrivateKey; + validatorKeys.push(key); + validatorAddresses.push(privateKeyToAccount(key).address); + } + publisherKey = `0x${getPrivateKeyFromIndex(PUBLISHER_KEY_INDEX)!.toString('hex')}` as EthPrivateKey; + + // Create temp directory for keystore files + keyStoreDirectory = await mkdtemp(join(tmpdir(), 'reload-keystore-')); + + // Write initial keystore: first 3 validators only (validator 4 is deliberately excluded). + // All share the same coinbase X so we can detect a change after reload. + const initialKeystore = { + schemaVersion: 1, + validators: validatorKeys.slice(0, INITIAL_KEYSTORE_COUNT).map(key => ({ + attester: key, + coinbase: initialCoinbase.toChecksumString(), + publisher: [publisherKey], + feeRecipient: initialFeeRecipient.toString(), + })), + }; + await writeFile(join(keyStoreDirectory, 'keystore.json'), JSON.stringify(initialKeystore, null, 2)); + + // Stake ALL 4 validators on L1 so they are part of the committee + const initialValidators = validatorKeys.map((key, i) => ({ + attester: EthAddress.fromString(validatorAddresses[i]), + withdrawer: EthAddress.fromString(validatorAddresses[i]), + privateKey: key, + bn254SecretKey: new SecretValue(Fr.random().toBigInt()), + })); + + ({ + teardown, + aztecNode, + aztecNodeAdmin, + wallet, + accounts: [ownerAddress], + sequencer: sequencerClient, + } = await setup(1, { + initialValidators, + aztecTargetCommitteeSize: COMMITTEE_SIZE, + keyStoreDirectory, + minTxsPerBlock: 1, + maxTxsPerBlock: 1, + })); + + if (!aztecNodeAdmin) { + throw new Error('Aztec node admin API must be available for this test'); + } + }); + + afterAll(async () => { + await teardown(); + await rm(keyStoreDirectory, { recursive: true, force: true }); + }); + + it('should reload keystore, add a new validator, and use updated coinbase in blocks', async () => { + // Access the sequencer's validator client to inspect keystore state + const sequencer = (sequencerClient! as TestSequencerClient).getSequencer(); + const validatorClient: ValidatorClient = (sequencer as TestSequencer).validatorClient; + + // Verify initial keystore state and block production + // Only the first 3 validators should be loaded + const initialAddrs = validatorClient.getValidatorAddresses(); + expect(initialAddrs).toHaveLength(INITIAL_KEYSTORE_COUNT); + for (let i = 0; i < INITIAL_KEYSTORE_COUNT; i++) { + const attestor = EthAddress.fromString(validatorAddresses[i]); + expect(validatorClient.getCoinbaseForAttestor(attestor)).toEqual(initialCoinbase); + expect(validatorClient.getFeeRecipientForAttestor(attestor)).toEqual(initialFeeRecipient); + } + + // Validator 4 should NOT be in the keystore yet + const addr4Lower = validatorAddresses[3].toLowerCase(); + expect(initialAddrs.map(a => a.toString().toLowerCase())).not.toContain(addr4Lower); + + // Send a tx and verify the block uses the initial coinbase + const deployer = new ContractDeployer(artifact, wallet); + const sentTx1 = await deployer.deploy(ownerAddress, ownerAddress, 1).send({ + from: ownerAddress, + contractAddressSalt: new Fr(1), + skipClassPublication: true, + skipInstancePublication: true, + wait: NO_WAIT, + }); + const receipt1 = await waitForTx(aztecNode, sentTx1); + + const block1 = await aztecNode.getBlock(BlockNumber(receipt1.blockNumber!)); + expect(block1).toBeDefined(); + expect(block1!.header.globalVariables.coinbase.toString().toLowerCase()).toEqual( + initialCoinbase.toString().toLowerCase(), + ); + + // Write updated keystore and reload + // Each validator gets its own new coinbase so we can verify per-validator updates. + const newCoinbases = VALIDATOR_KEY_INDICES.map((_, i) => EthAddress.fromNumber(100 + i)); + const newFeeRecipients = VALIDATOR_KEY_INDICES.map((_, i) => AztecAddress.fromNumber(100 + i)); + + // Build updated keystore: all 4 validators (including the previously-excluded validator 4) + const updatedKeystore = { + schemaVersion: 1, + validators: validatorKeys.map((key, i) => ({ + attester: key, + coinbase: newCoinbases[i].toChecksumString(), + publisher: [publisherKey], + feeRecipient: newFeeRecipients[i].toString(), + })), + }; + await writeFile(join(keyStoreDirectory, 'keystore.json'), JSON.stringify(updatedKeystore, null, 2)); + + // Reload keystore via the admin API + await aztecNodeAdmin!.reloadKeystore(); + + // Verify the reload took effect + // All 4 validators should now be loaded + const updatedAddrs = validatorClient.getValidatorAddresses(); + expect(updatedAddrs).toHaveLength(VALIDATOR_COUNT); + + for (let i = 0; i < VALIDATOR_COUNT; i++) { + const attestor = EthAddress.fromString(validatorAddresses[i]); + expect(validatorClient.getCoinbaseForAttestor(attestor)).toEqual(newCoinbases[i]); + expect(validatorClient.getFeeRecipientForAttestor(attestor)).toEqual(newFeeRecipients[i]); + } + + // Specifically confirm validator 4 is now present + expect(updatedAddrs.map(a => a.toString().toLowerCase())).toContain(addr4Lower); + + // Deterministically prove validator 4 CAN publish blocks + // Directly ask the publisher factory to create a publisher for validator 4. + // This exercises the full chain: keystore lookup → publisher filter → L1 signer match. + // If the publisher key weren't in the L1TxUtils pool, this would throw. + const publisherFactory = (sequencer as TestSequencer).publisherFactory; + const validator4Attestor = EthAddress.fromString(validatorAddresses[3]); + const { attestorAddress: returnedAttestor, publisher: validator4Publisher } = + await publisherFactory.create(validator4Attestor); + + expect(returnedAttestor.equals(validator4Attestor)).toBe(true); + expect(validator4Publisher).toBeDefined(); + expect(validator4Publisher.getSenderAddress()).toBeDefined(); + + // Verify block production uses new coinbases (not old) + // Send a tx and confirm the block uses one of the new per-validator coinbases. + // Whichever validator is the proposer, its coinbase must be from the reloaded keystore. + const allNewCoinbasesLower = newCoinbases.map(c => c.toString().toLowerCase()); + + const sentTx2 = await deployer.deploy(ownerAddress, ownerAddress, 2).send({ + from: ownerAddress, + contractAddressSalt: new Fr(2), + skipClassPublication: true, + skipInstancePublication: true, + wait: NO_WAIT, + }); + const receipt2 = await waitForTx(aztecNode, sentTx2); + + const block2 = await aztecNode.getBlock(BlockNumber(receipt2.blockNumber!)); + expect(block2).toBeDefined(); + + const actualCoinbase = block2!.header.globalVariables.coinbase.toString().toLowerCase(); + expect(allNewCoinbasesLower).toContain(actualCoinbase); + expect(actualCoinbase).not.toEqual(initialCoinbase.toString().toLowerCase()); + }); +}); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index fa0488a1ac5d..c55521d7b233 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -216,6 +216,11 @@ export class SequencerClient { return this.sequencer; } + /** Updates the publisher factory's node keystore adapter after a keystore reload. */ + public updatePublisherNodeKeyStore(adapter: NodeKeystoreAdapter): void { + this.sequencer.updatePublisherNodeKeyStore(adapter); + } + /** Returns the shared tx delayer for sequencer L1 txs, if enabled. Test-only. */ getDelayer(): Delayer | undefined { return this.delayer_; diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts index 178c90f24127..a06a0b62e2e7 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.test.ts @@ -140,6 +140,61 @@ describe('SequencerPublisherFactory', () => { expect(result.attestorAddress).toBe(validatorAddress); }); + it('should reject validator added via updateNodeKeyStore with a different publisher key', async () => { + // Initial keystore knows validator → publisherAddress + mockNodeKeyStore.getPublisherAddresses.mockReturnValue([publisherAddress]); + + // After updateNodeKeyStore, a new validator maps to a DIFFERENT publisher key + const newValidatorAddress = EthAddress.random(); + const differentPublisherAddress = EthAddress.random(); + const updatedKeyStore = mock(); + updatedKeyStore.getPublisherAddresses.mockImplementation((addr: EthAddress) => { + if (addr.equals(newValidatorAddress)) { + return [differentPublisherAddress]; // not in L1TxUtils pool + } + return [publisherAddress]; + }); + + factory.updateNodeKeyStore(updatedKeyStore); + + // The L1TxUtils pool only has publisherAddress, not differentPublisherAddress + mockL1TxUtils.getSenderAddress.mockReturnValue(publisherAddress); + mockPublisherManager.getAvailablePublisher.mockRejectedValueOnce( + new Error('Failed to find an available publisher.'), + ); + + await expect(factory.create(newValidatorAddress)).rejects.toThrow('Failed to find an available publisher.'); + + // Verify the filter rejects the available publisher (wrong key) + const filterFn = mockPublisherManager.getAvailablePublisher.mock.calls[0][0]!; + expect(filterFn(mockL1TxUtils)).toBe(false); + }); + + it('should allow validator added via updateNodeKeyStore with an existing publisher key', async () => { + // A new validator maps to the SAME publisher key that's already in the L1TxUtils pool + const newValidatorAddress = EthAddress.random(); + const updatedKeyStore = mock(); + updatedKeyStore.getPublisherAddresses.mockImplementation((addr: EthAddress) => { + if (addr.equals(newValidatorAddress)) { + return [publisherAddress]; // same key as L1TxUtils + } + return []; + }); + + factory.updateNodeKeyStore(updatedKeyStore); + + mockL1TxUtils.getSenderAddress.mockReturnValue(publisherAddress); + + const result = await factory.create(newValidatorAddress); + + // Verify the filter accepts the publisher (same key) + const filterFn = mockPublisherManager.getAvailablePublisher.mock.calls[0][0]!; + expect(filterFn(mockL1TxUtils)).toBe(true); + + expect(result.attestorAddress).toBe(newValidatorAddress); + expect(result.publisher).toBeDefined(); + }); + it('should create SequencerPublisher with correct configuration', async () => { mockNodeKeyStore.getAttestorForPublisher.mockReturnValue(attestorAddress); const mockSlashingProposer = { address: EthAddress.random() }; diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts index 3d79eea915b7..c58bc4d40afd 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher-factory.ts @@ -26,6 +26,8 @@ export class SequencerPublisherFactory { /** Stores the last slot in which every action was carried out by a publisher */ private lastActions: Partial> = {}; + private nodeKeyStore: NodeKeystoreAdapter; + private logger: Logger; constructor( @@ -45,7 +47,17 @@ export class SequencerPublisherFactory { ) { this.publisherMetrics = new SequencerPublisherMetrics(deps.telemetry, 'SequencerPublisher'); this.logger = deps.logger ?? createLogger('sequencer'); + this.nodeKeyStore = this.deps.nodeKeyStore; + } + + /** + * Updates the node keystore adapter used for publisher lookups. + * Called when the keystore is reloaded at runtime to reflect new validator-publisher mappings. + */ + public updateNodeKeyStore(adapter: NodeKeystoreAdapter): void { + this.nodeKeyStore = adapter; } + /** * Creates a new SequencerPublisher instance. * @param _validatorAddress - The address of the validator that will be using the publisher. @@ -54,7 +66,7 @@ export class SequencerPublisherFactory { public async create(validatorAddress?: EthAddress): Promise { // If we have been given an attestor address we must only allow publishers permitted for that attestor - const allowedPublishers = !validatorAddress ? [] : this.deps.nodeKeyStore.getPublisherAddresses(validatorAddress); + const allowedPublishers = !validatorAddress ? [] : this.nodeKeyStore.getPublisherAddresses(validatorAddress); const filter: PublisherFilter = !validatorAddress ? () => true : (utils: L1TxUtils) => { @@ -64,7 +76,7 @@ export class SequencerPublisherFactory { const l1Publisher = await this.deps.publisherManager.getAvailablePublisher(filter); const attestorAddress = - validatorAddress ?? this.deps.nodeKeyStore.getAttestorForPublisher(l1Publisher.getSenderAddress()); + validatorAddress ?? this.nodeKeyStore.getAttestorForPublisher(l1Publisher.getSenderAddress()); const rollup = this.deps.rollupContract; const slashingProposerContract = await rollup.getSlashingProposer(); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 2422150bab4d..071613e1c491 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -25,7 +25,7 @@ import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { pickFromSchema } from '@aztec/stdlib/schemas'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { Attributes, type TelemetryClient, type Tracer, getTelemetryClient, trackSpan } from '@aztec/telemetry-client'; -import { FullNodeCheckpointsBuilder, type ValidatorClient } from '@aztec/validator-client'; +import { FullNodeCheckpointsBuilder, NodeKeystoreAdapter, type ValidatorClient } from '@aztec/validator-client'; import EventEmitter from 'node:events'; @@ -856,6 +856,11 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter { epochOrSlot: expect.any(BigInt), }); }); + + it('reloadKeystore', async () => { + await context.client.reloadKeystore(); + }); }); class MockAztecNodeAdmin implements AztecNodeAdmin { @@ -189,4 +193,7 @@ class MockAztecNodeAdmin implements AztecNodeAdmin { resumeSync(): Promise { return Promise.resolve(); } + reloadKeystore(): Promise { + return Promise.resolve(); + } } diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts index 1003734261f8..500e2451af8a 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts @@ -50,6 +50,26 @@ export interface AztecNodeAdmin { /** Returns all offenses applicable for the given round. */ getSlashOffenses(round: bigint | 'all' | 'current'): Promise; + + /** + * Reloads keystore configuration from disk. + * + * What is updated: + * - Validator attester keys + * - Coinbase address per validator + * - Fee recipient address per validator + * + * What is NOT updated (requires node restart): + * - L1 publisher signers (the funded accounts that send L1 transactions) + * - Prover keys + * - HA signer PostgreSQL connections + * + * Notes: + * - New validators must use a publisher key that was already configured at node + * startup (or omit the publisher field to fall back to the attester key). + * A validator with an unknown publisher key will cause the reload to be rejected. + */ + reloadKeystore(): Promise; } // L1 contracts are not mutable via admin updates. @@ -88,6 +108,7 @@ export const AztecNodeAdminApiSchema: ApiSchemaFor = { .function() .args(z.union([z.bigint(), z.literal('all'), z.literal('current')])) .returns(z.array(OffenseSchema)), + reloadKeystore: z.function().returns(z.void()), }; export function createAztecNodeAdminClient( diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index df6d16208a16..639ec6611cf5 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -51,9 +51,32 @@ import type { FullNodeCheckpointsBuilder, } from './checkpoint_builder.js'; import { type ValidatorClientConfig, validatorClientConfigMappings } from './config.js'; -import type { HAKeyStore } from './key_store/ha_key_store.js'; +import { HAKeyStore } from './key_store/ha_key_store.js'; import { ValidatorClient } from './validator.js'; +function makeKeyStore(validator: { + attester: Hex<32>[] | Hex<32>; + coinbase?: EthAddress; + feeRecipient?: AztecAddress; + publisher?: Hex<32>[]; +}): KeyStore { + return { + schemaVersion: 1, + slasher: undefined, + prover: undefined, + remoteSigner: undefined, + validators: [ + { + attester: Array.isArray(validator.attester) ? validator.attester : [validator.attester], + feeRecipient: validator.feeRecipient ?? AztecAddress.ZERO, + coinbase: validator.coinbase, + remoteSigner: undefined, + publisher: validator.publisher ?? [], + }, + ], + }; +} + describe('ValidatorClient', () => { let config: ValidatorClientConfig & Pick< @@ -133,22 +156,7 @@ describe('ValidatorClient', () => { maxStuckDutiesAgeMs: 72000, }; - const keyStore: KeyStore = { - schemaVersion: 1, - slasher: undefined, - prover: undefined, - remoteSigner: undefined, - validators: [ - { - attester: validatorPrivateKeys.map(key => key as Hex<32>), - feeRecipient: AztecAddress.ZERO, - coinbase: undefined, - remoteSigner: undefined, - publisher: [], - }, - ], - }; - keyStoreManager = new KeystoreManager(keyStore); + keyStoreManager = new KeystoreManager(makeKeyStore({ attester: validatorPrivateKeys.map(key => key as Hex<32>) })); validatorClient = (await ValidatorClient.new( config, @@ -934,6 +942,126 @@ describe('ValidatorClient', () => { expect(haKeyStore.stop).toHaveBeenCalledTimes(1); }); }); + + describe('reloadKeystore', () => { + // build a KeystoreManager from a single-validator KeyStore and reload. + const reloadWith = (overrides: Parameters[0]) => { + const manager = new KeystoreManager(makeKeyStore(overrides)); + validatorClient.reloadKeystore(manager); + return manager; + }; + + const allKeys = () => config.validatorPrivateKeys!.getValue().map(k => k as Hex<32>); + + it('should update coinbase after reload', () => { + const newCoinbase = EthAddress.random(); + reloadWith({ attester: allKeys(), coinbase: newCoinbase }); + + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).toEqual(newCoinbase); + }); + + it('should update fee recipient after reload', async () => { + const newFeeRecipient = await AztecAddress.random(); + reloadWith({ attester: allKeys(), feeRecipient: newFeeRecipient }); + + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + expect(validatorClient.getFeeRecipientForAttestor(attestorAddress)).toEqual(newFeeRecipient); + }); + + it('should add new validator after reload', () => { + const newPrivateKey = generatePrivateKey(); + const newAccount = privateKeyToAccount(newPrivateKey); + reloadWith({ attester: [...allKeys(), newPrivateKey as Hex<32>] }); + + const addresses = validatorClient.getValidatorAddresses(); + expect(addresses).toHaveLength(3); + expect(addresses.some(a => a.equals(EthAddress.fromString(newAccount.address)))).toBe(true); + }); + + it('should update attester key after reload', () => { + const newPrivateKey = generatePrivateKey(); + const newAccount = privateKeyToAccount(newPrivateKey); + reloadWith({ attester: newPrivateKey as Hex<32> }); + + const addresses = validatorClient.getValidatorAddresses(); + expect(addresses).toHaveLength(1); + expect(addresses[0]).toEqual(EthAddress.fromString(newAccount.address)); + }); + + it('should remove a validator after reload', () => { + const remainingKey = config.validatorPrivateKeys!.getValue()[0] as Hex<32>; + const removedAccount = validatorAccounts[1]; + reloadWith({ attester: remainingKey }); + + const addresses = validatorClient.getValidatorAddresses(); + expect(addresses).toHaveLength(1); + expect(addresses.some(a => a.equals(EthAddress.fromString(removedAccount.address)))).toBe(false); + + // Accessing the removed validator's coinbase should throw + expect(() => validatorClient.getCoinbaseForAttestor(EthAddress.fromString(removedAccount.address))).toThrow( + /not found in any validator configuration/, + ); + }); + + it('should change coinbase and no longer return the old one', () => { + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + + const oldCoinbase = EthAddress.random(); + reloadWith({ attester: allKeys(), coinbase: oldCoinbase }); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).toEqual(oldCoinbase); + + const newCoinbase = EthAddress.random(); + reloadWith({ attester: allKeys(), coinbase: newCoinbase }); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).toEqual(newCoinbase); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).not.toEqual(oldCoinbase); + }); + + it('should reset coinbase to attester fallback when removed', () => { + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + + const explicitCoinbase = EthAddress.random(); + reloadWith({ attester: allKeys(), coinbase: explicitCoinbase }); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).toEqual(explicitCoinbase); + + // Reload without coinbase — falls back to the attester address itself + reloadWith({ attester: allKeys() }); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).toEqual(attestorAddress); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).not.toEqual(explicitCoinbase); + }); + + it('should change fee recipient and no longer return the old one', async () => { + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + + const oldFeeRecipient = await AztecAddress.random(); + reloadWith({ attester: allKeys(), feeRecipient: oldFeeRecipient }); + expect(validatorClient.getFeeRecipientForAttestor(attestorAddress)).toEqual(oldFeeRecipient); + + const newFeeRecipient = await AztecAddress.random(); + reloadWith({ attester: allKeys(), feeRecipient: newFeeRecipient }); + expect(validatorClient.getFeeRecipientForAttestor(attestorAddress)).toEqual(newFeeRecipient); + expect(validatorClient.getFeeRecipientForAttestor(attestorAddress)).not.toEqual(oldFeeRecipient); + }); + + it('should preserve HA signer and wrap new adapter in HAKeyStore after reload', () => { + // Simulate HA mode by setting the haSigner and wrapping in HAKeyStore + const mockHASigner = { nodeId: 'test-ha-node' }; + (validatorClient as any).haSigner = mockHASigner; + (validatorClient as any).keyStore = haKeyStore; + + const newCoinbase = EthAddress.random(); + reloadWith({ attester: allKeys(), coinbase: newCoinbase }); + + // Verify the keyStore is an HAKeyStore wrapping the same haSigner + const keyStoreAfterReload = (validatorClient as any).keyStore; + expect(keyStoreAfterReload).toBeInstanceOf(HAKeyStore); + expect((keyStoreAfterReload as any).haSigner).toBe(mockHASigner); + + // Verify the new coinbase is accessible through the HAKeyStore + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + expect(validatorClient.getCoinbaseForAttestor(attestorAddress)).toEqual(newCoinbase); + }); + }); }); /** Exposes protected methods for direct testing */ diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 8bb3c3f773f2..a1a8e112b264 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -47,6 +47,7 @@ import { AttestationTimeoutError } from '@aztec/stdlib/validators'; import { type TelemetryClient, type Tracer, getTelemetryClient } from '@aztec/telemetry-client'; import { createHASigner } from '@aztec/validator-ha-signer/factory'; import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; +import type { ValidatorHASigner } from '@aztec/validator-ha-signer/validator-ha-signer'; import { EventEmitter } from 'events'; import type { TypedDataDefinition } from 'viem'; @@ -77,7 +78,6 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) private validationService: ValidationService; private metrics: ValidatorMetrics; private log: Logger; - // Whether it has already registered handlers on the p2p client private hasRegisteredHandlers = false; @@ -106,6 +106,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) private l1ToL2MessageSource: L1ToL2MessageSource, private config: ValidatorClientFullConfig, private blobClient: BlobClientInterface, + private haSigner: ValidatorHASigner | undefined, private dateProvider: DateProvider = new DateProvider(), telemetry: TelemetryClient = getTelemetryClient(), log = createLogger('validator'), @@ -211,7 +212,9 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) telemetry, ); - let validatorKeyStore: ExtendedValidatorKeyStore = NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager); + const nodeKeystoreAdapter = NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager); + let validatorKeyStore: ExtendedValidatorKeyStore = nodeKeystoreAdapter; + let haSigner: ValidatorHASigner | undefined; if (config.haSigningEnabled) { // If maxStuckDutiesAgeMs is not explicitly set, compute it from Aztec slot duration const haConfig = { @@ -219,7 +222,8 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) maxStuckDutiesAgeMs: config.maxStuckDutiesAgeMs ?? epochCache.getL1Constants().slotDuration * 2 * 1000, }; const { signer } = await createHASigner(haConfig); - validatorKeyStore = new HAKeyStore(validatorKeyStore, signer); + haSigner = signer; + validatorKeyStore = new HAKeyStore(nodeKeystoreAdapter, signer); } const validator = new ValidatorClient( @@ -233,6 +237,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) l1ToL2MessageSource, config, blobClient, + haSigner, dateProvider, telemetry, ); @@ -270,6 +275,28 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) this.config = { ...this.config, ...config }; } + public reloadKeystore(newManager: KeystoreManager): void { + if (this.config.haSigningEnabled && !this.haSigner) { + this.log.warn( + 'HA signing is enabled in config but was not initialized at startup. ' + + 'Restart the node to enable HA signing.', + ); + } else if (!this.config.haSigningEnabled && this.haSigner) { + this.log.warn( + 'HA signing was disabled via config update but the HA signer is still active. ' + + 'Restart the node to fully disable HA signing.', + ); + } + + const newAdapter = NodeKeystoreAdapter.fromKeyStoreManager(newManager); + if (this.haSigner) { + this.keyStore = new HAKeyStore(newAdapter, this.haSigner); + } else { + this.keyStore = newAdapter; + } + this.validationService = new ValidationService(this.keyStore, this.log.createChild('validation-service')); + } + public async start() { if (this.epochCacheUpdateLoop.isRunning()) { this.log.warn(`Validator client already started`); From 6b2cade3d6862926d0e22537363750a3d5915b70 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 17:26:36 +0000 Subject: [PATCH 55/62] fix: increase waitForTx timeout in epochs_invalidate_block test (#20603) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Increases the `waitForTx` timeout in the `epochs_invalidate_block` test from `L2_SLOT_DURATION_IN_S * 4` (144s) to `L2_SLOT_DURATION_IN_S * 8` (288s) - The sentTx must survive a checkpoint invalidation cycle, and with `maxTxsPerBlock: 1` and 8 txs in the mempool, it could be picked up anywhere from 1st to 8th - With the previous timeout, node-0 could only sync ~2 checkpoints (4 txs) before timing out, causing intermittent failures when the sentTx was ordered 5th or later ## Test plan - This only increases a timeout value, no logic changes - The fix aligns with the timeout already used later in the same test for `waitUntilCheckpointNumber` (line 205) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Opus 4.6 --- .../src/e2e_epochs/epochs_invalidate_block.parallel.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts index 4442138cc91a..fa09091559b8 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.parallel.test.ts @@ -184,7 +184,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => { ); // Verify the transaction was eventually included - const receipt = await waitForTx(context.aztecNode, sentTx, { timeout: test.L2_SLOT_DURATION_IN_S * 4 }); + const receipt = await waitForTx(context.aztecNode, sentTx, { timeout: test.L2_SLOT_DURATION_IN_S * 8 }); expect(receipt.isMined()).toBeTrue(); logger.warn(`Transaction included in block ${receipt.blockNumber}`); From fae88c7f006a667b1b284a695eab0a42e8dbb024 Mon Sep 17 00:00:00 2001 From: danielntmd Date: Tue, 17 Feb 2026 17:36:33 +0000 Subject: [PATCH 56/62] chore: update skill to correct path and add changelog docs for new endpoints and error codes - update skill to point to correct path for node operators - add changelog docs for the reloadKeystore, admin API key, and error codes for RPC calls --- .claude/skills/updating-changelog/SKILL.md | 4 +- .../operators/reference/changelog/v4.md | 49 +++++++++++++++++++ 2 files changed, 51 insertions(+), 2 deletions(-) diff --git a/.claude/skills/updating-changelog/SKILL.md b/.claude/skills/updating-changelog/SKILL.md index 018859f8833e..1af52b765da4 100644 --- a/.claude/skills/updating-changelog/SKILL.md +++ b/.claude/skills/updating-changelog/SKILL.md @@ -14,7 +14,7 @@ Read `.release-please-manifest.json` to get the version (e.g., `{"." : "4.0.0"}` **Target files:** - Aztec contract developers: `docs/docs-developers/docs/resources/migration_notes.md` -- Node operators and Ethereum contract developers: `docs/docs-network/reference/changelog/v{major}.md` +- Node operators and Ethereum contract developers: `docs/docs-operate/operators/reference/changelog/v{major}.md` ### 2. Analyze Branch Changes @@ -60,7 +60,7 @@ Explanation of what changed. ## Node Operator Changelog Format -**File:** `docs/docs-network/reference/changelog/v{major}.md` +**File:** `docs/docs-operate/operators/reference/changelog/v{major}.md` **Breaking changes:** ````markdown diff --git a/docs/docs-operate/operators/reference/changelog/v4.md b/docs/docs-operate/operators/reference/changelog/v4.md index 10bfef79cc4d..369e3fec643a 100644 --- a/docs/docs-operate/operators/reference/changelog/v4.md +++ b/docs/docs-operate/operators/reference/changelog/v4.md @@ -88,6 +88,55 @@ A new environment variable `AZTEC_INITIAL_ETH_PER_FEE_ASSET` has been added to c This replaces the previous hardcoded default and allows network operators to set the starting price point for the fee asset. +### `reloadKeystore` admin RPC endpoint + +Node operators can now update validator attester keys, coinbase, and fee recipient without restarting the node by calling the new `reloadKeystore` admin RPC endpoint. + +What is updated on reload: +- Validator attester keys (add, remove, or replace) +- Coinbase and fee recipient per validator +- Publisher-to-validator mapping + +What is NOT updated (requires restart): +- L1 publisher signers +- Prover keys +- HA signer connections + +New validators must use a publisher key already initialized at startup. Reload is rejected with a clear error if validation fails. + +### Admin API key authentication + +The admin JSON-RPC endpoint now supports auto-generated API key authentication. + +**Behavior:** +- A cryptographically secure API key is auto-generated at first startup and displayed once via stdout +- Only the SHA-256 hash is persisted to `/admin/api_key_hash` +- The key is reused across restarts when `--data-directory` is set +- Supports both `x-api-key` and `Authorization: Bearer ` headers +- Health check endpoint (`GET /status`) is excluded from auth (for k8s probes) + +**Configuration:** + +```bash +--admin-api-key-hash ($AZTEC_ADMIN_API_KEY_HASH) # Use a pre-generated SHA-256 key hash +--no-admin-api-key ($AZTEC_NO_ADMIN_API_KEY) # Disable auth entirely +--reset-admin-api-key ($AZTEC_RESET_ADMIN_API_KEY) # Force key regeneration +``` + +**Helm charts**: Admin API key auth is disabled by default (`noAdminApiKey: true`). Set to `false` in production values to enable. + +**Migration**: No action required — auth is opt-out. To enable, ensure `--no-admin-api-key` is not set and note the key printed at startup. + +### Transaction pool error codes for RPC callers + +Transaction submission via RPC now returns structured rejection codes when a transaction is rejected by the mempool: + +- `LOW_PRIORITY_FEE` — tx priority fee is too low +- `INSUFFICIENT_FEE_PAYER_BALANCE` — fee payer doesn't have enough balance +- `NULLIFIER_CONFLICT` — conflicting nullifier already in pool + +**Impact**: Improved developer experience — callers can now programmatically handle specific rejection reasons. + ## Changed defaults ## Troubleshooting From 1e180e5eab962311f0fa1644894e5857aa75245a Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 17:55:26 +0000 Subject: [PATCH 57/62] chore: update block proposal/checkpoint proposal panel (#20584) Fix A-511 --- spartan/metrics/grafana/dashboards/aztec_network.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spartan/metrics/grafana/dashboards/aztec_network.json b/spartan/metrics/grafana/dashboards/aztec_network.json index fd9041a559a4..7b037701c3a8 100644 --- a/spartan/metrics/grafana/dashboards/aztec_network.json +++ b/spartan/metrics/grafana/dashboards/aztec_network.json @@ -372,14 +372,14 @@ "uid": "${data_source}" }, "editorMode": "code", - "expr": "avg_over_time((clamp(increase(aztec_archiver_block_height{k8s_namespace_name=\"$namespace\", service_name=\"prover-node\", aztec_status=\"\"}[$__range]) / on(service_name) (increase(aztec_p2p_gossip_message_validation_count{k8s_namespace_name=\"$namespace\", service_name=\"prover-node\", aztec_gossip_topic_name=\"block_proposal\"}[$__range])), 0, 1))[15m:1m])", + "expr": "(avg_over_time((clamp(increase(aztec_archiver_checkpoint_height{k8s_namespace_name=\"$namespace\", service_name=\"prover-node\", aztec_status=\"\"}[$__range]) / on(service_name) (increase(aztec_p2p_gossip_message_validation_count{k8s_namespace_name=\"$namespace\", service_name=\"prover-node\", aztec_gossip_topic_name=\"checkpoint_proposal\", aztec_ok=\"true\"}[$__range])), 0, 1))[15m:1m]))\nor (avg_over_time((clamp(increase(aztec_archiver_block_height{k8s_namespace_name=\"$namespace\", service_name=\"prover-node\", aztec_status=\"\"}[$__range]) / on(service_name) (increase(aztec_p2p_gossip_message_validation_count{k8s_namespace_name=\"$namespace\", service_name=\"prover-node\", aztec_gossip_topic_name=\"block_proposal\", aztec_ok=\"true\"}[$__range])), 0, 1))[15m:1m]))", "hide": false, - "legendFormat": "Mined block proposals", + "legendFormat": "Mined proposals", "range": true, "refId": "A" } ], - "title": "Mined block proposals", + "title": "Mined proposals", "type": "gauge" }, { From 101ef01fd918e1fb73efbb732ede48eba9437dbe Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 17:56:57 +0000 Subject: [PATCH 58/62] fix: async dispose() lint rule (#20587) `Symbol.dispose` was being incorrectly used in a couple of places for async disposal. This should be using `Symbol.asyncDispose` coupled with `await using foo = ...`. --- .../eslint-rules/no-async-dispose.js | 62 +++++++++++++++++++ yarn-project/foundation/eslint.config.js | 3 + .../sequencer/checkpoint_proposal_job.test.ts | 4 +- .../checkpoint_proposal_job.timing.test.ts | 4 +- .../src/sequencer/checkpoint_proposal_job.ts | 2 +- .../src/public/hinting_db_sources.ts | 2 +- .../public_processor/guarded_merkle_tree.ts | 2 +- .../src/interfaces/merkle_tree_operations.ts | 2 +- .../src/block_proposal_handler.ts | 2 +- .../src/validator.integration.test.ts | 2 +- .../validator-client/src/validator.test.ts | 2 +- .../src/native/merkle_trees_facade.ts | 2 +- 12 files changed, 79 insertions(+), 10 deletions(-) create mode 100644 yarn-project/foundation/eslint-rules/no-async-dispose.js diff --git a/yarn-project/foundation/eslint-rules/no-async-dispose.js b/yarn-project/foundation/eslint-rules/no-async-dispose.js new file mode 100644 index 000000000000..f8d79ae45ee3 --- /dev/null +++ b/yarn-project/foundation/eslint-rules/no-async-dispose.js @@ -0,0 +1,62 @@ +// @ts-check + +/** + * @fileoverview Rule to disallow async [Symbol.dispose]() methods. + * Use [Symbol.asyncDispose]() with AsyncDisposable instead. + */ + +/** @type {import('eslint').Rule.RuleModule} */ +export default { + meta: { + type: 'problem', + docs: { + description: 'Disallow async [Symbol.dispose]() methods', + category: 'Best Practices', + recommended: true, + }, + messages: { + asyncDispose: + '[Symbol.dispose]() should not be async. Use [Symbol.asyncDispose]() with AsyncDisposable instead.', + }, + schema: [], + }, + + create(context) { + return { + MethodDefinition(node) { + // Match computed property keys like [Symbol.dispose] + if (!node.computed || !node.key || node.key.type !== 'MemberExpression') { + return; + } + + const key = node.key; + if ( + key.object.type !== 'Identifier' || + key.object.name !== 'Symbol' || + key.property.type !== 'Identifier' || + key.property.name !== 'dispose' + ) { + return; + } + + // Check if the method is async + if (node.value.async) { + context.report({ node, messageId: 'asyncDispose' }); + return; + } + + // Check if the return type annotation contains Promise + // @ts-expect-error returnType is a typescript-eslint AST extension + const returnType = node.value.returnType?.typeAnnotation; + if ( + returnType && + returnType.type === 'TSTypeReference' && + returnType.typeName?.type === 'Identifier' && + returnType.typeName.name === 'Promise' + ) { + context.report({ node, messageId: 'asyncDispose' }); + } + }, + }; + }, +}; diff --git a/yarn-project/foundation/eslint.config.js b/yarn-project/foundation/eslint.config.js index a1c756de969d..51b7beb16235 100644 --- a/yarn-project/foundation/eslint.config.js +++ b/yarn-project/foundation/eslint.config.js @@ -8,6 +8,7 @@ import { globalIgnores } from 'eslint/config'; import globals from 'globals'; import tseslint from 'typescript-eslint'; +import noAsyncDispose from './eslint-rules/no-async-dispose.js'; import noNonPrimitiveInCollections from './eslint-rules/no-non-primitive-in-collections.js'; import noUnsafeBrandedTypeConversion from './eslint-rules/no-unsafe-branded-type-conversion.js'; @@ -51,6 +52,7 @@ export default [ importPlugin, 'aztec-custom': { rules: { + 'no-async-dispose': noAsyncDispose, 'no-non-primitive-in-collections': noNonPrimitiveInCollections, 'no-unsafe-branded-type-conversion': noUnsafeBrandedTypeConversion, }, @@ -116,6 +118,7 @@ export default [ 'import-x/no-extraneous-dependencies': 'error', // this unfortunately doesn't block `fit` and `fdescribe` 'no-only-tests/no-only-tests': ['error'], + 'aztec-custom/no-async-dispose': 'error', 'aztec-custom/no-non-primitive-in-collections': 'error', 'aztec-custom/no-unsafe-branded-type-conversion': 'error', }, diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts index c3523b9ce36c..8ef5a19129ba 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts @@ -196,7 +196,9 @@ describe('CheckpointProposalJob', () => { p2p.broadcastProposal.mockResolvedValue(undefined); worldState = mockDeep(); - const mockFork = mock({ [Symbol.dispose]: jest.fn() }); + const mockFork = mock({ + [Symbol.asyncDispose]: jest.fn().mockReturnValue(Promise.resolve()) as () => Promise, + }); worldState.fork.mockResolvedValue(mockFork); // Create fake CheckpointsBuilder and CheckpointBuilder diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts index 6b373ad5a3dd..2e9ebb18219e 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts @@ -415,7 +415,9 @@ describe('CheckpointProposalJob Timing Tests', () => { p2p.getPendingTxCount.mockResolvedValue(100); // Always have enough txs worldState = mockDeep(); - const mockFork = mock({ [Symbol.dispose]: jest.fn() }); + const mockFork = mock({ + [Symbol.asyncDispose]: jest.fn().mockReturnValue(Promise.resolve()) as () => Promise, + }); worldState.fork.mockResolvedValue(mockFork); l1ToL2MessageSource = mock(); diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index 032804d9cd04..44abe045ba91 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -194,7 +194,7 @@ export class CheckpointProposalJob implements Traceable { const feeAssetPriceModifier = await this.publisher.getFeeAssetPriceModifier(); // Create a long-lived forked world state for the checkpoint builder - using fork = await this.worldState.fork(this.syncedToBlockNumber, { closeDelayMs: 12_000 }); + await using fork = await this.worldState.fork(this.syncedToBlockNumber, { closeDelayMs: 12_000 }); // Create checkpoint builder for the entire slot const checkpointBuilder = await this.checkpointsBuilder.startCheckpoint( diff --git a/yarn-project/simulator/src/public/hinting_db_sources.ts b/yarn-project/simulator/src/public/hinting_db_sources.ts index 538fee0ed8df..79044c631e64 100644 --- a/yarn-project/simulator/src/public/hinting_db_sources.ts +++ b/yarn-project/simulator/src/public/hinting_db_sources.ts @@ -572,7 +572,7 @@ export class HintingMerkleWriteOperations implements MerkleTreeWriteOperations { return await this.db.close(); } - async [Symbol.dispose](): Promise { + async [Symbol.asyncDispose](): Promise { await this.close(); } diff --git a/yarn-project/simulator/src/public/public_processor/guarded_merkle_tree.ts b/yarn-project/simulator/src/public/public_processor/guarded_merkle_tree.ts index 1a7f644e08f5..bcbd818a03f0 100644 --- a/yarn-project/simulator/src/public/public_processor/guarded_merkle_tree.ts +++ b/yarn-project/simulator/src/public/public_processor/guarded_merkle_tree.ts @@ -82,7 +82,7 @@ export class GuardedMerkleTreeOperations implements MerkleTreeWriteOperations { return this.guardAndPush(() => this.target.close()); } - async [Symbol.dispose](): Promise { + async [Symbol.asyncDispose](): Promise { await this.close(); } getTreeInfo(treeId: MerkleTreeId): Promise { diff --git a/yarn-project/stdlib/src/interfaces/merkle_tree_operations.ts b/yarn-project/stdlib/src/interfaces/merkle_tree_operations.ts index 0da6e8b70080..63ee8e82f9b1 100644 --- a/yarn-project/stdlib/src/interfaces/merkle_tree_operations.ts +++ b/yarn-project/stdlib/src/interfaces/merkle_tree_operations.ts @@ -254,7 +254,7 @@ export interface MerkleTreeCheckpointOperations { export interface MerkleTreeWriteOperations extends MerkleTreeReadOperations, MerkleTreeCheckpointOperations, - Disposable { + AsyncDisposable { /** * Appends leaves to a given tree. * @param treeId - The tree to be updated. diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index 776b48ac266c..0c8812aee9a8 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -453,7 +453,7 @@ export class BlockProposalHandler { // Fork before the block to be built const parentBlockNumber = BlockNumber(blockNumber - 1); await this.worldState.syncImmediate(parentBlockNumber); - using fork = await this.worldState.fork(parentBlockNumber); + await using fork = await this.worldState.fork(parentBlockNumber); // Build checkpoint constants from proposal (excludes blockNumber which is per-block) const constants: CheckpointGlobalVariables = { diff --git a/yarn-project/validator-client/src/validator.integration.test.ts b/yarn-project/validator-client/src/validator.integration.test.ts index 539c0df9b233..dd30f91bd4be 100644 --- a/yarn-project/validator-client/src/validator.integration.test.ts +++ b/yarn-project/validator-client/src/validator.integration.test.ts @@ -282,7 +282,7 @@ describe('ValidatorClient Integration', () => { timestamp: BigInt(Date.now()), }; - using fork = await proposer.worldStateDb.fork(); + await using fork = await proposer.worldStateDb.fork(); const builder = await proposer.checkpointsBuilder.startCheckpoint( checkpointNumber, globalVariables, diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 639ec6611cf5..7485939da4fc 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -310,7 +310,7 @@ describe('ValidatorClient', () => { checkpointsBuilder.openCheckpoint.mockResolvedValue(mockCheckpointBuilder); worldState.fork.mockResolvedValue({ close: () => Promise.resolve(), - [Symbol.dispose]: () => {}, + [Symbol.asyncDispose]: () => Promise.resolve(), } as never); }; diff --git a/yarn-project/world-state/src/native/merkle_trees_facade.ts b/yarn-project/world-state/src/native/merkle_trees_facade.ts index d0eed23ccbdb..b7a107a8eb80 100644 --- a/yarn-project/world-state/src/native/merkle_trees_facade.ts +++ b/yarn-project/world-state/src/native/merkle_trees_facade.ts @@ -304,7 +304,7 @@ export class MerkleTreesForkFacade extends MerkleTreesFacade implements MerkleTr } } - async [Symbol.dispose](): Promise { + async [Symbol.asyncDispose](): Promise { if (this.opts.closeDelayMs) { void sleep(this.opts.closeDelayMs) .then(() => this.close()) From 7ea6a37bad49db41bdde2e6f9d68298c43c08a70 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 18:06:18 +0000 Subject: [PATCH 59/62] fix(p2p): fix compress option in file store and enable for tx uploads (#20605) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - **S3 file store**: `save()` now sets `ContentEncoding: gzip` so `read()` can detect and decompress. Uses async `gzip`/`gunzip` instead of sync. - **Local file store**: `save()` now respects the `compress` option. `read()` auto-detects gzip magic bytes and decompresses transparently. - **Tx uploads**: Flipped `compress: false` → `compress: true` in `TxFileStore.uploadTx()`. - **Tests**: Updated assertions and added a compression round-trip test (upload via `TxFileStore`, read back via `FileStoreTxSource`). ## Test plan - [x] `yarn build` passes - [x] All 14 tests pass in `tx_file_store.test.ts` including new round-trip test - [x] `yarn format` and `yarn lint` pass for p2p and stdlib 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Opus 4.6 --- .../tx_file_store/tx_file_store.test.ts | 25 +++++++++++++++++-- .../services/tx_file_store/tx_file_store.ts | 2 +- yarn-project/stdlib/src/file-store/local.ts | 20 +++++++++++---- yarn-project/stdlib/src/file-store/s3.ts | 15 ++++++++--- 4 files changed, 51 insertions(+), 11 deletions(-) diff --git a/yarn-project/p2p/src/services/tx_file_store/tx_file_store.test.ts b/yarn-project/p2p/src/services/tx_file_store/tx_file_store.test.ts index e14f47a2b6dd..1a0ab6b19fde 100644 --- a/yarn-project/p2p/src/services/tx_file_store/tx_file_store.test.ts +++ b/yarn-project/p2p/src/services/tx_file_store/tx_file_store.test.ts @@ -9,6 +9,7 @@ import { tmpdir } from 'os'; import { join } from 'path'; import { InMemoryTxPool } from '../../test-helpers/testbench-utils.js'; +import { FileStoreTxSource } from '../tx_collection/file_store_tx_source.js'; import type { TxFileStoreConfig } from './config.js'; import { TxFileStore } from './tx_file_store.js'; @@ -103,7 +104,7 @@ describe('TxFileStore', () => { await txFileStore!.flush(); expect(spy).toHaveBeenCalledWith(`${basePath}/txs/${tx.getTxHash().toString()}.bin`, tx.toBuffer(), { - compress: false, + compress: true, }); spy.mockRestore(); @@ -148,7 +149,7 @@ describe('TxFileStore', () => { await txFileStore!.flush(); expect(spy).toHaveBeenCalledWith(`${basePath}/txs/${tx.getTxHash().toString()}.bin`, tx.toBuffer(), { - compress: false, + compress: true, }); spy.mockRestore(); @@ -328,4 +329,24 @@ describe('TxFileStore', () => { expect(txFileStore!.getPendingUploadCount()).toBe(0); }); }); + + describe('compression round-trip', () => { + it('uploads compressed tx and reads it back via FileStoreTxSource', async () => { + txFileStore = await TxFileStore.create(txPool, config, basePath, log, undefined, fileStore); + txFileStore!.start(); + + const tx = await makeTx(); + await txPool.addPendingTxs([tx]); + await txFileStore!.flush(); + + // Read back via FileStoreTxSource using the same local file store + const txSource = await FileStoreTxSource.create(`file://${tmpDir}`, basePath, log); + expect(txSource).toBeDefined(); + + const results = await txSource!.getTxsByHash([tx.getTxHash()]); + expect(results).toHaveLength(1); + expect(results[0]).toBeDefined(); + expect(results[0]!.toBuffer()).toEqual(tx.toBuffer()); + }); + }); }); diff --git a/yarn-project/p2p/src/services/tx_file_store/tx_file_store.ts b/yarn-project/p2p/src/services/tx_file_store/tx_file_store.ts index 672bdd6d40cc..063c6256680f 100644 --- a/yarn-project/p2p/src/services/tx_file_store/tx_file_store.ts +++ b/yarn-project/p2p/src/services/tx_file_store/tx_file_store.ts @@ -146,7 +146,7 @@ export class TxFileStore { } await retry( - () => this.fileStore.save(path, tx.toBuffer(), { compress: false }), + () => this.fileStore.save(path, tx.toBuffer(), { compress: true }), `Uploading tx ${txHash}`, makeBackoff([0.1, 0.5, 2]), this.log, diff --git a/yarn-project/stdlib/src/file-store/local.ts b/yarn-project/stdlib/src/file-store/local.ts index e2c3409666ba..a01ade236dff 100644 --- a/yarn-project/stdlib/src/file-store/local.ts +++ b/yarn-project/stdlib/src/file-store/local.ts @@ -1,15 +1,21 @@ import { access, mkdir, readFile, writeFile } from 'fs/promises'; import { dirname, resolve } from 'path'; +import { promisify } from 'util'; +import { gunzip as gunzipCb, gzip as gzipCb } from 'zlib'; -import type { FileStore } from './interface.js'; +import type { FileStore, FileStoreSaveOptions } from './interface.js'; + +const gzip = promisify(gzipCb); +const gunzip = promisify(gunzipCb); export class LocalFileStore implements FileStore { constructor(private readonly basePath: string) {} - public async save(path: string, data: Buffer): Promise { + public async save(path: string, data: Buffer, opts?: FileStoreSaveOptions): Promise { const fullPath = this.getFullPath(path); await mkdir(dirname(fullPath), { recursive: true }); - await writeFile(fullPath, data); + const toWrite = opts?.compress ? await gzip(data) : data; + await writeFile(fullPath, toWrite); return `file://${fullPath}`; } @@ -18,9 +24,13 @@ export class LocalFileStore implements FileStore { return this.save(destPath, data); } - public read(pathOrUrlStr: string): Promise { + public async read(pathOrUrlStr: string): Promise { const fullPath = this.getFullPath(pathOrUrlStr); - return readFile(fullPath); + const data = await readFile(fullPath); + if (data.length >= 2 && data[0] === 0x1f && data[1] === 0x8b) { + return await gunzip(data); + } + return data; } public async download(pathOrUrlStr: string, destPath: string): Promise { diff --git a/yarn-project/stdlib/src/file-store/s3.ts b/yarn-project/stdlib/src/file-store/s3.ts index 2033a1e86d9d..22d41347b4bc 100644 --- a/yarn-project/stdlib/src/file-store/s3.ts +++ b/yarn-project/stdlib/src/file-store/s3.ts @@ -13,10 +13,14 @@ import { tmpdir } from 'os'; import { basename, dirname, join } from 'path'; import { Readable } from 'stream'; import { pipeline } from 'stream/promises'; -import { createGzip } from 'zlib'; +import { promisify } from 'util'; +import { createGzip, gunzip as gunzipCb, gzip as gzipCb } from 'zlib'; import type { FileStore, FileStoreSaveOptions } from './interface.js'; +const gzip = promisify(gzipCb); +const gunzip = promisify(gunzipCb); + function normalizeBasePath(path: string): string { return path?.replace(/^\/+|\/+$/g, '') ?? ''; } @@ -52,7 +56,7 @@ export class S3FileStore implements FileStore { const key = this.getFullPath(path); const shouldCompress = !!opts.compress; - const body = shouldCompress ? (await import('zlib')).gzipSync(data) : data; + const body = shouldCompress ? await gzip(data) : data; const contentLength = body.length; const contentType = this.detectContentType(key, shouldCompress); const put = new PutObjectCommand({ @@ -60,6 +64,7 @@ export class S3FileStore implements FileStore { Key: key, Body: body, ContentType: contentType, + ContentEncoding: shouldCompress ? 'gzip' : undefined, CacheControl: opts.metadata?.['Cache-control'], Metadata: this.extractUserMetadata(opts.metadata), ContentLength: contentLength, @@ -134,7 +139,11 @@ export class S3FileStore implements FileStore { for await (const chunk of stream) { chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)); } - return Buffer.concat(chunks); + const buffer = Buffer.concat(chunks); + if (out.ContentEncoding === 'gzip') { + return await gunzip(buffer); + } + return buffer; } public async download(pathOrUrlStr: string, destPath: string): Promise { From 2f31af494fe3e10a507e8d699ce8c7a9c422b3e8 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 18:06:54 +0000 Subject: [PATCH 60/62] chore: fixes for chaos mesh experiments (#20606) Update tps-scenario so that chaos mesh partition works over p2p too (`P2P_PUBLIC_IP=false`) and make sure the RPC nodes are selected correctly. Fix A-561 --- spartan/environments/tps-scenario.env | 2 ++ spartan/scripts/deploy_network.sh | 2 +- spartan/terraform/deploy-aztec-infra/main.tf | 9 ++++----- .../deploy-aztec-infra/values/full-node.yaml | 11 +++++++++++ .../values/prover-resources-hi-tps.yaml | 1 - spartan/terraform/deploy-aztec-infra/values/rpc.yaml | 2 +- 6 files changed, 19 insertions(+), 8 deletions(-) diff --git a/spartan/environments/tps-scenario.env b/spartan/environments/tps-scenario.env index b7548574fd39..abdaf40948b0 100644 --- a/spartan/environments/tps-scenario.env +++ b/spartan/environments/tps-scenario.env @@ -54,6 +54,8 @@ PROVER_RESOURCE_PROFILE="hi-tps" PROVER_AGENT_POLL_INTERVAL_MS=10000 WAIT_FOR_PROVER_DEPLOY=false +P2P_PUBLIC_IP=false + RUN_TESTS=false P2P_MAX_TX_POOL_SIZE=1000000000 diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index 992653a701c6..ad40060dee94 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -467,7 +467,7 @@ if [[ "${CLUSTER}" == "kind" ]]; then P2P_PUBLIC_IP=false else P2P_NODEPORT_ENABLED=false - P2P_PUBLIC_IP=true + P2P_PUBLIC_IP=${P2P_PUBLIC_IP:-true} fi cat > "${DEPLOY_AZTEC_INFRA_DIR}/terraform.tfvars" << EOF diff --git a/spartan/terraform/deploy-aztec-infra/main.tf b/spartan/terraform/deploy-aztec-infra/main.tf index 85c891329229..338cef6dffeb 100644 --- a/spartan/terraform/deploy-aztec-infra/main.tf +++ b/spartan/terraform/deploy-aztec-infra/main.tf @@ -306,7 +306,7 @@ locals { p2p = { publicIP = var.P2P_PUBLIC_IP } } } - })], local.is_kind ? [yamlencode({ + })], local.is_kind ? [yamlencode({ agent = { nodeSelector = null affinity = null @@ -335,9 +335,9 @@ locals { "broker.node.logLevel" = var.LOG_LEVEL "broker.node.env.BOOTSTRAP_NODES" = "asdf" "broker.node.env.PROVER_BROKER_DEBUG_REPLAY_ENABLED" = var.PROVER_BROKER_DEBUG_REPLAY_ENABLED - "agent.node.image.repository" = local.prover_agent_image.repository - "agent.node.image.tag" = local.prover_agent_image.tag - "agent.node.env.CRS_PATH" = "/usr/src/crs" + "agent.node.image.repository" = local.prover_agent_image.repository + "agent.node.image.tag" = local.prover_agent_image.tag + "agent.node.env.CRS_PATH" = "/usr/src/crs" "agent.node.proverRealProofs" = var.PROVER_REAL_PROOFS "agent.node.env.PROVER_AGENT_POLL_INTERVAL_MS" = var.PROVER_AGENT_POLL_INTERVAL_MS "agent.replicaCount" = var.PROVER_REPLICAS @@ -416,7 +416,6 @@ locals { })] custom_settings = merge({ - "nodeType" = "rpc" "replicaCount" = var.RPC_REPLICAS "service.p2p.nodePortEnabled" = var.P2P_NODEPORT_ENABLED "service.p2p.announcePort" = local.p2p_port_rpc diff --git a/spartan/terraform/deploy-aztec-infra/values/full-node.yaml b/spartan/terraform/deploy-aztec-infra/values/full-node.yaml index ac81a957591f..44db0c6f9e87 100644 --- a/spartan/terraform/deploy-aztec-infra/values/full-node.yaml +++ b/spartan/terraform/deploy-aztec-infra/values/full-node.yaml @@ -3,6 +3,17 @@ node: env: OTEL_SERVICE_NAME: "full-node" + preStartScript: | + if [ -n "${BOOT_NODE_HOST:-}" ]; then + until curl --silent --head --fail "${BOOT_NODE_HOST}/status" > /dev/null; do + echo "Waiting for boot node..." + sleep 1 + done + echo "Boot node is ready!" + + export BOOTSTRAP_NODES=$(curl -X POST -H "content-type: application/json" --data '{"method": "bootstrap_getEncodedEnr"}' $BOOT_NODE_HOST | jq -r .result) + fi + startCmd: - --node - --archiver diff --git a/spartan/terraform/deploy-aztec-infra/values/prover-resources-hi-tps.yaml b/spartan/terraform/deploy-aztec-infra/values/prover-resources-hi-tps.yaml index bdaee2f34f3c..586e22a37d7b 100644 --- a/spartan/terraform/deploy-aztec-infra/values/prover-resources-hi-tps.yaml +++ b/spartan/terraform/deploy-aztec-infra/values/prover-resources-hi-tps.yaml @@ -1,5 +1,4 @@ node: - hostNetwork: true node: enableInspector: true nodeJsOptions: diff --git a/spartan/terraform/deploy-aztec-infra/values/rpc.yaml b/spartan/terraform/deploy-aztec-infra/values/rpc.yaml index 55cab255e6b8..a79515b9cd5a 100644 --- a/spartan/terraform/deploy-aztec-infra/values/rpc.yaml +++ b/spartan/terraform/deploy-aztec-infra/values/rpc.yaml @@ -1,5 +1,5 @@ +nodeType: "rpc-node" node: - nodeType: "rpc-node" env: OTEL_SERVICE_NAME: "node" AWS_ACCESS_KEY_ID: "" From 76923c1ca5c66337e51501f2d4124e469088b805 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 18:08:30 +0000 Subject: [PATCH 61/62] feat(p2p): add download metrics to file store tx source (#20601) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Add success/failure counters and duration histogram for tx downloads through `FileStoreTxSource`, mirroring the existing upload metrics - Wire `TelemetryClient` through `FileStoreTxSource.create()` and `createFileStoreTxSources()` factory - Add `'file-store'` to pre-initialized collection method attributes in `TxCollectionInstrumentation` so dashboards show a zero baseline ## Test plan - Build passes (`yarn build`) - Existing p2p tests pass (`yarn workspace @aztec/p2p test`) - Format and lint pass 🤖 Generated with [Claude Code](https://claude.com/claude-code) Fix A-506 Co-authored-by: Claude Opus 4.6 --- yarn-project/p2p/src/client/factory.ts | 1 + .../tx_collection/file_store_tx_source.ts | 37 +++++++++++++++++-- .../services/tx_collection/instrumentation.ts | 8 +++- yarn-project/telemetry-client/src/metrics.ts | 22 +++++++++++ 4 files changed, 64 insertions(+), 4 deletions(-) diff --git a/yarn-project/p2p/src/client/factory.ts b/yarn-project/p2p/src/client/factory.ts index 1ec728155f98..08d1dd209343 100644 --- a/yarn-project/p2p/src/client/factory.ts +++ b/yarn-project/p2p/src/client/factory.ts @@ -161,6 +161,7 @@ export async function createP2PClient( config.txCollectionFileStoreUrls, txFileStoreBasePath, logger.createChild('file-store-tx-source'), + telemetry, ); if (fileStoreSources.length > 0) { logger.info(`Using ${fileStoreSources.length} file store sources for tx collection.`, { diff --git a/yarn-project/p2p/src/services/tx_collection/file_store_tx_source.ts b/yarn-project/p2p/src/services/tx_collection/file_store_tx_source.ts index ec8381d2d6cf..d9762a955d43 100644 --- a/yarn-project/p2p/src/services/tx_collection/file_store_tx_source.ts +++ b/yarn-project/p2p/src/services/tx_collection/file_store_tx_source.ts @@ -1,28 +1,51 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; +import { Timer } from '@aztec/foundation/timer'; import { type ReadOnlyFileStore, createReadOnlyFileStore } from '@aztec/stdlib/file-store'; import { Tx, type TxHash } from '@aztec/stdlib/tx'; +import { + type Histogram, + Metrics, + type TelemetryClient, + type UpDownCounter, + getTelemetryClient, +} from '@aztec/telemetry-client'; import type { TxSource } from './tx_source.js'; /** TxSource implementation that downloads txs from a file store. */ export class FileStoreTxSource implements TxSource { + private downloadsSuccess: UpDownCounter; + private downloadsFailed: UpDownCounter; + private downloadDuration: Histogram; + private downloadSize: Histogram; + private constructor( private readonly fileStore: ReadOnlyFileStore, private readonly baseUrl: string, private readonly basePath: string, private readonly log: Logger, - ) {} + telemetry: TelemetryClient, + ) { + const meter = telemetry.getMeter('file-store-tx-source'); + this.downloadsSuccess = meter.createUpDownCounter(Metrics.TX_FILE_STORE_DOWNLOADS_SUCCESS); + this.downloadsFailed = meter.createUpDownCounter(Metrics.TX_FILE_STORE_DOWNLOADS_FAILED); + this.downloadDuration = meter.createHistogram(Metrics.TX_FILE_STORE_DOWNLOAD_DURATION); + this.downloadSize = meter.createHistogram(Metrics.TX_FILE_STORE_DOWNLOAD_SIZE); + } /** * Creates a FileStoreTxSource from a URL. * @param url - The file store URL (s3://, gs://, file://, http://, https://). + * @param basePath - Base path for tx files within the store. * @param log - Optional logger. + * @param telemetry - Optional telemetry client. * @returns The FileStoreTxSource instance, or undefined if creation fails. */ public static async create( url: string, basePath: string, log: Logger = createLogger('p2p:file_store_tx_source'), + telemetry: TelemetryClient = getTelemetryClient(), ): Promise { try { const fileStore = await createReadOnlyFileStore(url, log); @@ -30,7 +53,7 @@ export class FileStoreTxSource implements TxSource { log.warn(`Failed to create file store for URL: ${url}`); return undefined; } - return new FileStoreTxSource(fileStore, url, basePath, log); + return new FileStoreTxSource(fileStore, url, basePath, log, telemetry); } catch (err) { log.warn(`Error creating file store for URL: ${url}`, { error: err }); return undefined; @@ -45,10 +68,15 @@ export class FileStoreTxSource implements TxSource { return Promise.all( txHashes.map(async txHash => { const path = `${this.basePath}/txs/${txHash.toString()}.bin`; + const timer = new Timer(); try { const buffer = await this.fileStore.read(path); + this.downloadsSuccess.add(1); + this.downloadDuration.record(Math.ceil(timer.ms())); + this.downloadSize.record(buffer.length); return Tx.fromBuffer(buffer); } catch { + this.downloadsFailed.add(1); // Tx not found or error reading - return undefined return undefined; } @@ -60,14 +88,17 @@ export class FileStoreTxSource implements TxSource { /** * Creates FileStoreTxSource instances from URLs. * @param urls - Array of file store URLs. + * @param basePath - Base path for tx files within each store. * @param log - Optional logger. + * @param telemetry - Optional telemetry client. * @returns Array of successfully created FileStoreTxSource instances. */ export async function createFileStoreTxSources( urls: string[], basePath: string, log: Logger = createLogger('p2p:file_store_tx_source'), + telemetry: TelemetryClient = getTelemetryClient(), ): Promise { - const sources = await Promise.all(urls.map(url => FileStoreTxSource.create(url, basePath, log))); + const sources = await Promise.all(urls.map(url => FileStoreTxSource.create(url, basePath, log, telemetry))); return sources.filter((s): s is FileStoreTxSource => s !== undefined); } diff --git a/yarn-project/p2p/src/services/tx_collection/instrumentation.ts b/yarn-project/p2p/src/services/tx_collection/instrumentation.ts index 780ee1b043f4..16068bb7045f 100644 --- a/yarn-project/p2p/src/services/tx_collection/instrumentation.ts +++ b/yarn-project/p2p/src/services/tx_collection/instrumentation.ts @@ -18,7 +18,13 @@ export class TxCollectionInstrumentation { const meter = client.getMeter(name); this.txsCollected = createUpDownCounterWithDefault(meter, Metrics.TX_COLLECTOR_COUNT, { - [Attributes.TX_COLLECTION_METHOD]: ['fast-req-resp', 'fast-node-rpc', 'slow-req-resp', 'slow-node-rpc'], + [Attributes.TX_COLLECTION_METHOD]: [ + 'fast-req-resp', + 'fast-node-rpc', + 'slow-req-resp', + 'slow-node-rpc', + 'file-store', + ], }); this.collectionDurationPerTx = meter.createHistogram(Metrics.TX_COLLECTOR_DURATION_PER_TX); diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 0c8f61d07077..0187114e0d5d 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -1402,6 +1402,28 @@ export const TX_FILE_STORE_QUEUE_SIZE: MetricDefinition = { description: 'Number of txs pending upload', valueType: ValueType.INT, }; +export const TX_FILE_STORE_DOWNLOADS_SUCCESS: MetricDefinition = { + name: 'aztec.p2p.tx_file_store.downloads_success', + description: 'Number of successful tx downloads from file storage', + valueType: ValueType.INT, +}; +export const TX_FILE_STORE_DOWNLOADS_FAILED: MetricDefinition = { + name: 'aztec.p2p.tx_file_store.downloads_failed', + description: 'Number of failed tx downloads from file storage', + valueType: ValueType.INT, +}; +export const TX_FILE_STORE_DOWNLOAD_DURATION: MetricDefinition = { + name: 'aztec.p2p.tx_file_store.download_duration', + description: 'Duration to download a tx from file storage', + unit: 'ms', + valueType: ValueType.INT, +}; +export const TX_FILE_STORE_DOWNLOAD_SIZE: MetricDefinition = { + name: 'aztec.p2p.tx_file_store.download_size', + description: 'Size of a downloaded tx from file storage', + unit: 'By', + valueType: ValueType.INT, +}; export const IVC_VERIFIER_TIME: MetricDefinition = { name: 'aztec.ivc_verifier.time', From e08a9e51d3721da9a5142ae192b146a500cd69fd Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 17 Feb 2026 19:02:29 +0000 Subject: [PATCH 62/62] chore: deflake epoch_mbps (#20609) . --- .../src/e2e_epochs/epochs_mbps.parallel.test.ts | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts index 1cbdd2bf2b15..1917f419e9f4 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts @@ -281,9 +281,18 @@ describe('e2e_epochs/epochs_mbps', () => { // Wait until all txs are mined const timeout = test.L2_SLOT_DURATION_IN_S * 5; - await Promise.all(txHashes.map(txHash => waitForTx(context.aztecNode, txHash, { timeout }))); + const receipts = await Promise.all(txHashes.map(txHash => waitForTx(context.aztecNode, txHash, { timeout }))); logger.warn(`All L2→L1 message txs have been mined`); + // wait for the other node to synch + const maxBlockNumber = Math.max(...receipts.map(r => r.blockNumber!)); + await retryUntil( + async () => ((await archiver.getCheckpointedL2BlockNumber()) >= maxBlockNumber ? true : undefined), + `archiver to checkpoint block ${maxBlockNumber}`, + test.L2_SLOT_DURATION_IN_S * 3, + 0.1, + ); + const multiBlockCheckpoint = await assertMultipleBlocksPerSlot(EXPECTED_BLOCKS_PER_CHECKPOINT, logger); // Verify L2→L1 messages are in the blocks