diff --git a/spartan/aztec-chaos-scenarios/values/network-requirements.yaml b/spartan/aztec-chaos-scenarios/values/network-requirements.yaml index a56dbe192d68..f300e19f1880 100644 --- a/spartan/aztec-chaos-scenarios/values/network-requirements.yaml +++ b/spartan/aztec-chaos-scenarios/values/network-requirements.yaml @@ -9,7 +9,7 @@ networkShaping: correlation: "75" bandwidth: enabled: true - rate: 25mbps + rate: 200mbps packetLoss: enabled: true loss: diff --git a/spartan/environments/five-tps-long-epoch.env b/spartan/environments/five-tps-long-epoch.env index 84bd8ee6e591..ff87b7ef63a2 100644 --- a/spartan/environments/five-tps-long-epoch.env +++ b/spartan/environments/five-tps-long-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=32 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=64 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" diff --git a/spartan/environments/five-tps-short-epoch.env b/spartan/environments/five-tps-short-epoch.env index 56141ee724c4..85f36344fc19 100644 --- a/spartan/environments/five-tps-short-epoch.env +++ b/spartan/environments/five-tps-short-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=8 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=16 +AZTEC_PROOF_SUBMISSION_EPOCHS=10 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" diff --git a/spartan/environments/ten-tps-long-epoch.env b/spartan/environments/ten-tps-long-epoch.env index 39ea3d75e197..e3fefc644364 100644 --- a/spartan/environments/ten-tps-long-epoch.env +++ b/spartan/environments/ten-tps-long-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=32 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=64 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" diff --git a/spartan/environments/ten-tps-short-epoch.env b/spartan/environments/ten-tps-short-epoch.env index 35868695e0f6..90f16277c385 100644 --- a/spartan/environments/ten-tps-short-epoch.env +++ b/spartan/environments/ten-tps-short-epoch.env @@ -6,7 +6,7 @@ DESTROY_ETH_DEVNET=true CREATE_ETH_DEVNET=${CREATE_ETH_DEVNET:-true} AZTEC_EPOCH_DURATION=8 AZTEC_SLOT_DURATION=36 -AZTEC_PROOF_SUBMISSION_WINDOW=16 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 ETHEREUM_CHAIN_ID=1337 LABS_INFRA_MNEMONIC="test test test test test test test test test test test junk" FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" diff --git a/spartan/environments/tps-scenario.env b/spartan/environments/tps-scenario.env index 18ecd87bc070..b7548574fd39 100644 --- a/spartan/environments/tps-scenario.env +++ b/spartan/environments/tps-scenario.env @@ -4,7 +4,7 @@ GCP_REGION=us-west1-a AZTEC_EPOCH_DURATION=8 AZTEC_SLOT_DURATION=72 -AZTEC_PROOF_SUBMISSION_WINDOW=16 +AZTEC_PROOF_SUBMISSION_EPOCHS=2 AZTEC_LAG_IN_EPOCHS=1 CREATE_ETH_DEVNET=false diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index abbdaf060174..9d8103c0794b 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -97,6 +97,7 @@ SEQ_MIN_TX_PER_BLOCK=${SEQ_MIN_TX_PER_BLOCK:-0} SEQ_MAX_TX_PER_BLOCK=${SEQ_MAX_TX_PER_BLOCK:-8} SEQ_BLOCK_DURATION_MS=${SEQ_BLOCK_DURATION_MS:-} SEQ_BUILD_CHECKPOINT_IF_EMPTY=${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-} +SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT=${SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT:0} PROVER_REPLICAS=${PROVER_REPLICAS:-4} PROVER_AGENTS_PER_PROVER=${PROVER_AGENTS_PER_PROVER:-1} R2_ACCESS_KEY_ID=${R2_ACCESS_KEY_ID:-} @@ -506,6 +507,7 @@ SEQ_MIN_TX_PER_BLOCK = ${SEQ_MIN_TX_PER_BLOCK} SEQ_MAX_TX_PER_BLOCK = ${SEQ_MAX_TX_PER_BLOCK} SEQ_BLOCK_DURATION_MS = ${SEQ_BLOCK_DURATION_MS:-null} SEQ_BUILD_CHECKPOINT_IF_EMPTY = ${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-null} +SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT = ${SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT} PROVER_MNEMONIC = "${LABS_INFRA_MNEMONIC}" PROVER_PUBLISHER_MNEMONIC_START_INDEX = ${PROVER_PUBLISHER_MNEMONIC_START_INDEX} PROVER_PUBLISHERS_PER_PROVER = ${PUBLISHERS_PER_PROVER} diff --git a/spartan/terraform/deploy-aztec-infra/main.tf b/spartan/terraform/deploy-aztec-infra/main.tf index dccea9a87427..347939595c5b 100644 --- a/spartan/terraform/deploy-aztec-infra/main.tf +++ b/spartan/terraform/deploy-aztec-infra/main.tf @@ -220,6 +220,7 @@ locals { "validator.node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE "validator.node.env.WS_NUM_HISTORIC_BLOCKS" = var.WS_NUM_HISTORIC_BLOCKS "validator.node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS + "validator.node.env.SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT" = var.SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT } # Note: nonsensitive() is required here because helm_releases is used in for_each, diff --git a/spartan/terraform/deploy-aztec-infra/variables.tf b/spartan/terraform/deploy-aztec-infra/variables.tf index f3f27dc0cde8..9947d2379f43 100644 --- a/spartan/terraform/deploy-aztec-infra/variables.tf +++ b/spartan/terraform/deploy-aztec-infra/variables.tf @@ -343,6 +343,12 @@ variable "SEQ_MAX_TX_PER_BLOCK" { default = "8" } +variable "SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT" { + description = "Percentage probability of skipping checkpoint publishing" + type = string + default = "0" +} + variable "SEQ_BLOCK_DURATION_MS" { description = "Duration per block in milliseconds when building multiple blocks per slot" type = string diff --git a/yarn-project/archiver/src/modules/instrumentation.ts b/yarn-project/archiver/src/modules/instrumentation.ts index 57f7c6413f75..f0a18a2d17d7 100644 --- a/yarn-project/archiver/src/modules/instrumentation.ts +++ b/yarn-project/archiver/src/modules/instrumentation.ts @@ -13,10 +13,13 @@ import { createUpDownCounterWithDefault, } from '@aztec/telemetry-client'; +import type { CheckpointData } from '../store/block_store.js'; + export class ArchiverInstrumentation { public readonly tracer: Tracer; private blockHeight: Gauge; + private checkpointHeight: Gauge; private txCount: UpDownCounter; private l1BlockHeight: Gauge; private proofsSubmittedDelay: Histogram; @@ -47,6 +50,8 @@ export class ArchiverInstrumentation { this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT); + this.checkpointHeight = meter.createGauge(Metrics.ARCHIVER_CHECKPOINT_HEIGHT); + this.l1BlockHeight = meter.createGauge(Metrics.ARCHIVER_L1_BLOCK_HEIGHT); this.txCount = createUpDownCounterWithDefault(meter, Metrics.ARCHIVER_TOTAL_TXS); @@ -105,6 +110,7 @@ export class ArchiverInstrumentation { public processNewBlocks(syncTimePerBlock: number, blocks: L2Block[]) { this.syncDurationPerBlock.record(Math.ceil(syncTimePerBlock)); this.blockHeight.record(Math.max(...blocks.map(b => b.number))); + this.checkpointHeight.record(Math.max(...blocks.map(b => b.checkpointNumber))); this.syncBlockCount.add(blocks.length); for (const block of blocks) { @@ -127,8 +133,10 @@ export class ArchiverInstrumentation { this.pruneDuration.record(Math.ceil(duration)); } - public updateLastProvenBlock(blockNumber: number) { - this.blockHeight.record(blockNumber, { [Attributes.STATUS]: 'proven' }); + public updateLastProvenCheckpoint(checkpoint: CheckpointData) { + const lastBlockNumberInCheckpoint = checkpoint.startBlock + checkpoint.numBlocks - 1; + this.blockHeight.record(lastBlockNumberInCheckpoint, { [Attributes.STATUS]: 'proven' }); + this.checkpointHeight.record(checkpoint.checkpointNumber, { [Attributes.STATUS]: 'proven' }); } public processProofsVerified(logs: { proverId: string; l2BlockNumber: bigint; delay: bigint }[]) { diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index c2dbca60d559..bc73f3bb35c2 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -599,7 +599,7 @@ export class ArchiverL1Synchronizer implements Traceable { slotNumber: provenSlotNumber, epochNumber: provenEpochNumber, }); - this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint); + this.instrumentation.updateLastProvenCheckpoint(localCheckpointForDestinationProvenCheckpointNumber); } else { this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`); } diff --git a/yarn-project/end-to-end/src/spartan/n_tps.test.ts b/yarn-project/end-to-end/src/spartan/n_tps.test.ts index 8e8e022171de..95405e1b9e84 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps.test.ts @@ -345,7 +345,9 @@ describe('sustained N TPS test', () => { let lowValueTxs = 0; const lowValueSendTx = async (wallet: TestWallet) => { lowValueTxs++; - const feeAmount = Number(randomBigInt(10n)) + 1; + //const feeAmount = Number(randomBigInt(100n)) + 1; + //const feeAmount = 1; + const feeAmount = Math.floor(lowValueTxs / 1000) + 1; const fee = new GasFees(0, feeAmount); logger.info('Sending low value tx ' + lowValueTxs + ' with fee ' + feeAmount); @@ -358,7 +360,7 @@ describe('sustained N TPS test', () => { let highValueTxs = 0; const highValueSendTx = async (wallet: TestWallet) => { highValueTxs++; - const feeAmount = Number(randomBigInt(10n)) + 11; + const feeAmount = Number(randomBigInt(10n)) + 1000; const fee = new GasFees(0, feeAmount); logger.info('Sending high value tx ' + highValueTxs + ' with fee ' + feeAmount); diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 3ed05d7dbb28..014ff224754e 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -215,6 +215,7 @@ export type EnvVar = | 'SEQ_BUILD_CHECKPOINT_IF_EMPTY' | 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_COMMITTEE_MEMBER' | 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_NON_COMMITTEE_MEMBER' + | 'SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT' | 'SLASH_MIN_PENALTY_PERCENTAGE' | 'SLASH_MAX_PENALTY_PERCENTAGE' | 'SLASH_VALIDATORS_ALWAYS' diff --git a/yarn-project/p2p/src/client/interface.ts b/yarn-project/p2p/src/client/interface.ts index f70316f88715..1aa02f01a1c7 100644 --- a/yarn-project/p2p/src/client/interface.ts +++ b/yarn-project/p2p/src/client/interface.ts @@ -107,13 +107,6 @@ export type P2P = P2PApiFull & **/ sendTx(tx: Tx): Promise; - /** - * Adds transactions to the pool. Does not send to peers or validate the tx. - * @param txs - The transactions. - * @returns The number of txs added to the pool. Note if the transaction already exists, it will not be added again. - **/ - addTxsToPool(txs: Tx[]): Promise; - /** * Handles failed transaction execution by removing txs from the pool. * @param txHashes - Hashes of the transactions that failed execution. diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 588eccbc2269..4bcfa04539a3 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -582,7 +582,7 @@ export class P2PClient **/ public async sendTx(tx: Tx): Promise { this.#assertIsReady(); - const result = await this.txPool.addPendingTxs([tx]); + const result = await this.txPool.addPendingTxs([tx], { feeComparisonOnly: true }); if (result.accepted.length === 1) { await this.p2pService.propagate(tx); } else { @@ -592,15 +592,6 @@ export class P2PClient } } - /** - * Adds transactions to the pool. Does not send to peers or validate the txs. - * @param txs - The transactions. - **/ - public async addTxsToPool(txs: Tx[]): Promise { - this.#assertIsReady(); - return (await this.txPool.addPendingTxs(txs)).accepted.length; - } - /** * Returns whether the given tx hash is flagged as pending or mined. * @param txHash - Hash of the tx to query. diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts index 9227e0b32809..b1aca0208f12 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/deleted_pool.ts @@ -301,6 +301,17 @@ export class DeletedPool { return this.#state.size; } + /** Gets the count of soft-deleted transactions (both prune-based and slot-based). */ + getSoftDeletedCount(): number { + let count = this.#slotDeletedTxs.size; + for (const state of this.#state.values()) { + if (state.softDeleted) { + count++; + } + } + return count; + } + /** * Gets all transaction hashes from pruned blocks. */ diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts index bb134e70df2a..f7fd0d3252de 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.test.ts @@ -183,6 +183,7 @@ describe('EvictionManager', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); @@ -204,7 +205,22 @@ describe('EvictionManager', () => { expect(result.shouldIgnore).toBe(false); expect(result.txHashesToEvict).toContain('0x2222'); - expect(preAddRule.check).toHaveBeenCalledWith(incomingMeta, poolAccess); + expect(preAddRule.check).toHaveBeenCalledWith(incomingMeta, poolAccess, undefined); + }); + + it('forwards PreAddContext to rules', async () => { + preAddRule.check.mockResolvedValue({ + shouldIgnore: false, + txHashesToEvict: [], + }); + + evictionManager.registerPreAddRule(preAddRule); + const incomingMeta = createMeta('0x1111', 100n); + const context = { feeComparisonOnly: true }; + + await evictionManager.runPreAddRules(incomingMeta, poolAccess, context); + + expect(preAddRule.check).toHaveBeenCalledWith(incomingMeta, poolAccess, context); }); it('returns ignore result immediately when a rule says to ignore', async () => { @@ -318,6 +334,7 @@ describe('EvictionManager', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts index 476f40eb0be7..b2a8ad122011 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts @@ -9,9 +9,11 @@ import { EvictionEvent, type EvictionRule, type PoolOperations, + type PreAddContext, type PreAddPoolAccess, type PreAddResult, type PreAddRule, + type TaggedEviction, } from './interfaces.js'; /** @@ -47,21 +49,27 @@ export class EvictionManager { * Runs all pre-add rules for an incoming transaction. * Returns combined result of all rules. */ - async runPreAddRules(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { - const allTxHashesToEvict: string[] = []; + async runPreAddRules( + incomingMeta: TxMetaData, + poolAccess: PreAddPoolAccess, + context?: PreAddContext, + ): Promise { + const evictions: TaggedEviction[] = []; + const seen = new Set(); for (const rule of this.preAddRules) { try { - const result = await rule.check(incomingMeta, poolAccess); + const result = await rule.check(incomingMeta, poolAccess, context); if (result.shouldIgnore) { return result; } - // Collect txs to evict from all rules + // Collect txs to evict from all rules, tagged with the rule name for (const txHash of result.txHashesToEvict) { - if (!allTxHashesToEvict.includes(txHash)) { - allTxHashesToEvict.push(txHash); + if (!seen.has(txHash)) { + seen.add(txHash); + evictions.push({ txHash, reason: rule.name }); } } } catch (err) { @@ -77,7 +85,8 @@ export class EvictionManager { return { shouldIgnore: false, - txHashesToEvict: allTxHashesToEvict, + txHashesToEvict: evictions.map(e => e.txHash), + evictions, }; } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts index de7c1d2bf76f..b36f4f506795 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.test.ts @@ -43,6 +43,7 @@ describe('FeePayerBalanceEvictionRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); @@ -144,7 +145,7 @@ describe('FeePayerBalanceEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // Low priority evicted - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'FeePayerBalanceEviction'); }); it('evicts multiple low-priority txs when balance is insufficient', async () => { @@ -193,7 +194,7 @@ describe('FeePayerBalanceEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0xaaaa']); // Only lowest priority evicted - expect(deleteTxsMock).toHaveBeenCalledWith(['0xaaaa']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0xaaaa'], 'FeePayerBalanceEviction'); }); it('considers claim amount when calculating available balance', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts index 32a5db700677..969fd127b1d1 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts @@ -67,8 +67,8 @@ export class FeePayerBalanceEvictionRule implements EvictionRule { ).flat(); if (txsToEvict.length > 0) { - await pool.deleteTxs(txsToEvict); - this.log.verbose(`Evicted ${txsToEvict.length} txs due to insufficient fee payer balance`, { + await pool.deleteTxs(txsToEvict, this.name); + this.log.debug(`Evicted ${txsToEvict.length} txs due to insufficient fee payer balance`, { txHashes: txsToEvict, }); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts index 2a9ca2ea552e..1c7898a3491e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.test.ts @@ -24,6 +24,7 @@ describe('FeePayerBalancePreAddRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts index ae2ba0006058..daa5ce665cfc 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts @@ -1,7 +1,7 @@ import { createLogger } from '@aztec/foundation/log'; import { type TxMetaData, comparePriority } from '../tx_metadata.js'; -import type { PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import type { PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; /** * Pre-add rule that checks if a fee payer has sufficient balance to cover the incoming transaction. @@ -19,7 +19,7 @@ export class FeePayerBalancePreAddRule implements PreAddRule { private log = createLogger('p2p:tx_pool_v2:fee_payer_balance_pre_add_rule'); - async check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { + async check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, _context?: PreAddContext): Promise { // Get fee payer's on-chain balance const initialBalance = await poolAccess.getFeePayerBalance(incomingMeta.feePayer); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts index faf1cc5b9615..e084e02039d8 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/index.ts @@ -6,9 +6,11 @@ export { type EvictionResult, type EvictionRule, type PoolOperations, + type PreAddContext, type PreAddPoolAccess, type PreAddResult, type PreAddRule, + type TaggedEviction, } from './interfaces.js'; // Pre-add rules diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts index 1d0ba416013c..ab5af10718cf 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts @@ -67,6 +67,12 @@ export interface PreAddPoolAccess { getLowestPriorityPendingTx(): TxMetaData | undefined; } +/** A single eviction tagged with the rule that caused it. */ +export interface TaggedEviction { + readonly txHash: string; + readonly reason: string; +} + /** * Result of a pre-add check for a single transaction. */ @@ -75,10 +81,18 @@ export interface PreAddResult { readonly shouldIgnore: boolean; /** Tx hashes (as strings) that should be evicted if this tx is added */ readonly txHashesToEvict: string[]; + /** Evictions tagged with the rule name that produced them. Populated by EvictionManager. */ + readonly evictions?: TaggedEviction[]; /** Optional reason for ignoring */ readonly reason?: string; } +/** Context passed to pre-add rules from addPendingTxs. */ +export interface PreAddContext { + /** If true, compare priority fee only (no tx hash tiebreaker). Used for RPC submissions. */ + feeComparisonOnly?: boolean; +} + /** * Pre-add rule interface. Rules check incoming txs before they're added to the pool. * All methods work with TxMetaData for efficiency. @@ -90,9 +104,10 @@ export interface PreAddRule { * Check if incoming tx should be added and which existing txs to evict. * @param incomingMeta - Metadata for the incoming transaction * @param poolAccess - Read-only access to current pool state + * @param context - Optional context from addPendingTxs caller * @returns Result indicating whether to ignore and what to evict */ - check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise; + check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, context?: PreAddContext): Promise; /** * Updates the configuration for this rule. @@ -120,8 +135,8 @@ export interface PoolOperations { /** Get the N lowest priority pending tx hashes */ getLowestPriorityPending(limit: number): string[]; - /** Delete transactions by hash */ - deleteTxs(txHashes: string[]): Promise; + /** Delete transactions by hash, with an optional reason for metrics */ + deleteTxs(txHashes: string[], reason?: string): Promise; } /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts index 51fa1fd3b3cb..fc5acf684f1f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.test.ts @@ -37,6 +37,7 @@ describe('InvalidTxsAfterMiningRule', () => { nullifiers, includeByTimestamp, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData({ includeByTimestamp }), }; }; @@ -122,7 +123,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // Only tx1 has duplicate nullifier - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('evicts transactions with expired timestamps', async () => { @@ -142,7 +143,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // Only tx1 is expired - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('evicts transactions with timestamp equal to block timestamp', async () => { @@ -162,7 +163,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); // tx1 has timestamp <= block timestamp - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('handles transactions with both duplicate nullifiers and expired timestamps', async () => { @@ -182,7 +183,7 @@ describe('InvalidTxsAfterMiningRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x1111']); - expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x1111'], 'InvalidTxsAfterMining'); }); it('handles empty pending transactions list', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts index 8c39393f48af..366eb8e02b21 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts @@ -35,23 +35,20 @@ export class InvalidTxsAfterMiningRule implements EvictionRule { for (const meta of pendingTxs) { // Evict pending txs that share nullifiers with mined txs if (meta.nullifiers.some(nullifier => minedNullifiers.has(nullifier))) { - this.log.verbose(`Evicting tx ${meta.txHash} from pool due to a duplicate nullifier with a mined tx`); txsToEvict.push(meta.txHash); continue; } // Evict pending txs with an expiration timestamp less than or equal to the mined block timestamp if (meta.includeByTimestamp <= timestamp) { - this.log.verbose( - `Evicting tx ${meta.txHash} from pool due to the tx being expired (includeByTimestamp: ${meta.includeByTimestamp}, mined block timestamp: ${timestamp})`, - ); txsToEvict.push(meta.txHash); continue; } } if (txsToEvict.length > 0) { - await pool.deleteTxs(txsToEvict); + this.log.info(`Evicted ${txsToEvict.length} invalid txs after block mined`); + await pool.deleteTxs(txsToEvict, this.name); } this.log.debug(`Evicted ${txsToEvict.length} invalid txs after block mined`, { txHashes: txsToEvict }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts index b07974d893b0..378431d2d7c2 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.test.ts @@ -31,6 +31,7 @@ describe('InvalidTxsAfterReorgRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); @@ -160,7 +161,7 @@ describe('InvalidTxsAfterReorgRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted.length).toBe(pendingTxs.length); - expect(deleteTxsMock).toHaveBeenCalledWith(result.txsEvicted); + expect(deleteTxsMock).toHaveBeenCalledWith(result.txsEvicted, 'InvalidTxsAfterReorg'); }); it('handles error from deleteTxs operation', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts index f76c23eb7b1e..72462a8a687f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts @@ -72,8 +72,8 @@ export class InvalidTxsAfterReorgRule implements EvictionRule { } if (txsToEvict.length > 0) { - this.log.verbose(`Evicting ${txsToEvict.length} txs from pool due to referencing pruned blocks`); - await pool.deleteTxs(txsToEvict); + this.log.info(`Evicting ${txsToEvict.length} txs from pool due to referencing pruned blocks`); + await pool.deleteTxs(txsToEvict, this.name); } const keptCount = pendingTxs.length - txsToEvict.length; @@ -81,7 +81,7 @@ export class InvalidTxsAfterReorgRule implements EvictionRule { this.log.verbose(`Kept ${keptCount} txs that did not reference pruned blocks`); } - this.log.info(`Evicted ${txsToEvict.length} invalid txs after reorg`, { txHashes: txsToEvict }); + this.log.debug(`Evicted ${txsToEvict.length} invalid txs after reorg`, { txHashes: txsToEvict }); return { reason: 'reorg_invalid_txs', diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts index 75aabcd10439..93744abc603b 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.test.ts @@ -132,7 +132,7 @@ describe('LowPriorityEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x3333', '0x4444']); - expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x4444']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x4444'], 'LowPriorityEviction'); }); it('tracks newly added transactions that were evicted', async () => { @@ -148,7 +148,7 @@ describe('LowPriorityEvictionRule', () => { expect(result.success).toBe(true); expect(result.txsEvicted).toEqual(['0x3333', '0x1111']); - expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x1111']); + expect(deleteTxsMock).toHaveBeenCalledWith(['0x3333', '0x1111'], 'LowPriorityEviction'); }); it('handles all transactions being non-evictable', async () => { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts index c9854aeb6a17..047695aaf681 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts @@ -48,19 +48,18 @@ export class LowPriorityEvictionRule implements EvictionRule { }; } - this.log.verbose( - `Evicting low priority txs. Pending tx count above limit: ${currentTxCount} > ${this.maxPoolSize}`, - ); + this.log.info(`Evicting low priority txs. Pending tx count above limit: ${currentTxCount} > ${this.maxPoolSize}`); const numberToEvict = currentTxCount - this.maxPoolSize; const txsToEvict = pool.getLowestPriorityPending(numberToEvict); + const toEvictSet = new Set(txsToEvict); + const numNewTxsEvicted = context.newTxHashes.filter(newTxHash => toEvictSet.has(newTxHash)).length; if (txsToEvict.length > 0) { - await pool.deleteTxs(txsToEvict); + this.log.info(`Evicted ${txsToEvict.length} low priority txs, including ${numNewTxsEvicted} newly added txs`); + await pool.deleteTxs(txsToEvict, this.name); } - const numNewTxsEvicted = context.newTxHashes.filter(newTxHash => txsToEvict.includes(newTxHash)).length; - - this.log.verbose(`Evicted ${txsToEvict.length} low priority txs, including ${numNewTxsEvicted} newly added txs`, { + this.log.debug(`Evicted ${txsToEvict.length} low priority txs, including ${numNewTxsEvicted} newly added txs`, { txHashes: txsToEvict, }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts index c4ff83c31aab..e2fc58850110 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts @@ -1,5 +1,5 @@ -import { type TxMetaData, stubTxMetaValidationData } from '../tx_metadata.js'; -import type { PreAddPoolAccess } from './interfaces.js'; +import { type TxMetaData, comparePriority, stubTxMetaValidationData } from '../tx_metadata.js'; +import type { PreAddContext, PreAddPoolAccess } from './interfaces.js'; import { LowPriorityPreAddRule } from './low_priority_pre_add_rule.js'; describe('LowPriorityPreAddRule', () => { @@ -16,6 +16,7 @@ describe('LowPriorityPreAddRule', () => { nullifiers: [`0x${txHash.slice(2)}null1`], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); @@ -147,5 +148,73 @@ describe('LowPriorityPreAddRule', () => { expect(result.txHashesToEvict).toHaveLength(0); }); }); + + describe('feeOnly context', () => { + it('uses comparePriority (default): same fee, higher-priority hash evicts existing', async () => { + // Pick two hashes with the same fee, where incoming has higher priority by hash tiebreaker + const existing = createMeta('0x1111', 100n); + const incoming = createMeta('0x2222', 100n); + + // Determine which direction the tiebreaker goes and swap if needed + const cmp = comparePriority(incoming, existing); + const [incomingMeta, lowestPriorityMeta] = cmp > 0 ? [incoming, existing] : [existing, incoming]; + + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + + // Default context (no feeOnly) — uses full comparePriority + const result = await rule.check(incomingMeta, poolAccess); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain(lowestPriorityMeta.txHash); + }); + + it('uses feeComparisonOnly: same fee, incoming is ignored even if it wins hash tiebreaker', async () => { + const existing = createMeta('0x1111', 100n); + const incoming = createMeta('0x2222', 100n); + + // Determine which has higher hash priority and use that as incoming + const cmp = comparePriority(incoming, existing); + const [incomingMeta, lowestPriorityMeta] = cmp > 0 ? [incoming, existing] : [existing, incoming]; + + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const context: PreAddContext = { feeComparisonOnly: true }; + + // feeOnly mode: same fee means ignored (no hash tiebreaker) + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(true); + expect(result.txHashesToEvict).toHaveLength(0); + }); + + it('higher fee evicts regardless of feeOnly flag', async () => { + const lowestPriorityMeta = createMeta('0x2222', 50n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 100n); + + // Without feeOnly + const result1 = await rule.check(incomingMeta, poolAccess); + expect(result1.shouldIgnore).toBe(false); + expect(result1.txHashesToEvict).toContain('0x2222'); + + // With feeOnly + const result2 = await rule.check(incomingMeta, poolAccess, { feeComparisonOnly: true }); + expect(result2.shouldIgnore).toBe(false); + expect(result2.txHashesToEvict).toContain('0x2222'); + }); + + it('lower fee is always ignored regardless of feeOnly flag', async () => { + const lowestPriorityMeta = createMeta('0x2222', 100n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 50n); + + // Without feeOnly + const result1 = await rule.check(incomingMeta, poolAccess); + expect(result1.shouldIgnore).toBe(true); + + // With feeOnly + const result2 = await rule.check(incomingMeta, poolAccess, { feeComparisonOnly: true }); + expect(result2.shouldIgnore).toBe(true); + }); + }); }); }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts index a086cd64fc85..fa5cc0360dcd 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts @@ -1,7 +1,7 @@ import { createLogger } from '@aztec/foundation/log'; -import type { TxMetaData } from '../tx_metadata.js'; -import type { EvictionConfig, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import { type TxMetaData, comparePriority } from '../tx_metadata.js'; +import type { EvictionConfig, PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; /** * Pre-add rule that checks if the pool is at capacity and handles low-priority eviction. @@ -20,7 +20,7 @@ export class LowPriorityPreAddRule implements PreAddRule { this.maxPoolSize = config.maxPoolSize; } - check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { + check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, context?: PreAddContext): Promise { // Skip if max pool size is disabled (0 = unlimited) if (this.maxPoolSize === 0) { return Promise.resolve({ shouldIgnore: false, txHashesToEvict: [] }); @@ -40,8 +40,14 @@ export class LowPriorityPreAddRule implements PreAddRule { return Promise.resolve({ shouldIgnore: false, txHashesToEvict: [] }); } - // If incoming tx has strictly higher priority, evict the lowest priority tx - if (incomingMeta.priorityFee > lowestPriorityMeta.priorityFee) { + // Compare incoming tx against lowest priority tx. + // feeOnly mode (RPC): use strict fee comparison only — avoids churn from hash ordering + // Default (gossip): use full comparePriority (fee + tx hash tiebreaker) for determinism + const isHigherPriority = context?.feeComparisonOnly + ? incomingMeta.priorityFee > lowestPriorityMeta.priorityFee + : comparePriority(incomingMeta, lowestPriorityMeta) > 0; + + if (isHigherPriority) { this.log.debug( `Pool at capacity (${currentCount}/${this.maxPoolSize}), evicting ${lowestPriorityMeta.txHash} ` + `(priority ${lowestPriorityMeta.priorityFee}) for ${incomingMeta.txHash} (priority ${incomingMeta.priorityFee})`, diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts index 507f2718c678..4a3b0d6297b7 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts @@ -21,6 +21,7 @@ describe('NullifierConflictRule', () => { nullifiers, includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts index 6eecac930709..05378999f704 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts @@ -1,7 +1,7 @@ import { createLogger } from '@aztec/foundation/log'; import { type TxMetaData, checkNullifierConflict } from '../tx_metadata.js'; -import type { PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; +import type { PreAddContext, PreAddPoolAccess, PreAddResult, PreAddRule } from './interfaces.js'; /** * Pre-add rule that checks for nullifier conflicts between incoming and existing transactions. @@ -15,7 +15,7 @@ export class NullifierConflictRule implements PreAddRule { private log = createLogger('p2p:tx_pool_v2:nullifier_conflict_rule'); - check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess): Promise { + check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, _context?: PreAddContext): Promise { const result = checkNullifierConflict( incomingMeta, nullifier => poolAccess.getTxHashByNullifier(nullifier), diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts new file mode 100644 index 000000000000..6ec711bb826c --- /dev/null +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/instrumentation.ts @@ -0,0 +1,69 @@ +import { + Attributes, + type Meter, + Metrics, + type ObservableGauge, + type ObservableResult, + type TelemetryClient, + type UpDownCounter, + createUpDownCounterWithDefault, +} from '@aztec/telemetry-client'; + +/** Callback that returns the current estimated metadata memory in bytes. */ +export type MetadataMemoryCallback = () => number; + +/** Instrumentation for TxPoolV2Impl internal operations. */ +export class TxPoolV2Instrumentation { + #evictedCounter: UpDownCounter; + #ignoredCounter: UpDownCounter; + #rejectedCounter: UpDownCounter; + #softDeletedHitsCounter: UpDownCounter; + #missingOnProtectCounter: UpDownCounter; + #missingPreviouslyEvictedCounter: UpDownCounter; + #metadataMemoryGauge: ObservableGauge; + + constructor(telemetry: TelemetryClient, metadataMemoryCallback: MetadataMemoryCallback) { + const meter: Meter = telemetry.getMeter('TxPoolV2Impl'); + + this.#evictedCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_EVICTED_COUNT); + this.#ignoredCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_IGNORED_COUNT); + this.#rejectedCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_REJECTED_COUNT); + this.#softDeletedHitsCounter = createUpDownCounterWithDefault(meter, Metrics.MEMPOOL_TX_POOL_V2_SOFT_DELETED_HITS); + this.#missingOnProtectCounter = createUpDownCounterWithDefault( + meter, + Metrics.MEMPOOL_TX_POOL_V2_MISSING_ON_PROTECT, + ); + this.#missingPreviouslyEvictedCounter = createUpDownCounterWithDefault( + meter, + Metrics.MEMPOOL_TX_POOL_V2_MISSING_PREVIOUSLY_EVICTED, + ); + this.#metadataMemoryGauge = meter.createObservableGauge(Metrics.MEMPOOL_TX_POOL_V2_METADATA_MEMORY); + this.#metadataMemoryGauge.addCallback((result: ObservableResult) => { + result.observe(metadataMemoryCallback()); + }); + } + + recordEvictions(count: number, reason: string) { + this.#evictedCounter.add(count, { [Attributes.TX_POOL_EVICTION_REASON]: reason }); + } + + recordIgnored(count: number) { + this.#ignoredCounter.add(count); + } + + recordRejected(count: number) { + this.#rejectedCounter.add(count); + } + + recordSoftDeletedHits(count: number) { + this.#softDeletedHitsCounter.add(count); + } + + recordMissingOnProtect(count: number) { + this.#missingOnProtectCounter.add(count); + } + + recordMissingPreviouslyEvicted(count: number) { + this.#missingPreviouslyEvictedCounter.add(count); + } +} diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts index 1057f57e3954..908e42a1eee7 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts @@ -39,6 +39,8 @@ export type TxPoolV2Config = { archivedTxLimit: number; /** Minimum age (ms) a transaction must have been in the pool before it's eligible for block building */ minTxPoolAgeMs: number; + /** Maximum number of evicted tx hashes to remember for metrics tracking */ + evictedTxCacheSize: number; }; /** @@ -48,6 +50,7 @@ export const DEFAULT_TX_POOL_V2_CONFIG: TxPoolV2Config = { maxPendingTxCount: 0, // 0 = disabled archivedTxLimit: 0, // 0 = disabled minTxPoolAgeMs: 2_000, + evictedTxCacheSize: 10_000, }; /** @@ -98,7 +101,7 @@ export interface TxPoolV2 extends TypedEventEmitter { * @param opts - Optional metadata (e.g., source for logging) * @returns Result categorizing each transaction as accepted, rejected, or ignored */ - addPendingTxs(txs: Tx[], opts?: { source?: string }): Promise; + addPendingTxs(txs: Tx[], opts?: { source?: string; feeComparisonOnly?: boolean }): Promise; /** * Checks if a transaction can be added without modifying the pool. diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts index 287883580a1b..54123c98a798 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts @@ -48,6 +48,7 @@ describe('TxMetaData', () => { nullifiers: [], includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); @@ -81,6 +82,7 @@ describe('TxMetaData', () => { nullifiers, includeByTimestamp: 0n, receivedAt: 0, + estimatedSizeBytes: 0, data: stubTxMetaValidationData(), }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts index 529484c84b6f..64b35b374401 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts @@ -63,6 +63,9 @@ export type TxMetaData = { /** Timestamp (ms) when the tx was received into the pool. 0 for hydrated txs (always eligible). */ receivedAt: number; + + /** Estimated memory footprint of this metadata object in bytes */ + readonly estimatedSizeBytes: number; }; /** Transaction state derived from TxMetaData fields and pool protection status */ @@ -86,6 +89,8 @@ export async function buildTxMetaData(tx: Tx): Promise { const { feeLimit, claimAmount } = await getFeePayerBalanceDelta(tx, ProtocolContractAddress.FeeJuice); + const estimatedSizeBytes = estimateTxMetaDataSize(nullifiers.length); + return { txHash, anchorBlockHeaderHash, @@ -96,6 +101,7 @@ export async function buildTxMetaData(tx: Tx): Promise { nullifiers, includeByTimestamp, receivedAt: 0, + estimatedSizeBytes, data: { getNonEmptyNullifiers: () => nullifierFrs, includeByTimestamp, @@ -109,6 +115,27 @@ export async function buildTxMetaData(tx: Tx): Promise { }; } +// V8 JS object overhead (~64 bytes for a plain object with hidden class). +// String overhead: ~32 bytes header + 1 byte per ASCII char (V8 one-byte strings). +// Hex string (0x + 64 hex chars = 66 chars): ~98 bytes per string. +// bigint: ~32 bytes. number: 8 bytes. Fr: ~80 bytes (32 data + object overhead). +const OBJECT_OVERHEAD = 64; +const HEX_STRING_BYTES = 98; +const BIGINT_BYTES = 32; +const FR_BYTES = 80; +// Fixed cost: object shell + txHash + anchorBlockHeaderHash + feePayer (3 hex strings) +// + priorityFee + claimAmount + feeLimit + includeByTimestamp (4 bigints) +// + receivedAt (number, 8 bytes) + estimatedSizeBytes (number, 8 bytes) +// + data closure object (~OBJECT_OVERHEAD + anchorBlockHeaderHashFr Fr + anchorBlockNumber number) +const FIXED_METADATA_BYTES = + OBJECT_OVERHEAD + 3 * HEX_STRING_BYTES + 4 * BIGINT_BYTES + 8 + 8 + OBJECT_OVERHEAD + FR_BYTES + 8; + +/** Estimates the in-memory size of a TxMetaData object based on the number of nullifiers. */ +function estimateTxMetaDataSize(nullifierCount: number): number { + // Per nullifier: one hex string in nullifiers[] + one Fr in the captured nullifierFrs[] + return FIXED_METADATA_BYTES + nullifierCount * (HEX_STRING_BYTES + FR_BYTES); +} + /** Minimal fields required for priority comparison. */ type PriorityComparable = Pick; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts index cf8291a17bad..a9a368dce37c 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts @@ -348,13 +348,15 @@ export class TxPoolIndices { // METRICS // ============================================================================ - /** Counts transactions by state */ - countTxs(): { pending: number; protected: number; mined: number } { + /** Counts transactions by state and estimates total metadata memory usage */ + countTxs(): { pending: number; protected: number; mined: number; totalMetadataBytes: number } { let pending = 0; let protected_ = 0; let mined = 0; + let totalMetadataBytes = 0; for (const meta of this.#metadata.values()) { + totalMetadataBytes += meta.estimatedSizeBytes; const state = this.getTxState(meta); if (state === 'pending') { pending++; @@ -365,7 +367,16 @@ export class TxPoolIndices { } } - return { pending, protected: protected_, mined }; + return { pending, protected: protected_, mined, totalMetadataBytes }; + } + + /** Returns the estimated total memory consumed by all metadata objects */ + getTotalMetadataBytes(): number { + let total = 0; + for (const meta of this.#metadata.values()) { + total += meta.estimatedSizeBytes; + } + return total; } /** Gets all mined transactions with their block IDs */ diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts index 278cc846f162..200d8e16709c 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts @@ -1158,6 +1158,144 @@ describe('TxPoolV2', () => { }); }); }); + + describe('soft-deleted tx resurrection', () => { + let mockValidator: MockProxy>; + let poolWithValidator: AztecKVTxPoolV2; + let validatorStore: Awaited>; + let validatorArchiveStore: Awaited>; + + beforeEach(async () => { + mockValidator = mock>(); + mockValidator.validateTx.mockResolvedValue({ result: 'valid' }); + + validatorStore = await openTmpStore('p2p-protect-soft-delete'); + validatorArchiveStore = await openTmpStore('archive-protect-soft-delete'); + poolWithValidator = new AztecKVTxPoolV2(validatorStore, validatorArchiveStore, { + l2BlockSource: mockL2BlockSource, + worldStateSynchronizer: mockWorldState, + createTxValidator: () => Promise.resolve(mockValidator), + }); + await poolWithValidator.start(); + }); + + afterEach(async () => { + await poolWithValidator.stop(); + await validatorStore.delete(); + await validatorArchiveStore.delete(); + }); + + /** Helper: add tx, mine it, prune it, fail validation -> soft-deleted */ + const softDeleteTx = async (tx: Tx) => { + await poolWithValidator.addPendingTxs([tx]); + await poolWithValidator.handleMinedBlock(makeBlock([tx], slot1Header)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('mined'); + + // Make validator reject so tx is soft-deleted on prune + mockValidator.validateTx.mockResolvedValue({ + result: 'invalid', + reason: ['timestamp expired'], + }); + await poolWithValidator.handlePrunedBlocks(block0Id); + + // Verify soft-deleted + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('deleted'); + expect(await poolWithValidator.getTxByHash(tx.getTxHash())).toBeDefined(); + + // Restore validator for subsequent operations + mockValidator.validateTx.mockResolvedValue({ result: 'valid' }); + }; + + it('resurrects a soft-deleted tx as protected instead of reporting it missing', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + // protectTxs should find the soft-deleted tx and resurrect it + const missing = await poolWithValidator.protectTxs([tx.getTxHash()], slot2Header); + + expect(missing).toHaveLength(0); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + }); + + it('resurrected soft-deleted tx is retrievable and in indices', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + await poolWithValidator.protectTxs([tx.getTxHash()], slot2Header); + + // Should be retrievable + const retrieved = await poolWithValidator.getTxByHash(tx.getTxHash()); + expect(retrieved).toBeDefined(); + expect(retrieved!.getTxHash().toString()).toEqual(tx.getTxHash().toString()); + + // hasTxs should return true (in indices, not just soft-deleted) + const [hasTx] = await poolWithValidator.hasTxs([tx.getTxHash()]); + expect(hasTx).toBe(true); + }); + + it('resurrected tx is unprotected on the next slot', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + await poolWithValidator.protectTxs([tx.getTxHash()], slot1Header); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Advance to slot 2 — protection from slot 1 expires + await poolWithValidator.prepareForSlot(SlotNumber(2)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('pending'); + }); + + it('mix of existing, soft-deleted, and truly missing txs', async () => { + const txExisting = await mockTx(1); + const txSoftDeleted = await mockTx(2); + const txMissing = await mockTx(3); + + // Add txExisting as a regular pending tx + await poolWithValidator.addPendingTxs([txExisting]); + expect(await poolWithValidator.getTxStatus(txExisting.getTxHash())).toBe('pending'); + + // Soft-delete txSoftDeleted + await softDeleteTx(txSoftDeleted); + + // Protect all three + const missing = await poolWithValidator.protectTxs( + [txExisting.getTxHash(), txSoftDeleted.getTxHash(), txMissing.getTxHash()], + slot2Header, + ); + + // Only txMissing should be reported as missing + expect(toStrings(missing)).toEqual([hashOf(txMissing)]); + + // txExisting: protected (was pending, now protected) + expect(await poolWithValidator.getTxStatus(txExisting.getTxHash())).toBe('protected'); + // txSoftDeleted: protected (resurrected from soft-deleted) + expect(await poolWithValidator.getTxStatus(txSoftDeleted.getTxHash())).toBe('protected'); + // txMissing: pre-recorded protection, not in pool yet + expect(await poolWithValidator.getTxStatus(txMissing.getTxHash())).toBeUndefined(); + }); + + it('resurrected tx survives a second protectTxs call', async () => { + const tx = await mockTx(1); + await softDeleteTx(tx); + + // Resurrect via protectTxs at slot 1 + await poolWithValidator.protectTxs([tx.getTxHash()], slot1Header); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Re-protect at slot 2 — should update slot, not report missing + const missing = await poolWithValidator.protectTxs([tx.getTxHash()], slot2Header); + expect(missing).toHaveLength(0); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Should survive prepareForSlot(2) + await poolWithValidator.prepareForSlot(SlotNumber(2)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('protected'); + + // Should unprotect at slot 3 + await poolWithValidator.prepareForSlot(SlotNumber(3)); + expect(await poolWithValidator.getTxStatus(tx.getTxHash())).toBe('pending'); + }); + }); }); describe('handleMinedBlock', () => { @@ -3326,6 +3464,111 @@ describe('TxPoolV2', () => { }); }); + describe('feeOnly priority comparison', () => { + it('default (gossip): same-fee tx can evict via hash tiebreaker at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + // Create a tx with the same fee as the lowest (tx1, fee=10). + // Without feeOnly, comparePriority uses hash tiebreaker and may evict. + const tx3 = await mockTxWithFee(3, 10); + + // Determine tiebreaker direction + const tx3HashFr = Fr.fromHexString(tx3.getTxHash().toString()); + const tx1HashFr = Fr.fromHexString(tx1.getTxHash().toString()); + const tx3WinsTiebreaker = tx3HashFr.cmp(tx1HashFr) > 0; + + // Default: no feeOnly flag (gossip path) + const result = await pool.addPendingTxs([tx3]); + + if (tx3WinsTiebreaker) { + expect(toStrings(result.accepted)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expect(await pool.getTxStatus(tx1.getTxHash())).toBe('deleted'); + expect(await pool.getTxStatus(tx3.getTxHash())).toBe('pending'); + } else { + expect(toStrings(result.ignored)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expect(await pool.getTxStatus(tx1.getTxHash())).toBe('pending'); + } + }); + + it('feeOnly (RPC): same-fee tx is ignored at capacity regardless of hash', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + // Same fee as the lowest — with feeOnly, no hash tiebreaker, always ignored + const tx3 = await mockTxWithFee(3, 10); + const result = await pool.addPendingTxs([tx3], { feeComparisonOnly: true }); + + expect(toStrings(result.ignored)).toContain(hashOf(tx3)); + expect(result.accepted).toHaveLength(0); + expect(await pool.getPendingTxCount()).toBe(2); + expectNoCallbacks(); + }); + + it('feeOnly (RPC): higher-fee tx still evicts at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + const tx3 = await mockTxWithFee(3, 15); + const result = await pool.addPendingTxs([tx3], { feeComparisonOnly: true }); + + expect(toStrings(result.accepted)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expect(await pool.getTxStatus(tx1.getTxHash())).toBe('deleted'); // fee=10 evicted + expect(await pool.getTxStatus(tx3.getTxHash())).toBe('pending'); + }); + + it('feeOnly (RPC): lower-fee tx is ignored at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 2 }); + + const tx1 = await mockTxWithFee(1, 10); + const tx2 = await mockTxWithFee(2, 20); + await pool.addPendingTxs([tx1, tx2]); + expect(await pool.getPendingTxCount()).toBe(2); + clearCallbackTracking(); + + const tx3 = await mockTxWithFee(3, 5); + const result = await pool.addPendingTxs([tx3], { feeComparisonOnly: true }); + + expect(toStrings(result.ignored)).toContain(hashOf(tx3)); + expect(await pool.getPendingTxCount()).toBe(2); + expectNoCallbacks(); + }); + + it('feeOnly has no effect when pool is not at capacity', async () => { + await pool.updateConfig({ maxPendingTxCount: 10 }); + + const tx1 = await mockTxWithFee(1, 10); + + // Both modes accept when below capacity + const result1 = await pool.addPendingTxs([tx1], { feeComparisonOnly: true }); + expect(result1.accepted).toHaveLength(1); + + const tx2 = await mockTxWithFee(2, 10); + const result2 = await pool.addPendingTxs([tx2]); + expect(result2.accepted).toHaveLength(1); + + expect(await pool.getPendingTxCount()).toBe(2); + }); + }); + describe('multiple nullifier conflicts', () => { it('handles tx with multiple nullifiers conflicting with different txs', async () => { const tx1 = await mockPublicTx(1, 5); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts index c37702c77a8f..42d8a39406be 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts @@ -61,7 +61,7 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte }; // Create the implementation - this.#impl = new TxPoolV2Impl(store, archiveStore, deps, callbacks, config, dateProvider, log); + this.#impl = new TxPoolV2Impl(store, archiveStore, deps, callbacks, telemetry, config, dateProvider, log); } // ============================================================================ @@ -70,7 +70,7 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte // === Core Operations === - addPendingTxs(txs: Tx[], opts: { source?: string } = {}): Promise { + addPendingTxs(txs: Tx[], opts: { source?: string; feeComparisonOnly?: boolean } = {}): Promise { return this.#queue.put(() => this.#impl.addPendingTxs(txs, opts)); } @@ -83,7 +83,7 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte } protectTxs(txHashes: TxHash[], block: BlockHeader): Promise { - return this.#queue.put(() => Promise.resolve(this.#impl.protectTxs(txHashes, block))); + return this.#queue.put(() => this.#impl.protectTxs(txHashes, block)); } addMinedTxs(txs: Tx[], block: BlockHeader, opts: { source?: string } = {}): Promise { @@ -195,7 +195,12 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte this.#queue.put(() => { const counts = this.#impl.countTxs(); return Promise.resolve({ - itemCount: { pending: counts.pending, protected: counts.protected, mined: counts.mined }, + itemCount: { + pending: counts.pending, + protected: counts.protected, + mined: counts.mined, + softDeleted: counts.softDeleted, + }, }); }), () => this.#store.estimateSize(), diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index 31979f3ea1a1..73a16d061228 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -9,6 +9,7 @@ import type { L2Block, L2BlockId, L2BlockSource } from '@aztec/stdlib/block'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { DatabasePublicStateSource } from '@aztec/stdlib/trees'; import { BlockHeader, Tx, TxHash, type TxValidator } from '@aztec/stdlib/tx'; +import type { TelemetryClient } from '@aztec/telemetry-client'; import { TxArchive } from './archive/index.js'; import { DeletedPool } from './deleted_pool.js'; @@ -22,8 +23,10 @@ import { LowPriorityPreAddRule, NullifierConflictRule, type PoolOperations, + type PreAddContext, type PreAddPoolAccess, } from './eviction/index.js'; +import { TxPoolV2Instrumentation } from './instrumentation.js'; import { type AddTxsResult, DEFAULT_TX_POOL_V2_CONFIG, @@ -66,6 +69,8 @@ export class TxPoolV2Impl { #deletedPool: DeletedPool; #evictionManager: EvictionManager; #dateProvider: DateProvider; + #instrumentation: TxPoolV2Instrumentation; + #evictedTxHashes: Set = new Set(); #log: Logger; #callbacks: TxPoolV2Callbacks; @@ -74,6 +79,7 @@ export class TxPoolV2Impl { archiveStore: AztecAsyncKVStore, deps: TxPoolV2Dependencies, callbacks: TxPoolV2Callbacks, + telemetry: TelemetryClient, config: Partial = {}, dateProvider: DateProvider, log: Logger, @@ -89,6 +95,7 @@ export class TxPoolV2Impl { this.#archive = new TxArchive(archiveStore, this.#config.archivedTxLimit, log); this.#deletedPool = new DeletedPool(store, this.#txsDB, log); this.#dateProvider = dateProvider; + this.#instrumentation = new TxPoolV2Instrumentation(telemetry, () => this.#indices.getTotalMetadataBytes()); this.#log = log; this.#callbacks = callbacks; @@ -171,13 +178,15 @@ export class TxPoolV2Impl { this.#log.info(`Deleted ${toDelete.length} invalid/rejected transactions on startup`, { txHashes: toDelete }); } - async addPendingTxs(txs: Tx[], opts: { source?: string }): Promise { + async addPendingTxs(txs: Tx[], opts: { source?: string; feeComparisonOnly?: boolean }): Promise { const accepted: TxHash[] = []; const ignored: TxHash[] = []; const rejected: TxHash[] = []; const acceptedPending = new Set(); const poolAccess = this.#createPreAddPoolAccess(); + const preAddContext: PreAddContext | undefined = + opts.feeComparisonOnly !== undefined ? { feeComparisonOnly: opts.feeComparisonOnly } : undefined; await this.#store.transactionAsync(async () => { for (const tx of txs) { @@ -204,7 +213,14 @@ export class TxPoolV2Impl { accepted.push(txHash); } else { // Regular pending tx - validate and run pre-add rules - const result = await this.#tryAddRegularPendingTx(tx, opts, poolAccess, acceptedPending, ignored); + const result = await this.#tryAddRegularPendingTx( + tx, + opts, + poolAccess, + acceptedPending, + ignored, + preAddContext, + ); if (result.status === 'accepted') { acceptedPending.add(txHashStr); } else if (result.status === 'rejected') { @@ -221,6 +237,14 @@ export class TxPoolV2Impl { accepted.push(TxHash.fromString(txHashStr)); } + // Record metrics + if (ignored.length > 0) { + this.#instrumentation.recordIgnored(ignored.length); + } + if (rejected.length > 0) { + this.#instrumentation.recordRejected(rejected.length); + } + // Run post-add eviction rules for pending txs if (acceptedPending.size > 0) { const feePayers = Array.from(acceptedPending).map(txHash => this.#indices.getMetadata(txHash)!.feePayer); @@ -238,6 +262,7 @@ export class TxPoolV2Impl { poolAccess: PreAddPoolAccess, acceptedPending: Set, ignored: TxHash[], + preAddContext?: PreAddContext, ): Promise<{ status: 'accepted' | 'ignored' | 'rejected' }> { const txHash = tx.getTxHash(); const txHashStr = txHash.toString(); @@ -249,24 +274,37 @@ export class TxPoolV2Impl { } // Run pre-add rules - const preAddResult = await this.#evictionManager.runPreAddRules(meta, poolAccess); + const preAddResult = await this.#evictionManager.runPreAddRules(meta, poolAccess, preAddContext); if (preAddResult.shouldIgnore) { this.#log.debug(`Ignoring tx ${txHashStr}: ${preAddResult.reason}`); return { status: 'ignored' }; } - // Evict conflicts - for (const evictHashStr of preAddResult.txHashesToEvict) { - await this.#deleteTx(evictHashStr); - this.#log.debug(`Evicted tx ${evictHashStr} due to higher-fee tx ${txHashStr}`, { - evictedTxHash: evictHashStr, - replacementTxHash: txHashStr, - }); - if (acceptedPending.has(evictHashStr)) { - // Evicted tx was from this batch - mark as ignored in result - acceptedPending.delete(evictHashStr); - ignored.push(TxHash.fromString(evictHashStr)); + // Evict conflicts, grouped by rule name for metrics + if (preAddResult.evictions && preAddResult.evictions.length > 0) { + const byReason = new Map(); + for (const { txHash: evictHash, reason } of preAddResult.evictions) { + const group = byReason.get(reason); + if (group) { + group.push(evictHash); + } else { + byReason.set(reason, [evictHash]); + } + } + for (const [reason, hashes] of byReason) { + await this.#evictTxs(hashes, reason); + } + for (const evictHashStr of preAddResult.txHashesToEvict) { + this.#log.debug(`Evicted tx ${evictHashStr} due to higher-fee tx ${txHashStr}`, { + evictedTxHash: evictHashStr, + replacementTxHash: txHashStr, + }); + if (acceptedPending.has(evictHashStr)) { + // Evicted tx was from this batch - mark as ignored in result + acceptedPending.delete(evictHashStr); + ignored.push(TxHash.fromString(evictHashStr)); + } } } @@ -327,9 +365,11 @@ export class TxPoolV2Impl { }); } - protectTxs(txHashes: TxHash[], block: BlockHeader): TxHash[] { + async protectTxs(txHashes: TxHash[], block: BlockHeader): Promise { const slotNumber = block.globalVariables.slotNumber; const missing: TxHash[] = []; + let softDeletedHits = 0; + let missingPreviouslyEvicted = 0; for (const txHash of txHashes) { const txHashStr = txHash.toString(); @@ -337,13 +377,44 @@ export class TxPoolV2Impl { if (this.#indices.has(txHashStr)) { // Update protection for existing tx this.#indices.updateProtection(txHashStr, slotNumber); + } else if (this.#deletedPool.isSoftDeleted(txHashStr)) { + // Resurrect soft-deleted tx as protected + const buffer = await this.#txsDB.getAsync(txHashStr); + if (buffer) { + const tx = Tx.fromBuffer(buffer); + await this.#addTx(tx, { protected: slotNumber }); + softDeletedHits++; + } else { + // Data missing despite soft-delete flag — treat as truly missing + this.#indices.setProtection(txHashStr, slotNumber); + missing.push(txHash); + } } else { - // Pre-record protection for tx we don't have yet + // Truly missing — pre-record protection for tx we don't have yet this.#indices.setProtection(txHashStr, slotNumber); missing.push(txHash); + if (this.#evictedTxHashes.has(txHashStr)) { + missingPreviouslyEvicted++; + } } } + // Record metrics + if (softDeletedHits > 0) { + this.#instrumentation.recordSoftDeletedHits(softDeletedHits); + } + if (missing.length > 0) { + this.#log.debug(`protectTxs missing tx hashes: ${missing.map(h => h.toString()).join(', ')}`); + this.#instrumentation.recordMissingOnProtect(missing.length); + } + if (missingPreviouslyEvicted > 0) { + this.#instrumentation.recordMissingPreviouslyEvicted(missingPreviouslyEvicted); + } + + this.#log.info( + `Protected ${txHashes.length} txs, missing: ${missing.length}, soft-deleted hits: ${softDeletedHits}`, + ); + return missing; } @@ -412,6 +483,7 @@ export class TxPoolV2Impl { // Step 3: Filter to only txs that have metadata and are not mined const txsToRestore = this.#indices.filterRestorable(expiredProtected); if (txsToRestore.length === 0) { + this.#log.debug(`Preparing for slot ${slotNumber}, no txs to unprotect`); return; } @@ -423,8 +495,9 @@ export class TxPoolV2Impl { // Step 5: Resolve nullifier conflicts and add winners to pending indices const { added, toEvict } = this.#applyNullifierConflictResolution(valid); - // Step 6: Delete invalid and evicted txs - await this.#deleteTxsBatch([...invalid, ...toEvict]); + // Step 6: Delete invalid txs and evict conflict losers + await this.#deleteTxsBatch(invalid); + await this.#evictTxs(toEvict, 'NullifierConflict'); // Step 7: Run eviction rules (enforce pool size limit) if (added.length > 0) { @@ -471,8 +544,9 @@ export class TxPoolV2Impl { // Step 6: Resolve nullifier conflicts and add winners to pending indices const { toEvict } = this.#applyNullifierConflictResolution(valid); - // Step 7: Delete invalid and evicted txs - await this.#deleteTxsBatch([...invalid, ...toEvict]); + // Step 7: Delete invalid txs and evict conflict losers + await this.#deleteTxsBatch(invalid); + await this.#evictTxs(toEvict, 'NullifierConflict'); this.#log.info( `Handled prune to block ${latestBlock.number}: ${valid.length} txs restored to pending, ${invalid.length} invalid, ${toEvict.length} evicted due to nullifier conflicts`, @@ -637,8 +711,17 @@ export class TxPoolV2Impl { // === Metrics === - countTxs(): { pending: number; protected: number; mined: number } { - return this.#indices.countTxs(); + countTxs(): { + pending: number; + protected: number; + mined: number; + softDeleted: number; + totalMetadataBytes: number; + } { + return { + ...this.#indices.countTxs(), + softDeleted: this.#deletedPool.getSoftDeletedCount(), + }; } // ============================================================================ @@ -672,9 +755,11 @@ export class TxPoolV2Impl { } const stateStr = typeof state === 'string' ? state : Object.keys(state)[0]; - this.#log.verbose(`Added ${stateStr} tx ${txHashStr}`, { + this.#log.debug(`Added tx ${txHashStr} as ${stateStr}`, { eventName: 'tx-added-to-pool', + txHash: txHashStr, state: stateStr, + source: opts.source, }); return meta; @@ -702,6 +787,29 @@ export class TxPoolV2Impl { } } + /** Evicts transactions: records eviction metric with reason, caches hashes, then deletes. */ + async #evictTxs(txHashes: string[], reason: string): Promise { + if (txHashes.length === 0) { + return; + } + this.#instrumentation.recordEvictions(txHashes.length, reason); + for (const txHashStr of txHashes) { + this.#log.debug(`Evicting tx ${txHashStr}`, { txHash: txHashStr, reason }); + this.#addToEvictedCache(txHashStr); + } + await this.#deleteTxsBatch(txHashes); + } + + /** Adds a tx hash to the bounded evicted cache, evicting the oldest entry if at capacity. */ + #addToEvictedCache(txHashStr: string): void { + if (this.#evictedTxHashes.size >= this.#config.evictedTxCacheSize) { + // FIFO eviction: remove the first (oldest) entry + const oldest = this.#evictedTxHashes.values().next().value!; + this.#evictedTxHashes.delete(oldest); + } + this.#evictedTxHashes.add(txHashStr); + } + // ============================================================================ // PRIVATE HELPERS - Validation & Conflict Resolution // ============================================================================ @@ -893,7 +1001,7 @@ export class TxPoolV2Impl { getFeePayerPendingTxs: (feePayer: string) => this.#indices.getFeePayerPendingTxs(feePayer), getPendingTxCount: () => this.#indices.getPendingTxCount(), getLowestPriorityPending: (limit: number) => this.#indices.getLowestPriorityPending(limit), - deleteTxs: (txHashes: string[]) => this.#deleteTxsBatch(txHashes), + deleteTxs: (txHashes: string[], reason?: string) => this.#evictTxs(txHashes, reason ?? 'unknown'), }; } diff --git a/yarn-project/p2p/src/test-helpers/testbench-utils.ts b/yarn-project/p2p/src/test-helpers/testbench-utils.ts index d0b8ce352fde..cfa6a923fcb8 100644 --- a/yarn-project/p2p/src/test-helpers/testbench-utils.ts +++ b/yarn-project/p2p/src/test-helpers/testbench-utils.ts @@ -59,7 +59,7 @@ export class InMemoryTxPool extends EventEmitter implements TxPoolV2 { // === Core Operations (TxPoolV2) === - addPendingTxs(txs: Tx[], opts?: { source?: string }): Promise { + addPendingTxs(txs: Tx[], opts?: { source?: string; feeComparisonOnly?: boolean }): Promise { const accepted: TxHash[] = []; const newTxs: Tx[] = []; for (const tx of txs) { diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index a6004e0f88c9..982616eba126 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -55,6 +55,7 @@ export const DefaultSequencerConfig: ResolvedSequencerConfig = { fishermanMode: false, shuffleAttestationOrdering: false, skipPushProposedBlocksToArchiver: false, + skipPublishingCheckpointsPercent: 0, }; /** @@ -208,6 +209,11 @@ export const sequencerConfigMappings: ConfigMappingsType = { minBlocksForCheckpoint: { description: 'Minimum number of blocks required for a checkpoint proposal (test only)', }, + skipPublishingCheckpointsPercent: { + env: 'SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT', + description: 'Percent probability (0 - 100) of sequencer skipping checkpoint publishing (testing only)', + ...numberConfigHelper(DefaultSequencerConfig.skipPublishingCheckpointsPercent), + }, ...pickConfigMappings(p2pConfigMappings, ['txPublicSetupAllowList']), }; diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index 1b0f345a2493..fcc773ab3a5d 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -336,6 +336,21 @@ export class CheckpointProposalJob implements Traceable { const aztecSlotDuration = this.l1Constants.slotDuration; const slotStartBuildTimestamp = this.getSlotStartBuildTimestamp(); const txTimeoutAt = new Date((slotStartBuildTimestamp + aztecSlotDuration) * 1000); + + // If we have been configured to potentially skip publishing checkpoint then roll the dice here + if ( + this.config.skipPublishingCheckpointsPercent !== undefined && + this.config.skipPublishingCheckpointsPercent > 0 + ) { + const result = Math.max(0, randomInt(100)); + if (result < this.config.skipPublishingCheckpointsPercent) { + this.log.warn( + `Skipping publishing proposal for checkpoint ${checkpoint.number}. Configured percentage: ${this.config.skipPublishingCheckpointsPercent}, generated value: ${result}`, + ); + return checkpoint; + } + } + await this.publisher.enqueueProposeCheckpoint(checkpoint, attestations, attestationsSignature, { txTimeoutAt, forcePendingCheckpointNumber: this.invalidateCheckpoint?.forcePendingCheckpointNumber, diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index d51c3b0383ea..5149006d2f65 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -71,6 +71,8 @@ export interface SequencerConfig { skipPushProposedBlocksToArchiver?: boolean; /** Minimum number of blocks required for a checkpoint proposal (test only, defaults to undefined = no minimum) */ minBlocksForCheckpoint?: number; + /** Skip publishing checkpoint proposals probability (for testing checkpoint prunes only) */ + skipPublishingCheckpointsPercent?: number; } export const SequencerConfigSchema = zodFor()( @@ -106,6 +108,7 @@ export const SequencerConfigSchema = zodFor()( buildCheckpointIfEmpty: z.boolean().optional(), skipPushProposedBlocksToArchiver: z.boolean().optional(), minBlocksForCheckpoint: z.number().positive().optional(), + skipPublishingCheckpointsPercent: z.number().gte(0).lte(100).optional(), }), ); diff --git a/yarn-project/telemetry-client/src/attributes.ts b/yarn-project/telemetry-client/src/attributes.ts index 4e9a843ba6e7..06bf938dbb66 100644 --- a/yarn-project/telemetry-client/src/attributes.ts +++ b/yarn-project/telemetry-client/src/attributes.ts @@ -128,6 +128,9 @@ export const NODEJS_EVENT_LOOP_STATE = 'nodejs.eventloop.state'; export const TOPIC_NAME = 'aztec.gossip.topic_name'; +/** The reason a transaction was evicted from the tx pool */ +export const TX_POOL_EVICTION_REASON = 'aztec.mempool.eviction_reason'; + export const TX_COLLECTION_METHOD = 'aztec.tx_collection.method'; /** Scope of L1 transaction (sequencer, prover, or other) */ diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 65feb2ada1c0..5f6b94abe1b3 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -167,6 +167,55 @@ export const MEMPOOL_TX_MINED_DELAY: MetricDefinition = { valueType: ValueType.INT, }; +export const MEMPOOL_TX_POOL_V2_EVICTED_COUNT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.evicted_count', + description: 'The number of transactions evicted from the tx pool', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_IGNORED_COUNT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.ignored_count', + description: 'The number of transactions ignored in addPendingTxs', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_REJECTED_COUNT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.rejected_count', + description: 'The number of transactions rejected in addPendingTxs', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_SOFT_DELETED_HITS: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.soft_deleted_hits', + description: 'The number of transactions found in the soft-deleted pool', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_MISSING_ON_PROTECT: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.missing_on_protect', + description: 'The number of truly missing transactions in protectTxs', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_MISSING_PREVIOUSLY_EVICTED: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.missing_previously_evicted', + description: 'The number of truly missing transactions in protectTxs that were previously evicted', + valueType: ValueType.INT, +}; +export const MEMPOOL_TX_POOL_V2_METADATA_MEMORY: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.metadata_memory', + description: 'Estimated total memory consumed by in-memory transaction metadata', + unit: 'By', + valueType: ValueType.INT, +}; + +export const MEMPOOL_TX_POOL_V2_DUPLICATE_ADD: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.duplicate_add', + description: 'Transactions received via addPendingTxs that were already in the pool', + valueType: ValueType.INT, +}; + +export const MEMPOOL_TX_POOL_V2_ALREADY_PROTECTED_ADD: MetricDefinition = { + name: 'aztec.mempool.tx_pool_v2.already_protected_add', + description: 'Transactions received via addPendingTxs that were already pre-protected', + valueType: ValueType.INT, +}; + export const DB_NUM_ITEMS: MetricDefinition = { name: 'aztec.db.num_items', description: 'LMDB Num Items', @@ -224,6 +273,11 @@ export const ARCHIVER_BLOCK_HEIGHT: MetricDefinition = { description: 'The height of the latest block processed by the archiver', valueType: ValueType.INT, }; +export const ARCHIVER_CHECKPOINT_HEIGHT: MetricDefinition = { + name: 'aztec.archiver.checkpoint_height', + description: 'The height of the latest checkpoint processed by the archiver', + valueType: ValueType.INT, +}; export const ARCHIVER_ROLLUP_PROOF_DELAY: MetricDefinition = { name: 'aztec.archiver.rollup_proof_delay', description: 'Time after a block is submitted until its proof is published', diff --git a/yarn-project/txe/src/state_machine/dummy_p2p_client.ts b/yarn-project/txe/src/state_machine/dummy_p2p_client.ts index fa6615cc8b13..2dc0d77d9376 100644 --- a/yarn-project/txe/src/state_machine/dummy_p2p_client.ts +++ b/yarn-project/txe/src/state_machine/dummy_p2p_client.ts @@ -171,10 +171,6 @@ export class DummyP2P implements P2P { throw new Error('DummyP2P does not implement "hasTxsInPool"'); } - public addTxsToPool(_txs: Tx[]): Promise { - throw new Error('DummyP2P does not implement "addTxs"'); - } - public getSyncedLatestBlockNum(): Promise { throw new Error('DummyP2P does not implement "getSyncedLatestBlockNum"'); }