Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
f4800a9
refactor missing txs collection
deffrian Feb 6, 2026
bd47ea0
missing txs tracker
deffrian Feb 6, 2026
07a27f0
add test
deffrian Feb 6, 2026
e895363
track txs between requests
deffrian Feb 9, 2026
dee87b0
fromArray
deffrian Feb 9, 2026
dd81ca3
verification inside node tx source
deffrian Feb 9, 2026
c6f1471
fix file store
deffrian Feb 9, 2026
3d0e498
fix build
deffrian Feb 9, 2026
5881f21
fix tests
deffrian Feb 10, 2026
87b2cef
verification in file store tx source
deffrian Feb 10, 2026
a14c9f8
Merge branch 'merge-train/spartan' into nikita/dynamic-missing-txs
deffrian Feb 10, 2026
014b6b3
fix build
deffrian Feb 10, 2026
6682f5a
fix lint
deffrian Feb 10, 2026
4b44406
fix test
deffrian Feb 10, 2026
bf208ba
Merge branch 'merge-train/spartan' into nikita/dynamic-missing-txs
deffrian Feb 11, 2026
73f2bb1
fix build & suggestion
deffrian Feb 11, 2026
5a3074f
fix other suggestions
deffrian Feb 11, 2026
2c85faf
actual fix
deffrian Feb 11, 2026
e9f4dba
Merge branch 'merge-train/spartan' into nikita/dynamic-missing-txs
deffrian Feb 12, 2026
cab041b
fix merge
deffrian Feb 12, 2026
49524a2
Merge branch 'merge-train/spartan' into nikita/dynamic-missing-txs
deffrian Feb 16, 2026
497f35f
suggestions
deffrian Feb 16, 2026
1e81f41
fix
deffrian Feb 16, 2026
0bfa842
fix
deffrian Feb 16, 2026
603ad70
Add flag to delete all prune transactions
PhilWindle Feb 17, 2026
3e96287
Use the tips store
PhilWindle Feb 17, 2026
ab981a2
Gate on config
PhilWindle Feb 17, 2026
c43786e
fix inverted flag
deffrian Feb 17, 2026
be842f0
Merge branch 'merge-train/spartan' into nikita/dynamic-missing-txs
deffrian Feb 17, 2026
dcb856c
type
deffrian Feb 17, 2026
c8d1ccc
Merge branch 'merge-train/spartan' into nikita/dynamic-missing-txs
deffrian Feb 17, 2026
623d2ce
Just use checkpoint difference
PhilWindle Feb 17, 2026
b103742
fix build
deffrian Feb 17, 2026
30b9692
Set appropriate slot duration
PhilWindle Feb 17, 2026
afd4cbe
fix: Set Aztec slot duration as a multiple of the Ethereum slot durat…
PhilWindle Feb 17, 2026
41cb45a
feat: Re-instate the function optionally to delete all transactions i…
PhilWindle Feb 17, 2026
00e18c5
feat: dynamically adjust missing txs set (#20300)
mralj Feb 17, 2026
afaea64
chore: fund deployer account (#20564)
alexghr Feb 17, 2026
07ffd46
Merge branch 'next' into merge-train/spartan
Feb 17, 2026
637df1c
Merge branch 'next' into merge-train/spartan
Feb 17, 2026
3c5f51d
Merge branch 'next' into merge-train/spartan
Feb 17, 2026
e73e0de
Merge branch 'next' into merge-train/spartan
Feb 17, 2026
84d400c
Merge branch 'next' into merge-train/spartan
Feb 17, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 46 additions & 15 deletions spartan/scripts/ensure_funded_environment.sh
Original file line number Diff line number Diff line change
Expand Up @@ -128,27 +128,58 @@ echo " - Already funded: $ACCOUNTS_ALREADY_FUNDED accounts"
if [ -z "$ACCOUNTS_TO_FUND" ]; then
echo " - Need funding: 0 accounts"
echo ""
echo "All publisher and bot accounts are sufficiently funded!"
echo "All publisher and bot accounts are sufficiently funded."
echo "==================================================="
exit 0
fi
else
# Count accounts to fund
IFS=',' read -r -a accounts_to_fund_array <<< "$ACCOUNTS_TO_FUND"
ACCOUNTS_TO_FUND_COUNT=${#accounts_to_fund_array[@]}

echo " - Need funding: $ACCOUNTS_TO_FUND_COUNT accounts"
echo "==================================================="
echo ""

# Count accounts to fund
IFS=',' read -r -a accounts_to_fund_array <<< "$ACCOUNTS_TO_FUND"
ACCOUNTS_TO_FUND_COUNT=${#accounts_to_fund_array[@]}
# Use the existing ensure_eth_balances.sh script to fund the accounts
echo "Funding accounts to $HIGH_WATERMARK ETH..."
"${spartan}/scripts/ensure_eth_balances.sh" \
"$ETHEREUM_HOST" \
"$FUNDING_PRIVATE_KEY" \
"$LABS_INFRA_MNEMONIC" \
"$ACCOUNTS_TO_FUND" \
"$HIGH_WATERMARK"
fi

echo " - Need funding: $ACCOUNTS_TO_FUND_COUNT accounts"
# --- Fund L1 Contract Deployer ---
echo ""
echo "==================================================="
echo "Checking L1 Contract Deployer"
echo "==================================================="
echo ""

# Use the existing ensure_eth_balances.sh script to fund the accounts
echo "Funding accounts to $HIGH_WATERMARK ETH..."
"${spartan}/scripts/ensure_eth_balances.sh" \
"$ETHEREUM_HOST" \
"$FUNDING_PRIVATE_KEY" \
"$LABS_INFRA_MNEMONIC" \
"$ACCOUNTS_TO_FUND" \
"$HIGH_WATERMARK"
if [ -z "${ROLLUP_DEPLOYMENT_PRIVATE_KEY:-}" ] || [ "${ROLLUP_DEPLOYMENT_PRIVATE_KEY}" == "REPLACE_WITH_GCP_SECRET" ]; then
echo "ROLLUP_DEPLOYMENT_PRIVATE_KEY not set — skipping deployer funding."
else
deployer_address=$(cast wallet address --private-key "$ROLLUP_DEPLOYMENT_PRIVATE_KEY")
deployer_balance_wei=$(cast balance --rpc-url "$ETHEREUM_HOST" "$deployer_address")
deployer_balance_eth=$(cast from-wei "$deployer_balance_wei" ether)
low_watermark_wei=$(cast to-wei "$LOW_WATERMARK" ether)
high_watermark_wei=$(cast to-wei "$HIGH_WATERMARK" ether)

if (($(echo "$deployer_balance_wei < $low_watermark_wei" | bc -l))); then
topup_wei=$(echo "$high_watermark_wei - $deployer_balance_wei" | bc)
topup_eth=$(cast from-wei "$topup_wei" ether)
echo " Deployer ($deployer_address): $deployer_balance_eth ETH - NEEDS FUNDING"
echo " Sending $topup_eth ETH to deployer..."
cast send \
--rpc-url "$ETHEREUM_HOST" \
--private-key "$FUNDING_PRIVATE_KEY" \
--value "$topup_wei" \
"$deployer_address"
echo " Deployer funded."
else
echo " Deployer ($deployer_address): $deployer_balance_eth ETH - OK"
fi
fi

echo ""
echo "✅ Environment funding complete!"
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => {
// Uses multiple-blocks-per-slot timing configuration.
test = await EpochsTestContext.setup({
ethereumSlotDuration: 8,
aztecSlotDuration: 36,
aztecSlotDuration: 32,
blockDurationMs: 6000,
l1PublishingTime: 8,
enforceTimeTable: true,
Expand Down
53 changes: 46 additions & 7 deletions yarn-project/p2p/src/client/p2p_client.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -298,19 +298,58 @@ describe('P2P Client', () => {
});

describe('Chain prunes', () => {
it('calls handlePrunedBlocks when chain is pruned', async () => {
it('passes deleteAllTxs: false when prune does not cross a checkpoint boundary', async () => {
client = createClient({ txPoolDeleteTxsAfterReorg: true });
blockSource.setProvenBlockNumber(0);
// Only checkpoint up to block 90 — blocks 91-100 are proposed but not checkpointed
blockSource.setCheckpointedBlockNumber(90);
await client.start();

// Prune the chain back to block 90
// Prune 5 blocks (91-100): checkpointed tip stays at checkpoint 90
blockSource.removeBlocks(10);
await client.sync();

// Verify handlePrunedBlocks is called with the correct block ID
expect(txPool.handlePrunedBlocks).toHaveBeenCalledWith({
number: BlockNumber(90),
hash: expect.any(String),
});
expect(txPool.handlePrunedBlocks).toHaveBeenCalledWith(
{ number: BlockNumber(90), hash: expect.any(String) },
{ deleteAllTxs: false },
);
await client.stop();
});

it('passes deleteAllTxs: true when prune crosses a checkpoint boundary', async () => {
client = createClient({ txPoolDeleteTxsAfterReorg: true });
blockSource.setProvenBlockNumber(0);
// Checkpoint all 100 blocks
blockSource.setCheckpointedBlockNumber(100);
await client.start();

// Prune 5 blocks (96-100): checkpointed tip moves from checkpoint 100 to 95
blockSource.removeBlocks(5);
await client.sync();

expect(txPool.handlePrunedBlocks).toHaveBeenCalledWith(
{ number: BlockNumber(95), hash: expect.any(String) },
{ deleteAllTxs: true },
);
await client.stop();
});

it('passes deleteAllTxs: false for cross-checkpoint prune when txPoolDeleteTxsAfterReorg is disabled', async () => {
// Default config has txPoolDeleteTxsAfterReorg: false
blockSource.setProvenBlockNumber(0);
// Checkpoint all 100 blocks
blockSource.setCheckpointedBlockNumber(100);
await client.start();

// Prune 5 blocks (96-100): checkpointed tip moves from checkpoint 100 to 95
blockSource.removeBlocks(5);
await client.sync();

// Should delete all txs but flag is off
expect(txPool.handlePrunedBlocks).toHaveBeenCalledWith(
{ number: BlockNumber(95), hash: expect.any(String) },
{ deleteAllTxs: false },
);
await client.stop();
});

Expand Down
30 changes: 26 additions & 4 deletions yarn-project/p2p/src/client/p2p_client.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import { GENESIS_BLOCK_HEADER_HASH } from '@aztec/constants';
import type { EpochCacheInterface } from '@aztec/epoch-cache';
import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types';
import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types';
import { createLogger } from '@aztec/foundation/log';
import { RunningPromise } from '@aztec/foundation/promise';
import { DateProvider } from '@aztec/foundation/timer';
import type { AztecAsyncKVStore, AztecAsyncSingleton } from '@aztec/kv-store';
import { L2TipsKVStore } from '@aztec/kv-store/stores';
import {
type CheckpointId,
type EthAddress,
type L2Block,
type L2BlockId,
Expand Down Expand Up @@ -201,7 +202,7 @@ export class P2PClient<T extends P2PClientType = P2PClientType.Full>
break;
case 'chain-pruned':
this.txCollection.stopCollectingForBlocksAfter(event.block.number);
await this.handlePruneL2Blocks(event.block);
await this.handlePruneL2Blocks(event.block, event.checkpoint);
break;
case 'chain-checkpointed':
break;
Expand Down Expand Up @@ -759,10 +760,31 @@ export class P2PClient<T extends P2PClientType = P2PClientType.Full>

/**
* Updates the tx pool after a chain prune.
* Detects epoch prunes (checkpoint number changed) and deletes all txs in that case.
* @param latestBlock - The block ID the chain was pruned to.
* @param newCheckpoint - The checkpoint ID after the prune.
*/
private async handlePruneL2Blocks(latestBlock: L2BlockId): Promise<void> {
await this.txPool.handlePrunedBlocks(latestBlock);
private async handlePruneL2Blocks(latestBlock: L2BlockId, newCheckpoint: CheckpointId): Promise<void> {
const deleteAllTxs = this.config.txPoolDeleteTxsAfterReorg && (await this.isEpochPrune(newCheckpoint));
await this.txPool.handlePrunedBlocks(latestBlock, { deleteAllTxs });
}

/**
* Returns true if the prune crossed a checkpoint boundary.
* If the old and new checkpoint numbers are the same, the prune is within a single checkpoint.
* If they differ, the prune spans across checkpoints (epoch prune).
*/
private async isEpochPrune(newCheckpoint: CheckpointId): Promise<boolean> {
const tips = await this.l2Tips.getL2Tips();
const oldCheckpointNumber = tips.checkpointed.checkpoint.number;
if (oldCheckpointNumber <= CheckpointNumber.ZERO) {
return false;
}
const isEpochPrune = oldCheckpointNumber !== newCheckpoint.number;
this.log.info(
`Detected epoch prune: ${isEpochPrune}. Old checkpoint: ${oldCheckpointNumber}, new checkpoint: ${newCheckpoint.number}`,
);
return isEpochPrune;
}

/** Checks if the slot has changed and calls prepareForSlot if so. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import { BatchTxRequester } from '../../services/reqresp/batch-tx-requester/batc
import type { BatchTxRequesterLibP2PService } from '../../services/reqresp/batch-tx-requester/interface.js';
import type { IBatchRequestTxValidator } from '../../services/reqresp/batch-tx-requester/tx_validator.js';
import type { ConnectionSampler } from '../../services/reqresp/connection-sampler/connection_sampler.js';
import { MissingTxsTracker } from '../../services/tx_collection/missing_txs_tracker.js';
import { generatePeerIdPrivateKeys } from '../../test-helpers/generate-peer-id-private-keys.js';
import { getPorts } from '../../test-helpers/get-ports.js';
import { makeEnrs } from '../../test-helpers/make-enrs.js';
Expand Down Expand Up @@ -229,7 +230,7 @@ describe('p2p client integration batch txs', () => {
mockP2PService.reqResp = (client0 as any).p2pService.reqresp;

const requester = new BatchTxRequester(
missingTxHashes,
MissingTxsTracker.fromArray(missingTxHashes),
blockProposal,
undefined, // no pinned peer
5_000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,11 @@ import type { PeerId } from '@libp2p/interface';
import { peerIdFromString } from '@libp2p/peer-id';

import type { P2PConfig } from '../../../config.js';
import { BatchTxRequesterCollector, SendBatchRequestCollector } from '../../../services/index.js';
import type { IBatchRequestTxValidator } from '../../../services/reqresp/batch-tx-requester/tx_validator.js';
import { RateLimitStatus } from '../../../services/reqresp/rate-limiter/rate_limiter.js';
import {
BatchTxRequesterCollector,
SendBatchRequestCollector,
} from '../../../services/tx_collection/proposal_tx_collector.js';
import { AlwaysTrueCircuitVerifier } from '../../../test-helpers/reqresp-nodes.js';
import { MissingTxsTracker } from '../../../services/tx_collection/missing_txs_tracker.js';
import { AlwaysTrueCircuitVerifier } from '../../../test-helpers/index.js';
import {
BENCHMARK_CONSTANTS,
InMemoryAttestationPool,
Expand All @@ -31,7 +29,7 @@ import {
calculateInternalTimeout,
createMockEpochCache,
createMockWorldStateSynchronizer,
} from '../../../test-helpers/testbench-utils.js';
} from '../../../test-helpers/index.js';
import { createP2PClient } from '../../index.js';
import type { P2PClient } from '../../p2p_client.js';
import {
Expand Down Expand Up @@ -214,7 +212,13 @@ async function runCollector(cmd: Extract<WorkerCommand, { type: 'RUN_COLLECTOR'
if (collectorType === 'batch-requester') {
const collector = new BatchTxRequesterCollector(p2pService, logger, new DateProvider(), noopTxValidator);
const fetched = await executeTimeout(
(_signal: AbortSignal) => collector.collectTxs(parsedTxHashes, parsedProposal, pinnedPeer, internalTimeoutMs),
(_signal: AbortSignal) =>
collector.collectTxs(
MissingTxsTracker.fromArray(parsedTxHashes),
parsedProposal,
pinnedPeer,
internalTimeoutMs,
),
timeoutMs,
() => new Error(`Collector timed out after ${timeoutMs}ms`),
);
Expand All @@ -226,7 +230,13 @@ async function runCollector(cmd: Extract<WorkerCommand, { type: 'RUN_COLLECTOR'
BENCHMARK_CONSTANTS.FIXED_MAX_RETRY_ATTEMPTS,
);
const fetched = await executeTimeout(
(_signal: AbortSignal) => collector.collectTxs(parsedTxHashes, parsedProposal, pinnedPeer, internalTimeoutMs),
(_signal: AbortSignal) =>
collector.collectTxs(
MissingTxsTracker.fromArray(parsedTxHashes),
parsedProposal,
pinnedPeer,
internalTimeoutMs,
),
timeoutMs,
() => new Error(`Collector timed out after ${timeoutMs}ms`),
);
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ export interface TxPoolV2 extends TypedEventEmitter<TxPoolV2Events> {
* and validates them before returning to pending.
* @param latestBlock - The latest valid block ID after the prune
*/
handlePrunedBlocks(latestBlock: L2BlockId): Promise<void>;
handlePrunedBlocks(latestBlock: L2BlockId, options?: { deleteAllTxs?: boolean }): Promise<void>;

/**
* Handles failed transaction execution.
Expand Down
21 changes: 21 additions & 0 deletions yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1628,6 +1628,27 @@ describe('TxPoolV2', () => {
expectNoCallbacks(); // handlePrunedBlocks restores to pending, no removal
});

it('deleteAllTxs option deletes all un-mined txs instead of restoring to pending', async () => {
const tx1 = await mockTx(1);
const tx2 = await mockTx(2);
await pool.addPendingTxs([tx1, tx2]);
expectAddedTxs(tx1, tx2);

// Mine both txs
await pool.handleMinedBlock(makeBlock([tx1, tx2], slot1Header));
expectNoCallbacks();
expect(await pool.getTxStatus(tx1.getTxHash())).toBe('mined');
expect(await pool.getTxStatus(tx2.getTxHash())).toBe('mined');

// Prune with deleteAllTxs - should delete all instead of restoring to pending
await pool.handlePrunedBlocks(block0Id, { deleteAllTxs: true });

expect(await pool.getTxStatus(tx1.getTxHash())).toBe('deleted');
expect(await pool.getTxStatus(tx2.getTxHash())).toBe('deleted');
expect(await pool.getPendingTxCount()).toBe(0);
expectRemovedTxs(tx1, tx2);
});

it('un-mined tx with higher priority evicts conflicting pending tx', async () => {
// Ensure anchor block is valid
db.findLeafIndices.mockResolvedValue([1n]);
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,8 @@ export class AztecKVTxPoolV2 extends (EventEmitter as new () => TypedEventEmitte
return this.#queue.put(() => this.#impl.prepareForSlot(slotNumber));
}

handlePrunedBlocks(latestBlock: L2BlockId): Promise<void> {
return this.#queue.put(() => this.#impl.handlePrunedBlocks(latestBlock));
handlePrunedBlocks(latestBlock: L2BlockId, options?: { deleteAllTxs?: boolean }): Promise<void> {
return this.#queue.put(() => this.#impl.handlePrunedBlocks(latestBlock, options));
}

handleFailedExecution(txHashes: TxHash[]): Promise<void> {
Expand Down
14 changes: 12 additions & 2 deletions yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -518,7 +518,7 @@ export class TxPoolV2Impl {
}
}

async handlePrunedBlocks(latestBlock: L2BlockId): Promise<void> {
async handlePrunedBlocks(latestBlock: L2BlockId, options?: { deleteAllTxs?: boolean }): Promise<void> {
// Step 1: Find transactions mined after the prune point
const txsToUnmine = this.#indices.findTxsMinedAfter(latestBlock.number);
if (txsToUnmine.length === 0) {
Expand All @@ -543,10 +543,20 @@ export class TxPoolV2Impl {
this.#indices.markAsUnmined(meta);
}

// If deleteAllTxs is set (epoch prune), delete all un-mined txs and return early
if (options?.deleteAllTxs) {
const allTxHashes = txsToUnmine.map(m => m.txHash);
await this.#deleteTxsBatch(allTxHashes);
this.#log.info(
`Handled prune to block ${latestBlock.number} with deleteAllTxs: deleted ${allTxHashes.length} txs`,
);
return;
}

// Step 4: Filter out protected txs (they'll be handled by prepareForSlot)
const unprotectedTxs = this.#indices.filterUnprotected(txsToUnmine);

// Step 4: Validate for pending pool
// Step 5: Validate for pending pool
const { valid, invalid } = await this.#revalidateMetadata(unprotectedTxs, 'during handlePrunedBlocks');

// Step 6: Resolve nullifier conflicts and add winners to pending indices
Expand Down
Loading
Loading