diff --git a/barretenberg/cpp/CMakePresets.json b/barretenberg/cpp/CMakePresets.json index 420fb003d42e..62b4f4851719 100644 --- a/barretenberg/cpp/CMakePresets.json +++ b/barretenberg/cpp/CMakePresets.json @@ -19,7 +19,6 @@ }, "cacheVariables": { "CMAKE_BUILD_TYPE": "Release", - "TARGET_ARCH": "skylake", "ENABLE_PIC": "ON" } }, diff --git a/barretenberg/cpp/bootstrap.sh b/barretenberg/cpp/bootstrap.sh index a4c70bba834c..3c533971fed9 100755 --- a/barretenberg/cpp/bootstrap.sh +++ b/barretenberg/cpp/bootstrap.sh @@ -7,7 +7,7 @@ else export native_preset=${NATIVE_PRESET:-clang20-no-avm} fi export hash=$(hash_str $(../../avm-transpiler/bootstrap.sh hash) $(cache_content_hash .rebuild_patterns)) -export native_build_dir=$(scripts/preset-build-dir) +export native_build_dir=$(scripts/preset-build-dir $native_preset) # Injects version number into a given bb binary. # Means we don't actually need to rebuild bb to release a new version if code hasn't changed. diff --git a/barretenberg/cpp/cmake/arch.cmake b/barretenberg/cpp/cmake/arch.cmake index 71b6dbb599ff..e1e7d0978fd7 100644 --- a/barretenberg/cpp/cmake/arch.cmake +++ b/barretenberg/cpp/cmake/arch.cmake @@ -5,7 +5,16 @@ if(WASM) add_compile_options(-fno-exceptions -fno-slp-vectorize) endif() -if(NOT WASM AND NOT ARM AND TARGET_ARCH) +# Auto-detect TARGET_ARCH on x86_64 if not explicitly set (native builds only). +# On ARM, we skip -march entirely — the zig wrappers use an explicit aarch64 target +# to produce generic ARM64 code without CPU-specific extensions (e.g. SVE). +# Skip auto-detection when cross-compiling — the toolchain (e.g. Zig -mcpu) handles +# architecture targeting, and injecting -march here conflicts with it. +if(NOT WASM AND NOT TARGET_ARCH AND NOT ARM AND NOT CMAKE_CROSSCOMPILING) + set(TARGET_ARCH "skylake") +endif() + +if(NOT WASM AND TARGET_ARCH) message(STATUS "Target architecture: ${TARGET_ARCH}") add_compile_options(-march=${TARGET_ARCH}) endif() diff --git a/barretenberg/cpp/scripts/zig-c++.sh b/barretenberg/cpp/scripts/zig-c++.sh index 3c1a69cb9ad6..1afa0f82f8fa 100755 --- a/barretenberg/cpp/scripts/zig-c++.sh +++ b/barretenberg/cpp/scripts/zig-c++.sh @@ -1,8 +1,15 @@ #!/bin/bash # Wrapper for zig c++ that pins glibc 2.35 on Linux (Ubuntu 22.04+ compat) # and uses native target on macOS. +# On ARM64 Linux, use an explicit aarch64 target instead of 'native' to produce +# generic ARM64 code. This prevents CPU-specific instructions (e.g. SVE on Graviton) +# from being emitted, ensuring binaries work across all ARM64 machines including +# Apple Silicon in devcontainers. if [[ "$(uname -s)" == "Linux" ]]; then - exec zig c++ -target native-linux-gnu.2.35 "$@" + case "$(uname -m)" in + aarch64|arm64) exec zig c++ -target aarch64-linux-gnu.2.35 "$@" ;; + *) exec zig c++ -target native-linux-gnu.2.35 "$@" ;; + esac else exec zig c++ "$@" fi diff --git a/barretenberg/cpp/scripts/zig-cc.sh b/barretenberg/cpp/scripts/zig-cc.sh index 6f1444434676..34dd6263c6ce 100755 --- a/barretenberg/cpp/scripts/zig-cc.sh +++ b/barretenberg/cpp/scripts/zig-cc.sh @@ -1,8 +1,15 @@ #!/bin/bash # Wrapper for zig cc that pins glibc 2.35 on Linux (Ubuntu 22.04+ compat) # and uses native target on macOS. +# On ARM64 Linux, use an explicit aarch64 target instead of 'native' to produce +# generic ARM64 code. This prevents CPU-specific instructions (e.g. SVE on Graviton) +# from being emitted, ensuring binaries work across all ARM64 machines including +# Apple Silicon in devcontainers. if [[ "$(uname -s)" == "Linux" ]]; then - exec zig cc -target native-linux-gnu.2.35 "$@" + case "$(uname -m)" in + aarch64|arm64) exec zig cc -target aarch64-linux-gnu.2.35 "$@" ;; + *) exec zig cc -target native-linux-gnu.2.35 "$@" ;; + esac else exec zig cc "$@" fi diff --git a/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/node_store/cached_content_addressed_tree_store.hpp b/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/node_store/cached_content_addressed_tree_store.hpp index 2f08ef61ef2b..49a4dc1110cd 100644 --- a/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/node_store/cached_content_addressed_tree_store.hpp +++ b/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/node_store/cached_content_addressed_tree_store.hpp @@ -867,6 +867,10 @@ void ContentAddressedCachedTreeStore::advance_finalized_block(con ReadTransactionPtr readTx = create_read_transaction(); get_meta(uncommittedMeta); get_meta(committedMeta, *readTx, false); + // do nothing if the block is already finalized + if (committedMeta.finalizedBlockHeight >= blockNumber) { + return; + } if (!dataStore_->read_block_data(blockNumber, blockPayload, *readTx)) { throw std::runtime_error(format("Unable to advance finalized block: ", blockNumber, @@ -874,10 +878,6 @@ void ContentAddressedCachedTreeStore::advance_finalized_block(con forkConstantData_.name_)); } } - // do nothing if the block is already finalized - if (committedMeta.finalizedBlockHeight >= blockNumber) { - return; - } // can currently only finalize up to the unfinalized block height if (committedMeta.finalizedBlockHeight > committedMeta.unfinalizedBlockHeight) { diff --git a/barretenberg/sol/scripts/init_honk.sh b/barretenberg/sol/scripts/init_honk.sh index cf24ef092aba..5711269ce03c 100755 --- a/barretenberg/sol/scripts/init_honk.sh +++ b/barretenberg/sol/scripts/init_honk.sh @@ -1,14 +1,22 @@ #!/usr/bin/env bash +set -eu # the verification key is the same for ultra and ultra zk SRS_PATH="$HOME/.bb-crs" OUTPUT_PATH="./src/honk" +KEYGEN="../cpp/build/bin/honk_solidity_key_gen" + +if [ ! -x "$KEYGEN" ]; then + echo "Error: honk_solidity_key_gen binary not found at $KEYGEN" >&2 + echo "Run barretenberg/cpp bootstrap first." >&2 + exit 1 +fi mkdir -p './src/honk/keys' -../cpp/build/bin/honk_solidity_key_gen add2 $OUTPUT_PATH $SRS_PATH -../cpp/build/bin/honk_solidity_key_gen blake $OUTPUT_PATH $SRS_PATH -../cpp/build/bin/honk_solidity_key_gen ecdsa $OUTPUT_PATH $SRS_PATH -../cpp/build/bin/honk_solidity_key_gen recursive $OUTPUT_PATH $SRS_PATH +$KEYGEN add2 $OUTPUT_PATH $SRS_PATH +$KEYGEN blake $OUTPUT_PATH $SRS_PATH +$KEYGEN ecdsa $OUTPUT_PATH $SRS_PATH +$KEYGEN recursive $OUTPUT_PATH $SRS_PATH echo "" echo "✓ VK generation complete" diff --git a/boxes/boxes/vanilla/app/main.ts b/boxes/boxes/vanilla/app/main.ts index ddce11af435c..4c35c1045b97 100644 --- a/boxes/boxes/vanilla/app/main.ts +++ b/boxes/boxes/vanilla/app/main.ts @@ -198,7 +198,7 @@ async function updateVoteTally(wallet: Wallet, from: AztecAddress) { ) ); - const batchResult = await new BatchCall(wallet, payloads).simulate({ from }); + const { result: batchResult } = await new BatchCall(wallet, payloads).simulate({ from }); batchResult.forEach(({ result: value }, i) => { results[i + 1] = value; diff --git a/noir/bootstrap.sh b/noir/bootstrap.sh index 4fd19781f16a..1fcb30d95250 100755 --- a/noir/bootstrap.sh +++ b/noir/bootstrap.sh @@ -25,7 +25,12 @@ function build_native { set -euo pipefail if ! cache_download noir-$hash.tar.gz; then - (cd noir-repo && cargo build --locked --release --target-dir target) + # Serialize cargo operations to avoid race conditions with avm-transpiler + # which may run in parallel and share the same CARGO_HOME. + ( + flock -x 200 + cd noir-repo && cargo build --locked --release --target-dir target + ) 200>/tmp/rustup.lock cache_upload noir-$hash.tar.gz noir-repo/target/release/{nargo,acvm,noir-profiler} fi } diff --git a/spartan/bootstrap.sh b/spartan/bootstrap.sh index 7d10efc6a3a9..b6a2399fc90c 100755 --- a/spartan/bootstrap.sh +++ b/spartan/bootstrap.sh @@ -120,14 +120,24 @@ function run_network_tests { fi } +function slack_notify_scenario_pass { + local label="$1" + if [[ "${REF_NAME:-}" == v* ]]; then + slack_notify "Scenario ${label} tests PASSED on *${REF_NAME}*" "#alerts-next-scenario" + fi +} + function network_tests_1 { run_network_tests "$1" "smoke.test.ts" "${NETWORK_TESTS_1[@]}" + slack_notify_scenario_pass "set-1" } function network_tests_2 { run_network_tests "$1" "smoke.test.ts" "${NETWORK_TESTS_2[@]}" + slack_notify_scenario_pass "set-2" } function network_tests { run_network_tests "$1" "smoke.test.ts" "${NETWORK_TESTS_1[@]}" "${NETWORK_TESTS_2[@]}" + slack_notify_scenario_pass "all" } function network_bench_cmds { diff --git a/spartan/scripts/extract_proving_metrics.ts b/spartan/scripts/extract_proving_metrics.ts index 46d1d01ae966..4216145bfb17 100755 --- a/spartan/scripts/extract_proving_metrics.ts +++ b/spartan/scripts/extract_proving_metrics.ts @@ -79,6 +79,7 @@ const config = parseArgs(process.argv); interface LogEntry { timestamp: string; + trace?: string; jsonPayload?: { message?: string; [key: string]: any; @@ -88,7 +89,7 @@ interface LogEntry { function buildFilter( textFilter: string, - opts?: { module?: string; pod?: string }, + opts?: { module?: string; pod?: string; trace?: string }, ): string { const pod = opts?.pod ?? config.pod; let filter = @@ -101,13 +102,16 @@ function buildFilter( if (opts?.module) { filter += ` AND jsonPayload.module="${opts.module}"`; } + if (opts?.trace) { + filter += ` AND trace="${opts.trace}"`; + } return filter; } async function queryLogs( name: string, textFilter: string, - opts?: { module?: string; pod?: string }, + opts?: { module?: string; pod?: string; trace?: string }, ): Promise { const filter = buildFilter(textFilter, opts); const cmd = [ @@ -134,7 +138,7 @@ async function queryLogs( // ── Epoch auto-detection ───────────────────────────────────────────────────── -async function scanForEpoch(): Promise<{ start: string; end: string }> { +async function scanForEpoch(): Promise<{ start: string; end: string; trace?: string }> { process.stderr.write( `Scanning for epoch in ${config.start} to ${config.end}...\n\n`, ); @@ -151,6 +155,7 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> { epoch: number; txCount: number; timestamp: string; + trace?: string; }[] = []; for (const entry of epochStarts) { const m = msg(entry); @@ -163,6 +168,7 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> { epoch: parseInt(epochMatch[1]), txCount: p.epochSizeTxs ?? 0, timestamp: entry.timestamp, + trace: entry.trace, }); } } @@ -178,7 +184,7 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> { process.stderr.write( `Warning: epoch ${config.epoch} not found in scan window. Using full window.\n`, ); - return { start: config.start, end: config.end }; + return { start: config.start, end: config.end, trace: undefined }; } } else { target = starts.find((s) => s.txCount >= 1); @@ -186,12 +192,12 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> { process.stderr.write( `Warning: no epoch with >=1 tx found in scan window. Using full window.\n`, ); - return { start: config.start, end: config.end }; + return { start: config.start, end: config.end, trace: undefined }; } } process.stderr.write( - `Found epoch ${target.epoch} (${target.txCount} txs) at ${target.timestamp}\n`, + `Found epoch ${target.epoch} (${target.txCount} txs) at ${target.timestamp}${target.trace ? ` trace=${target.trace}` : ""}\n`, ); // Find matching finalized entry @@ -226,7 +232,7 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> { `Narrowed window: ${narrowedStart} to ${narrowedEnd}\n\n`, ); - return { start: narrowedStart, end: narrowedEnd }; + return { start: narrowedStart, end: narrowedEnd, trace: target.trace }; } // ── Pipeline order for proving job types ───────────────────────────────────── @@ -249,11 +255,15 @@ const PIPELINE_ORDER = [ // ── Query definitions ──────────────────────────────────────────────────────── -async function fetchAllData() { +async function fetchAllData(trace?: string) { process.stderr.write( `Fetching logs for ${config.pod} in ${config.namespace}\n`, ); - process.stderr.write(`Time range: ${config.start} to ${config.end}\n\n`); + process.stderr.write(`Time range: ${config.start} to ${config.end}\n`); + if (trace) { + process.stderr.write(`Trace filter: ${trace}\n`); + } + process.stderr.write("\n"); const brokerPod = `${config.namespace}-prover-broker-0`; @@ -268,15 +278,16 @@ async function fetchAllData() { brokerNewJobs, brokerCompleteJobs, ] = await Promise.all([ - queryLogs("epoch-start", "Starting epoch.*proving job"), - queryLogs("blob-fields", "Blob fields per checkpoint"), - queryLogs("blob-batching", "Final blob batching"), + queryLogs("epoch-start", "Starting epoch.*proving job", { trace }), + queryLogs("blob-fields", "Blob fields per checkpoint", { trace }), + queryLogs("blob-batching", "Final blob batching", { trace }), queryLogs("starting-block", "Starting block", { module: "prover-client:orchestrator", + trace, }), - queryLogs("processed-txs", "Processed.*successful txs"), - queryLogs("adding-txs", "Adding.*transactions to block"), - queryLogs("epoch-finalized", "Finalized proof for epoch"), + queryLogs("processed-txs", "Processed.*successful txs", { trace }), + queryLogs("adding-txs", "Adding.*transactions to block", { trace }), + queryLogs("epoch-finalized", "Finalized proof for epoch", { trace }), queryLogs("broker-new-jobs", "New proving job", { pod: brokerPod }), queryLogs("broker-complete-jobs", "Proving job complete", { pod: brokerPod, @@ -841,7 +852,7 @@ async function main() { config.start = scanResult.start; config.end = scanResult.end; - const data = await fetchAllData(); + const data = await fetchAllData(scanResult.trace); const output = formatOutput(data); console.log(output); } diff --git a/yarn-project/.claude/skills/unit-test-implementation/SKILL.md b/yarn-project/.claude/skills/unit-test-implementation/SKILL.md index 0bc59961fb87..da1c1eab66f7 100644 --- a/yarn-project/.claude/skills/unit-test-implementation/SKILL.md +++ b/yarn-project/.claude/skills/unit-test-implementation/SKILL.md @@ -23,6 +23,28 @@ beforeEach(() => { }); ``` +### NEVER Pass Complex Objects as mock() Props + +`jest-mock-extended`'s `mock(props)` deep-processes any objects passed as initial properties. When those objects contain class instances with internal state (like `Fr`, `EthAddress`, `AztecAddress`, `GasFees`, `Buffer`, etc.), this causes **O(2^n) exponential slowdown** across tests — each test doubles the time of the previous one. + +```typescript +// ❌ NEVER: Passing complex domain objects as mock props +// This causes exponential test slowdown (1s → 2s → 4s → 8s → ...) +const constants = { chainId: new Fr(1), coinbase: EthAddress.random(), gasFees: GasFees.empty() }; +beforeEach(() => { + builder = mock({ checkpointNumber, constants }); +}); + +// ✅ GOOD: Create mock without props, then set properties directly +beforeEach(() => { + builder = mock(); + Object.defineProperty(builder, 'checkpointNumber', { value: checkpointNumber }); + Object.defineProperty(builder, 'constants', { value: constants }); +}); +``` + +Simple primitives (strings, numbers, booleans) and arrow functions are safe to pass as props. The issue is specifically with class instances that have complex prototypes. + ### When to Use Real Instances vs Mocks **Mock external dependencies** that are: diff --git a/yarn-project/archiver/src/config.ts b/yarn-project/archiver/src/config.ts index e3ed77eb71dc..a2eb30464302 100644 --- a/yarn-project/archiver/src/config.ts +++ b/yarn-project/archiver/src/config.ts @@ -8,7 +8,12 @@ import { getConfigFromMappings, numberConfigHelper, } from '@aztec/foundation/config'; -import { type ChainConfig, chainConfigMappings } from '@aztec/stdlib/config'; +import { + type ChainConfig, + type PipelineConfig, + chainConfigMappings, + pipelineConfigMappings, +} from '@aztec/stdlib/config'; import type { ArchiverSpecificConfig } from '@aztec/stdlib/interfaces/server'; /** @@ -21,11 +26,13 @@ import type { ArchiverSpecificConfig } from '@aztec/stdlib/interfaces/server'; export type ArchiverConfig = ArchiverSpecificConfig & L1ReaderConfig & L1ContractsConfig & + PipelineConfig & // required to pass through to epoch cache BlobClientConfig & ChainConfig; export const archiverConfigMappings: ConfigMappingsType = { ...blobClientConfigMapping, + ...pipelineConfigMappings, archiverPollingIntervalMS: { env: 'ARCHIVER_POLLING_INTERVAL_MS', description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.', diff --git a/yarn-project/archiver/src/modules/data_store_updater.ts b/yarn-project/archiver/src/modules/data_store_updater.ts index ccb80e2b450c..32087d4e2b7a 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.ts @@ -457,7 +457,7 @@ export class ArchiverDataStoreUpdater { if (validFnCount > 0) { this.log.verbose(`Storing ${validFnCount} functions for contract class ${contractClassId.toString()}`); } - return await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns); + await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns); } return true; } diff --git a/yarn-project/archiver/src/store/kv_archiver_store.test.ts b/yarn-project/archiver/src/store/kv_archiver_store.test.ts index 4e70ce10aa1c..92ae927f0ef3 100644 --- a/yarn-project/archiver/src/store/kv_archiver_store.test.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.test.ts @@ -1787,19 +1787,146 @@ describe('KVArchiverDataStore', () => { }); }); - it('deleteLogs', async () => { - const block = publishedCheckpoints[0].checkpoint.blocks[0]; - await store.addProposedBlock(block); - await expect(store.addLogs([block])).resolves.toEqual(true); + describe('deleteLogs', () => { + it('deletes public logs for a block', async () => { + const block = publishedCheckpoints[0].checkpoint.blocks[0]; + await store.addProposedBlock(block); + await expect(store.addLogs([block])).resolves.toEqual(true); + + expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual( + block.body.txEffects.map(txEffect => txEffect.publicLogs).flat().length, + ); + + await store.deleteLogs([block]); - expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual( - block.body.txEffects.map(txEffect => txEffect.publicLogs).flat().length, - ); + expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual(0); + }); - // This one is a pain for memory as we would never want to just delete memory in the middle. - await store.deleteLogs([block]); + it('deletes contract class logs for a block', async () => { + // Create a block that explicitly has contract class logs + const block = await L2Block.random(BlockNumber(1), { + txsPerBlock: 2, + txOptions: { numContractClassLogs: 1 }, + state: makeStateForBlock(1, 2), + }); + await store.addProposedBlock(block); + await store.addLogs([block]); + + const logsBefore = await store.getContractClassLogs({ fromBlock: BlockNumber(1) }); + expect(logsBefore.logs.length).toBeGreaterThan(0); + + await store.deleteLogs([block]); + + const logsAfter = await store.getContractClassLogs({ fromBlock: BlockNumber(1) }); + expect(logsAfter.logs.length).toEqual(0); + }); + + it('retains private logs from non-reorged block when same tag appears in reorged block', async () => { + const sharedTag = makePrivateLogTag(1, 0, 0); + + // Block 1 with a private log using sharedTag + const cp1 = await makeCheckpointWithLogs(1, { + numTxsPerBlock: 1, + privateLogs: { numLogsPerTx: 1 }, + }); + const block1 = cp1.checkpoint.blocks[0]; + + // Block 2 with a private log using the SAME tag + const cp2 = await makeCheckpointWithLogs(2, { + previousArchive: block1.archive, + numTxsPerBlock: 1, + privateLogs: { numLogsPerTx: 1 }, + }); + const block2 = cp2.checkpoint.blocks[0]; + // Override block2's private log tag to match block1's + block2.body.txEffects[0].privateLogs[0] = makePrivateLog(sharedTag); + + await addProposedBlocks(store, [block1, block2], { force: true }); + await store.addLogs([block1, block2]); + + // Both blocks' logs should be present + const logsBefore = await store.getPrivateLogsByTags([sharedTag]); + expect(logsBefore[0]).toHaveLength(2); + + // Reorg: delete block 2 + await store.deleteLogs([block2]); + + // Block 1's log should still be present + const logsAfter = await store.getPrivateLogsByTags([sharedTag]); + expect(logsAfter[0]).toHaveLength(1); + expect(logsAfter[0][0].blockNumber).toEqual(1); + }); - expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual(0); + it('retains public logs from non-reorged block when same tag appears in reorged block', async () => { + const contractAddress = AztecAddress.fromNumber(543254); + const sharedTag = makePublicLogTag(1, 0, 0); + + // Block 1 with a public log using sharedTag + const cp1 = await makeCheckpointWithLogs(1, { + numTxsPerBlock: 1, + publicLogs: { numLogsPerTx: 1, contractAddress }, + }); + const block1 = cp1.checkpoint.blocks[0]; + + // Block 2 with a public log using the SAME tag from the same contract + const cp2 = await makeCheckpointWithLogs(2, { + previousArchive: block1.archive, + numTxsPerBlock: 1, + publicLogs: { numLogsPerTx: 1, contractAddress }, + }); + const block2 = cp2.checkpoint.blocks[0]; + // Override block2's public log tag to match block1's + block2.body.txEffects[0].publicLogs[0] = makePublicLog(sharedTag, contractAddress); + + await addProposedBlocks(store, [block1, block2], { force: true }); + await store.addLogs([block1, block2]); + + // Both blocks' logs should be present + const logsBefore = await store.getPublicLogsByTagsFromContract(contractAddress, [sharedTag]); + expect(logsBefore[0]).toHaveLength(2); + + // Reorg: delete block 2 + await store.deleteLogs([block2]); + + // Block 1's log should still be present + const logsAfter = await store.getPublicLogsByTagsFromContract(contractAddress, [sharedTag]); + expect(logsAfter[0]).toHaveLength(1); + expect(logsAfter[0][0].blockNumber).toEqual(1); + }); + + it('deletes multiple blocks at once', async () => { + const cp1 = await makeCheckpointWithLogs(1, { + numTxsPerBlock: 2, + privateLogs: { numLogsPerTx: 1 }, + publicLogs: { numLogsPerTx: 1 }, + }); + const block1 = cp1.checkpoint.blocks[0]; + + const cp2 = await makeCheckpointWithLogs(2, { + previousArchive: block1.archive, + numTxsPerBlock: 2, + privateLogs: { numLogsPerTx: 1 }, + publicLogs: { numLogsPerTx: 1 }, + }); + const block2 = cp2.checkpoint.blocks[0]; + + await addProposedBlocks(store, [block1, block2], { force: true }); + await store.addLogs([block1, block2]); + + // Verify logs exist + expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toBeGreaterThan(0); + + // Delete both blocks at once + await store.deleteLogs([block1, block2]); + + expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual(0); + }); + + it('is a no-op when deleting blocks with no logs', async () => { + const block = publishedCheckpoints[0].checkpoint.blocks[0]; + // Don't add logs, just try to delete + await expect(store.deleteLogs([block])).resolves.toEqual(true); + }); }); describe('getTxEffect', () => { diff --git a/yarn-project/archiver/src/store/log_store.ts b/yarn-project/archiver/src/store/log_store.ts index 5ef8656acc4d..271160cac351 100644 --- a/yarn-project/archiver/src/store/log_store.ts +++ b/yarn-project/archiver/src/store/log_store.ts @@ -1,6 +1,6 @@ import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { BlockNumber } from '@aztec/foundation/branded-types'; -import { filterAsync } from '@aztec/foundation/collection'; +import { compactArray, filterAsync } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { createLogger } from '@aztec/foundation/log'; import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize'; @@ -313,18 +313,49 @@ export class LogStore { deleteLogs(blocks: L2Block[]): Promise { return this.db.transactionAsync(async () => { - await Promise.all( - blocks.map(async block => { - // Delete private logs - const privateKeys = (await this.#privateLogKeysByBlock.getAsync(block.number)) ?? []; - await Promise.all(privateKeys.map(tag => this.#privateLogsByTag.delete(tag))); - - // Delete public logs - const publicKeys = (await this.#publicLogKeysByBlock.getAsync(block.number)) ?? []; - await Promise.all(publicKeys.map(key => this.#publicLogsByContractAndTag.delete(key))); - }), + const blockNumbers = new Set(blocks.map(block => block.number)); + const firstBlockToDelete = Math.min(...blockNumbers); + + // Collect all unique private tags across all blocks being deleted + const allPrivateTags = new Set( + compactArray(await Promise.all(blocks.map(block => this.#privateLogKeysByBlock.getAsync(block.number)))).flat(), + ); + + // Trim private logs: for each tag, delete all instances including and after the first block being deleted. + // This hinges on the invariant that logs for a given tag are always inserted in order of block number, which is enforced in #addPrivateLogs. + for (const tag of allPrivateTags) { + const existing = await this.#privateLogsByTag.getAsync(tag); + if (existing === undefined || existing.length === 0) { + continue; + } + const lastIndexToKeep = existing.findLastIndex( + buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete, + ); + const remaining = existing.slice(0, lastIndexToKeep + 1); + await (remaining.length > 0 ? this.#privateLogsByTag.set(tag, remaining) : this.#privateLogsByTag.delete(tag)); + } + + // Collect all unique public keys across all blocks being deleted + const allPublicKeys = new Set( + compactArray(await Promise.all(blocks.map(block => this.#publicLogKeysByBlock.getAsync(block.number)))).flat(), ); + // And do the same as we did with private logs + for (const key of allPublicKeys) { + const existing = await this.#publicLogsByContractAndTag.getAsync(key); + if (existing === undefined || existing.length === 0) { + continue; + } + const lastIndexToKeep = existing.findLastIndex( + buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete, + ); + const remaining = existing.slice(0, lastIndexToKeep + 1); + await (remaining.length > 0 + ? this.#publicLogsByContractAndTag.set(key, remaining) + : this.#publicLogsByContractAndTag.delete(key)); + } + + // After trimming the tagged logs, we can delete the block-level keys that track which tags are in which blocks. await Promise.all( blocks.map(block => Promise.all([ diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 92dfc102a387..be709fdc6557 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -111,6 +111,7 @@ import { createBlockProposalHandler, createValidatorClient, } from '@aztec/validator-client'; +import type { SlashingProtectionDatabase } from '@aztec/validator-ha-signer/types'; import { createWorldStateSynchronizer } from '@aztec/world-state'; import { createPublicClient } from 'viem'; @@ -195,6 +196,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { dateProvider?: DateProvider; p2pClientDeps?: P2PClientDeps; proverNodeDeps?: Partial; + slashingProtectionDb?: SlashingProtectionDatabase; } = {}, options: { prefilledPublicData?: PublicDataTreeLeaf[]; @@ -377,6 +379,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { l1ToL2MessageSource: archiver, keyStoreManager, blobClient, + slashingProtectionDb: deps.slashingProtectionDb, }); // If we have a validator client, register it as a source of offenses for the slasher, diff --git a/yarn-project/aztec.js/src/contract/batch_call.test.ts b/yarn-project/aztec.js/src/contract/batch_call.test.ts index 88b6e4427d60..cecd701b27ec 100644 --- a/yarn-project/aztec.js/src/contract/batch_call.test.ts +++ b/yarn-project/aztec.js/src/contract/batch_call.test.ts @@ -146,7 +146,7 @@ describe('BatchCall', () => { { name: 'simulateTx', result: txSimResult }, ] as any); - const results = await batchCall.simulate({ from: await AztecAddress.random() }); + const { result: results } = await batchCall.simulate({ from: await AztecAddress.random() }); // Verify wallet.batch was called once with both utility calls AND simulateTx expect(wallet.batch).toHaveBeenCalledTimes(1); @@ -212,7 +212,7 @@ describe('BatchCall', () => { { name: 'executeUtility', result: utilityResult2 }, ] as any); - const results = await batchCall.simulate({ from: await AztecAddress.random() }); + const { result: results } = await batchCall.simulate({ from: await AztecAddress.random() }); expect(wallet.batch).toHaveBeenCalledTimes(1); expect(wallet.batch).toHaveBeenCalledWith([ @@ -247,7 +247,7 @@ describe('BatchCall', () => { const utilityResult = UtilityExecutionResult.random(); wallet.batch.mockResolvedValue([{ name: 'executeUtility', result: utilityResult }] as any); - const results = await batchCall.simulate({ from: await AztecAddress.random() }); + const { result: results } = await batchCall.simulate({ from: await AztecAddress.random() }); expect(results).toHaveLength(1); expect(results[0].offchainEffects).toEqual([]); @@ -307,7 +307,7 @@ describe('BatchCall', () => { { name: 'simulateTx', result: txSimResult }, ] as any); - const results = await batchCall.simulate({ from: await AztecAddress.random() }); + const { result: results } = await batchCall.simulate({ from: await AztecAddress.random() }); expect(results).toHaveLength(3); expect(results[0].offchainMessages).toEqual([ @@ -349,7 +349,7 @@ describe('BatchCall', () => { wallet.batch.mockResolvedValue([{ name: 'simulateTx', result: txSimResult }] as any); - const results = await batchCall.simulate({ from: await AztecAddress.random() }); + const { result: results } = await batchCall.simulate({ from: await AztecAddress.random() }); expect(wallet.batch).toHaveBeenCalledTimes(1); expect(wallet.batch).toHaveBeenCalledWith([ @@ -376,7 +376,7 @@ describe('BatchCall', () => { it('should handle empty batch', async () => { batchCall = new BatchCall(wallet, []); - const results = await batchCall.simulate({ from: await AztecAddress.random() }); + const { result: results } = await batchCall.simulate({ from: await AztecAddress.random() }); expect(wallet.batch).not.toHaveBeenCalled(); expect(results).toEqual([]); diff --git a/yarn-project/aztec.js/src/contract/batch_call.ts b/yarn-project/aztec.js/src/contract/batch_call.ts index 3012c2dd8f01..ecf21515d616 100644 --- a/yarn-project/aztec.js/src/contract/batch_call.ts +++ b/yarn-project/aztec.js/src/contract/batch_call.ts @@ -3,9 +3,11 @@ import { ExecutionPayload, TxSimulationResult, UtilityExecutionResult, mergeExec import type { BatchedMethod, Wallet } from '../wallet/wallet.js'; import { BaseContractInteraction } from './base_contract_interaction.js'; +import { getGasLimits } from './get_gas_limits.js'; import { type RequestInteractionOptions, type SimulateInteractionOptions, + type SimulationResult, extractOffchainOutput, toSimulateOptions, } from './interaction_options.js'; @@ -45,7 +47,7 @@ export class BatchCall extends BaseContractInteraction { * @param options - An optional object containing additional configuration for the interaction. * @returns The results of all the interactions that make up the batch */ - public async simulate(options: SimulateInteractionOptions): Promise { + public async simulate(options: SimulateInteractionOptions): Promise { const { indexedExecutionPayloads, utility } = (await this.getExecutionPayloads()).reduce<{ /** Keep track of the number of private calls to retrieve the return values */ privateIndex: 0; @@ -119,10 +121,11 @@ export class BatchCall extends BaseContractInteraction { } // Process tx simulation result (it comes last if present) + let simulatedTx: TxSimulationResult | undefined; if (indexedExecutionPayloads.length > 0) { const txResultWrapper = batchResults[utility.length]; if (txResultWrapper.name === 'simulateTx') { - const simulatedTx = txResultWrapper.result as TxSimulationResult; + simulatedTx = txResultWrapper.result as TxSimulationResult; indexedExecutionPayloads.forEach(([request, callIndex, resultIndex]) => { const call = request.calls[0]; // As account entrypoints are private, for private functions we retrieve the return values from the first nested call @@ -130,21 +133,34 @@ export class BatchCall extends BaseContractInteraction { // For public functions we retrieve the first values directly from the public output. const rawReturnValues = call.type == FunctionType.PRIVATE - ? simulatedTx.getPrivateReturnValues()?.nested?.[resultIndex].values - : simulatedTx.getPublicReturnValues()?.[resultIndex].values; + ? simulatedTx!.getPrivateReturnValues()?.nested?.[resultIndex].values + : simulatedTx!.getPublicReturnValues()?.[resultIndex].values; results[callIndex] = { result: rawReturnValues ? decodeFromAbi(call.returnTypes, rawReturnValues) : [], ...extractOffchainOutput( - simulatedTx.offchainEffects, - simulatedTx.publicInputs.constants.anchorBlockHeader.globalVariables.timestamp, + simulatedTx!.offchainEffects, + simulatedTx!.publicInputs.constants.anchorBlockHeader.globalVariables.timestamp, ), }; }); } } - return results; + if ((options.includeMetadata || options.fee?.estimateGas) && simulatedTx) { + const { gasLimits, teardownGasLimits } = getGasLimits(simulatedTx, options.fee?.estimatedGasPadding); + this.log.verbose( + `Estimated gas limits for batch tx: DA=${gasLimits.daGas} L2=${gasLimits.l2Gas} teardownDA=${teardownGasLimits.daGas} teardownL2=${teardownGasLimits.l2Gas}`, + ); + return { + result: results, + estimatedGas: { gasLimits, teardownGasLimits }, + offchainEffects: [], + offchainMessages: [], + }; + } + + return { result: results, offchainEffects: [], offchainMessages: [] }; } protected async getExecutionPayloads(): Promise { diff --git a/yarn-project/bot/src/factory.ts b/yarn-project/bot/src/factory.ts index 99995c4bf8a6..c62e3b78a85b 100644 --- a/yarn-project/bot/src/factory.ts +++ b/yarn-project/bot/src/factory.ts @@ -27,7 +27,7 @@ import { PrivateTokenContract } from '@aztec/noir-contracts.js/PrivateToken'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; -import { GasSettings } from '@aztec/stdlib/gas'; +import { GasFees, GasSettings } from '@aztec/stdlib/gas'; import type { AztecNode, AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; import { deriveSigningKey } from '@aztec/stdlib/keys'; import { EmbeddedWallet } from '@aztec/wallets/embedded'; @@ -223,7 +223,12 @@ export class BotFactory { const paymentMethod = new FeeJuicePaymentMethodWithClaim(accountManager.address, claim); const deployMethod = await accountManager.getDeployMethod(); const maxFeesPerGas = (await this.aztecNode.getCurrentMinFees()).mul(1 + this.config.minFeePadding); - const gasSettings = GasSettings.default({ maxFeesPerGas }); + + const { estimatedGas } = await deployMethod.simulate({ + from: AztecAddress.ZERO, + fee: { estimateGas: true, paymentMethod }, + }); + const gasSettings = GasSettings.from({ ...estimatedGas!, maxFeesPerGas, maxPriorityFeesPerGas: GasFees.empty() }); await this.withNoMinTxsPerBlock(async () => { const { txHash } = await deployMethod.send({ @@ -231,7 +236,7 @@ export class BotFactory { fee: { gasSettings, paymentMethod }, wait: NO_WAIT, }); - this.log.info(`Sent tx for account deployment with hash ${txHash.toString()}`); + this.log.info(`Sent tx for account deployment with hash ${txHash.toString()}`, { gasSettings }); return waitForTx(this.aztecNode, txHash, { timeout: this.config.txMinedWaitSeconds }); }); this.log.info(`Account deployed at ${address}`); @@ -297,8 +302,9 @@ export class BotFactory { await deploy.register(); } else { this.log.info(`Deploying token contract at ${address.toString()}`); - const { txHash } = await deploy.send({ ...deployOpts, wait: NO_WAIT }); - this.log.info(`Sent tx for token setup with hash ${txHash.toString()}`); + const { estimatedGas } = await deploy.simulate({ ...deployOpts, fee: { estimateGas: true } }); + const { txHash } = await deploy.send({ ...deployOpts, fee: { gasSettings: estimatedGas }, wait: NO_WAIT }); + this.log.info(`Sent tx for token setup with hash ${txHash.toString()}`, { estimatedGas }); await this.withNoMinTxsPerBlock(async () => { await waitForTx(this.aztecNode, txHash, { timeout: this.config.txMinedWaitSeconds }); return token; @@ -338,10 +344,19 @@ export class BotFactory { const amm = AMMContract.at(instance.address, this.wallet); this.log.info(`AMM deployed at ${amm.address}`); - const { receipt: minterReceipt } = await lpToken.methods - .set_minter(amm.address, true) - .send({ from: deployer, wait: { timeout: this.config.txMinedWaitSeconds } }); - this.log.info(`Set LP token minter to AMM txHash=${minterReceipt.txHash.toString()}`); + const setMinterInteraction = lpToken.methods.set_minter(amm.address, true); + const { estimatedGas: setMinterGas } = await setMinterInteraction.simulate({ + from: deployer, + fee: { estimateGas: true }, + }); + const { receipt: minterReceipt } = await setMinterInteraction.send({ + from: deployer, + fee: { gasSettings: setMinterGas }, + wait: { timeout: this.config.txMinedWaitSeconds }, + }); + this.log.info(`Set LP token minter to AMM txHash=${minterReceipt.txHash.toString()}`, { + estimatedGas: setMinterGas, + }); this.log.info(`Liquidity token initialized`); return amm; @@ -409,22 +424,44 @@ export class BotFactory { .getFunctionCall(), }); - const { receipt: mintReceipt } = await new BatchCall(this.wallet, [ + const mintBatch = new BatchCall(this.wallet, [ token0.methods.mint_to_private(liquidityProvider, MINT_BALANCE), token1.methods.mint_to_private(liquidityProvider, MINT_BALANCE), - ]).send({ from: liquidityProvider, wait: { timeout: this.config.txMinedWaitSeconds } }); + ]); + const { estimatedGas: mintGas } = await mintBatch.simulate({ + from: liquidityProvider, + fee: { estimateGas: true }, + }); + const { receipt: mintReceipt } = await mintBatch.send({ + from: liquidityProvider, + fee: { gasSettings: mintGas }, + wait: { timeout: this.config.txMinedWaitSeconds }, + }); - this.log.info(`Sent mint tx: ${mintReceipt.txHash.toString()}`); + this.log.info(`Sent mint tx: ${mintReceipt.txHash.toString()}`, { estimatedGas: mintGas }); - const { receipt: addLiquidityReceipt } = await amm.methods - .add_liquidity(amount0Max, amount1Max, amount0Min, amount1Min, authwitNonce) - .send({ - from: liquidityProvider, - authWitnesses: [token0Authwit, token1Authwit], - wait: { timeout: this.config.txMinedWaitSeconds }, - }); + const addLiquidityInteraction = amm.methods.add_liquidity( + amount0Max, + amount1Max, + amount0Min, + amount1Min, + authwitNonce, + ); + const { estimatedGas: addLiquidityGas } = await addLiquidityInteraction.simulate({ + from: liquidityProvider, + fee: { estimateGas: true }, + authWitnesses: [token0Authwit, token1Authwit], + }); + const { receipt: addLiquidityReceipt } = await addLiquidityInteraction.send({ + from: liquidityProvider, + fee: { gasSettings: addLiquidityGas }, + authWitnesses: [token0Authwit, token1Authwit], + wait: { timeout: this.config.txMinedWaitSeconds }, + }); - this.log.info(`Sent tx to add liquidity to the AMM: ${addLiquidityReceipt.txHash.toString()}`); + this.log.info(`Sent tx to add liquidity to the AMM: ${addLiquidityReceipt.txHash.toString()}`, { + estimatedGas: addLiquidityGas, + }); this.log.info(`Liquidity added`); const [newT0Bal, newT1Bal, newLPBal] = await getPrivateBalances(); @@ -445,9 +482,10 @@ export class BotFactory { this.log.info(`Contract ${name} at ${address.toString()} already deployed`); await deploy.register(); } else { - this.log.info(`Deploying contract ${name} at ${address.toString()}`); + const { estimatedGas } = await deploy.simulate({ ...deployOpts, fee: { estimateGas: true } }); + this.log.info(`Deploying contract ${name} at ${address.toString()}`, { estimatedGas }); await this.withNoMinTxsPerBlock(async () => { - const { txHash } = await deploy.send({ ...deployOpts, wait: NO_WAIT }); + const { txHash } = await deploy.send({ ...deployOpts, fee: { gasSettings: estimatedGas }, wait: NO_WAIT }); this.log.info(`Sent contract ${name} setup tx with hash ${txHash.toString()}`); return waitForTx(this.aztecNode, txHash, { timeout: this.config.txMinedWaitSeconds }); }); @@ -491,13 +529,16 @@ export class BotFactory { // PrivateToken's mint accesses contract-level private storage vars (admin, total_supply). const additionalScopes = isStandardToken ? undefined : [token.address]; + const mintBatch = new BatchCall(token.wallet, calls); + const { estimatedGas } = await mintBatch.simulate({ from: minter, fee: { estimateGas: true }, additionalScopes }); await this.withNoMinTxsPerBlock(async () => { - const { txHash } = await new BatchCall(token.wallet, calls).send({ + const { txHash } = await mintBatch.send({ from: minter, additionalScopes, + fee: { gasSettings: estimatedGas }, wait: NO_WAIT, }); - this.log.info(`Sent token mint tx with hash ${txHash.toString()}`); + this.log.info(`Sent token mint tx with hash ${txHash.toString()}`, { estimatedGas }); return waitForTx(this.aztecNode, txHash, { timeout: this.config.txMinedWaitSeconds }); }); } diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_ha_sync.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_ha_sync.test.ts new file mode 100644 index 000000000000..fa8b97f02252 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_ha_sync.test.ts @@ -0,0 +1,203 @@ +import type { Archiver } from '@aztec/archiver'; +import type { AztecNodeService } from '@aztec/aztec-node'; +import { AztecAddress, EthAddress } from '@aztec/aztec.js/addresses'; +import { NO_WAIT } from '@aztec/aztec.js/contracts'; +import { Fr } from '@aztec/aztec.js/fields'; +import type { Logger } from '@aztec/aztec.js/log'; +import { RollupContract } from '@aztec/ethereum/contracts'; +import type { Operator } from '@aztec/ethereum/deploy-aztec-l1-contracts'; +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { times, timesAsync } from '@aztec/foundation/collection'; +import { SecretValue } from '@aztec/foundation/config'; +import { retryUntil } from '@aztec/foundation/retry'; +import { bufferToHex } from '@aztec/foundation/string'; +import { TestContract } from '@aztec/noir-test-contracts.js/Test'; +import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; +import { createSharedSlashingProtectionDb } from '@aztec/validator-ha-signer/factory'; + +import { jest } from '@jest/globals'; +import { privateKeyToAccount } from 'viem/accounts'; + +import { type EndToEndContext, getPrivateKeyFromIndex } from '../fixtures/utils.js'; +import { TestWallet } from '../test-wallet/test_wallet.js'; +import { proveInteraction } from '../test-wallet/utils.js'; +import { EpochsTestContext } from './epochs_test.js'; + +jest.setTimeout(1000 * 60 * 20); + +const VALIDATOR_COUNT = 4; +const TX_COUNT = 6; + +/** + * E2E test for HA (High Availability) proposed chain sync. + * Verifies that nodes sharing validator keys with the proposer still process + * block proposals and sync to the proposed chain, rather than ignoring them. + */ +describe('e2e_epochs/epochs_ha_sync', () => { + let context: EndToEndContext; + let logger: Logger; + let rollup: RollupContract; + + let test: EpochsTestContext; + let validators: (Operator & { privateKey: `0x${string}` })[]; + let nodes: AztecNodeService[]; + let contract: TestContract; + let wallet: TestWallet; + let from: AztecAddress; + + async function setupTest() { + validators = times(VALIDATOR_COUNT, i => { + const privateKey = bufferToHex(getPrivateKeyFromIndex(i + 3)!); + const attester = EthAddress.fromString(privateKeyToAccount(privateKey).address); + return { attester, withdrawer: attester, privateKey, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) }; + }); + + // Do NOT set skipPublishingCheckpointsPercent here: the initial sequencer needs to + // publish checkpoints during setup (account deployment). We disable it per-validator-node below. + test = await EpochsTestContext.setup({ + numberOfAccounts: 1, + initialValidators: validators, + mockGossipSubNetwork: true, + disableAnvilTestWatcher: true, + aztecEpochDuration: 4, + enforceTimeTable: true, + ethereumSlotDuration: 4, + aztecSlotDuration: 36, + blockDurationMs: 8000, + l1PublishingTime: 2, + attestationPropagationTime: 0.5, + aztecTargetCommitteeSize: VALIDATOR_COUNT, + minTxsPerBlock: 1, + maxTxsPerBlock: 2, + pxeOpts: { syncChainTip: 'proposed' }, + }); + + ({ context, logger, rollup } = test); + wallet = context.wallet; + from = context.accounts[0]; + + // Stop the initial non-validator sequencer. + logger.warn(`Stopping sequencer in initial aztec node.`); + await context.sequencer!.stop(); + + // Create 4 nodes in 2 HA pairs: each pair shares the same two validator keys. + const pk1 = validators[0].privateKey; + const pk2 = validators[1].privateKey; + const pk3 = validators[2].privateKey; + const pk4 = validators[3].privateKey; + + // Disable checkpoint publishing on validator nodes so we can assert proposed chain sync + // strictly before any checkpoint is published by the validators. + // Use different coinbase addresses per node so HA peers would build different blocks + // if the proposer's block isn't correctly propagated to its HA peer. + // Each HA pair shares a slashing protection DB so only one peer can sign per duty. + const baseOpts = { dontStartSequencer: true, skipPublishingCheckpointsPercent: 100 } as const; + const sharedDb1 = await createSharedSlashingProtectionDb(context.dateProvider); + const sharedDb2 = await createSharedSlashingProtectionDb(context.dateProvider); + + logger.warn(`Creating 4 validator nodes in 2 HA pairs.`); + nodes = [ + await test.createValidatorNode([pk1, pk2], { + ...baseOpts, + coinbase: EthAddress.fromNumber(1), + slashingProtectionDb: sharedDb1, + }), + await test.createValidatorNode([pk1, pk2], { + ...baseOpts, + coinbase: EthAddress.fromNumber(2), + slashingProtectionDb: sharedDb1, + }), + await test.createValidatorNode([pk3, pk4], { + ...baseOpts, + coinbase: EthAddress.fromNumber(3), + slashingProtectionDb: sharedDb2, + }), + await test.createValidatorNode([pk3, pk4], { + ...baseOpts, + coinbase: EthAddress.fromNumber(4), + slashingProtectionDb: sharedDb2, + }), + ]; + logger.warn(`Created 4 validator nodes.`); + + // Point the wallet at a validator node so it tracks proposed blocks. + wallet.updateNode(nodes[0]); + + // Register contract for sending txs. + contract = await test.registerTestContract(wallet); + logger.warn(`Test setup completed.`); + } + + afterEach(async () => { + jest.restoreAllMocks(); + await test?.teardown(); + }); + + it('HA peers sync to proposed chain from proposals signed by their own validator keys', async () => { + await setupTest(); + + // Record the checkpoint state after setup. Validators must produce proposed blocks + // beyond this point for the test to be meaningful. + const allArchivers = nodes.map(n => n.getBlockSource() as Archiver); + const initialCheckpointNumber = await rollup.getCheckpointNumber(); + const initialCheckpointedBlock = (await allArchivers[0].getL2Tips()).checkpointed.block.number; + logger.warn(`Initial state: checkpoint ${initialCheckpointNumber}, checkpointed block ${initialCheckpointedBlock}`); + + // Pre-prove and send transactions. + const txs = await timesAsync(TX_COUNT, i => + proveInteraction(context.wallet, contract.methods.emit_nullifier(new Fr(i + 1)), { from }), + ); + const txHashes = await Promise.all(txs.map(tx => tx.send({ wait: NO_WAIT }))); + logger.warn(`Sent ${txHashes.length} transactions.`); + + // Warp to 1 L1 slot before the start of the next L2 slot, so sequencers start cleanly. + const currentSlot = await rollup.getSlotNumber(); + const nextSlot = SlotNumber(currentSlot + 1); + const nextSlotTimestamp = getTimestampForSlot(nextSlot, test.constants); + await context.cheatCodes.eth.warp(Number(nextSlotTimestamp) - test.L1_BLOCK_TIME_IN_S, { + resetBlockInterval: true, + }); + logger.warn(`Warped to 1 L1 slot before L2 slot ${nextSlot}.`); + + // Start the sequencers on all nodes. + await Promise.all(nodes.map(n => n.getSequencer()!.start())); + logger.warn(`Started all sequencers.`); + + // Wait until all nodes have proposed blocks strictly beyond the checkpointed tip. + // This ensures we're checking blocks produced by validators via P2P proposals, + // not blocks synced from L1 checkpoints during setup. + await retryUntil( + async () => { + const tips = await Promise.all(allArchivers.map(a => a.getL2Tips())); + return tips.every( + t => t.proposed.number > initialCheckpointedBlock && t.proposed.number > t.checkpointed.block.number, + ); + }, + 'all nodes to sync proposed blocks beyond checkpointed tip', + test.L2_SLOT_DURATION_IN_S * 5, + 0.5, + ); + + logger.warn(`All nodes synced proposed blocks beyond checkpointed tip`); + + // Take the smallest proposed tip across all nodes and verify the block hash matches on all of them. + // This block is strictly proposed (not checkpointed), so it must have arrived via P2P. + const tips = await Promise.all(allArchivers.map(a => a.getL2Tips())); + const proposedNumbers = tips.map(t => t.proposed.number); + const minProposed = BlockNumber(Math.min(...proposedNumbers)); + expect(minProposed).toBeGreaterThan(initialCheckpointedBlock); + logger.warn(`Verifying block hashes at proposed block ${minProposed}.`, { proposedNumbers }); + + const headers = await Promise.all(allArchivers.map(a => a.getBlockHeader(minProposed))); + const hashes = await Promise.all(headers.map(h => h!.hash())); + for (let i = 1; i < hashes.length; i++) { + expect(hashes[i].toString()).toBe(hashes[0].toString()); + } + logger.warn(`All 4 nodes agree on block hash at proposed block ${minProposed}.`); + + // Verify that no new checkpoints have been published by validators (we disabled checkpoint publishing). + const currentCheckpointNumber = await rollup.getCheckpointNumber(); + expect(currentCheckpointNumber).toBe(initialCheckpointNumber); + logger.warn(`Verified no new checkpoints were published.`); + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts index 4b729728b706..69aeef5670e0 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts @@ -9,6 +9,7 @@ import { isL1ToL2MessageReady } from '@aztec/aztec.js/messaging'; import { waitForTx } from '@aztec/aztec.js/node'; import { RollupContract } from '@aztec/ethereum/contracts'; import type { Operator } from '@aztec/ethereum/deploy-aztec-l1-contracts'; +import { waitUntilL1Timestamp } from '@aztec/ethereum/l1-tx-utils'; import { asyncMap } from '@aztec/foundation/async-map'; import { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { times, timesAsync } from '@aztec/foundation/collection'; @@ -17,6 +18,8 @@ import { retryUntil } from '@aztec/foundation/retry'; import { bufferToHex } from '@aztec/foundation/string'; import { executeTimeout } from '@aztec/foundation/timer'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; +import { getSlotAtTimestamp, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; +import { GasFees } from '@aztec/stdlib/gas'; import { TxStatus } from '@aztec/stdlib/tx'; import { jest } from '@jest/globals'; @@ -67,6 +70,7 @@ describe('e2e_epochs/epochs_mbps', () => { maxTxsPerBlock?: number; buildCheckpointIfEmpty?: boolean; deployCrossChainContract?: boolean; + skipPushProposedBlocksToArchiver?: boolean; }) { const { syncChainTip = 'checkpointed', deployCrossChainContract = false, ...setupOpts } = opts; @@ -493,4 +497,91 @@ describe('e2e_epochs/epochs_mbps', () => { const multiBlockCheckpoint = await assertMultipleBlocksPerSlot(2, logger); await waitForProvenCheckpoint(multiBlockCheckpoint); }); + + it('deploys a contract and calls it in separate blocks within a slot', async () => { + await setupTest({ + syncChainTip: 'checkpointed', + minTxsPerBlock: 1, + maxTxsPerBlock: 1, + }); + + // Prepare deploy tx for a new TestContract. Get the instance address so we can construct the call tx. + const highPriority = new GasFees(100, 100); + const lowPriority = new GasFees(1, 1); + + const deployMethod = TestContract.deploy(wallet); + const deployInstance = await deployMethod.getInstance(); + logger.warn(`Will deploy TestContract at ${deployInstance.address}`); + + // Register the contract on the PXE so we can prove the call interaction against it. + await wallet.registerContract(deployInstance, TestContract.artifact); + const deployedContract = TestContract.at(deployInstance.address, wallet); + + // Pre-prove both txs before starting sequencers. This ensures both arrive in the pool + // at the same time, so the sequencer can sort by priority fee for correct ordering. + logger.warn(`Pre-proving deploy tx (high priority) and call tx (low priority)`); + const deployTx = await proveInteraction(wallet, deployMethod, { + from, + fee: { gasSettings: { maxPriorityFeesPerGas: highPriority } }, + }); + const callTx = await proveInteraction(wallet, deployedContract.methods.emit_nullifier_public(new Fr(42)), { + from, + fee: { gasSettings: { maxPriorityFeesPerGas: lowPriority } }, + }); + logger.warn(`Pre-proved both txs`); + + // Start the sequencers + await Promise.all(nodes.map(n => n.getSequencer()!.start())); + logger.warn(`Started all sequencers`); + + // Wait until one L1 slot before the start of the next L2 slot. + // This ensures both txs land in the pending pool right before the proposer starts building. + // REFACTOR: This should go into a shared "waitUntilNextSlotStartsBuilding" utility + const currentL1Block = await test.l1Client.getBlock({ blockTag: 'latest' }); + const currentTimestamp = currentL1Block.timestamp; + const currentSlot = getSlotAtTimestamp(currentTimestamp, test.constants); + const nextSlot = SlotNumber(currentSlot + 1); + const nextSlotTimestamp = getTimestampForSlot(nextSlot, test.constants); + const targetTimestamp = nextSlotTimestamp - BigInt(test.L1_BLOCK_TIME_IN_S); + logger.warn(`Waiting until L1 timestamp ${targetTimestamp} (one L1 slot before L2 slot ${nextSlot})`, { + currentTimestamp, + currentSlot, + nextSlot, + nextSlotTimestamp, + targetTimestamp, + }); + await waitUntilL1Timestamp(test.l1Client, targetTimestamp, undefined, test.L2_SLOT_DURATION_IN_S * 3); + + // Send both pre-proved txs simultaneously, waiting for them to be checkpointed. + const timeout = test.L2_SLOT_DURATION_IN_S * 5; + logger.warn(`Sending both txs and waiting for checkpointed receipts`); + const [deployReceipt, callReceipt] = await executeTimeout( + () => Promise.all([deployTx.send({ wait: { timeout } }), callTx.send({ wait: { timeout } })]), + timeout * 1000, + ); + logger.warn(`Both txs checkpointed`, { + deployBlock: deployReceipt.blockNumber, + callBlock: callReceipt.blockNumber, + }); + + // Both txs should succeed (send throws on revert). Deploy should be in an earlier block. + expect(deployReceipt.blockNumber).toBeLessThan(callReceipt.blockNumber!); + + // Verify both blocks belong to the same checkpoint. + const deployCheckpointedBlock = await retryUntil( + async () => (await context.aztecNode.getCheckpointedBlocks(deployReceipt.blockNumber!, 1))[0], + 'deploy checkpointed block', + timeout, + ); + const callCheckpointedBlock = await retryUntil( + async () => (await context.aztecNode.getCheckpointedBlocks(callReceipt.blockNumber!, 1))[0], + 'call checkpointed block', + timeout, + ); + expect(deployCheckpointedBlock.checkpointNumber).toBe(callCheckpointedBlock.checkpointNumber); + logger.warn(`Both blocks in checkpoint ${deployCheckpointedBlock.checkpointNumber}`); + + // Wait for the checkpoint to be proven. + await waitForProvenCheckpoint(deployCheckpointedBlock.checkpointNumber); + }); }); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts index 0068706853bd..1a38395c9df3 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_test.ts @@ -28,6 +28,7 @@ import { type SequencerClient, type SequencerEvents, SequencerState } from '@azt import { type BlockParameter, EthAddress } from '@aztec/stdlib/block'; import { type L1RollupConstants, getProofSubmissionDeadlineTimestamp } from '@aztec/stdlib/epoch-helpers'; import { tryStop } from '@aztec/stdlib/interfaces/server'; +import type { SlashingProtectionDatabase } from '@aztec/validator-ha-signer/types'; import { join } from 'path'; import type { Hex } from 'viem'; @@ -238,13 +239,21 @@ export class EpochsTestContext { public createValidatorNode( privateKeys: `0x${string}`[], - opts: Partial & { dontStartSequencer?: boolean } = {}, + opts: Partial & { + dontStartSequencer?: boolean; + slashingProtectionDb?: SlashingProtectionDatabase; + } = {}, ) { this.logger.warn('Creating and syncing a validator node...'); return this.createNode({ ...opts, disableValidator: false, validatorPrivateKeys: new SecretValue(privateKeys) }); } - private async createNode(opts: Partial & { dontStartSequencer?: boolean } = {}) { + private async createNode( + opts: Partial & { + dontStartSequencer?: boolean; + slashingProtectionDb?: SlashingProtectionDatabase; + } = {}, + ) { const nodeIndex = this.nodes.length + 1; const actorPrefix = opts.disableValidator ? 'node' : 'validator'; const { mockGossipSubNetwork } = this.context; @@ -257,6 +266,7 @@ export class EpochsTestContext { ...resolvedConfig, dataDirectory: join(this.context.config.dataDirectory!, randomBytes(8).toString('hex')), validatorPrivateKeys: opts.validatorPrivateKeys ?? new SecretValue([]), + nodeId: resolvedConfig.nodeId || `${actorPrefix}-${nodeIndex}`, p2pEnabled, p2pIp, }, @@ -265,6 +275,7 @@ export class EpochsTestContext { p2pClientDeps: { p2pServiceFactory: mockGossipSubNetwork ? getMockPubSubP2PServiceFactory(mockGossipSubNetwork) : undefined, }, + slashingProtectionDb: opts.slashingProtectionDb, }, { prefilledPublicData: this.context.prefilledPublicData, diff --git a/yarn-project/end-to-end/src/e2e_p2p/duplicate_proposal_slash.test.ts b/yarn-project/end-to-end/src/e2e_p2p/duplicate_proposal_slash.test.ts index cc2868010c00..fc01b6f7fee1 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/duplicate_proposal_slash.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/duplicate_proposal_slash.test.ts @@ -132,6 +132,9 @@ describe('e2e_p2p_duplicate_proposal_slash', () => { coinbase: coinbase1, broadcastEquivocatedProposals: true, dontStartSequencer: true, + // Prevent HA peer proposals from being added to the archiver, so both + // malicious nodes build their own blocks instead of one yielding to the other. + skipPushProposedBlocksToArchiver: true, }, t.ctx.dateProvider, BOOT_NODE_UDP_PORT + 1, @@ -150,6 +153,9 @@ describe('e2e_p2p_duplicate_proposal_slash', () => { coinbase: coinbase2, broadcastEquivocatedProposals: true, dontStartSequencer: true, + // Prevent HA peer proposals from being added to the archiver, so both + // malicious nodes build their own blocks instead of one yielding to the other. + skipPushProposedBlocksToArchiver: true, }, t.ctx.dateProvider, BOOT_NODE_UDP_PORT + 2, diff --git a/yarn-project/end-to-end/src/e2e_state_vars.test.ts b/yarn-project/end-to-end/src/e2e_state_vars.test.ts index 6d2f58a7f37d..29435110942e 100644 --- a/yarn-project/end-to-end/src/e2e_state_vars.test.ts +++ b/yarn-project/end-to-end/src/e2e_state_vars.test.ts @@ -68,7 +68,7 @@ describe('e2e_state_vars', () => { contract.methods.get_public_immutable_constrained_private_indirect(), contract.methods.get_public_immutable(), ]).simulate({ from: defaultAccountAddress }) - ).map((r: any) => r.result); + ).result.map((r: any) => r.result); expect(a).toEqual(c); expect(b).toEqual({ account: c.account, value: c.value + 1n }); @@ -87,7 +87,7 @@ describe('e2e_state_vars', () => { contract.methods.get_public_immutable_constrained_public_indirect(), contract.methods.get_public_immutable(), ]).simulate({ from: defaultAccountAddress }) - ).map((r: any) => r.result); + ).result.map((r: any) => r.result); expect(a).toEqual(c); expect(b).toEqual({ account: c.account, value: c.value + 1n }); diff --git a/yarn-project/end-to-end/src/simulators/token_simulator.ts b/yarn-project/end-to-end/src/simulators/token_simulator.ts index a2065beb4426..a10bc0c7de17 100644 --- a/yarn-project/end-to-end/src/simulators/token_simulator.ts +++ b/yarn-project/end-to-end/src/simulators/token_simulator.ts @@ -110,7 +110,7 @@ export class TokenSimulator { chunk(calls, 5).map(batch => new BatchCall(this.defaultWallet, batch).simulate({ from: this.defaultAddress })), ) ) - .flat() + .flatMap(r => r.result) .map(r => r.result); expect(results[0]).toEqual(this.totalSupply); diff --git a/yarn-project/epoch-cache/src/config.ts b/yarn-project/epoch-cache/src/config.ts index bd9b76c1cd58..bf6e92f13046 100644 --- a/yarn-project/epoch-cache/src/config.ts +++ b/yarn-project/epoch-cache/src/config.ts @@ -1,11 +1,17 @@ import { type L1ContractsConfig, getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import { type L1ReaderConfig, getL1ReaderConfigFromEnv } from '@aztec/ethereum/l1-reader'; +import { type PipelineConfig, getPipelineConfigEnvVars } from '@aztec/stdlib/config'; export type EpochCacheConfig = Pick< - L1ReaderConfig & L1ContractsConfig, - 'l1RpcUrls' | 'l1ChainId' | 'viemPollingIntervalMS' | 'l1HttpTimeoutMS' | 'ethereumSlotDuration' + L1ReaderConfig & L1ContractsConfig & PipelineConfig, + | 'l1RpcUrls' + | 'l1ChainId' + | 'viemPollingIntervalMS' + | 'ethereumSlotDuration' + | 'l1HttpTimeoutMS' + | 'enableProposerPipelining' >; export function getEpochCacheConfigEnvVars(): EpochCacheConfig { - return { ...getL1ReaderConfigFromEnv(), ...getL1ContractsConfigEnvVars() }; + return { ...getL1ReaderConfigFromEnv(), ...getL1ContractsConfigEnvVars(), ...getPipelineConfigEnvVars() }; } diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 6cd085cb9e16..159eba8cfb4a 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -131,6 +131,7 @@ export type EnvVar = | 'P2P_L2_QUEUE_SIZE' | 'P2P_MAX_PEERS' | 'P2P_PEER_CHECK_INTERVAL_MS' + | 'P2P_PEER_FAILED_BAN_TIME_MS' | 'P2P_PEER_PENALTY_VALUES' | 'P2P_QUERY_FOR_IP' | 'P2P_REQRESP_INDIVIDUAL_REQUEST_TIMEOUT_MS' @@ -218,6 +219,7 @@ export type EnvVar = | 'SEQ_PUBLISHER_ALLOW_INVALID_STATES' | 'SEQ_PUBLISHER_FORWARDER_ADDRESS' | 'SEQ_POLLING_INTERVAL_MS' + | 'SEQ_ENABLE_PROPOSER_PIPELINING' | 'SEQ_ENFORCE_TIME_TABLE' | 'SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT' | 'SEQ_ATTESTATION_PROPAGATION_TIME' diff --git a/yarn-project/p2p/src/client/test/tx_proposal_collector/p2p_client.proposal_tx_collector.bench.test.ts b/yarn-project/p2p/src/client/test/tx_proposal_collector/p2p_client.proposal_tx_collector.bench.test.ts index fd3b8457f75e..606fe511e047 100644 --- a/yarn-project/p2p/src/client/test/tx_proposal_collector/p2p_client.proposal_tx_collector.bench.test.ts +++ b/yarn-project/p2p/src/client/test/tx_proposal_collector/p2p_client.proposal_tx_collector.bench.test.ts @@ -95,6 +95,7 @@ describe('ProposalTxCollector Benchmarks', () => { bootstrapNodesAsFullPeers: true, maxPeerCount: PEERS_PER_RUN + 1, peerCheckIntervalMS: 1000, + peerFailedBanTimeMs: 5_000, dialTimeoutMs: 10_000, individualRequestTimeoutMs: 30_000, }; diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index 74f0562addad..a753e50feb29 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -43,6 +43,9 @@ export interface P2PConfig /** Maximum transactions per block for validation. Overrides maxTxsPerBlock for gossip validation when set. */ validateMaxTxsPerBlock?: number; + /** Maximum transactions per checkpoint for validation. Used as fallback for maxTxsPerBlock when that is not set. */ + validateMaxTxsPerCheckpoint?: number; + /** Maximum L2 gas per block for validation. When set, txs exceeding this limit are rejected. */ validateMaxL2BlockGas?: number; @@ -67,6 +70,9 @@ export interface P2PConfig /** The frequency in which to check for new peers. */ peerCheckIntervalMS: number; + /** How long to ban a peer after it fails MAX_DIAL_ATTEMPTS dials. */ + peerFailedBanTimeMs: number; + /** Size of queue of L2 blocks to store. */ l2QueueSize: number; @@ -214,6 +220,12 @@ export const p2pConfigMappings: ConfigMappingsType = { 'Maximum transactions per block for validation. Overrides maxTxsPerBlock for gossip validation when set.', parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), }, + validateMaxTxsPerCheckpoint: { + env: 'VALIDATOR_MAX_TX_PER_CHECKPOINT', + description: + 'Maximum transactions per checkpoint for validation. Used as fallback for maxTxsPerBlock when that is not set.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, validateMaxL2BlockGas: { env: 'VALIDATOR_MAX_L2_BLOCK_GAS', description: 'Maximum L2 gas per block for validation. When set, txs exceeding this limit are rejected.', @@ -254,6 +266,11 @@ export const p2pConfigMappings: ConfigMappingsType = { description: 'The frequency in which to check for new peers.', ...numberConfigHelper(30_000), }, + peerFailedBanTimeMs: { + env: 'P2P_PEER_FAILED_BAN_TIME_MS', + description: 'How long to ban a peer after it fails maximum dial attempts.', + ...numberConfigHelper(5 * 60 * 1000), + }, l2QueueSize: { env: 'P2P_L2_QUEUE_SIZE', description: 'Size of queue of L2 blocks to store.', diff --git a/yarn-project/p2p/src/services/encoding.ts b/yarn-project/p2p/src/services/encoding.ts index 0aea0032d158..d21fbe695597 100644 --- a/yarn-project/p2p/src/services/encoding.ts +++ b/yarn-project/p2p/src/services/encoding.ts @@ -9,6 +9,14 @@ import { webcrypto } from 'node:crypto'; import { compressSync, uncompressSync } from 'snappy'; import xxhashFactory from 'xxhash-wasm'; +/** Thrown when a Snappy-compressed response exceeds the allowed decompressed size. */ +export class OversizedSnappyResponseError extends Error { + constructor(decompressedSize: number, maxSizeKb: number) { + super(`Decompressed size ${decompressedSize} exceeds maximum allowed size of ${maxSizeKb}kb`); + this.name = 'OversizedSnappyResponseError'; + } +} + // Load WASM const xxhash = await xxhashFactory(); @@ -86,7 +94,7 @@ export class SnappyTransform implements DataTransform { const { decompressedSize } = readSnappyPreamble(data); if (decompressedSize > maxSizeKb * 1024) { this.logger.warn(`Decompressed size ${decompressedSize} exceeds maximum allowed size of ${maxSizeKb}kb`); - throw new Error(`Decompressed size ${decompressedSize} exceeds maximum allowed size of ${maxSizeKb}kb`); + throw new OversizedSnappyResponseError(decompressedSize, maxSizeKb); } return Buffer.from(uncompressSync(data, { asBuffer: true })); diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index e59fbaa99b4c..6f89b66a5cf3 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -226,7 +226,7 @@ export class LibP2PService extends WithTracer implements P2PService { const proposalValidatorOpts = { txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.validateMaxTxsPerBlock, + maxTxsPerBlock: config.validateMaxTxsPerBlock ?? config.validateMaxTxsPerCheckpoint, }; this.blockProposalValidator = new BlockProposalValidator(epochCache, proposalValidatorOpts); this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, proposalValidatorOpts); diff --git a/yarn-project/p2p/src/services/peer-manager/peer_manager.ts b/yarn-project/p2p/src/services/peer-manager/peer_manager.ts index 669f0e149a9c..0754438c7a1f 100644 --- a/yarn-project/p2p/src/services/peer-manager/peer_manager.ts +++ b/yarn-project/p2p/src/services/peer-manager/peer_manager.ts @@ -32,7 +32,7 @@ import { PeerScoreState, type PeerScoring } from './peer_scoring.js'; const MAX_DIAL_ATTEMPTS = 3; const MAX_CACHED_PEERS = 100; const MAX_CACHED_PEER_AGE_MS = 5 * 60 * 1000; // 5 minutes -const FAILED_PEER_BAN_TIME_MS = 5 * 60 * 1000; // 5 minutes timeout after failing MAX_DIAL_ATTEMPTS +const DEFAULT_FAILED_PEER_BAN_TIME_MS = 5 * 60 * 1000; // 5 minutes timeout after failing MAX_DIAL_ATTEMPTS const GOODBYE_DIAL_TIMEOUT_MS = 1000; const FAILED_AUTH_HANDSHAKE_EXPIRY_MS = 60 * 60 * 1000; // 1 hour @@ -776,7 +776,8 @@ export class PeerManager implements PeerManagerInterface { // Add to timed out peers this.timedOutPeers.set(id, { peerId: id, - timeoutUntilMs: this.dateProvider.now() + FAILED_PEER_BAN_TIME_MS, + timeoutUntilMs: + this.dateProvider.now() + (this.config.peerFailedBanTimeMs ?? DEFAULT_FAILED_PEER_BAN_TIME_MS), }); } } diff --git a/yarn-project/p2p/src/services/reqresp/reqresp.ts b/yarn-project/p2p/src/services/reqresp/reqresp.ts index bd15d5ff6c4e..38354e1dd67f 100644 --- a/yarn-project/p2p/src/services/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/services/reqresp/reqresp.ts @@ -16,7 +16,7 @@ import { IndividualReqRespTimeoutError, InvalidResponseError, } from '../../errors/reqresp.error.js'; -import { SnappyTransform } from '../encoding.js'; +import { OversizedSnappyResponseError, SnappyTransform } from '../encoding.js'; import type { PeerScoring } from '../peer-manager/peer_scoring.js'; import { DEFAULT_INDIVIDUAL_REQUEST_TIMEOUT_MS, @@ -553,16 +553,10 @@ export class ReqResp implements ReqRespInterface { data: message, }; } catch (e: any) { + // All errors (invalid status bytes, oversized snappy responses, corrupt data, etc.) + // are re-thrown so the caller can penalize the peer via handleResponseError. this.logger.debug(`Reading message failed: ${e.message}`); - - let status = ReqRespStatus.UNKNOWN; - if (e instanceof ReqRespStatusError) { - status = e.status; - } - - return { - status, - }; + throw e; } } @@ -780,6 +774,20 @@ export class ReqResp implements ReqRespInterface { return undefined; } + // Invalid status byte: the peer sent a status byte that doesn't match any known status code. + // This is a protocol violation, penalize harshly. + if (e instanceof ReqRespStatusError) { + this.logger.warn(`Invalid status byte from peer ${peerId.toString()} in ${subProtocol}: ${e.message}`, logTags); + return PeerErrorSeverity.LowToleranceError; + } + + // Oversized snappy response: the peer is sending data that exceeds the allowed size. + // This is a protocol violation that wastes bandwidth, so penalize harshly. + if (e instanceof OversizedSnappyResponseError) { + this.logger.warn(`Oversized response from peer ${peerId.toString()} in ${subProtocol}: ${e.message}`, logTags); + return PeerErrorSeverity.LowToleranceError; + } + return this.categorizeConnectionErrors(e, peerId, subProtocol); } diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 0efeafb01f10..239c332e57f6 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -118,6 +118,7 @@ export class SequencerClient { l1ChainId: chainId, viemPollingIntervalMS: config.viemPollingIntervalMS, ethereumSlotDuration: config.ethereumSlotDuration, + enableProposerPipelining: config.enableProposerPipelining, }, { dateProvider: deps.dateProvider }, )); diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index e0ce28583791..577275cc0c09 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -13,8 +13,10 @@ import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type ChainConfig, + type PipelineConfig, type SequencerConfig, chainConfigMappings, + pipelineConfigMappings, sharedSequencerConfigMappings, } from '@aztec/stdlib/config'; import type { ResolvedSequencerConfig } from '@aztec/stdlib/interfaces/server'; @@ -68,6 +70,7 @@ export type SequencerClientConfig = SequencerPublisherConfig & SequencerConfig & L1ReaderConfig & ChainConfig & + PipelineConfig & Pick & Pick; @@ -244,6 +247,7 @@ export const sequencerClientConfigMappings: ConfigMappingsType(); const initialBlockNumber = BlockNumber(this.syncedToBlockNumber + 1); + const slot = this.slot; // Last block in the checkpoint will usually be flagged as pending broadcast, so we send it along with the checkpoint proposal let blockPendingBroadcast: { block: L2Block; txs: Tx[] } | undefined = undefined; @@ -415,11 +421,7 @@ export class CheckpointProposalJob implements Traceable { const timingInfo = this.timetable.canStartNextBlock(secondsIntoSlot); if (!timingInfo.canStart) { - this.log.debug(`Not enough time left in slot to start another block`, { - slot: this.slot, - blocksBuilt, - secondsIntoSlot, - }); + this.log.debug(`Not enough time left in slot to start another block`, { slot, blocksBuilt, secondsIntoSlot }); break; } @@ -451,50 +453,37 @@ export class CheckpointProposalJob implements Traceable { } else if ('error' in buildResult) { // If there was an error building the block, just exit the loop and give up the rest of the slot if (!(buildResult.error instanceof SequencerInterruptedError)) { - this.log.warn(`Halting block building for slot ${this.slot}`, { - slot: this.slot, - blocksBuilt, - error: buildResult.error, - }); + this.log.warn(`Halting block building for slot ${slot}`, { slot, blocksBuilt, error: buildResult.error }); } break; } const { block, usedTxs } = buildResult; blocksInCheckpoint.push(block); - - // Sync the proposed block to the archiver to make it available - // We wait for the sync to succeed, as this helps catch consistency errors, even if it means we lose some time for block-building - // If this throws, we abort the entire checkpoint - await this.syncProposedBlockToArchiver(block); - usedTxs.forEach(tx => txHashesAlreadyIncluded.add(tx.txHash.toString())); - // If this is the last block, exit the loop now so we start collecting attestations + // If this is the last block, send the proposed block to the archiver, + // and exit the loop now so we can build the checkpoint and start collecting attestations. if (timingInfo.isLastBlock) { - this.log.verbose(`Completed final block ${blockNumber} for slot ${this.slot}`, { - slot: this.slot, - blockNumber, - blocksBuilt, - }); + await this.syncProposedBlockToArchiver(block); + this.log.verbose(`Completed final block ${blockNumber} for slot ${slot}`, { slot, blockNumber, blocksBuilt }); blockPendingBroadcast = { block, txs: usedTxs }; break; } - // For non-last blocks, broadcast the block proposal (unless we're in fisherman mode) - // If the block is the last one, we'll broadcast it along with the checkpoint at the end of the loop - if (!this.config.fishermanMode) { - const proposal = await this.validatorClient.createBlockProposal( - block.header, - block.indexWithinCheckpoint, - inHash, - block.archive.root, - usedTxs, - this.proposer, - blockProposalOptions, - ); - await this.p2pClient.broadcastProposal(proposal); - } + // Broadcast the block proposal (unless we're in fisherman mode) unless the block is the last one, + // in which case we'll broadcast it along with the checkpoint at the end of the loop. + // Note that we only send the block to the archiver if we manage to create the proposal, so if there's + // a HA error we don't pollute our archiver with a block that won't make it to the chain. + const proposal = await this.createBlockProposal(block, inHash, usedTxs, blockProposalOptions); + + // Sync the proposed block to the archiver to make it available, only after we've managed to sign the proposal. + // We wait for the sync to succeed, as this helps catch consistency errors, even if it means we lose some time for block-building. + // If this throws, we abort the entire checkpoint. + await this.syncProposedBlockToArchiver(block); + + // Once we have a signed proposal and the archiver agreed with our proposed block, then we broadcast it. + proposal && (await this.p2pClient.broadcastProposal(proposal)); // Wait until the next block's start time await this.waitUntilNextSubslot(timingInfo.deadline); @@ -508,6 +497,28 @@ export class CheckpointProposalJob implements Traceable { return { blocksInCheckpoint, blockPendingBroadcast }; } + /** Creates a block proposal for a given block via the validator client (unless in fisherman mode) */ + private createBlockProposal( + block: L2Block, + inHash: Fr, + usedTxs: Tx[], + blockProposalOptions: BlockProposalOptions, + ): Promise { + if (this.config.fishermanMode) { + this.log.info(`Skipping block proposal for block ${block.number} in fisherman mode`); + return Promise.resolve(undefined); + } + return this.validatorClient.createBlockProposal( + block.header, + block.indexWithinCheckpoint, + inHash, + block.archive.root, + usedTxs, + this.proposer, + blockProposalOptions, + ); + } + /** Sleeps until it is time to produce the next block in the slot */ @trackSpan('CheckpointProposalJob.waitUntilNextSubslot') private async waitUntilNextSubslot(nextSubslotStart: number) { diff --git a/yarn-project/sequencer-client/src/sequencer/metrics.ts b/yarn-project/sequencer-client/src/sequencer/metrics.ts index 58655d3fbd74..dcc1d8a2b9e6 100644 --- a/yarn-project/sequencer-client/src/sequencer/metrics.ts +++ b/yarn-project/sequencer-client/src/sequencer/metrics.ts @@ -49,6 +49,8 @@ export class SequencerMetrics { private checkpointBlockCount: Gauge; private checkpointTxCount: Gauge; private checkpointTotalMana: Gauge; + private pipelineDepth: Gauge; + private pipelineDiscards: UpDownCounter; // Fisherman fee analysis metrics private fishermanWouldBeIncluded: UpDownCounter; @@ -143,6 +145,10 @@ export class SequencerMetrics { this.slashingAttempts = createUpDownCounterWithDefault(this.meter, Metrics.SEQUENCER_SLASHING_ATTEMPTS_COUNT); + this.pipelineDepth = this.meter.createGauge(Metrics.SEQUENCER_PIPELINE_DEPTH); + this.pipelineDiscards = createUpDownCounterWithDefault(this.meter, Metrics.SEQUENCER_PIPELINE_DISCARDS_COUNT); + this.pipelineDepth.record(0); + // Fisherman fee analysis metrics this.fishermanWouldBeIncluded = createUpDownCounterWithDefault( this.meter, @@ -234,6 +240,14 @@ export class SequencerMetrics { }); } + recordPipelineDepth(depth: number) { + this.pipelineDepth.record(depth); + } + + recordPipelineDiscard(count = 1) { + this.pipelineDiscards.add(count); + } + incOpenSlot(slot: SlotNumber, proposer: string) { // sequencer went through the loop a second time. Noop if (slot === this.lastSeenSlot) { diff --git a/yarn-project/simulator/src/public/public_processor/public_processor.ts b/yarn-project/simulator/src/public/public_processor/public_processor.ts index 20ce6fbaa3e4..f8b708d0a568 100644 --- a/yarn-project/simulator/src/public/public_processor/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor/public_processor.ts @@ -306,6 +306,9 @@ export class PublicProcessor implements Traceable { totalBlockGas = totalBlockGas.add(processedTx.gasUsed.totalGas); totalSizeInBytes += txSize; totalBlobFields += txBlobFields; + + // Commit the tx-level contracts checkpoint on success + this.contractsDB.commitCheckpoint(); } catch (err: any) { if (err?.name === 'PublicProcessorTimeoutError') { this.log.warn(`Stopping tx processing due to timeout.`); @@ -354,7 +357,6 @@ export class PublicProcessor implements Traceable { } finally { // Base case is we always commit the checkpoint. Using the ForkCheckpoint means this has no effect if the tx was previously reverted await checkpoint.commit(); - this.contractsDB.commitCheckpointOkIfNone(); } } diff --git a/yarn-project/stdlib/src/config/index.ts b/yarn-project/stdlib/src/config/index.ts index 3682f0db4012..f6c59e75b3d9 100644 --- a/yarn-project/stdlib/src/config/index.ts +++ b/yarn-project/stdlib/src/config/index.ts @@ -1,3 +1,4 @@ export * from './chain-config.js'; export * from './node-rpc-config.js'; +export * from './pipelining-config.js'; export * from './sequencer-config.js'; diff --git a/yarn-project/stdlib/src/config/pipelining-config.ts b/yarn-project/stdlib/src/config/pipelining-config.ts new file mode 100644 index 000000000000..3a2a1a981506 --- /dev/null +++ b/yarn-project/stdlib/src/config/pipelining-config.ts @@ -0,0 +1,31 @@ +import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; + +import { z } from 'zod'; + +import { zodFor } from '../schemas/index.js'; + +export interface PipelineConfig { + /** Whether to enable build-ahead proposer pipelining. */ + enableProposerPipelining: boolean; +} + +/** + * Pipelining config mappings for fields that need to be shared across packages. + */ +export const pipelineConfigMappings: ConfigMappingsType = { + enableProposerPipelining: { + env: 'SEQ_ENABLE_PROPOSER_PIPELINING', + description: 'Whether to enable build-ahead proposer pipelining.', + ...booleanConfigHelper(false), + }, +}; + +export const PipelineConfigSchema = zodFor()( + z.object({ + enableProposerPipelining: z.boolean(), + }), +); + +export function getPipelineConfigEnvVars(): PipelineConfig { + return getConfigFromMappings(pipelineConfigMappings); +} diff --git a/yarn-project/stdlib/src/logs/tx_scoped_l2_log.test.ts b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.test.ts new file mode 100644 index 000000000000..d7b42331e3cf --- /dev/null +++ b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.test.ts @@ -0,0 +1,17 @@ +import { TxScopedL2Log } from './tx_scoped_l2_log.js'; + +describe('TxScopedL2Log', () => { + it('should serialize and deserialize correctly', () => { + const log = TxScopedL2Log.random(); + const buffer = log.toBuffer(); + const deserializedLog = TxScopedL2Log.fromBuffer(buffer); + expect(deserializedLog.equals(log)).toBe(true); + }); + + it('should extract block number from buffer correctly', () => { + const log = TxScopedL2Log.random(); + const buffer = log.toBuffer(); + const blockNumber = TxScopedL2Log.getBlockNumberFromBuffer(buffer); + expect(blockNumber).toBe(log.blockNumber); + }); +}); diff --git a/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts index 31481f4ab49d..7e9264ac94de 100644 --- a/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts +++ b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts @@ -1,4 +1,5 @@ import { BlockNumber, BlockNumberSchema } from '@aztec/foundation/branded-types'; +import { times } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { schemas as foundationSchemas } from '@aztec/foundation/schemas'; import { @@ -83,6 +84,21 @@ export class TxScopedL2Log { return new TxScopedL2Log(txHash, blockNumber, blockTimestamp, logData, noteHashes, firstNullifier); } + static getBlockNumberFromBuffer(buffer: Buffer) { + return BlockNumber(buffer.readUint32BE(Fr.SIZE_IN_BYTES)); + } + + static random() { + return new TxScopedL2Log( + TxHash.fromField(Fr.random()), + BlockNumber(Math.floor(Math.random() * 100000) + 1), + BigInt(Math.floor(Date.now() / 1000)), + times(3, Fr.random), + times(3, Fr.random), + Fr.random(), + ); + } + equals(other: TxScopedL2Log) { return ( this.txHash.equals(other.txHash) && diff --git a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts index fdf4d679510f..69d5d6d936d6 100644 --- a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts +++ b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts @@ -178,29 +178,32 @@ export class CheckpointProposal extends Gossipable { blockNumber: lastBlockInfo?.blockHeader?.globalVariables.blockNumber ?? BlockNumber(0), dutyType: DutyType.CHECKPOINT_PROPOSAL, }; - const checkpointSignature = await payloadSigner(checkpointHash, checkpointContext); - if (!lastBlockInfo) { - return new CheckpointProposal(checkpointHeader, archiveRoot, feeAssetPriceModifier, checkpointSignature); + if (lastBlockInfo) { + // Sign block proposal before signing checkpoint proposal to ensure HA protection + const lastBlockProposal = await BlockProposal.createProposalFromSigner( + lastBlockInfo.blockHeader, + lastBlockInfo.indexWithinCheckpoint, + checkpointHeader.inHash, + archiveRoot, + lastBlockInfo.txHashes, + lastBlockInfo.txs, + payloadSigner, + ); + + const checkpointSignature = await payloadSigner(checkpointHash, checkpointContext); + + return new CheckpointProposal(checkpointHeader, archiveRoot, feeAssetPriceModifier, checkpointSignature, { + blockHeader: lastBlockInfo.blockHeader, + indexWithinCheckpoint: lastBlockInfo.indexWithinCheckpoint, + txHashes: lastBlockInfo.txHashes, + signature: lastBlockProposal.signature, + signedTxs: lastBlockProposal.signedTxs, + }); } - const lastBlockProposal = await BlockProposal.createProposalFromSigner( - lastBlockInfo.blockHeader, - lastBlockInfo.indexWithinCheckpoint, - checkpointHeader.inHash, - archiveRoot, - lastBlockInfo.txHashes, - lastBlockInfo.txs, - payloadSigner, - ); - - return new CheckpointProposal(checkpointHeader, archiveRoot, feeAssetPriceModifier, checkpointSignature, { - blockHeader: lastBlockInfo.blockHeader, - indexWithinCheckpoint: lastBlockInfo.indexWithinCheckpoint, - txHashes: lastBlockInfo.txHashes, - signature: lastBlockProposal.signature, - signedTxs: lastBlockProposal.signedTxs, - }); + const checkpointSignature = await payloadSigner(checkpointHash, checkpointContext); + return new CheckpointProposal(checkpointHeader, archiveRoot, feeAssetPriceModifier, checkpointSignature); } /** diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index f6678089bcf9..6bd63208404b 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -504,6 +504,16 @@ export const SEQUENCER_CHECKPOINT_SUCCESS_COUNT: MetricDefinition = { description: 'The number of times checkpoint publishing succeeded', valueType: ValueType.INT, }; +export const SEQUENCER_PIPELINE_DEPTH: MetricDefinition = { + name: 'aztec.sequencer.pipeline.depth', + description: 'Current pipeline depth when builder pipelining is enabled', + valueType: ValueType.INT, +}; +export const SEQUENCER_PIPELINE_DISCARDS_COUNT: MetricDefinition = { + name: 'aztec.sequencer.pipeline.discards_count', + description: 'The number of times a pipeline was discarded', + valueType: ValueType.INT, +}; // Fisherman fee analysis metrics export const FISHERMAN_FEE_ANALYSIS_WOULD_BE_INCLUDED: MetricDefinition = { diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index 43c890bdafa8..1582c74b334c 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -487,7 +487,9 @@ export class BlockProposalHandler { } private getReexecuteFailureReason(err: any): BlockProposalValidationFailureReason { - if (err instanceof ReExInitialStateMismatchError) { + if (err instanceof TransactionsNotAvailableError) { + return 'txs_not_available'; + } else if (err instanceof ReExInitialStateMismatchError) { return 'initial_state_mismatch'; } else if (err instanceof ReExStateMismatchError) { return 'state_mismatch'; diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index 9dab4a4778c9..f8b3ce509dc3 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -10,7 +10,7 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { TestDateProvider } from '@aztec/foundation/timer'; import type { LightweightCheckpointBuilder } from '@aztec/prover-client/light'; -import type { PublicProcessor } from '@aztec/simulator/server'; +import type { PublicContractsDB, PublicProcessor } from '@aztec/simulator/server'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { L2Block } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; @@ -22,17 +22,22 @@ import { type PublicProcessorLimits, type PublicProcessorValidator, } from '@aztec/stdlib/interfaces/server'; -import { TxHash } from '@aztec/stdlib/tx'; -import type { CheckpointGlobalVariables, GlobalVariables, ProcessedTx, Tx } from '@aztec/stdlib/tx'; +import { + type CheckpointGlobalVariables, + type GlobalVariables, + type ProcessedTx, + type Tx, + TxHash, +} from '@aztec/stdlib/tx'; import type { TelemetryClient } from '@aztec/telemetry-client'; -import { describe, expect, it } from '@jest/globals'; +import { describe, expect, it, jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { CheckpointBuilder } from './checkpoint_builder.js'; describe('CheckpointBuilder', () => { - let checkpointBuilder: CheckpointBuilder; + let checkpointBuilder: TestCheckpointBuilder; let lightweightCheckpointBuilder: MockProxy; let fork: MockProxy; let config: FullNodeBlockBuilderConfig; @@ -57,6 +62,8 @@ describe('CheckpointBuilder', () => { }; class TestCheckpointBuilder extends CheckpointBuilder { + declare public contractsDB: PublicContractsDB; + public override makeBlockBuilderDeps(_globalVariables: GlobalVariables, _fork: MerkleTreeWriteOperations) { return Promise.resolve({ processor, validator }); } @@ -101,7 +108,9 @@ describe('CheckpointBuilder', () => { } beforeEach(() => { - lightweightCheckpointBuilder = mock({ checkpointNumber, constants }); + lightweightCheckpointBuilder = mock(); + Object.defineProperty(lightweightCheckpointBuilder, 'checkpointNumber', { value: checkpointNumber }); + Object.defineProperty(lightweightCheckpointBuilder, 'constants', { value: constants }); lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); fork = mock(); @@ -117,6 +126,50 @@ describe('CheckpointBuilder', () => { setupBuilder(); }); + describe('contractsDB checkpointing', () => { + let createCheckpointSpy: jest.SpiedFunction<() => void>; + let commitCheckpointSpy: jest.SpiedFunction<() => void>; + let revertCheckpointSpy: jest.SpiedFunction<() => void>; + + beforeEach(() => { + const db = checkpointBuilder.contractsDB; + createCheckpointSpy = jest.spyOn(db, 'createCheckpoint'); + commitCheckpointSpy = jest.spyOn(db, 'commitCheckpoint'); + revertCheckpointSpy = jest.spyOn(db, 'revertCheckpoint'); + + lightweightCheckpointBuilder.getBlockCount.mockReturnValue(0); + }); + + async function mockSuccessfulBlock() { + const block = await L2Block.random(blockNumber); + lightweightCheckpointBuilder.addBlock.mockResolvedValue({ block, timings: {} }); + processor.process.mockResolvedValue([[{ hash: TxHash.random() } as ProcessedTx], [], [], [], []]); + return block; + } + + it('uses the same contractsDB across multiple block builds', async () => { + await mockSuccessfulBlock(); + await checkpointBuilder.buildBlock([], blockNumber, 1000n); + + await mockSuccessfulBlock(); + await checkpointBuilder.buildBlock([], BlockNumber(blockNumber + 1), 1001n); + + expect(createCheckpointSpy).toHaveBeenCalledTimes(2); + expect(commitCheckpointSpy).toHaveBeenCalledTimes(2); + expect(revertCheckpointSpy).not.toHaveBeenCalled(); + }); + + it('calls revertCheckpoint when public processor fails', async () => { + processor.process.mockRejectedValue(new Error('processor failure')); + + await expect(checkpointBuilder.buildBlock([], blockNumber, 1000n)).rejects.toThrow('processor failure'); + + expect(createCheckpointSpy).toHaveBeenCalledTimes(1); + expect(commitCheckpointSpy).not.toHaveBeenCalled(); + expect(revertCheckpointSpy).toHaveBeenCalledTimes(1); + }); + }); + describe('buildBlock', () => { it('builds a block successfully when transactions are processed', async () => { lightweightCheckpointBuilder.getBlockCount.mockReturnValue(0); diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index 906bcfe7da98..20a9625c8e71 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -46,6 +46,9 @@ export type { BuildBlockInCheckpointResult } from '@aztec/stdlib/interfaces/serv export class CheckpointBuilder implements ICheckpointBlockBuilder { private log: Logger; + /** Persistent contracts DB shared across all blocks in this checkpoint. */ + protected contractsDB: PublicContractsDB; + constructor( private checkpointBuilder: LightweightCheckpointBuilder, private fork: MerkleTreeWriteOperations, @@ -60,6 +63,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { ...bindings, instanceId: `checkpoint-${checkpointBuilder.checkpointNumber}`, }); + this.contractsDB = new PublicContractsDB(this.contractDataSource, this.log.getBindings()); } getConstantData(): CheckpointGlobalVariables { @@ -104,6 +108,8 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { ...this.capLimitsByCheckpointBudgets(opts), }; + // Create a block-level checkpoint on the contracts DB so we can roll back on failure + this.contractsDB.createCheckpoint(); // We execute all merkle tree operations on a world state fork checkpoint // This enables us to discard all modifications in the event that we fail to successfully process sufficient transactions const forkCheckpoint = await ForkCheckpoint.new(this.fork); @@ -112,6 +118,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { const [publicProcessorDuration, [processedTxs, failedTxs, usedTxs]] = await elapsed(() => processor.process(pendingTxs, cappedOpts, validator), ); + // Throw before updating state if we don't have enough valid txs const minValidTxs = opts.minValidTxs ?? 0; if (processedTxs.length < minValidTxs) { @@ -126,6 +133,8 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { expectedEndState: opts.expectedEndState, }); + this.contractsDB.commitCheckpoint(); + this.log.debug('Built block within checkpoint', { header: block.header.toInspect(), processedTxs: processedTxs.map(tx => tx.hash.toString()), @@ -140,6 +149,8 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { usedTxs, }; } catch (err) { + // Revert all changes to contracts db + this.contractsDB.revertCheckpoint(); // If we reached the point of committing the checkpoint, this does nothing // Otherwise it reverts any changes made to the fork for this failed block await forkCheckpoint.revert(); @@ -233,7 +244,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { ...(await getDefaultAllowedSetupFunctions()), ...(this.config.txPublicSetupAllowListExtend ?? []), ]; - const contractsDB = new PublicContractsDB(this.contractDataSource, this.log.getBindings()); + const contractsDB = this.contractsDB; const guardedFork = new GuardedMerkleTreeOperations(fork); const collectDebugLogs = this.debugLogStore.isEnabled; diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index b7645d48c485..6d9db7607753 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -7,6 +7,7 @@ import type { L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import type { ValidatorClientFullConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import type { TelemetryClient } from '@aztec/telemetry-client'; +import type { SlashingProtectionDatabase } from '@aztec/validator-ha-signer/types'; import { BlockProposalHandler } from './block_proposal_handler.js'; import type { FullNodeCheckpointsBuilder } from './checkpoint_builder.js'; @@ -29,7 +30,7 @@ export function createBlockProposalHandler( const metrics = new ValidatorMetrics(deps.telemetry); const blockProposalValidator = new BlockProposalValidator(deps.epochCache, { txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.validateMaxTxsPerBlock, + maxTxsPerBlock: config.validateMaxTxsPerBlock ?? config.validateMaxTxsPerCheckpoint, }); return new BlockProposalHandler( deps.checkpointsBuilder, @@ -59,6 +60,7 @@ export function createValidatorClient( epochCache: EpochCache; keyStoreManager: KeystoreManager | undefined; blobClient: BlobClientInterface; + slashingProtectionDb?: SlashingProtectionDatabase; }, ) { if (config.disableValidator || !deps.keyStoreManager) { @@ -79,5 +81,6 @@ export function createValidatorClient( deps.blobClient, deps.dateProvider, deps.telemetry, + deps.slashingProtectionDb, ); } diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index ab751b9b33ba..804869df8474 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -92,6 +92,7 @@ describe('ValidatorClient', () => { let checkpointsBuilder: MockProxy; let worldState: MockProxy; let validatorAccounts: PrivateKeyAccount[]; + let validatorPrivateKeys: `0x${string}`[]; let dateProvider: TestDateProvider; let txProvider: MockProxy; let keyStoreManager: KeystoreManager; @@ -135,7 +136,7 @@ describe('ValidatorClient', () => { haKeyStore.start.mockImplementation(() => Promise.resolve()); haKeyStore.stop.mockImplementation(() => Promise.resolve()); - const validatorPrivateKeys = [generatePrivateKey(), generatePrivateKey()]; + validatorPrivateKeys = [generatePrivateKey(), generatePrivateKey()]; validatorAccounts = validatorPrivateKeys.map(privateKey => privateKeyToAccount(privateKey)); haKeyStore.getAddresses.mockReturnValue(validatorAccounts.map(account => EthAddress.fromString(account.address))); @@ -387,6 +388,23 @@ describe('ValidatorClient', () => { expect(isValid).toBe(true); }); + it('should process block proposal from own validator key (HA peer)', async () => { + const selfSigner = new Secp256k1Signer(Buffer32.fromString(validatorPrivateKeys[0])); + const emptyInHash = computeInHashFromL1ToL2Messages([]); + const selfProposal = await makeBlockProposal({ + blockHeader: proposal.blockHeader, + inHash: emptyInHash, + signer: selfSigner, + }); + + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(selfSigner.address); + + const handleSpy = jest.spyOn(validatorClient.getBlockProposalHandler(), 'handleBlockProposal'); + const isValid = await validatorClient.validateBlockProposal(selfProposal, sender); + expect(isValid).toBe(true); + expect(handleSpy).toHaveBeenCalled(); + }); + it('should return early when escape hatch is open', async () => { epochCache.isEscapeHatchOpenAtSlot.mockResolvedValueOnce(true); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 60ee9c8ea2d2..2691c7a749e7 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -46,8 +46,12 @@ import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, CheckpointGlobalVariables, Tx } from '@aztec/stdlib/tx'; import { AttestationTimeoutError } from '@aztec/stdlib/validators'; import { type TelemetryClient, type Tracer, getTelemetryClient } from '@aztec/telemetry-client'; -import { createHASigner, createLocalSignerWithProtection } from '@aztec/validator-ha-signer/factory'; -import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; +import { + createHASigner, + createLocalSignerWithProtection, + createSignerFromSharedDb, +} from '@aztec/validator-ha-signer/factory'; +import { DutyType, type SigningContext, type SlashingProtectionDatabase } from '@aztec/validator-ha-signer/types'; import type { ValidatorHASigner } from '@aztec/validator-ha-signer/validator-ha-signer'; import { EventEmitter } from 'events'; @@ -197,6 +201,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) blobClient: BlobClientInterface, dateProvider: DateProvider = new DateProvider(), telemetry: TelemetryClient = getTelemetryClient(), + slashingProtectionDb?: SlashingProtectionDatabase, ) { const metrics = new ValidatorMetrics(telemetry); const blockProposalValidator = new BlockProposalValidator(epochCache, { @@ -219,7 +224,13 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) const nodeKeystoreAdapter = NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager); let slashingProtectionSigner: ValidatorHASigner; - if (config.haSigningEnabled) { + if (slashingProtectionDb) { + // Shared database mode: use a pre-existing database (e.g. for testing HA setups). + ({ signer: slashingProtectionSigner } = createSignerFromSharedDb(slashingProtectionDb, config, { + telemetryClient: telemetry, + dateProvider, + })); + } else if (config.haSigningEnabled) { // Multi-node HA mode: use PostgreSQL-backed distributed locking. // If maxStuckDutiesAgeMs is not explicitly set, compute it from Aztec slot duration const haConfig = { @@ -378,13 +389,12 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) return false; } - // Ignore proposals from ourselves (may happen in HA setups) + // Log self-proposals from HA peers (same validator key on different nodes) if (this.getValidatorAddresses().some(addr => addr.equals(proposer))) { - this.log.debug(`Ignoring block proposal from self for slot ${slotNumber}`, { + this.log.verbose(`Processing block proposal from HA peer for slot ${slotNumber}`, { proposer: proposer.toString(), slotNumber, }); - return false; } // Check if we're in the committee (for metrics purposes) diff --git a/yarn-project/validator-ha-signer/src/factory.ts b/yarn-project/validator-ha-signer/src/factory.ts index 3f9e09e69f00..11540567f38b 100644 --- a/yarn-project/validator-ha-signer/src/factory.ts +++ b/yarn-project/validator-ha-signer/src/factory.ts @@ -137,3 +137,35 @@ export async function createLocalSignerWithProtection( return { signer, db }; } + +/** + * Create an in-memory LMDB-backed SlashingProtectionDatabase that can be shared across + * multiple validator nodes in the same process. Used for testing HA setups. + */ +export async function createSharedSlashingProtectionDb( + dateProvider: DateProvider = new DateProvider(), +): Promise { + const kvStore = await createStore('shared-signing-protection', LmdbSlashingProtectionDatabase.SCHEMA_VERSION, { + dataStoreMapSizeKb: 1024 * 1024, + }); + return new LmdbSlashingProtectionDatabase(kvStore, dateProvider); +} + +/** + * Create a ValidatorHASigner backed by a pre-existing SlashingProtectionDatabase. + * Used for testing HA setups where multiple nodes share the same protection database. + */ +export function createSignerFromSharedDb( + db: SlashingProtectionDatabase, + config: Pick< + ValidatorHASignerConfig, + 'nodeId' | 'pollingIntervalMs' | 'signingTimeoutMs' | 'maxStuckDutiesAgeMs' | 'l1Contracts' + >, + deps?: CreateLocalSignerWithProtectionDeps, +): { signer: ValidatorHASigner; db: SlashingProtectionDatabase } { + const telemetryClient = deps?.telemetryClient ?? getTelemetryClient(); + const dateProvider = deps?.dateProvider ?? new DateProvider(); + const metrics = new HASignerMetrics(telemetryClient, config.nodeId, 'SharedSigningProtectionMetrics'); + const signer = new ValidatorHASigner(db, config, { metrics, dateProvider }); + return { signer, db }; +} diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts index 511579f3d2e4..37d55fce1eb4 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts @@ -388,6 +388,18 @@ export class ServerWorldStateSynchronizer private async handleChainFinalized(blockNumber: BlockNumber) { this.log.verbose(`Finalized chain is now at block ${blockNumber}`); + // If the finalized block number is older than the oldest available block in world state, + // skip entirely. The finalized block number can jump backwards (e.g. when the finalization + // heuristic changes) and try to read block data that has already been pruned. When this + // happens, there is nothing useful to do — the native world state is already finalized + // past this point and pruning has already happened. + const currentSummary = await this.merkleTreeDb.getStatusSummary(); + if (blockNumber < currentSummary.oldestHistoricalBlock || blockNumber < 1) { + this.log.trace( + `Finalized block ${blockNumber} is older than the oldest available block ${currentSummary.oldestHistoricalBlock}. Skipping.`, + ); + return; + } const summary = await this.merkleTreeDb.setFinalized(blockNumber); if (this.historyToKeep === undefined) { return; @@ -421,6 +433,12 @@ export class ServerWorldStateSynchronizer } // Find the block at the start of the checkpoint and remove blocks up to this one const newHistoricBlock = historicCheckpoint.checkpoint.blocks[0]; + if (newHistoricBlock.number <= currentSummary.oldestHistoricalBlock) { + this.log.debug( + `Historic block ${newHistoricBlock.number} is not newer than oldest available ${currentSummary.oldestHistoricalBlock}. Skipping prune.`, + ); + return; + } this.log.verbose(`Pruning historic blocks to ${newHistoricBlock.number}`); const status = await this.merkleTreeDb.removeHistoricalBlocks(BlockNumber(newHistoricBlock.number)); this.log.debug(`World state summary `, status.summary); diff --git a/yarn-project/world-state/src/test/integration.test.ts b/yarn-project/world-state/src/test/integration.test.ts index 8ceb875d4669..893a50c8746d 100644 --- a/yarn-project/world-state/src/test/integration.test.ts +++ b/yarn-project/world-state/src/test/integration.test.ts @@ -252,6 +252,44 @@ describe('world-state integration', () => { await awaitSync(5, 4); await expectSynchedToBlock(5, 4); }); + + it('does not throw when finalized block jumps backwards past pruned blocks', async () => { + // Create 20 blocks and sync them all + await archiver.createBlocks(MAX_CHECKPOINT_COUNT); + await synchronizer.start(); + await awaitSync(MAX_CHECKPOINT_COUNT); + await expectSynchedToBlock(MAX_CHECKPOINT_COUNT); + + // Manually finalize to block 15 and prune historical blocks up to block 10 + // to simulate world-state having pruned old data. + await db.setFinalized(BlockNumber(15)); + await db.removeHistoricalBlocks(BlockNumber(10)); + + const summary = await db.getStatusSummary(); + log.info( + `After manual finalize+prune: oldest=${summary.oldestHistoricalBlock}, finalized=${summary.finalizedBlockNumber}`, + ); + expect(summary.oldestHistoricalBlock).toBe(10); + expect(summary.finalizedBlockNumber).toBe(15); + + // Now simulate the scenario from PR #21597: finalized block jumps backwards + // to a block M that is older than oldestHistoricalBlock. + // This should NOT throw — the clamping logic should handle it. + const backwardsFinalized = BlockNumber(5); + log.info( + `Sending chain-finalized for block ${backwardsFinalized} (below oldest ${summary.oldestHistoricalBlock})`, + ); + await expect( + synchronizer.handleBlockStreamEvent({ + type: 'chain-finalized', + block: { number: backwardsFinalized, hash: '' }, + }), + ).resolves.not.toThrow(); + + // Finalized block should remain at 15 (unchanged by the backwards event) + const afterSummary = await db.getStatusSummary(); + expect(afterSummary.finalizedBlockNumber).toBe(15); + }); }); });