Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
a02c83d
chore: fix proving logs script
alexghr Mar 10, 2026
5eedb7d
fix(validator): process block proposals from own validator keys in HA…
spalladino Mar 16, 2026
0daebe9
fix: tx collector bench test
Mar 16, 2026
f6c26a8
fix(p2p): fall back to maxTxsPerCheckpoint for per-block tx validatio…
spalladino Mar 16, 2026
63a8035
Merge branch 'next' into merge-train/spartan
Mar 16, 2026
6845b72
chore: fixing M3 devcontainer builds (#21611)
mrzeszutko Mar 16, 2026
a698baa
Merge branch 'next' into merge-train/spartan
Mar 16, 2026
ce8904d
Merge branch 'next' into merge-train/spartan
Mar 16, 2026
aa97afc
fix: clamp finalized block to oldest available in world-state
AztecBot Mar 17, 2026
1ad718d
test: add integration test for finalized block backwards jump past pr…
AztecBot Mar 17, 2026
92f87f8
fix: avoid `Array.from` with untrusted sizes
spalladino Mar 17, 2026
2154ecf
fix: same change but for field reader
spalladino Mar 17, 2026
79bf4c4
fix: clamp finalized block to oldest available in world-state (#21643)
PhilWindle Mar 17, 2026
75219ef
chore: fix proving logs script (#21335)
PhilWindle Mar 17, 2026
e141dbe
fix: (A-649) tx collector bench test (#21619)
PhilWindle Mar 17, 2026
7cdaafc
fix: skip handleChainFinalized when block is behind oldest available
AztecBot Mar 17, 2026
bbcefc8
fix(validator): process block proposals from own validator keys in HA…
PhilWindle Mar 17, 2026
881eb02
fix: add bounds when allocating arrays in deserialization (#21622)
PhilWindle Mar 17, 2026
fac513c
fix: skip handleChainFinalized when block is behind oldest available …
PhilWindle Mar 17, 2026
e31df37
chore: demote finalized block skip log to trace (#21661)
AztecBot Mar 17, 2026
08b84e8
fix: skip -march auto-detection for cross-compilation presets (#21356)
AztecBot Mar 17, 2026
e4dcdee
chore: revert "add bounds when allocating arrays in deserialization" …
spalladino Mar 17, 2026
938dc3c
fix: capture txs not available error reason in proposal handler (#21670)
spalladino Mar 17, 2026
a26fe5b
Merge branch 'next' into merge-train/spartan
Mar 17, 2026
8371fea
Merge branch 'next' into merge-train/spartan
Mar 17, 2026
4943a4f
fix: estimate gas in bot and make BatchCall.simulate() return Simulat…
alexghr Mar 17, 2026
5fd6fd7
fix: prevent HA peer proposals from blocking equivocation in duplicat…
AztecBot Mar 17, 2026
34ee6a4
fix(p2p): penalize peers for errors during response reading
spalladino Mar 17, 2026
94eaf06
Merge branch 'next' into merge-train/spartan
Mar 17, 2026
b32d86f
fix(p2p): penalize peers for errors during response reading (#21680)
mralj Mar 17, 2026
9f90527
feat(sequencer): add build-ahead config and metrics (#20779)
Maddiaa0 Mar 17, 2026
09a1e9b
chore: fixing build on mac (#21685)
mrzeszutko Mar 17, 2026
f503d61
fix: HA deadlock for last block edge case (#21690)
spypsy Mar 17, 2026
8738399
fix: process all contract classes in storeBroadcastedIndividualFuncti…
PhilWindle Mar 17, 2026
4c2210e
chore: add slack success post on nightly scenario
Mar 17, 2026
1827411
chore: add slack success post on nightly scenario (#21701)
danielntmd Mar 17, 2026
e56de5c
Merge branch 'next' into merge-train/spartan
Mar 17, 2026
b56ac49
fix(builder): persist contractsDB across blocks within a checkpoint (…
spalladino Mar 17, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion barretenberg/cpp/CMakePresets.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
},
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Release",
"TARGET_ARCH": "skylake",
"ENABLE_PIC": "ON"
}
},
Expand Down
2 changes: 1 addition & 1 deletion barretenberg/cpp/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ else
export native_preset=${NATIVE_PRESET:-clang20-no-avm}
fi
export hash=$(hash_str $(../../avm-transpiler/bootstrap.sh hash) $(cache_content_hash .rebuild_patterns))
export native_build_dir=$(scripts/preset-build-dir)
export native_build_dir=$(scripts/preset-build-dir $native_preset)

# Injects version number into a given bb binary.
# Means we don't actually need to rebuild bb to release a new version if code hasn't changed.
Expand Down
11 changes: 10 additions & 1 deletion barretenberg/cpp/cmake/arch.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,16 @@ if(WASM)
add_compile_options(-fno-exceptions -fno-slp-vectorize)
endif()

if(NOT WASM AND NOT ARM AND TARGET_ARCH)
# Auto-detect TARGET_ARCH on x86_64 if not explicitly set (native builds only).
# On ARM, we skip -march entirely — the zig wrappers use an explicit aarch64 target
# to produce generic ARM64 code without CPU-specific extensions (e.g. SVE).
# Skip auto-detection when cross-compiling — the toolchain (e.g. Zig -mcpu) handles
# architecture targeting, and injecting -march here conflicts with it.
if(NOT WASM AND NOT TARGET_ARCH AND NOT ARM AND NOT CMAKE_CROSSCOMPILING)
set(TARGET_ARCH "skylake")
endif()

if(NOT WASM AND TARGET_ARCH)
message(STATUS "Target architecture: ${TARGET_ARCH}")
add_compile_options(-march=${TARGET_ARCH})
endif()
9 changes: 8 additions & 1 deletion barretenberg/cpp/scripts/zig-c++.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,15 @@
#!/bin/bash
# Wrapper for zig c++ that pins glibc 2.35 on Linux (Ubuntu 22.04+ compat)
# and uses native target on macOS.
# On ARM64 Linux, use an explicit aarch64 target instead of 'native' to produce
# generic ARM64 code. This prevents CPU-specific instructions (e.g. SVE on Graviton)
# from being emitted, ensuring binaries work across all ARM64 machines including
# Apple Silicon in devcontainers.
if [[ "$(uname -s)" == "Linux" ]]; then
exec zig c++ -target native-linux-gnu.2.35 "$@"
case "$(uname -m)" in
aarch64|arm64) exec zig c++ -target aarch64-linux-gnu.2.35 "$@" ;;
*) exec zig c++ -target native-linux-gnu.2.35 "$@" ;;
esac
else
exec zig c++ "$@"
fi
9 changes: 8 additions & 1 deletion barretenberg/cpp/scripts/zig-cc.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,15 @@
#!/bin/bash
# Wrapper for zig cc that pins glibc 2.35 on Linux (Ubuntu 22.04+ compat)
# and uses native target on macOS.
# On ARM64 Linux, use an explicit aarch64 target instead of 'native' to produce
# generic ARM64 code. This prevents CPU-specific instructions (e.g. SVE on Graviton)
# from being emitted, ensuring binaries work across all ARM64 machines including
# Apple Silicon in devcontainers.
if [[ "$(uname -s)" == "Linux" ]]; then
exec zig cc -target native-linux-gnu.2.35 "$@"
case "$(uname -m)" in
aarch64|arm64) exec zig cc -target aarch64-linux-gnu.2.35 "$@" ;;
*) exec zig cc -target native-linux-gnu.2.35 "$@" ;;
esac
else
exec zig cc "$@"
fi
Original file line number Diff line number Diff line change
Expand Up @@ -867,17 +867,17 @@ void ContentAddressedCachedTreeStore<LeafValueType>::advance_finalized_block(con
ReadTransactionPtr readTx = create_read_transaction();
get_meta(uncommittedMeta);
get_meta(committedMeta, *readTx, false);
// do nothing if the block is already finalized
if (committedMeta.finalizedBlockHeight >= blockNumber) {
return;
}
if (!dataStore_->read_block_data(blockNumber, blockPayload, *readTx)) {
throw std::runtime_error(format("Unable to advance finalized block: ",
blockNumber,
". Failed to read block data. Tree name: ",
forkConstantData_.name_));
}
}
// do nothing if the block is already finalized
if (committedMeta.finalizedBlockHeight >= blockNumber) {
return;
}

// can currently only finalize up to the unfinalized block height
if (committedMeta.finalizedBlockHeight > committedMeta.unfinalizedBlockHeight) {
Expand Down
16 changes: 12 additions & 4 deletions barretenberg/sol/scripts/init_honk.sh
Original file line number Diff line number Diff line change
@@ -1,14 +1,22 @@
#!/usr/bin/env bash
set -eu
# the verification key is the same for ultra and ultra zk
SRS_PATH="$HOME/.bb-crs"
OUTPUT_PATH="./src/honk"
KEYGEN="../cpp/build/bin/honk_solidity_key_gen"

if [ ! -x "$KEYGEN" ]; then
echo "Error: honk_solidity_key_gen binary not found at $KEYGEN" >&2
echo "Run barretenberg/cpp bootstrap first." >&2
exit 1
fi

mkdir -p './src/honk/keys'

../cpp/build/bin/honk_solidity_key_gen add2 $OUTPUT_PATH $SRS_PATH
../cpp/build/bin/honk_solidity_key_gen blake $OUTPUT_PATH $SRS_PATH
../cpp/build/bin/honk_solidity_key_gen ecdsa $OUTPUT_PATH $SRS_PATH
../cpp/build/bin/honk_solidity_key_gen recursive $OUTPUT_PATH $SRS_PATH
$KEYGEN add2 $OUTPUT_PATH $SRS_PATH
$KEYGEN blake $OUTPUT_PATH $SRS_PATH
$KEYGEN ecdsa $OUTPUT_PATH $SRS_PATH
$KEYGEN recursive $OUTPUT_PATH $SRS_PATH

echo ""
echo "✓ VK generation complete"
Expand Down
2 changes: 1 addition & 1 deletion boxes/boxes/vanilla/app/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ async function updateVoteTally(wallet: Wallet, from: AztecAddress) {
)
);

const batchResult = await new BatchCall(wallet, payloads).simulate({ from });
const { result: batchResult } = await new BatchCall(wallet, payloads).simulate({ from });

batchResult.forEach(({ result: value }, i) => {
results[i + 1] = value;
Expand Down
7 changes: 6 additions & 1 deletion noir/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,12 @@ function build_native {
set -euo pipefail

if ! cache_download noir-$hash.tar.gz; then
(cd noir-repo && cargo build --locked --release --target-dir target)
# Serialize cargo operations to avoid race conditions with avm-transpiler
# which may run in parallel and share the same CARGO_HOME.
(
flock -x 200
cd noir-repo && cargo build --locked --release --target-dir target
) 200>/tmp/rustup.lock
cache_upload noir-$hash.tar.gz noir-repo/target/release/{nargo,acvm,noir-profiler}
fi
}
Expand Down
10 changes: 10 additions & 0 deletions spartan/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -120,14 +120,24 @@ function run_network_tests {
fi
}

function slack_notify_scenario_pass {
local label="$1"
if [[ "${REF_NAME:-}" == v* ]]; then
slack_notify "Scenario ${label} tests PASSED on *${REF_NAME}*" "#alerts-next-scenario"
fi
}

function network_tests_1 {
run_network_tests "$1" "smoke.test.ts" "${NETWORK_TESTS_1[@]}"
slack_notify_scenario_pass "set-1"
}
function network_tests_2 {
run_network_tests "$1" "smoke.test.ts" "${NETWORK_TESTS_2[@]}"
slack_notify_scenario_pass "set-2"
}
function network_tests {
run_network_tests "$1" "smoke.test.ts" "${NETWORK_TESTS_1[@]}" "${NETWORK_TESTS_2[@]}"
slack_notify_scenario_pass "all"
}

function network_bench_cmds {
Expand Down
43 changes: 27 additions & 16 deletions spartan/scripts/extract_proving_metrics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ const config = parseArgs(process.argv);

interface LogEntry {
timestamp: string;
trace?: string;
jsonPayload?: {
message?: string;
[key: string]: any;
Expand All @@ -88,7 +89,7 @@ interface LogEntry {

function buildFilter(
textFilter: string,
opts?: { module?: string; pod?: string },
opts?: { module?: string; pod?: string; trace?: string },
): string {
const pod = opts?.pod ?? config.pod;
let filter =
Expand All @@ -101,13 +102,16 @@ function buildFilter(
if (opts?.module) {
filter += ` AND jsonPayload.module="${opts.module}"`;
}
if (opts?.trace) {
filter += ` AND trace="${opts.trace}"`;
}
return filter;
}

async function queryLogs(
name: string,
textFilter: string,
opts?: { module?: string; pod?: string },
opts?: { module?: string; pod?: string; trace?: string },
): Promise<LogEntry[]> {
const filter = buildFilter(textFilter, opts);
const cmd = [
Expand All @@ -134,7 +138,7 @@ async function queryLogs(

// ── Epoch auto-detection ─────────────────────────────────────────────────────

async function scanForEpoch(): Promise<{ start: string; end: string }> {
async function scanForEpoch(): Promise<{ start: string; end: string; trace?: string }> {
process.stderr.write(
`Scanning for epoch in ${config.start} to ${config.end}...\n\n`,
);
Expand All @@ -151,6 +155,7 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> {
epoch: number;
txCount: number;
timestamp: string;
trace?: string;
}[] = [];
for (const entry of epochStarts) {
const m = msg(entry);
Expand All @@ -163,6 +168,7 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> {
epoch: parseInt(epochMatch[1]),
txCount: p.epochSizeTxs ?? 0,
timestamp: entry.timestamp,
trace: entry.trace,
});
}
}
Expand All @@ -178,20 +184,20 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> {
process.stderr.write(
`Warning: epoch ${config.epoch} not found in scan window. Using full window.\n`,
);
return { start: config.start, end: config.end };
return { start: config.start, end: config.end, trace: undefined };
}
} else {
target = starts.find((s) => s.txCount >= 1);
if (!target) {
process.stderr.write(
`Warning: no epoch with >=1 tx found in scan window. Using full window.\n`,
);
return { start: config.start, end: config.end };
return { start: config.start, end: config.end, trace: undefined };
}
}

process.stderr.write(
`Found epoch ${target.epoch} (${target.txCount} txs) at ${target.timestamp}\n`,
`Found epoch ${target.epoch} (${target.txCount} txs) at ${target.timestamp}${target.trace ? ` trace=${target.trace}` : ""}\n`,
);

// Find matching finalized entry
Expand Down Expand Up @@ -226,7 +232,7 @@ async function scanForEpoch(): Promise<{ start: string; end: string }> {
`Narrowed window: ${narrowedStart} to ${narrowedEnd}\n\n`,
);

return { start: narrowedStart, end: narrowedEnd };
return { start: narrowedStart, end: narrowedEnd, trace: target.trace };
}

// ── Pipeline order for proving job types ─────────────────────────────────────
Expand All @@ -249,11 +255,15 @@ const PIPELINE_ORDER = [

// ── Query definitions ────────────────────────────────────────────────────────

async function fetchAllData() {
async function fetchAllData(trace?: string) {
process.stderr.write(
`Fetching logs for ${config.pod} in ${config.namespace}\n`,
);
process.stderr.write(`Time range: ${config.start} to ${config.end}\n\n`);
process.stderr.write(`Time range: ${config.start} to ${config.end}\n`);
if (trace) {
process.stderr.write(`Trace filter: ${trace}\n`);
}
process.stderr.write("\n");

const brokerPod = `${config.namespace}-prover-broker-0`;

Expand All @@ -268,15 +278,16 @@ async function fetchAllData() {
brokerNewJobs,
brokerCompleteJobs,
] = await Promise.all([
queryLogs("epoch-start", "Starting epoch.*proving job"),
queryLogs("blob-fields", "Blob fields per checkpoint"),
queryLogs("blob-batching", "Final blob batching"),
queryLogs("epoch-start", "Starting epoch.*proving job", { trace }),
queryLogs("blob-fields", "Blob fields per checkpoint", { trace }),
queryLogs("blob-batching", "Final blob batching", { trace }),
queryLogs("starting-block", "Starting block", {
module: "prover-client:orchestrator",
trace,
}),
queryLogs("processed-txs", "Processed.*successful txs"),
queryLogs("adding-txs", "Adding.*transactions to block"),
queryLogs("epoch-finalized", "Finalized proof for epoch"),
queryLogs("processed-txs", "Processed.*successful txs", { trace }),
queryLogs("adding-txs", "Adding.*transactions to block", { trace }),
queryLogs("epoch-finalized", "Finalized proof for epoch", { trace }),
queryLogs("broker-new-jobs", "New proving job", { pod: brokerPod }),
queryLogs("broker-complete-jobs", "Proving job complete", {
pod: brokerPod,
Expand Down Expand Up @@ -841,7 +852,7 @@ async function main() {
config.start = scanResult.start;
config.end = scanResult.end;

const data = await fetchAllData();
const data = await fetchAllData(scanResult.trace);
const output = formatOutput(data);
console.log(output);
}
Expand Down
22 changes: 22 additions & 0 deletions yarn-project/.claude/skills/unit-test-implementation/SKILL.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,28 @@ beforeEach(() => {
});
```

### NEVER Pass Complex Objects as mock() Props

`jest-mock-extended`'s `mock<T>(props)` deep-processes any objects passed as initial properties. When those objects contain class instances with internal state (like `Fr`, `EthAddress`, `AztecAddress`, `GasFees`, `Buffer`, etc.), this causes **O(2^n) exponential slowdown** across tests — each test doubles the time of the previous one.

```typescript
// ❌ NEVER: Passing complex domain objects as mock props
// This causes exponential test slowdown (1s → 2s → 4s → 8s → ...)
const constants = { chainId: new Fr(1), coinbase: EthAddress.random(), gasFees: GasFees.empty() };
beforeEach(() => {
builder = mock<CheckpointBuilder>({ checkpointNumber, constants });
});

// ✅ GOOD: Create mock without props, then set properties directly
beforeEach(() => {
builder = mock<CheckpointBuilder>();
Object.defineProperty(builder, 'checkpointNumber', { value: checkpointNumber });
Object.defineProperty(builder, 'constants', { value: constants });
});
```

Simple primitives (strings, numbers, booleans) and arrow functions are safe to pass as props. The issue is specifically with class instances that have complex prototypes.

### When to Use Real Instances vs Mocks

**Mock external dependencies** that are:
Expand Down
9 changes: 8 additions & 1 deletion yarn-project/archiver/src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import {
getConfigFromMappings,
numberConfigHelper,
} from '@aztec/foundation/config';
import { type ChainConfig, chainConfigMappings } from '@aztec/stdlib/config';
import {
type ChainConfig,
type PipelineConfig,
chainConfigMappings,
pipelineConfigMappings,
} from '@aztec/stdlib/config';
import type { ArchiverSpecificConfig } from '@aztec/stdlib/interfaces/server';

/**
Expand All @@ -21,11 +26,13 @@ import type { ArchiverSpecificConfig } from '@aztec/stdlib/interfaces/server';
export type ArchiverConfig = ArchiverSpecificConfig &
L1ReaderConfig &
L1ContractsConfig &
PipelineConfig & // required to pass through to epoch cache
BlobClientConfig &
ChainConfig;

export const archiverConfigMappings: ConfigMappingsType<ArchiverConfig> = {
...blobClientConfigMapping,
...pipelineConfigMappings,
archiverPollingIntervalMS: {
env: 'ARCHIVER_POLLING_INTERVAL_MS',
description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.',
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/archiver/src/modules/data_store_updater.ts
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ export class ArchiverDataStoreUpdater {
if (validFnCount > 0) {
this.log.verbose(`Storing ${validFnCount} functions for contract class ${contractClassId.toString()}`);
}
return await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns);
await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns);
}
return true;
}
Expand Down
3 changes: 3 additions & 0 deletions yarn-project/aztec-node/src/aztec-node/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ import {
createBlockProposalHandler,
createValidatorClient,
} from '@aztec/validator-client';
import type { SlashingProtectionDatabase } from '@aztec/validator-ha-signer/types';
import { createWorldStateSynchronizer } from '@aztec/world-state';

import { createPublicClient } from 'viem';
Expand Down Expand Up @@ -195,6 +196,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
dateProvider?: DateProvider;
p2pClientDeps?: P2PClientDeps;
proverNodeDeps?: Partial<ProverNodeDeps>;
slashingProtectionDb?: SlashingProtectionDatabase;
} = {},
options: {
prefilledPublicData?: PublicDataTreeLeaf[];
Expand Down Expand Up @@ -377,6 +379,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
l1ToL2MessageSource: archiver,
keyStoreManager,
blobClient,
slashingProtectionDb: deps.slashingProtectionDb,
});

// If we have a validator client, register it as a source of offenses for the slasher,
Expand Down
Loading
Loading