diff --git a/.claude/skills/updating-changelog/SKILL.md b/.claude/skills/updating-changelog/SKILL.md index 018859f8833e..1af52b765da4 100644 --- a/.claude/skills/updating-changelog/SKILL.md +++ b/.claude/skills/updating-changelog/SKILL.md @@ -14,7 +14,7 @@ Read `.release-please-manifest.json` to get the version (e.g., `{"." : "4.0.0"}` **Target files:** - Aztec contract developers: `docs/docs-developers/docs/resources/migration_notes.md` -- Node operators and Ethereum contract developers: `docs/docs-network/reference/changelog/v{major}.md` +- Node operators and Ethereum contract developers: `docs/docs-operate/operators/reference/changelog/v{major}.md` ### 2. Analyze Branch Changes @@ -60,7 +60,7 @@ Explanation of what changed. ## Node Operator Changelog Format -**File:** `docs/docs-network/reference/changelog/v{major}.md` +**File:** `docs/docs-operate/operators/reference/changelog/v{major}.md` **Breaking changes:** ````markdown diff --git a/.github/ci3_labels_to_env.sh b/.github/ci3_labels_to_env.sh index 86839f791773..e9a38a2dfdcb 100755 --- a/.github/ci3_labels_to_env.sh +++ b/.github/ci3_labels_to_env.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash # Determines CI mode from labels and environment variables. -# Called by ci3.yml to set CI_MODE and related environment variables. +# Called by ci3.yml and ci3-external.yml to set CI_MODE and related environment variables. # Outputs environment variables to GITHUB_ENV for use in subsequent steps. set -euo pipefail @@ -23,7 +23,7 @@ function main { local target_branch if [ "${GITHUB_EVENT_NAME:-}" == "merge_group" ]; then target_branch="${MERGE_GROUP_BASE_REF:-}" - elif [ "${GITHUB_EVENT_NAME:-}" == "pull_request" ]; then + elif [ "${GITHUB_EVENT_NAME:-}" == "pull_request" ] || [ "${GITHUB_EVENT_NAME:-}" == "pull_request_target" ]; then target_branch="${PR_BASE_REF:-}" else target_branch="${GITHUB_REF_NAME:-}" diff --git a/.github/workflows/ci3-external.yml b/.github/workflows/ci3-external.yml index 7942f818d93f..a69e864456cb 100644 --- a/.github/workflows/ci3-external.yml +++ b/.github/workflows/ci3-external.yml @@ -59,6 +59,14 @@ jobs: fi GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} gh pr edit ${{ github.event.pull_request.number }} --remove-label "ci-external-once" + - name: Determine CI Mode + env: + MERGE_GROUP_BASE_REF: ${{ github.event.merge_group.base_ref }} + PR_BASE_REF: ${{ github.event.pull_request.base.ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_TOKEN: ${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} + run: ./.github/ci3_labels_to_env.sh ${{ join(github.event.pull_request.labels.*.name, ' ') }} + - name: Run env: REF_NAME: repo-fork/${{ github.repository }}/${{ github.head_ref }} @@ -78,7 +86,7 @@ jobs: PR_HEAD_REF: ${{ github.event.pull_request.head.ref }} PR_COMMITS: ${{ github.event.pull_request.commits }} GITHUB_REF_NAME: ${{ github.ref_name }} - run: ./.github/ci3.sh "${{ join(github.event.pull_request.labels.*.name, ',') }}" + run: ./.github/ci3.sh $CI_MODE - name: Post-Actions if: always() diff --git a/.github/workflows/ci3.yml b/.github/workflows/ci3.yml index d2b6d6be837c..ec0a4c81b234 100644 --- a/.github/workflows/ci3.yml +++ b/.github/workflows/ci3.yml @@ -94,6 +94,7 @@ jobs: PR_COMMITS: ${{ github.event.pull_request.commits }} PR_NUMBER: ${{ github.event.pull_request.number }} GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_ACTOR: ${{ github.actor }} # NOTE: $CI_MODE is set in the Determine CI Mode step. run: ./.github/ci3.sh $CI_MODE diff --git a/.github/workflows/merge-queue-dequeue-notify.yml b/.github/workflows/merge-queue-dequeue-notify.yml index 705e6f7293c9..f9845e025ced 100644 --- a/.github/workflows/merge-queue-dequeue-notify.yml +++ b/.github/workflows/merge-queue-dequeue-notify.yml @@ -1,14 +1,14 @@ -name: Notify Slack on Merge Queue Dequeue +name: Notify Slack on Merge Train Events on: pull_request: - types: [dequeued] + types: [dequeued, closed] jobs: - notify-slack: - name: Notify Slack + notify-dequeued: + name: Notify Slack (Dequeued) runs-on: ubuntu-latest - if: startsWith(github.event.pull_request.head.ref, 'merge-train/') && github.event.pull_request.merged != true + if: github.event.action == 'dequeued' && startsWith(github.event.pull_request.head.ref, 'merge-train/') && github.event.pull_request.merged != true steps: - name: Checkout uses: actions/checkout@v4 @@ -21,3 +21,20 @@ jobs: REF_NAME: ${{ github.event.pull_request.head.ref }} PR_URL: ${{ github.event.pull_request.html_url }} run: ./ci3/merge_train_failure_slack_notify --dequeued + + notify-merged: + name: Notify Slack (Merged) + runs-on: ubuntu-latest + if: github.event.action == 'closed' && github.event.pull_request.merged == true && startsWith(github.event.pull_request.head.ref, 'merge-train/') + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.base.ref }} + + - name: Send Slack notification + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + REF_NAME: ${{ github.event.pull_request.head.ref }} + PR_URL: ${{ github.event.pull_request.html_url }} + run: ./ci3/merge_train_failure_slack_notify --merged diff --git a/.github/workflows/weekly-proving-bench.yml b/.github/workflows/weekly-proving-bench.yml index f342710184e5..f9f561f6d88e 100644 --- a/.github/workflows/weekly-proving-bench.yml +++ b/.github/workflows/weekly-proving-bench.yml @@ -47,7 +47,7 @@ jobs: fi - name: Run real proving benchmarks - timeout-minutes: 150 + timeout-minutes: 180 env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -57,7 +57,7 @@ jobs: GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} RUN_ID: ${{ github.run_id }} - AWS_SHUTDOWN_TIME: 150 + AWS_SHUTDOWN_TIME: 180 NO_SPOT: 1 run: | ./.github/ci3.sh network-proving-bench prove-n-tps-real prove-n-tps-real "aztecprotocol/aztec:${{ steps.nightly-tag.outputs.nightly_tag }}" diff --git a/.test_patterns.yml b/.test_patterns.yml index 18b68fcee5bb..32666e514a5a 100644 --- a/.test_patterns.yml +++ b/.test_patterns.yml @@ -309,6 +309,11 @@ tests: owners: - *palla + - regex: "src/e2e_block_building.test.ts" + error_regex: "✕ processes txs until hitting timetable" + owners: + - *palla + # http://ci.aztec-labs.com/e8228a36afda93b8 # Test passed but there was an error on stopping - regex: "playground/scripts/run_test.sh" diff --git a/barretenberg/cpp/scripts/audit/audit_scopes/logic_scope_doc.md b/barretenberg/cpp/scripts/audit/audit_scopes/logic_audit_scope.md similarity index 60% rename from barretenberg/cpp/scripts/audit/audit_scopes/logic_scope_doc.md rename to barretenberg/cpp/scripts/audit/audit_scopes/logic_audit_scope.md index 4c053d90b222..3f7951482927 100644 --- a/barretenberg/cpp/scripts/audit/audit_scopes/logic_scope_doc.md +++ b/barretenberg/cpp/scripts/audit/audit_scopes/logic_audit_scope.md @@ -2,20 +2,29 @@ Repository: https://github.com/AztecProtocol/aztec-packages -Commit hash: TBD +Commit hash: e4712cda8def49d75fbba2d361625fc5e21945f5 ## Files to Audit Note: Paths relative to `aztec-packages/barretenberg/cpp/src/barretenberg` +### stdlib (core logic implementation) + 1. `stdlib/primitives/logic/logic.hpp` 2. `stdlib/primitives/logic/logic.cpp` 3. `stdlib_circuit_builders/plookup_tables/uint.hpp` (lookup tables) +### dsl (ACIR interface) + +4. `dsl/acir_format/logic_constraint.hpp` — ACIR struct and gate function declarations +5. `dsl/acir_format/logic_constraint.cpp` — Converts ACIR logic constraints into stdlib circuit gates + ## Summary of Module The `logic` module provides circuit-friendly implementations of bitwise logical operations (XOR and AND) over variable-length unsigned integers using plookup tables. +### stdlib layer + Main function: `create_logic_constraint(a, b, num_bits, is_xor_gate)` - Computes `a XOR b` or `a AND b` for inputs up to `num_bits` in length @@ -31,10 +40,19 @@ The implementation: - If both inputs are constants, the operation is computed natively without circuit constraints - If one input is constant, it is converted to a witness before processing +### dsl layer + +The DSL layer bridges ACIR opcodes (`BlackBoxFuncCall::AND`, `BlackBoxFuncCall::XOR`) to the stdlib logic implementation: + +- `acir_to_constraint_buf.cpp` deserializes ACIR AND/XOR black box calls into `LogicConstraint` structs (deserialization file itself is out of scope) +- `create_logic_gate` converts `WitnessOrConstant` inputs to `field_ct` via `to_field_ct`, calls `stdlib::logic::create_logic_constraint`, and asserts the computed result equals the ACIR-provided result witness + ## Test Files 1. `stdlib/primitives/logic/logic.test.cpp` +2. `dsl/acir_format/logic_constraint.test.cpp` ## Dependencies - Plookup read: `stdlib/primitives/plookup/plookup.hpp` +- ACIR format core: `dsl/acir_format/acir_format.hpp` (constraint application loop) diff --git a/barretenberg/cpp/src/barretenberg/avm_fuzzer/harness/alu.fuzzer.cpp b/barretenberg/cpp/src/barretenberg/avm_fuzzer/harness/alu.fuzzer.cpp index 37da5d8454f6..50852f90432e 100644 --- a/barretenberg/cpp/src/barretenberg/avm_fuzzer/harness/alu.fuzzer.cpp +++ b/barretenberg/cpp/src/barretenberg/avm_fuzzer/harness/alu.fuzzer.cpp @@ -42,8 +42,9 @@ struct AluFuzzerInput { MemoryValue a; MemoryValue b; MemoryValue c = MemoryValue::from_tag(MemoryTag::FF, 0); // Placeholder for result - int op_id = 0; // For execution trace alu_op_id - + uint16_t op_id = 0; // For execution trace alu_op_id + // We serialise MemoryValues as FF + 1 byte for tag to save 31 bytes per value: + static const size_t size = (3 * (sizeof(FF) + 1)) + sizeof(uint16_t); // Serialize to buffer void to_buffer(uint8_t* buffer) const { @@ -58,7 +59,7 @@ struct AluFuzzerInput { buffer += sizeof(FF) + 1; write_mem_value(buffer, c); buffer += sizeof(FF) + 1; - serialize::write(buffer, static_cast(op_id)); + serialize::write(buffer, op_id); } static AluFuzzerInput from_buffer(const uint8_t* buffer) @@ -88,11 +89,11 @@ struct AluFuzzerInput { extern "C" size_t LLVMFuzzerCustomMutator(uint8_t* data, size_t size, size_t max_size, unsigned int seed) { - if (size < sizeof(AluFuzzerInput)) { + if (size < AluFuzzerInput::size) { // Initialize with default input AluFuzzerInput input; input.to_buffer(data); - return sizeof(AluFuzzerInput); + return AluFuzzerInput::size; } std::mt19937_64 rng(seed); @@ -119,7 +120,6 @@ extern "C" size_t LLVMFuzzerCustomMutator(uint8_t* data, size_t size, size_t max auto random_mem_value_from_tag = [&rng](MemoryTag tag) -> MemoryValue { std::uniform_int_distribution dist(0, std::numeric_limits::max()); - // TODO(MW): Use array? FF value = FF(dist(rng), dist(rng), dist(rng), dist(rng)); // Do we want the option of making "invalid tag" values, where the value is out of range for the tag? // These aren't currently possible with this function since MemoryValue::from_tag will throw in that case. @@ -135,9 +135,9 @@ extern "C" size_t LLVMFuzzerCustomMutator(uint8_t* data, size_t size, size_t max // Deserialize current input AluFuzzerInput input = AluFuzzerInput::from_buffer(data); - // Choose random ALU operation + // Choose random ALU operation (11 possible operations with op_id = 2^index) std::uniform_int_distribution dist(0, 11); - input.op_id = 1 << dist(rng); + input.op_id = static_cast(1 << dist(rng)); // Choose test case (TODO(MW): what else do we want here?) dist = std::uniform_int_distribution(0, 4); @@ -187,18 +187,18 @@ extern "C" size_t LLVMFuzzerCustomMutator(uint8_t* data, size_t size, size_t max // Serialize mutated input back to buffer input.to_buffer(data); - if (max_size > sizeof(AluFuzzerInput)) { - return sizeof(AluFuzzerInput); + if (max_size > AluFuzzerInput::size) { + return AluFuzzerInput::size; } - return sizeof(AluFuzzerInput); + return AluFuzzerInput::size; } extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) { using bb::avm2::MemoryValue; - if (size < sizeof(AluFuzzerInput)) { + if (size < AluFuzzerInput::size) { info("Input size too small"); return 0; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.cpp index f25f1b0f1eea..83b66882f4ba 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.cpp @@ -1,5 +1,5 @@ // === AUDIT STATUS === -// internal: { status: Planned, auditors: [], commit: } +// internal: { status: Complete, auditors: [Suyash], commit: e4712cda8def49d75fbba2d361625fc5e21945f5 } // external_1: { status: not started, auditors: [], commit: } // external_2: { status: not started, auditors: [], commit: } // ===================== diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.hpp index 0a6775ed53fa..02f7e73f871e 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/logic_constraint.hpp @@ -1,5 +1,5 @@ // === AUDIT STATUS === -// internal: { status: Planned, auditors: [], commit: } +// internal: { status: Complete, auditors: [Suyash], commit: e4712cda8def49d75fbba2d361625fc5e21945f5 } // external_1: { status: not started, auditors: [], commit: } // external_2: { status: not started, auditors: [], commit: } // ===================== diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.cpp index a8a89fc34e9c..e9414e3ed14d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.cpp @@ -1,5 +1,5 @@ // === AUDIT STATUS === -// internal: { status: Planned, auditors: [], commit: } +// internal: { status: Complete, auditors: [Suyash], commit: e4712cda8def49d75fbba2d361625fc5e21945f5 } // external_1: { status: not started, auditors: [], commit: } // external_2: { status: not started, auditors: [], commit: } // ===================== diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.hpp index f45aaa9e8a04..cebd44992273 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/logic/logic.hpp @@ -1,5 +1,5 @@ // === AUDIT STATUS === -// internal: { status: Planned, auditors: [], commit: } +// internal: { status: Complete, auditors: [Suyash], commit: e4712cda8def49d75fbba2d361625fc5e21945f5 } // external_1: { status: not started, auditors: [], commit: } // external_2: { status: not started, auditors: [], commit: } // ===================== diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/uint.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/uint.hpp index f56c30a86b61..0587289d7909 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/uint.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/uint.hpp @@ -1,5 +1,5 @@ // === AUDIT STATUS === -// internal: { status: Planned, auditors: [], commit: } +// internal: { status: Complete, auditors: [Suyash], commit: e4712cda8def49d75fbba2d361625fc5e21945f5 } // external_1: { status: not started, auditors: [], commit: } // external_2: { status: not started, auditors: [], commit: } // ===================== diff --git a/barretenberg/cpp/src/barretenberg/vm2/common/aztec_constants.hpp b/barretenberg/cpp/src/barretenberg/vm2/common/aztec_constants.hpp index e320408eb05d..3890636f80b3 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/common/aztec_constants.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/common/aztec_constants.hpp @@ -237,12 +237,12 @@ #define AVM_TORADIXBE_DYN_L2_GAS 3 #define AVM_BITWISE_DYN_L2_GAS 3 #define AVM_EMITPUBLICLOG_DYN_L2_GAS 3 -#define AVM_EMITNOTEHASH_BASE_DA_GAS 512 -#define AVM_EMITNULLIFIER_BASE_DA_GAS 512 -#define AVM_SENDL2TOL1MSG_BASE_DA_GAS 512 -#define AVM_EMITPUBLICLOG_BASE_DA_GAS 1024 -#define AVM_EMITPUBLICLOG_DYN_DA_GAS 512 -#define AVM_SSTORE_DYN_DA_GAS 1024 +#define AVM_EMITNOTEHASH_BASE_DA_GAS 32 +#define AVM_EMITNULLIFIER_BASE_DA_GAS 32 +#define AVM_SENDL2TOL1MSG_BASE_DA_GAS 32 +#define AVM_EMITPUBLICLOG_BASE_DA_GAS 64 +#define AVM_EMITPUBLICLOG_DYN_DA_GAS 32 +#define AVM_SSTORE_DYN_DA_GAS 64 #define AVM_WRITTEN_PUBLIC_DATA_SLOTS_TREE_HEIGHT 6 #define AVM_WRITTEN_PUBLIC_DATA_SLOTS_TREE_INITIAL_ROOT \ "0x2870b93163d4fd6ada360fe48ee1e8e8e69308af34cdfaeffacbbe5929e2466d" diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/avm_fixed_vk.hpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/avm_fixed_vk.hpp index d693bbc47d49..93e925fff4c8 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/avm_fixed_vk.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/avm_fixed_vk.hpp @@ -17,7 +17,7 @@ class AvmHardCodedVKAndHash { using FF = bb::curve::BN254::ScalarField; // Precomputed VK hash (hash of all commitments below). - static FF vk_hash() { return FF(uint256_t("0x1f47e68404e58fcde864ff61573d61acab5832efb230f9a32ae68044d31dfd19")); } + static FF vk_hash() { return FF(uint256_t("0x02296b934ced1a5cdacae120d2032d88a119bdb0738d4c4f3ada4f5a831a5153")); } static constexpr std::array get_all() { @@ -59,13 +59,13 @@ class AvmHardCodedVKAndHash { uint256_t( "0x09f2eef32136799118634a108531dc248506d5f58f64885575b245865b56d48e")), // precomputed_exec_opcode Commitment( - uint256_t("0x0188169f0225c14e925347c4f9f192d191b2ab6ca2fbc1d0453f48af5d9c667b"), + uint256_t("0x09bd44905d676585d8c7a91c8ba8fd6b1b598326cb80b95e80b4b39703c7e2c8"), uint256_t( - "0x25b997de6f92af3ea3b409b41437fea01980b344a12fabc6b833656f26d6e954")), // precomputed_exec_opcode_base_da_gas + "0x1bec3a67476715f88745bc09b1ea57859c8fe809fae4376efab6ba773ea7f6d4")), // precomputed_exec_opcode_base_da_gas Commitment( - uint256_t("0x1ddbbb27c627edafce021d5f332867ac9234c6f507442633bff9a5dbb4d02803"), + uint256_t("0x0f5b3fee86f9815eb0be052412de7f2a4c82f678604ba9e161c4412529810057"), uint256_t( - "0x10fda7a2360b21bbfbb1e815377adc0fa869bbcb4a46b29c31ae017893fdfb0d")), // precomputed_exec_opcode_dynamic_da_gas + "0x1ad065dec1d51664807b4d551d0eb8abe0b061b8380dde6d662e2df36a1f85c8")), // precomputed_exec_opcode_dynamic_da_gas Commitment( uint256_t("0x06c03e425e92d09aa8243220a0968b4d7d00c89e541a2b6095920883a8a6fa72"), uint256_t( diff --git a/barretenberg/cpp/src/barretenberg/vm2/testing/avm_inputs.testdata.bin b/barretenberg/cpp/src/barretenberg/vm2/testing/avm_inputs.testdata.bin index b92184d5c05e..44bfd7a9a088 100644 Binary files a/barretenberg/cpp/src/barretenberg/vm2/testing/avm_inputs.testdata.bin and b/barretenberg/cpp/src/barretenberg/vm2/testing/avm_inputs.testdata.bin differ diff --git a/barretenberg/cpp/src/barretenberg/vm2/testing/minimal_tx.testdata.bin b/barretenberg/cpp/src/barretenberg/vm2/testing/minimal_tx.testdata.bin index cd68b285a97c..57ac06aafc7d 100644 Binary files a/barretenberg/cpp/src/barretenberg/vm2/testing/minimal_tx.testdata.bin and b/barretenberg/cpp/src/barretenberg/vm2/testing/minimal_tx.testdata.bin differ diff --git a/barretenberg/docs/docs/bb-cli-reference.md b/barretenberg/docs/docs/bb-cli-reference.md index d56d5bfc32aa..f16dad82d5b5 100644 --- a/barretenberg/docs/docs/bb-cli-reference.md +++ b/barretenberg/docs/docs/bb-cli-reference.md @@ -10,7 +10,7 @@ sidebar_position: 1000 *This documentation is auto-generated from the `bb` CLI help output.* -*Generated: Tue 17 Feb 2026 04:44:25 UTC* +*Generated: Wed 18 Feb 2026 04:46:30 UTC* *Command: `bb`* diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/bb-cli-reference.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/bb-cli-reference.md similarity index 99% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/bb-cli-reference.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/bb-cli-reference.md index d56d5bfc32aa..f16dad82d5b5 100644 --- a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/bb-cli-reference.md +++ b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/bb-cli-reference.md @@ -10,7 +10,7 @@ sidebar_position: 1000 *This documentation is auto-generated from the `bb` CLI help output.* -*Generated: Tue 17 Feb 2026 04:44:25 UTC* +*Generated: Wed 18 Feb 2026 04:46:30 UTC* *Command: `bb`* diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/explainers/advanced/_category_.json b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/explainers/advanced/_category_.json similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/explainers/advanced/_category_.json rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/explainers/advanced/_category_.json diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/explainers/advanced/chonk.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/explainers/advanced/chonk.md similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/explainers/advanced/chonk.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/explainers/advanced/chonk.md diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/explainers/recursive_aggregation.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/explainers/recursive_aggregation.md similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/explainers/recursive_aggregation.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/explainers/recursive_aggregation.md diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/getting_started.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/getting_started.md similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/getting_started.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/getting_started.md diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/_category_.json b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/_category_.json similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/_category_.json rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/_category_.json diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/how-to-solidity-verifier.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/how-to-solidity-verifier.md similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/how-to-solidity-verifier.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/how-to-solidity-verifier.md diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/on-the-browser.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/on-the-browser.md similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/on-the-browser.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/on-the-browser.md diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/recursive_aggregation.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/recursive_aggregation.md similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/how_to_guides/recursive_aggregation.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/how_to_guides/recursive_aggregation.md diff --git a/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/index.md b/barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/index.md similarity index 100% rename from barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260217/index.md rename to barretenberg/docs/versioned_docs/version-v4.0.0-nightly.20260218/index.md diff --git a/barretenberg/docs/versioned_sidebars/version-v4.0.0-nightly.20260217-sidebars.json b/barretenberg/docs/versioned_sidebars/version-v4.0.0-nightly.20260218-sidebars.json similarity index 100% rename from barretenberg/docs/versioned_sidebars/version-v4.0.0-nightly.20260217-sidebars.json rename to barretenberg/docs/versioned_sidebars/version-v4.0.0-nightly.20260218-sidebars.json diff --git a/barretenberg/docs/versions.json b/barretenberg/docs/versions.json index 1f0a17ae46f1..b55e32974b6e 100644 --- a/barretenberg/docs/versions.json +++ b/barretenberg/docs/versions.json @@ -1,4 +1,4 @@ [ "v0.87.0", - "v4.0.0-nightly.20260217" + "v4.0.0-nightly.20260218" ] diff --git a/barretenberg/sol/scripts/copy_optimized_to_cpp.sh b/barretenberg/sol/scripts/copy_optimized_to_cpp.sh index d4e475a28cdc..2ce32b6d2e18 100755 --- a/barretenberg/sol/scripts/copy_optimized_to_cpp.sh +++ b/barretenberg/sol/scripts/copy_optimized_to_cpp.sh @@ -97,22 +97,17 @@ awk ' awk ' BEGIN { in_unroll = 0 - unroll_label = "" } # Detect UNROLL_SECTION_START /\{\{[[:space:]]*UNROLL_SECTION_START[[:space:]]+[^}]+\}\}/ { print # Print the start marker in_unroll = 1 - # Extract the label for matching with END - match($0, /UNROLL_SECTION_START[[:space:]]+([^[:space:]}\]]+)/, arr) - unroll_label = arr[1] next } # Detect UNROLL_SECTION_END /\{\{[[:space:]]*UNROLL_SECTION_END[[:space:]]+[^}]+\}\}/ { print # Print the end marker in_unroll = 0 - unroll_label = "" next } # Skip lines inside unroll sections diff --git a/bootstrap.sh b/bootstrap.sh index cb0ca10bdeae..edc7bb5775fa 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -233,11 +233,14 @@ function start_txes { export TOKIO_WORKER_THREADS=1 # Starting txe servers with incrementing port numbers. + # Base port is below the Linux ephemeral range (32768-60999) to avoid conflicts. + local txe_base_port=14730 for i in $(seq 0 $((NUM_TXES-1))); do - port=$((45730 + i)) + port=$((txe_base_port + i)) existing_pid=$(lsof -ti :$port || true) if [ -n "$existing_pid" ]; then echo "Killing existing process $existing_pid on port: $port" + check_port $port kill -9 $existing_pid &>/dev/null || true while kill -0 $existing_pid &>/dev/null; do sleep 0.1; done fi @@ -248,8 +251,12 @@ function start_txes { echo "Waiting for TXE's to start..." for i in $(seq 0 $((NUM_TXES-1))); do local j=0 - while ! nc -z 127.0.0.1 $((45730 + i)) &>/dev/null; do - [ $j == 60 ] && echo_stderr "TXE $i took too long to start. Exiting." && exit 1 + while ! nc -z 127.0.0.1 $((txe_base_port + i)) &>/dev/null; do + if [ $j == 60 ]; then + echo_stderr "TXE $i failed to start on port $((txe_base_port + i)) after 60s." + check_port $((txe_base_port + i)) + exit 1 + fi sleep 1 j=$((j+1)) done diff --git a/boxes/boxes/vanilla/app/embedded-wallet.ts b/boxes/boxes/vanilla/app/embedded-wallet.ts index 112348e05e99..04951e383ee9 100644 --- a/boxes/boxes/vanilla/app/embedded-wallet.ts +++ b/boxes/boxes/vanilla/app/embedded-wallet.ts @@ -1,68 +1,39 @@ -import { Account, SignerlessAccount } from '@aztec/aztec.js/account'; import { AztecAddress } from '@aztec/aztec.js/addresses'; import { getContractInstanceFromInstantiationParams, - InteractionWaitOptions, + type InteractionWaitOptions, } from '@aztec/aztec.js/contracts'; import { SponsoredFeePaymentMethod } from '@aztec/aztec.js/fee'; import { Fr } from '@aztec/aztec.js/fields'; import { createLogger } from '@aztec/aztec.js/log'; -import { createAztecNodeClient } from '@aztec/aztec.js/node'; -import { - AccountManager, - DeployAccountOptions, - SimulateOptions, -} from '@aztec/aztec.js/wallet'; -import { type FeeOptions, BaseWallet } from '@aztec/wallet-sdk/base-wallet'; +import { DeployAccountOptions } from '@aztec/aztec.js/wallet'; +import type { AztecNode } from '@aztec/aztec.js/node'; +import { type FeeOptions } from '@aztec/wallet-sdk/base-wallet'; import { SPONSORED_FPC_SALT } from '@aztec/constants'; import type { FieldsOf } from '@aztec/foundation/types'; import { randomBytes } from '@aztec/foundation/crypto/random'; -import { EcdsaRAccountContract } from '@aztec/accounts/ecdsa/lazy'; -import { SchnorrAccountContract } from '@aztec/accounts/schnorr/lazy'; - -import { getPXEConfig } from '@aztec/pxe/config'; -import { createPXE } from '@aztec/pxe/client/lazy'; import { getInitialTestAccountsData } from '@aztec/accounts/testing/lazy'; -import { - getStubAccountContractArtifact, - createStubAccount, -} from '@aztec/accounts/stub/lazy'; -import { ExecutionPayload, mergeExecutionPayloads } from '@aztec/stdlib/tx'; -import { TxSimulationResult } from '@aztec/stdlib/tx'; import { GasSettings } from '@aztec/stdlib/gas'; +import { AccountFeePaymentMethodOptions } from '@aztec/entrypoints/account'; import { - AccountFeePaymentMethodOptions, - DefaultAccountEntrypointOptions, -} from '@aztec/entrypoints/account'; - -const PROVER_ENABLED = true; + EmbeddedWallet as EmbeddedWalletBase, + type EmbeddedWalletOptions, +} from '@aztec/wallets/embedded'; const logger = createLogger('wallet'); const LocalStorageKey = 'aztec-account'; // This is a minimal implementation of an Aztec wallet -// WARNING: This example code stores the wallet in plain text in LocalStorage. Do not use in production without understanding the security implications -export class EmbeddedWallet extends BaseWallet { - connectedAccount: AztecAddress | null = null; - protected accounts: Map = new Map(); - - protected async getAccountFromAddress( - address: AztecAddress - ): Promise { - let account: Account | undefined; - if (address.equals(AztecAddress.ZERO)) { - account = new SignerlessAccount(); - } else { - account = this.accounts.get(address?.toString() ?? ''); - } - - if (!account) { - throw new Error(`Account not found in wallet for address: ${address}`); - } - - return account; +export class EmbeddedWallet extends EmbeddedWalletBase { + static override create( + nodeOrUrl: string | AztecNode, + options?: EmbeddedWalletOptions + ): Promise { + return super.create(nodeOrUrl, options); } + connectedAccount: AztecAddress | null = null; + /** * Completes partial user-provided fee options with wallet defaults. * This wallet will use the sponsoredFPC payment method unless otherwise stated. @@ -79,16 +50,17 @@ export class EmbeddedWallet extends BaseWallet { const maxFeesPerGas = gasSettings?.maxFeesPerGas ?? (await this.aztecNode.getCurrentMinFees()).mul(1 + this.minFeePadding); - let walletFeePaymentMethod; let accountFeePaymentMethodOptions; - // The transaction does not include a fee payment method, so we set a default + let walletFeePaymentMethod; + // The transaction does not include a fee payment method, so we + // use the sponsoredFPC if (!feePayer) { - const sponsoredFPCContract = - await EmbeddedWallet.#getSponsoredPFCContract(); + accountFeePaymentMethodOptions = AccountFeePaymentMethodOptions.EXTERNAL; + const sponsoredFPCAddress = await this.#getSponsoredFPCAddress(); + walletFeePaymentMethod = new SponsoredFeePaymentMethod( - sponsoredFPCContract.instance.address + sponsoredFPCAddress ); - accountFeePaymentMethodOptions = AccountFeePaymentMethodOptions.EXTERNAL; } else { // The transaction includes fee payment method, so we check if we are the fee payer for it // (this can only happen if the embedded payment method is FeeJuiceWithClaim) @@ -108,50 +80,29 @@ export class EmbeddedWallet extends BaseWallet { }; } - getAccounts() { - return Promise.resolve( - Array.from(this.accounts.values()).map((acc) => ({ - alias: '', - item: acc.getAddress(), - })) - ); - } - - static async initialize(nodeUrl: string) { - // Create Aztec Node Client - const aztecNode = createAztecNodeClient(nodeUrl); - - // Create PXE - const config = getPXEConfig(); - config.l1Contracts = await aztecNode.getL1ContractAddresses(); - config.proverEnabled = PROVER_ENABLED; - const pxe = await createPXE(aztecNode, config, {}); - - // Register Sponsored FPC Contract with PXE - await pxe.registerContract(await EmbeddedWallet.#getSponsoredPFCContract()); - - // Log the Node Info - const nodeInfo = await aztecNode.getNodeInfo(); - logger.info('PXE Connected to node', nodeInfo); - return new EmbeddedWallet(pxe, aztecNode); - } - // Internal method to use the Sponsored FPC Contract for fee payment - static async #getSponsoredPFCContract() { + async #getSponsoredFPCAddress() { const { SponsoredFPCContractArtifact } = await import( '@aztec/noir-contracts.js/SponsoredFPC' ); - const instance = await getContractInstanceFromInstantiationParams( - SponsoredFPCContractArtifact, - { - salt: new Fr(SPONSORED_FPC_SALT), - } + const sponsoredFPCInstance = + await getContractInstanceFromInstantiationParams( + SponsoredFPCContractArtifact, + { + salt: new Fr(SPONSORED_FPC_SALT), + } + ); + const { instance } = await this.getContractMetadata( + sponsoredFPCInstance.address ); + if (!instance) { + await this.registerContract( + sponsoredFPCInstance, + SponsoredFPCContractArtifact + ); + } - return { - instance, - artifact: SponsoredFPCContractArtifact, - }; + return sponsoredFPCInstance.address; } getConnectedAccount() { @@ -161,37 +112,20 @@ export class EmbeddedWallet extends BaseWallet { return this.connectedAccount; } - private async registerAccount(accountManager: AccountManager) { - const instance = await accountManager.getInstance(); - const artifact = await accountManager - .getAccountContract() - .getContractArtifact(); - - await this.registerContract( - instance, - artifact, - accountManager.getSecretKey() - ); - } - async connectTestAccount(index: number) { const testAccounts = await getInitialTestAccountsData(); const accountData = testAccounts[index]; - const accountManager = await AccountManager.create( - this, + const accountManager = await this.createAndStoreAccount( + `test-account-${index}`, + 'schnorr', accountData.secret, - new SchnorrAccountContract(accountData.signingKey), - accountData.salt - ); - - await this.registerAccount(accountManager); - this.accounts.set( - accountManager.address.toString(), - await accountManager.getAccount() + accountData.salt, + accountData.signingKey.toBuffer() ); this.connectedAccount = accountManager.address; + return this.connectedAccount; } @@ -207,24 +141,21 @@ export class EmbeddedWallet extends BaseWallet { const signingKey = randomBytes(32); // Create an ECDSA account - const contract = new EcdsaRAccountContract(signingKey); - const accountManager = await AccountManager.create( - this, + const accountManager = await this.createAndStoreAccount( + `main`, + 'ecdsasecp256r1', secretKey, - contract, - salt + salt, + signingKey ); - // Deploy the account const deployMethod = await accountManager.getDeployMethod(); - const sponsoredPFCContract = - await EmbeddedWallet.#getSponsoredPFCContract(); + const sponsoredFPCAddress = await this.#getSponsoredFPCAddress(); + const deployOpts: DeployAccountOptions = { from: AztecAddress.ZERO, fee: { - paymentMethod: new SponsoredFeePaymentMethod( - sponsoredPFCContract.instance.address - ), + paymentMethod: new SponsoredFeePaymentMethod(sponsoredFPCAddress), }, skipClassPublication: true, skipInstancePublication: true, @@ -236,137 +167,20 @@ export class EmbeddedWallet extends BaseWallet { logger.info('Account deployed', receipt); // Store the account in local storage - localStorage.setItem( - LocalStorageKey, - JSON.stringify({ - address: accountManager.address.toString(), - signingKey: signingKey.toString('hex'), - secretKey: secretKey.toString(), - salt: salt.toString(), - }) - ); + localStorage.setItem(LocalStorageKey, accountManager.address.toString()); - // Register the account with PXE - await this.registerAccount(accountManager); - this.accounts.set( - accountManager.address.toString(), - await accountManager.getAccount() - ); this.connectedAccount = accountManager.address; return this.connectedAccount; } async connectExistingAccount() { // Read key from local storage and create the account - const account = localStorage.getItem(LocalStorageKey); - if (!account) { + const address = localStorage.getItem(LocalStorageKey); + if (!address) { return null; } - const parsed = JSON.parse(account); - - const contract = new EcdsaRAccountContract( - Buffer.from(parsed.signingKey, 'hex') - ); - const accountManager = await AccountManager.create( - this, - Fr.fromString(parsed.secretKey), - contract, - Fr.fromString(parsed.salt) - ); - - await this.registerAccount(accountManager); - this.accounts.set( - accountManager.address.toString(), - await accountManager.getAccount() - ); - this.connectedAccount = accountManager.address; + const parsed = AztecAddress.fromString(address); + this.connectedAccount = parsed; return this.connectedAccount; } - - /** - * Creates a stub account that impersonates the given address, allowing kernelless simulations - * to bypass the account's authorization mechanisms via contract overrides. - * @param address - The address of the account to impersonate - * @returns The stub account, contract instance, and artifact for simulation - */ - private async getFakeAccountDataFor(address: AztecAddress) { - const originalAccount = await this.getAccountFromAddress(address); - // Account contracts can only be overridden if they have an associated address - // Overwriting SignerlessAccount is not supported, and does not really make sense - // since it has no authorization mechanism. - if (originalAccount instanceof SignerlessAccount) { - throw new Error( - `Cannot create fake account data for SignerlessAccount at address: ${address}` - ); - } - const originalAddress = (originalAccount as Account).getCompleteAddress(); - const contractInstance = await this.pxe.getContractInstance( - originalAddress.address - ); - if (!contractInstance) { - throw new Error( - `No contract instance found for address: ${originalAddress.address}` - ); - } - const stubAccount = createStubAccount(originalAddress); - const StubAccountContractArtifact = await getStubAccountContractArtifact(); - const instance = await getContractInstanceFromInstantiationParams( - StubAccountContractArtifact, - { salt: Fr.random() } - ); - return { - account: stubAccount, - instance, - artifact: StubAccountContractArtifact, - }; - } - - async simulateTx( - executionPayload: ExecutionPayload, - opts: SimulateOptions - ): Promise { - const feeOptions = opts.fee?.estimateGas - ? await this.completeFeeOptionsForEstimation( - opts.from, - executionPayload.feePayer, - opts.fee?.gasSettings - ) - : await this.completeFeeOptions( - opts.from, - executionPayload.feePayer, - opts.fee?.gasSettings - ); - const feeExecutionPayload = - await feeOptions.walletFeePaymentMethod?.getExecutionPayload(); - const executionOptions: DefaultAccountEntrypointOptions = { - txNonce: Fr.random(), - cancellable: this.cancellableTransactions, - feePaymentMethodOptions: feeOptions.accountFeePaymentMethodOptions, - }; - const finalExecutionPayload = feeExecutionPayload - ? mergeExecutionPayloads([feeExecutionPayload, executionPayload]) - : executionPayload; - const { - account: fromAccount, - instance, - artifact, - } = await this.getFakeAccountDataFor(opts.from); - const chainInfo = await this.getChainInfo(); - const txRequest = await fromAccount.createTxExecutionRequest( - finalExecutionPayload, - feeOptions.gasSettings, - chainInfo, - executionOptions - ); - const contractOverrides = { - [opts.from.toString()]: { instance, artifact }, - }; - return this.pxe.simulateTx(txRequest, { - simulatePublic: true, - skipTxValidation: true, - skipFeeEnforcement: true, - overrides: { contracts: contractOverrides }, - scopes: this.scopesFor(opts.from) - }); - } } diff --git a/boxes/boxes/vanilla/app/main.ts b/boxes/boxes/vanilla/app/main.ts index 465e587255bf..fa55a4cae208 100644 --- a/boxes/boxes/vanilla/app/main.ts +++ b/boxes/boxes/vanilla/app/main.ts @@ -1,5 +1,9 @@ +import './style.css'; import { AztecAddress } from '@aztec/aztec.js/addresses'; -import { getContractInstanceFromInstantiationParams } from '@aztec/aztec.js/contracts'; +import { + BatchCall, + getContractInstanceFromInstantiationParams, +} from '@aztec/aztec.js/contracts'; import { Fr } from '@aztec/aztec.js/fields'; import type { Wallet } from '@aztec/aztec.js/wallet'; import { EmbeddedWallet } from './embedded-wallet'; @@ -24,11 +28,12 @@ const testAccountNumber = document.querySelector( )!; // Local variables -let wallet: EmbeddedWallet; +let wallet; let contractAddress = process.env.CONTRACT_ADDRESS; let deployerAddress = process.env.DEPLOYER_ADDRESS; let deploymentSalt = process.env.DEPLOYMENT_SALT; let nodeUrl = process.env.AZTEC_NODE_URL; +let electionId = Fr.fromString(process.env.ELECTION_ID); // On page load document.addEventListener('DOMContentLoaded', async () => { @@ -39,7 +44,9 @@ document.addEventListener('DOMContentLoaded', async () => { // Initialize the PXE and the wallet displayStatusMessage('Connecting to node and initializing wallet...'); - wallet = await EmbeddedWallet.initialize(nodeUrl); + wallet = await EmbeddedWallet.create(nodeUrl, { + pxeConfig: { proverEnabled: true }, + }); // Register voting contract with wallet/PXE displayStatusMessage('Registering contracts...'); @@ -154,7 +161,7 @@ voteButton.addEventListener('click', async (e) => { // Send tx await votingContract.methods - .cast_vote(candidate) + .cast_vote({ id: electionId }, candidate) .send({ from: connectedAccount }); // Update tally @@ -185,16 +192,18 @@ async function updateVoteTally(wallet: Wallet, from: AztecAddress) { wallet ); - await Promise.all( - Array.from({ length: 5 }, async (_, i) => { - const value = await votingContract.methods - .get_vote(i + 1) - .simulate({ from }); - results[i + 1] = value; - }) + const payloads = await Promise.all( + Array.from({ length: 5 }, async (_, i) => + votingContract.methods.get_tally({ id: electionId }, i + 1).request() + ) ); - // Display the tally + const batchResult = await new BatchCall(wallet, payloads).simulate({ from }); + + batchResult.forEach((value, i) => { + results[i + 1] = value; + }); + displayTally(results); displayStatusMessage(''); } diff --git a/boxes/boxes/vanilla/contracts/src/main.nr b/boxes/boxes/vanilla/contracts/src/main.nr index b5ef8266aa53..5062ff09c892 100644 --- a/boxes/boxes/vanilla/contracts/src/main.nr +++ b/boxes/boxes/vanilla/contracts/src/main.nr @@ -1,65 +1,88 @@ +mod test; use aztec::macros::aztec; -/** - * WARNING: this is no-longer considered a good example of an Aztec contract, - * because it touches low-level functions and concepts that oughtn't be - * seen by a typical user. - * The syntax and user-experience of Aztec contracts has since improved, so you - * should seek alternative examples, please. - */ - #[aztec] pub contract PrivateVoting { - use aztec::keys::getters::get_public_keys; - use aztec::macros::{ - functions::{external, initializer, only_self}, - storage::storage, - }; - use aztec::state_vars::{Map, PublicImmutable, PublicMutable}; - use aztec::protocol::{address::AztecAddress, hash::poseidon2_hash, traits::{Hash, ToField}}; + // docs:start:imports + use aztec::macros::{functions::{external, initializer, only_self, view}, storage::storage}; + use aztec::protocol::{address::AztecAddress, traits::{Deserialize, Serialize, ToField}}; + use aztec::state_vars::{Map, Owned, PublicImmutable, PublicMutable, SingleUseClaim}; + #[derive(Serialize, Deserialize)] + pub struct ElectionId { + id: Field, + } + + impl ElectionId { + pub fn new(id: Field) -> Self { + Self { id } + } + } + + impl ToField for ElectionId { + fn to_field(self) -> Field { + self.id + } + } + + // docs:end:imports + // docs:start:storage_struct #[storage] struct Storage { - admin: PublicMutable, // admin can end vote - tally: Map, Context>, // we will store candidate as key and number of votes as value - vote_ended: PublicMutable, // vote_ended is boolean - active_at_block: PublicImmutable, // when people can start voting + // admin can start and end elections + admin: PublicMutable, + // election => candidate => number of votes + tally: Map, Context>, Context>, + // election => election ended + vote_ended: Map, Context>, + // election => election started at + active_at_block: Map, Context>, + // election => voter => single use claim that ensures voter can at most vote once per election + vote_claims: Map, Context>, Context>, } + // docs:end:storage_struct + // docs:start:constructor #[external("public")] #[initializer] fn constructor(admin: AztecAddress) { self.storage.admin.write(admin); - self.storage.vote_ended.write(false); - self.storage.active_at_block.initialize(self.context.block_number()); } + // docs:end:constructor #[external("private")] - fn cast_vote(candidate: Field) { - let msg_sender_npk_m_hash = get_public_keys(self.msg_sender()).npk_m.hash(); - - let secret = self.context.request_nhk_app(msg_sender_npk_m_hash); // get nullifier hiding key of caller of function - let nullifier = poseidon2_hash([self.msg_sender().to_field(), secret]); // derive nullifier from sender and secret - self.context.push_nullifier(nullifier); - self.enqueue_self.add_to_tally_public(candidate); + fn cast_vote(election_id: ElectionId, candidate: Field) { + self.storage.vote_claims.at(election_id).at(self.msg_sender()).claim(); + self.enqueue_self.add_to_tally_public(election_id, candidate); } + // docs:start:nested_map_access #[external("public")] #[only_self] - fn add_to_tally_public(candidate: Field) { - assert(self.storage.vote_ended.read() == false, "Vote has ended"); // assert that vote has not ended - let new_tally = self.storage.tally.at(candidate).read() + 1; - self.storage.tally.at(candidate).write(new_tally); + fn add_to_tally_public(election_id: ElectionId, candidate: Field) { + assert(self.storage.active_at_block.at(election_id).read() <= self.context.block_number()); + assert(self.storage.vote_ended.at(election_id).read() == false, "Vote has ended"); // assert that vote has not ended + let new_tally = self.storage.tally.at(election_id).at(candidate).read() + 1; + self.storage.tally.at(election_id).at(candidate).write(new_tally); } + // docs:end:nested_map_access #[external("public")] - fn end_vote() { + fn start_vote(election_id: ElectionId) { + assert(self.storage.admin.read().eq(self.msg_sender()), "Only admin can start votes"); // assert that caller is admin + self.storage.vote_ended.at(election_id).write(false); + self.storage.active_at_block.at(election_id).initialize(self.context.block_number()); + } + + #[external("public")] + fn end_vote(election_id: ElectionId) { assert(self.storage.admin.read().eq(self.msg_sender()), "Only admin can end votes"); // assert that caller is admin - self.storage.vote_ended.write(true); + self.storage.vote_ended.at(election_id).write(true); } - #[external("utility")] - unconstrained fn get_vote(candidate: Field) -> Field { - self.storage.tally.at(candidate).read() + #[external("public")] + #[view] + fn get_tally(election_id: ElectionId, candidate: Field) -> Field { + self.storage.tally.at(election_id).at(candidate).read() } } diff --git a/boxes/boxes/vanilla/contracts/src/test/first.nr b/boxes/boxes/vanilla/contracts/src/test/first.nr new file mode 100644 index 000000000000..1c337d3db8ee --- /dev/null +++ b/boxes/boxes/vanilla/contracts/src/test/first.nr @@ -0,0 +1,153 @@ +use crate::test::utils; +use aztec::protocol::storage::map::derive_storage_slot_in_map; + +use crate::PrivateVoting; +use crate::PrivateVoting::ElectionId; + +#[test] +unconstrained fn test_initializer() { + let (mut env, voting_contract_address, admin) = utils::setup(); + + env.public_context_at(voting_contract_address, |context| { + let current_admin = context.storage_read(PrivateVoting::storage_layout().admin.slot); + assert_eq(current_admin, admin); + }); +} + +#[test] +unconstrained fn test_check_vote_status() { + let (mut env, voting_contract_address, _) = utils::setup(); + + env.public_context_at(voting_contract_address, |context| { + let vote_ended = context.storage_read(PrivateVoting::storage_layout().vote_ended.slot); + assert_eq(vote_ended, false); + }); +} + +#[test] +unconstrained fn test_end_vote() { + let (env, voting_contract_address, admin) = utils::setup(); + + let election_id = ElectionId::new(Field::from(42)); + env.call_public(admin, PrivateVoting::at(voting_contract_address).end_vote(election_id)); + + env.public_context_at(voting_contract_address, |context| { + let vote_ended = context.storage_read(derive_storage_slot_in_map( + PrivateVoting::storage_layout().vote_ended.slot, + election_id, + )); + assert_eq(vote_ended, true); + }); +} + +#[test(should_fail_with = "Only admin can end votes")] +unconstrained fn test_fail_end_vote_by_non_admin() { + let (mut env, voting_contract_address, _) = utils::setup(); + let alice = env.create_light_account(); + let election_id = ElectionId::new(Field::from(42)); + + env.call_public(alice, PrivateVoting::at(voting_contract_address).end_vote(election_id)); +} + +#[test] +unconstrained fn test_cast_vote() { + let (mut env, voting_contract_address, admin) = utils::setup(); + let alice = env.create_light_account(); + + let election_id = ElectionId::new(Field::from(42)); + env.call_public(admin, PrivateVoting::at(voting_contract_address).start_vote(election_id)); + + let candidate = 1; + env.call_private( + alice, + PrivateVoting::at(voting_contract_address).cast_vote(election_id, candidate), + ); + + let tally = env.view_public(PrivateVoting::at(voting_contract_address).get_tally( + election_id, + candidate, + )); + assert_eq(tally, 1); +} + +#[test] +unconstrained fn test_cast_vote_with_separate_accounts() { + let (mut env, voting_contract_address, admin) = utils::setup(); + let alice = env.create_light_account(); + let bob = env.create_light_account(); + + let election_id = ElectionId::new(Field::from(42)); + env.call_public(admin, PrivateVoting::at(voting_contract_address).start_vote(election_id)); + + let candidate = 101; + env.call_private( + alice, + PrivateVoting::at(voting_contract_address).cast_vote(election_id, candidate), + ); + + let _ = env.call_private( + bob, + PrivateVoting::at(voting_contract_address).cast_vote(election_id, candidate), + ); + + let tally = env.view_public(PrivateVoting::at(voting_contract_address).get_tally( + election_id, + candidate, + )); + assert_eq(tally, 2); +} + +#[test(should_fail_with = "duplicate siloed nullifier")] +unconstrained fn test_fail_vote_twice() { + let (mut env, voting_contract_address, admin) = utils::setup(); + let alice = env.create_light_account(); + + let election_id = ElectionId::new(Field::from(42)); + env.call_public(admin, PrivateVoting::at(voting_contract_address).start_vote(election_id)); + + let candidate = 101; + + env.call_private( + alice, + PrivateVoting::at(voting_contract_address).cast_vote(election_id, candidate), + ); + env.call_private( + alice, + PrivateVoting::at(voting_contract_address).cast_vote(election_id, candidate), + ); +} + +#[test] +unconstrained fn test_vote_in_two_different_elections() { + let (mut env, voting_contract_address, admin) = utils::setup(); + let alice = env.create_light_account(); + + let election_id_42 = ElectionId::new(Field::from(42)); + let election_id_54 = ElectionId::new(Field::from(54)); + + env.call_public(admin, PrivateVoting::at(voting_contract_address).start_vote(election_id_42)); + env.call_public(admin, PrivateVoting::at(voting_contract_address).start_vote(election_id_54)); + + let candidate = 101; + + env.call_private( + alice, + PrivateVoting::at(voting_contract_address).cast_vote(election_id_42, candidate), + ); + env.call_private( + alice, + PrivateVoting::at(voting_contract_address).cast_vote(election_id_54, candidate), + ); + + let vote_count_42 = env.view_public(PrivateVoting::at(voting_contract_address).get_tally( + election_id_42, + candidate, + )); + let vote_count_54 = env.view_public(PrivateVoting::at(voting_contract_address).get_tally( + election_id_54, + candidate, + )); + + assert_eq(vote_count_42, 1); + assert_eq(vote_count_54, 1); +} diff --git a/boxes/boxes/vanilla/contracts/src/test/mod.nr b/boxes/boxes/vanilla/contracts/src/test/mod.nr new file mode 100644 index 000000000000..6fde1dc26251 --- /dev/null +++ b/boxes/boxes/vanilla/contracts/src/test/mod.nr @@ -0,0 +1,2 @@ +mod first; +mod utils; diff --git a/boxes/boxes/vanilla/contracts/src/test/utils.nr b/boxes/boxes/vanilla/contracts/src/test/utils.nr new file mode 100644 index 000000000000..bb36d53c94e1 --- /dev/null +++ b/boxes/boxes/vanilla/contracts/src/test/utils.nr @@ -0,0 +1,15 @@ +use aztec::{protocol::address::AztecAddress, test::helpers::test_environment::TestEnvironment}; + +use crate::PrivateVoting; + +pub unconstrained fn setup() -> (TestEnvironment, AztecAddress, AztecAddress) { + let mut env = TestEnvironment::new(); + + let admin = env.create_light_account(); + + let initializer_call = PrivateVoting::interface().constructor(admin); + let voting_contract_address = + env.deploy("PrivateVoting").with_public_initializer(admin, initializer_call); + + (env, voting_contract_address, admin) +} diff --git a/boxes/boxes/vanilla/package.json b/boxes/boxes/vanilla/package.json index a2cfd38157c2..448cef38efca 100644 --- a/boxes/boxes/vanilla/package.json +++ b/boxes/boxes/vanilla/package.json @@ -12,7 +12,8 @@ "compile": "cd contracts && ${AZTEC:-aztec} compile && cp target/*.json ../artifacts", "codegen": "cd contracts && ${AZTEC:-aztec} codegen ./target -o ./target && cp target/*.ts ../artifacts", "serve": "webpack serve --mode production --port ${PORT:-3000}", - "test": "PROVER_ENABLED=false node --experimental-transform-types scripts/deploy.ts && yarn playwright test", + "deploy": "node --experimental-transform-types scripts/deploy.ts", + "test": "yarn deploy && yarn playwright test", "lint": "prettier --check ./src" }, "dependencies": { @@ -25,10 +26,10 @@ "@aztec/noir-contracts.js": "latest", "@aztec/pxe": "latest", "@aztec/stdlib": "latest", - "@aztec/wallet-sdk": "latest" + "@aztec/wallet-sdk": "latest", + "@aztec/wallets": "latest" }, "devDependencies": { - "@aztec/wallets": "latest", "@playwright/test": "1.49.0", "@types/node": "^22.15.17", "buffer": "^6.0.3", diff --git a/boxes/boxes/vanilla/scripts/deploy.ts b/boxes/boxes/vanilla/scripts/deploy.ts index 3d2dd2d52b17..b02edaa7e6b1 100644 --- a/boxes/boxes/vanilla/scripts/deploy.ts +++ b/boxes/boxes/vanilla/scripts/deploy.ts @@ -3,7 +3,7 @@ import { type ContractInstanceWithAddress, DeployMethod, getContractInstanceFromInstantiationParams, - InteractionWaitOptions, + type InteractionWaitOptions, } from '@aztec/aztec.js/contracts'; import { SponsoredFeePaymentMethod } from '@aztec/aztec.js/fee'; import { Fr } from '@aztec/aztec.js/fields'; @@ -68,32 +68,10 @@ async function createAccount(wallet: EmbeddedWallet) { async function deployContract(wallet: Wallet, deployer: AztecAddress) { const salt = Fr.random(); - const contract = await getContractInstanceFromInstantiationParams( - PrivateVotingContract.artifact, - { - publicKeys: PublicKeys.default(), - constructorArtifact: getDefaultInitializer( - PrivateVotingContract.artifact - ), - constructorArgs: [deployer.toField()], - deployer: deployer, - salt, - } - ); - - const deployMethod = new DeployMethod( - contract.publicKeys, - wallet, - PrivateVotingContract.artifact, - (instance: ContractInstanceWithAddress, wallet: Wallet) => - PrivateVotingContract.at(instance.address, wallet), - [deployer.toField()], - getDefaultInitializer(PrivateVotingContract.artifact)?.name - ); const sponsoredPFCContract = await getSponsoredPFCContract(); - await deployMethod.send({ + const contract = await PrivateVotingContract.deploy(wallet, deployer).send({ from: deployer, contractAddressSalt: salt, fee: { @@ -103,9 +81,21 @@ async function deployContract(wallet: Wallet, deployer: AztecAddress) { }, wait: { timeout: 120 }, }); - await wallet.registerContract(contract, PrivateVotingContract.artifact); + + const electionId = new Fr(42); + + await contract.methods.start_vote({ id: electionId }).send({ + from: deployer, + fee: { + paymentMethod: new SponsoredFeePaymentMethod( + sponsoredPFCContract.address + ), + }, + wait: { timeout: 120 }, + }); return { + electionId: electionId.toString(), contractAddress: contract.address.toString(), deployerAddress: deployer.toString(), deploymentSalt: salt.toString(), @@ -115,6 +105,7 @@ async function deployContract(wallet: Wallet, deployer: AztecAddress) { async function writeEnvFile(deploymentInfo) { const envFilePath = path.join(import.meta.dirname, '../.env'); const envConfig = Object.entries({ + ELECTION_ID: deploymentInfo.electionId, CONTRACT_ADDRESS: deploymentInfo.contractAddress, DEPLOYER_ADDRESS: deploymentInfo.deployerAddress, DEPLOYMENT_SALT: deploymentInfo.deploymentSalt, diff --git a/ci.sh b/ci.sh index cbec87590272..da4966235f47 100755 --- a/ci.sh +++ b/ci.sh @@ -129,7 +129,7 @@ case "$cmd" in } export -f run - parallel --jobs 10 --termseq 'TERM,10000' --tagstring '{= $_=~s/run (\w+).*/$1/; =}' --line-buffered --halt now,fail=1 ::: \ + parallel --jobs 11 --termseq 'TERM,10000' --tagstring '{= $_=~s/run (\w+).*/$1/; =}' --line-buffered --halt now,fail=1 ::: \ 'run x1-full amd64 ci-full-no-test-cache' \ 'run x2-full amd64 ci-full-no-test-cache' \ 'run x3-full amd64 ci-full-no-test-cache' \ diff --git a/ci3/aws_request_instance_type b/ci3/aws_request_instance_type index df835a7cb77e..bf9afbf4c36c 100755 --- a/ci3/aws_request_instance_type +++ b/ci3/aws_request_instance_type @@ -88,6 +88,21 @@ fi echo "Instance id: $iid" +tags="Key=Name,Value=$name Key=Group,Value=build-instance" +[ -n "${GITHUB_ACTOR:-}" ] && tags+=" Key=GithubActor,Value=$GITHUB_ACTOR" +[ -n "${CI_MODE:-}" ] && tags+=" Key=CICommand,Value=$CI_MODE" +[ -n "${CI_DASHBOARD:-}" ] && tags+=" Key=Dashboard,Value=$CI_DASHBOARD" +if [ "${UNSAFE_AWS_KEEP_ALIVE:-0}" -eq 1 ]; then + echo_stderr "You have set UNSAFE_AWS_KEEP_ALIVE=1, so the instance will not be terminated after 1.5 hours by the reaper script. Make sure you shut the machine down when done." + tags+=" Key=Keep-Alive,Value=true" +fi +aws ec2 create-tags --resources $iid --tags $tags + +# Record the instance type so callers can pass it downstream (e.g. into Docker). +echo $instance_type > $state_dir/instance_type +# Record whether this is spot or on-demand. +[ -f "$sir_path" ] && echo spot > $state_dir/spot || echo ondemand > $state_dir/spot + while [ -z "${ip:-}" ]; do sleep 1 ip=$(aws ec2 describe-instances \ diff --git a/ci3/bootstrap_ec2 b/ci3/bootstrap_ec2 index b68f18c014d3..28a0b1c70401 100755 --- a/ci3/bootstrap_ec2 +++ b/ci3/bootstrap_ec2 @@ -89,6 +89,8 @@ if [[ -f "$state_dir/sir" ]]; then sir=$(cat $state_dir/sir) fi iid=$(cat $state_dir/iid) +export EC2_INSTANCE_TYPE=$(cat $state_dir/instance_type 2>/dev/null || echo "unknown") +export EC2_SPOT=$(cat $state_dir/spot 2>/dev/null || echo "unknown") # If AWS credentials are not set, try to load them from ~/.aws/build_instance_credentials. if [ -z "${AWS_ACCESS_KEY_ID:-}" ] || [ -z "${AWS_SECRET_ACCESS_KEY:-}" ]; then @@ -191,16 +193,6 @@ container_script=$( log_ci_run FAILED \$ci_log_id merge_train_failure_slack_notify \$ci_log_id release_canary_slack_notify \$ci_log_id - ci_failed_data=\$(jq -n \\ - --arg status "failed" \\ - --arg log_id "\$ci_log_id" \\ - --arg ref_name "\${TARGET_BRANCH:-\$REF_NAME}" \\ - --arg commit_hash "\$COMMIT_HASH" \\ - --arg commit_author "\$COMMIT_AUTHOR" \\ - --arg commit_msg "\$COMMIT_MSG" \\ - --argjson exit_code "\$code" \\ - '{status: \$status, log_id: \$log_id, ref_name: \$ref_name, commit_hash: \$commit_hash, commit_author: \$commit_author, commit_msg: \$commit_msg, exit_code: \$exit_code, timestamp: now | todate}') - redis_publish "ci:run:failed" "\$ci_failed_data" ;; esac exit \$code @@ -330,6 +322,9 @@ function run { -e AWS_TOKEN=\$aws_token \ -e NAMESPACE=${NAMESPACE:-} \ -e NETWORK=${NETWORK:-} \ + -e GITHUB_ACTOR=${GITHUB_ACTOR:-} \ + -e EC2_INSTANCE_TYPE=${EC2_INSTANCE_TYPE:-unknown} \ + -e EC2_SPOT=${EC2_SPOT:-unknown} \ --pids-limit=65536 \ --shm-size=2g \ aztecprotocol/devbox:3.0 bash -c $(printf '%q' "$container_script") diff --git a/ci3/check_port b/ci3/check_port new file mode 100755 index 000000000000..a637a8e78405 --- /dev/null +++ b/ci3/check_port @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# Check if a port is free. If taken, print the process tree of the holder. +# Usage: check_port +# Exit code: 0 if free, 1 if taken. +set -eu + +port="${1:?Usage: check_port }" + +pid=$(lsof -ti :"$port" 2>/dev/null | head -1 || true) +if [ -z "$pid" ]; then + exit 0 +fi + +echo "Port $port is taken by PID $pid:" >&2 +# Show the command line of the process. +ps -p "$pid" -o pid,ppid,user,args --no-headers >&2 || true +# Show the process tree rooted at this PID. +pstree -apls "$pid" >&2 2>/dev/null || pstree -p "$pid" >&2 2>/dev/null || true +exit 1 diff --git a/ci3/ci-metrics/Dockerfile b/ci3/ci-metrics/Dockerfile new file mode 100644 index 000000000000..4013545da66d --- /dev/null +++ b/ci3/ci-metrics/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.12 + +RUN apt update && apt install -y jq redis-tools && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt gunicorn +RUN git config --global --add safe.directory /aztec-packages +COPY . . +EXPOSE 8081 +CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:8081", "app:app"] diff --git a/ci3/ci-metrics/app.py b/ci3/ci-metrics/app.py new file mode 100644 index 000000000000..c62875e7d19a --- /dev/null +++ b/ci3/ci-metrics/app.py @@ -0,0 +1,848 @@ +from flask import Flask, request, Response, redirect +from flask_compress import Compress +from flask_httpauth import HTTPBasicAuth +from datetime import datetime, timedelta +import json +import os +import re +import redis +import threading +from pathlib import Path + +import db +import metrics +import github_data +import billing.aws as billing_aws +from billing import ( + get_billing_files_in_range, + aggregate_billing_weekly, aggregate_billing_monthly, + serve_billing_dashboard, +) + +REDIS_HOST = os.getenv('REDIS_HOST', 'localhost') +REDIS_PORT = int(os.getenv('REDIS_PORT', '6379')) +LOGS_DISK_PATH = os.getenv('LOGS_DISK_PATH', '/logs-disk') +DASHBOARD_PASSWORD = os.getenv('DASHBOARD_PASSWORD', 'password') + +r = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=False) + +app = Flask(__name__) +Compress(app) +auth = HTTPBasicAuth() + + +@auth.verify_password +def verify_password(username, password): + return password == DASHBOARD_PASSWORD + + +def _init(): + """Initialize SQLite and start background threads.""" + try: + db.get_db() + metrics.start_test_listener(r) + metrics.start_ci_run_sync(r) + print("[ci-metrics] Background threads started") + except Exception as e: + print(f"[ci-metrics] Warning: startup failed: {e}") + +threading.Thread(target=_init, daemon=True, name='metrics-init').start() + + +# ---- Helpers ---- + +def _aggregate_dates(by_date_list, granularity, sum_fields, avg_fields=None): + """Aggregate a list of {date, ...} dicts by weekly/monthly granularity.""" + if granularity == 'daily' or not by_date_list: + return by_date_list + + buckets = {} + for entry in by_date_list: + d = datetime.strptime(entry['date'], '%Y-%m-%d') + if granularity == 'weekly': + key = (d - timedelta(days=d.weekday())).strftime('%Y-%m-%d') + else: # monthly + key = d.strftime('%Y-%m') + '-01' + + if key not in buckets: + buckets[key] = {'date': key} + for f in sum_fields: + buckets[key][f] = 0 + if avg_fields: + for f in avg_fields: + buckets[key][f'_avg_sum_{f}'] = 0 + buckets[key][f'_avg_cnt_{f}'] = 0 + + for f in sum_fields: + buckets[key][f] += entry.get(f) or 0 + if avg_fields: + for f in avg_fields: + val = entry.get(f) + if val is not None: + buckets[key][f'_avg_sum_{f}'] += val + buckets[key][f'_avg_cnt_{f}'] += 1 + + result = [] + for key in sorted(buckets): + b = buckets[key] + out = {'date': b['date']} + for f in sum_fields: + out[f] = round(b[f], 2) if isinstance(b[f], float) else b[f] + if avg_fields: + for f in avg_fields: + cnt = b[f'_avg_cnt_{f}'] + out[f] = round(b[f'_avg_sum_{f}'] / cnt, 1) if cnt else None + result.append(out) + + return result + + +def _json(data): + return Response(json.dumps(data), mimetype='application/json') + + +# ---- Namespace billing ---- + +@app.route('/namespace-billing') +@auth.login_required +def namespace_billing(): + html = serve_billing_dashboard() + if html: + return html + return "Billing dashboard not found", 404 + + +@app.route('/api/billing/data') +@auth.login_required +def billing_data(): + date_from_str = request.args.get('from') + date_to_str = request.args.get('to') + granularity = request.args.get('granularity', 'daily') + + if not date_from_str or not date_to_str: + return _json({'error': 'from and to date params required (YYYY-MM-DD)'}), 400 + try: + date_from = datetime.strptime(date_from_str, '%Y-%m-%d') + date_to = datetime.strptime(date_to_str, '%Y-%m-%d') + except ValueError: + return _json({'error': 'Invalid date format, use YYYY-MM-DD'}), 400 + + daily_data = get_billing_files_in_range(date_from, date_to) + + # Filter out namespaces costing less than $1 total across the range + ns_totals = {} + for entry in daily_data: + for ns, ns_data in entry.get('namespaces', {}).items(): + ns_totals[ns] = ns_totals.get(ns, 0) + ns_data.get('total', 0) + cheap_ns = {ns for ns, total in ns_totals.items() if total < 1.0} + if cheap_ns: + for entry in daily_data: + entry['namespaces'] = {ns: d for ns, d in entry.get('namespaces', {}).items() + if ns not in cheap_ns} + + if granularity == 'weekly': + result = aggregate_billing_weekly(daily_data) + elif granularity == 'monthly': + result = aggregate_billing_monthly(daily_data) + else: + result = daily_data + + return _json(result) + + +# ---- CI runs ---- + +@app.route('/api/ci/runs') +@auth.login_required +def api_ci_runs(): + date_from = request.args.get('from', '') + date_to = request.args.get('to', '') + status_filter = request.args.get('status', '') + author = request.args.get('author', '') + dashboard = request.args.get('dashboard', '') + limit = min(int(request.args.get('limit', 100)), 1000) + offset = int(request.args.get('offset', 0)) + + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) if date_from else None + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) if date_to else None + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + + if status_filter: + runs = [run for run in runs if run.get('status') == status_filter] + if author: + runs = [run for run in runs if run.get('author') == author] + if dashboard: + runs = [run for run in runs if run.get('dashboard') == dashboard] + + runs.sort(key=lambda x: x.get('timestamp', 0), reverse=True) + runs = runs[offset:offset + limit] + + return _json(runs) + + +@app.route('/api/ci/stats') +@auth.login_required +def api_ci_stats(): + ts_from = int((datetime.now() - timedelta(days=7)).timestamp() * 1000) + runs = metrics.get_ci_runs(r, ts_from) + + total = len(runs) + passed = sum(1 for run in runs if run.get('status') == 'PASSED') + failed = sum(1 for run in runs if run.get('status') == 'FAILED') + costs = [run['cost_usd'] for run in runs if run.get('cost_usd') is not None] + durations = [] + for run in runs: + complete = run.get('complete') + ts = run.get('timestamp') + if complete and ts: + durations.append((complete - ts) / 60000.0) + + return _json({ + 'total_runs': total, + 'passed': passed, + 'failed': failed, + 'total_cost': round(sum(costs), 2) if costs else None, + 'avg_duration_mins': round(sum(durations) / len(durations), 1) if durations else None, + }) + + +# ---- Cost endpoints ---- + +@app.route('/api/costs/overview') +@auth.login_required +def api_costs_overview(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + granularity = request.args.get('granularity', 'daily') + result = billing_aws.get_costs_overview(date_from, date_to) + if granularity != 'daily' and result.get('by_date'): + buckets = {} + for entry in result['by_date']: + d = datetime.strptime(entry['date'], '%Y-%m-%d') + if granularity == 'weekly': + key = (d - timedelta(days=d.weekday())).strftime('%Y-%m-%d') + else: + key = d.strftime('%Y-%m') + '-01' + if key not in buckets: + buckets[key] = {'date': key, 'aws': {}, 'gcp': {}, 'aws_total': 0, 'gcp_total': 0} + for cat, amt in entry.get('aws', {}).items(): + buckets[key]['aws'][cat] = buckets[key]['aws'].get(cat, 0) + amt + for cat, amt in entry.get('gcp', {}).items(): + buckets[key]['gcp'][cat] = buckets[key]['gcp'].get(cat, 0) + amt + buckets[key]['aws_total'] += entry.get('aws_total', 0) + buckets[key]['gcp_total'] += entry.get('gcp_total', 0) + result['by_date'] = sorted(buckets.values(), key=lambda x: x['date']) + return _json(result) + + +@app.route('/api/costs/details') +@auth.login_required +def api_costs_details(): + """Per-resource (USAGE_TYPE) cost breakdown.""" + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + + rows = billing_aws.get_aws_cost_details(date_from, date_to) + + usage_map = {} + for row in rows: + ut = row['usage_type'] + if ut not in usage_map: + usage_map[ut] = { + 'usage_type': ut, + 'service': row['service'], + 'category': row['category'], + 'total': 0, + 'by_date': {}, + 'is_ri': 'HeavyUsage' in ut, + } + usage_map[ut]['total'] += row['amount_usd'] + d = row['date'] + usage_map[ut]['by_date'][d] = usage_map[ut]['by_date'].get(d, 0) + row['amount_usd'] + + items = sorted(usage_map.values(), key=lambda x: -x['total']) + for item in items: + item['total'] = round(item['total'], 2) + item['by_date'] = {d: round(v, 4) for d, v in sorted(item['by_date'].items())} + + all_dates = sorted({row['date'] for row in rows}) + ri_items = [i for i in items if i['is_ri']] + ri_total = round(sum(i['total'] for i in ri_items), 2) + + return _json({ + 'items': items, + 'dates': all_dates, + 'ri_total': ri_total, + 'grand_total': round(sum(i['total'] for i in items), 2), + }) + + +@app.route('/api/costs/attribution') +@auth.login_required +def api_costs_attribution(): + """CI cost attribution by user, branch, instance.""" + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + runs_with_cost = [run for run in runs if run.get('cost_usd') is not None] + + # Enrich merge queue runs with PR author from GitHub + pr_numbers = {run.get('pr_number') for run in runs_with_cost if run.get('pr_number')} + pr_authors = github_data.batch_get_pr_authors(pr_numbers) + + granularity = request.args.get('granularity', 'daily') + + instances = [] + by_user = {} + by_branch = {} + by_type = {} + by_date_type = {} + + for run in runs_with_cost: + info = billing_aws.decode_branch_info(run) + cost = run['cost_usd'] + date = metrics._ts_to_date(run.get('timestamp', 0)) + + author = info['author'] + prn = info['pr_number'] + if prn and int(prn) in pr_authors: + author = pr_authors[int(prn)]['author'] + + inst_type = run.get('instance_type', 'unknown') + vcpus = run.get('instance_vcpus') + if inst_type == 'unknown' and vcpus: + inst_type = f'{vcpus}vcpu' + + instances.append({ + 'instance_name': info['instance_name'], + 'date': date, + 'cost_usd': cost, + 'author': author, + 'branch': info['branch'], + 'pr_number': prn, + 'type': info['type'], + 'instance_type': inst_type, + 'spot': run.get('spot', False), + 'job_id': run.get('job_id', ''), + 'duration_mins': round((run.get('complete', 0) - run.get('timestamp', 0)) / 60000, 1) if run.get('complete') else None, + }) + + if author not in by_user: + by_user[author] = {'aws_cost': 0, 'gcp_cost': 0, 'runs': 0, 'by_date': {}} + by_user[author]['aws_cost'] += cost + by_user[author]['runs'] += 1 + by_user[author]['by_date'][date] = by_user[author]['by_date'].get(date, 0) + cost + + branch_key = info['branch'] or info['type'] + if branch_key not in by_branch: + by_branch[branch_key] = {'cost': 0, 'runs': 0, 'type': info['type'], 'author': author} + by_branch[branch_key]['cost'] += cost + by_branch[branch_key]['runs'] += 1 + + rt = info['type'] + if rt not in by_type: + by_type[rt] = {'cost': 0, 'runs': 0} + by_type[rt]['cost'] += cost + by_type[rt]['runs'] += 1 + + if date not in by_date_type: + by_date_type[date] = {} + by_date_type[date][rt] = by_date_type[date].get(rt, 0) + cost + + # GCP costs — reported as total, no namespace→user heuristic + gcp_total = 0 + try: + from billing.gcp import get_billing_files_in_range as get_gcp_billing + gcp_data = get_gcp_billing( + datetime.strptime(date_from, '%Y-%m-%d'), + datetime.strptime(date_to, '%Y-%m-%d'), + ) + for entry in gcp_data: + for ns, ns_data in entry.get('namespaces', {}).items(): + gcp_total += ns_data.get('total', 0) + except Exception as e: + print(f"[attribution] GKE billing error: {e}") + + # Sort and format + user_list = [{'author': a, 'aws_cost': round(v['aws_cost'], 2), 'gcp_cost': round(v['gcp_cost'], 2), + 'total_cost': round(v['aws_cost'] + v['gcp_cost'], 2), 'runs': v['runs'], + 'by_date': {d: round(c, 2) for d, c in sorted(v['by_date'].items())}} + for a, v in sorted(by_user.items(), key=lambda x: -(x[1]['aws_cost'] + x[1]['gcp_cost']))] + + branch_list = [{'branch': b, 'cost': round(v['cost'], 2), 'runs': v['runs'], + 'type': v['type'], 'author': v['author']} + for b, v in sorted(by_branch.items(), key=lambda x: -x[1]['cost'])[:100]] + + type_list = [{'type': t, 'cost': round(v['cost'], 2), 'runs': v['runs']} + for t, v in sorted(by_type.items(), key=lambda x: -x[1]['cost'])] + + instances.sort(key=lambda x: -(x['cost_usd'] or 0)) + + all_types = sorted(by_type.keys()) + by_date_list = [] + for date in sorted(by_date_type): + entry = {'date': date, 'total': 0, 'runs': 0} + for rt in all_types: + entry[rt] = round(by_date_type[date].get(rt, 0), 2) + entry['total'] += by_date_type[date].get(rt, 0) + entry['total'] = round(entry['total'], 2) + entry['runs'] = sum(1 for inst in instances if inst['date'] == date) + by_date_list.append(entry) + + by_date_list = _aggregate_dates(by_date_list, granularity, + sum_fields=['total', 'runs'] + all_types) + + total_aws = sum(u['aws_cost'] for u in user_list) + + return _json({ + 'by_user': user_list, + 'by_branch': branch_list, + 'by_type': type_list, + 'by_date': by_date_list, + 'run_types': all_types, + 'instances': instances[:500], + 'totals': {'aws': round(total_aws, 2), 'gcp': round(gcp_total, 2), + 'gcp_unattributed': round(gcp_total, 2), + 'combined': round(total_aws + gcp_total, 2)}, + }) + + +@app.route('/api/costs/runners') +@auth.login_required +def api_costs_runners(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + granularity = request.args.get('granularity', 'daily') + dashboard = request.args.get('dashboard', '') + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + runs_with_cost = [run for run in runs if run.get('cost_usd') is not None] + if dashboard: + runs_with_cost = [run for run in runs_with_cost if run.get('dashboard') == dashboard] + + by_date_map = {} + for run in runs_with_cost: + date = metrics._ts_to_date(run.get('timestamp', 0)) + if date not in by_date_map: + by_date_map[date] = {'spot_cost': 0, 'ondemand_cost': 0, 'total': 0} + cost = run['cost_usd'] + if run.get('spot'): + by_date_map[date]['spot_cost'] += cost + else: + by_date_map[date]['ondemand_cost'] += cost + by_date_map[date]['total'] += cost + + by_date = [{'date': date, 'spot_cost': round(d['spot_cost'], 2), + 'ondemand_cost': round(d['ondemand_cost'], 2), 'total': round(d['total'], 2), + 'spot_pct': round(100.0 * d['spot_cost'] / max(d['total'], 0.01), 1)} + for date, d in sorted(by_date_map.items())] + + by_date = _aggregate_dates(by_date, granularity, + sum_fields=['spot_cost', 'ondemand_cost', 'total']) + for d in by_date: + d['spot_pct'] = round(100.0 * d['spot_cost'] / max(d['total'], 0.01), 1) + + by_instance_map = {} + for run in runs_with_cost: + inst = run.get('instance_type', 'unknown') + if inst not in by_instance_map: + by_instance_map[inst] = {'cost': 0, 'runs': 0} + by_instance_map[inst]['cost'] += run['cost_usd'] + by_instance_map[inst]['runs'] += 1 + by_instance = [{'instance_type': k, 'cost': round(v['cost'], 2), 'runs': v['runs']} + for k, v in sorted(by_instance_map.items(), key=lambda x: -x[1]['cost'])] + + by_dash_map = {} + for run in runs_with_cost: + dash = run.get('dashboard', 'unknown') + if dash not in by_dash_map: + by_dash_map[dash] = {'cost': 0, 'runs': 0} + by_dash_map[dash]['cost'] += run['cost_usd'] + by_dash_map[dash]['runs'] += 1 + by_dashboard = [{'dashboard': k, 'cost': round(v['cost'], 2), 'runs': v['runs']} + for k, v in sorted(by_dash_map.items(), key=lambda x: -x[1]['cost'])] + + total_cost = sum(run['cost_usd'] for run in runs_with_cost) + spot_cost = sum(run['cost_usd'] for run in runs_with_cost if run.get('spot')) + + return _json({ + 'by_date': by_date, + 'by_instance_type': by_instance, + 'by_dashboard': by_dashboard, + 'summary': { + 'total_cost': round(total_cost, 2), + 'spot_pct': round(100.0 * spot_cost / max(total_cost, 0.01), 1), + 'avg_cost_per_run': round(total_cost / max(len(runs_with_cost), 1), 2), + 'total_runs': len(runs_with_cost), + }, + }) + + +# ---- CI Performance ---- + +@app.route('/api/ci/performance') +@auth.login_required +def api_ci_performance(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + dashboard = request.args.get('dashboard', '') + granularity = request.args.get('granularity', 'daily') + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + + runs = metrics.get_ci_runs(r, ts_from, ts_to) + runs = [run for run in runs if run.get('status') in ('PASSED', 'FAILED')] + if dashboard: + runs = [run for run in runs if run.get('dashboard') == dashboard] + + by_date_map = {} + for run in runs: + date = metrics._ts_to_date(run.get('timestamp', 0)) + if date not in by_date_map: + by_date_map[date] = {'total': 0, 'passed': 0, 'failed': 0, 'durations': []} + by_date_map[date]['total'] += 1 + if run.get('status') == 'PASSED': + by_date_map[date]['passed'] += 1 + else: + by_date_map[date]['failed'] += 1 + complete = run.get('complete') + ts = run.get('timestamp') + if complete and ts: + by_date_map[date]['durations'].append((complete - ts) / 60000.0) + + by_date = [] + for date in sorted(by_date_map): + d = by_date_map[date] + by_date.append({ + 'date': date, + 'total': d['total'], + 'passed': d['passed'], + 'failed': d['failed'], + 'pass_rate': round(100.0 * d['passed'] / max(d['total'], 1), 1), + 'failure_rate': round(100.0 * d['failed'] / max(d['total'], 1), 1), + 'avg_duration_mins': round(sum(d['durations']) / len(d['durations']), 1) if d['durations'] else None, + }) + + by_date = _aggregate_dates(by_date, granularity, + sum_fields=['total', 'passed', 'failed'], + avg_fields=['avg_duration_mins']) + for d in by_date: + d['pass_rate'] = round(100.0 * d['passed'] / max(d['total'], 1), 1) + d['failure_rate'] = round(100.0 * d['failed'] / max(d['total'], 1), 1) + + # Daily flake/failure counts from test_events + if dashboard: + flake_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'flaked' AND dashboard = ? + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (dashboard, date_from, date_to + 'T23:59:59')) + fail_test_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'failed' AND dashboard = ? + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + flake_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'flaked' + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (date_from, date_to + 'T23:59:59')) + fail_test_daily = db.query(''' + SELECT substr(timestamp, 1, 10) as date, COUNT(*) as count + FROM test_events WHERE status = 'failed' + AND timestamp >= ? AND timestamp < ? + GROUP BY substr(timestamp, 1, 10) + ''', (date_from, date_to + 'T23:59:59')) + flake_daily_map = {r['date']: r['count'] for r in flake_daily} + fail_test_daily_map = {r['date']: r['count'] for r in fail_test_daily} + for d in by_date: + d['flake_count'] = flake_daily_map.get(d['date'], 0) + d['test_failure_count'] = fail_test_daily_map.get(d['date'], 0) + + # Top flakes/failures + if dashboard: + top_flakes = db.query(''' + SELECT test_cmd, COUNT(*) as count, ref_name + FROM test_events WHERE status='flaked' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (dashboard, date_from, date_to + 'T23:59:59')) + top_failures = db.query(''' + SELECT test_cmd, COUNT(*) as count + FROM test_events WHERE status='failed' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + top_flakes = db.query(''' + SELECT test_cmd, COUNT(*) as count, ref_name + FROM test_events WHERE status='flaked' AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (date_from, date_to + 'T23:59:59')) + top_failures = db.query(''' + SELECT test_cmd, COUNT(*) as count + FROM test_events WHERE status='failed' AND timestamp >= ? AND timestamp <= ? + GROUP BY test_cmd ORDER BY count DESC LIMIT 15 + ''', (date_from, date_to + 'T23:59:59')) + + # Summary + total = len(runs) + passed = sum(1 for run in runs if run.get('status') == 'PASSED') + failed = total - passed + durations = [] + for run in runs: + complete = run.get('complete') + ts = run.get('timestamp') + if complete and ts: + durations.append((complete - ts) / 60000.0) + + if dashboard: + flake_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='flaked' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + ''', (dashboard, date_from, date_to + 'T23:59:59')) + total_tests = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status IN ('failed','flaked') AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + ''', (dashboard, date_from, date_to + 'T23:59:59')) + total_failures_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='failed' AND dashboard = ? + AND timestamp >= ? AND timestamp <= ? + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + flake_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='flaked' AND timestamp >= ? AND timestamp <= ? + ''', (date_from, date_to + 'T23:59:59')) + total_tests = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status IN ('failed','flaked') AND timestamp >= ? AND timestamp <= ? + ''', (date_from, date_to + 'T23:59:59')) + total_failures_count = db.query(''' + SELECT COUNT(*) as c FROM test_events WHERE status='failed' AND timestamp >= ? AND timestamp <= ? + ''', (date_from, date_to + 'T23:59:59')) + + fc = flake_count[0]['c'] if flake_count else 0 + tc = total_tests[0]['c'] if total_tests else 0 + tfc = total_failures_count[0]['c'] if total_failures_count else 0 + + return _json({ + 'by_date': by_date, + 'top_flakes': top_flakes, + 'top_failures': top_failures, + 'summary': { + 'total_runs': total, + 'pass_rate': round(100.0 * passed / max(total, 1), 1), + 'failure_rate': round(100.0 * failed / max(total, 1), 1), + 'avg_duration_mins': round(sum(durations) / len(durations), 1) if durations else None, + 'flake_rate': round(100.0 * fc / max(tc, 1), 1) if tc else 0, + 'total_flakes': fc, + 'total_test_failures': tfc, + }, + }) + + +# ---- GitHub integration ---- + +@app.route('/api/deployments/speed') +@auth.login_required +def api_deploy_speed(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + workflow = request.args.get('workflow', '') + granularity = request.args.get('granularity', 'daily') + result = github_data.get_deployment_speed(date_from, date_to, workflow) + if granularity != 'daily' and result.get('by_date'): + result['by_date'] = _aggregate_dates( + result['by_date'], granularity, + sum_fields=['count', 'success', 'failure'], + avg_fields=['median_mins', 'p95_mins']) + return _json(result) + + +@app.route('/api/branches/lag') +@auth.login_required +def api_branch_lag(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + return _json(github_data.get_branch_lag(date_from, date_to)) + + +@app.route('/api/prs/metrics') +@auth.login_required +def api_pr_metrics(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + author = request.args.get('author', '') + ts_from = int(datetime.strptime(date_from, '%Y-%m-%d').timestamp() * 1000) + ts_to = int((datetime.strptime(date_to, '%Y-%m-%d') + timedelta(days=1)).timestamp() * 1000) + ci_runs = metrics.get_ci_runs(r, ts_from, ts_to) + return _json(github_data.get_pr_metrics(date_from, date_to, author, ci_runs)) + + +@app.route('/api/merge-queue/stats') +@auth.login_required +def api_merge_queue_stats(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + return _json(github_data.get_merge_queue_stats(date_from, date_to)) + + +@app.route('/api/ci/flakes-by-command') +@auth.login_required +def api_flakes_by_command(): + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + dashboard = request.args.get('dashboard', '') + metrics.sync_failed_tests_to_sqlite(r) + return _json(metrics.get_flakes_by_command(date_from, date_to, dashboard)) + + +# ---- Test timings ---- + +@app.route('/api/tests/timings') +@auth.login_required +def api_test_timings(): + """Test timing statistics: duration by test command, with trends.""" + date_from = request.args.get('from', (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')) + date_to = request.args.get('to', datetime.now().strftime('%Y-%m-%d')) + dashboard = request.args.get('dashboard', '') + status = request.args.get('status', '') # filter to specific status + test_cmd = request.args.get('test_cmd', '') # filter to specific test + + conditions = ['duration_secs IS NOT NULL', 'duration_secs > 0', + 'timestamp >= ?', "timestamp < ? || 'T23:59:59'"] + params = [date_from, date_to] + + if dashboard: + conditions.append('dashboard = ?') + params.append(dashboard) + if status: + conditions.append('status = ?') + params.append(status) + if test_cmd: + conditions.append('test_cmd = ?') + params.append(test_cmd) + + where = 'WHERE ' + ' AND '.join(conditions) + + # Per-test stats + by_test = db.query(f''' + SELECT test_cmd, + COUNT(*) as count, + ROUND(AVG(duration_secs), 1) as avg_secs, + ROUND(MIN(duration_secs), 1) as min_secs, + ROUND(MAX(duration_secs), 1) as max_secs, + SUM(CASE WHEN status = 'passed' THEN 1 ELSE 0 END) as passed, + SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed, + SUM(CASE WHEN status = 'flaked' THEN 1 ELSE 0 END) as flaked, + dashboard + FROM test_events {where} + GROUP BY test_cmd + ORDER BY count DESC + LIMIT 200 + ''', params) + + # Add pass rate + for row in by_test: + total = row['passed'] + row['failed'] + row['flaked'] + row['pass_rate'] = round(100.0 * row['passed'] / max(total, 1), 1) + row['total_time_secs'] = round(row['avg_secs'] * row['count'], 0) + + # Daily time series (aggregate across all tests or filtered test) + by_date = db.query(f''' + SELECT substr(timestamp, 1, 10) as date, + COUNT(*) as count, + ROUND(AVG(duration_secs), 1) as avg_secs, + ROUND(MAX(duration_secs), 1) as max_secs, + SUM(CASE WHEN status = 'passed' THEN 1 ELSE 0 END) as passed, + SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed, + SUM(CASE WHEN status = 'flaked' THEN 1 ELSE 0 END) as flaked + FROM test_events {where} + GROUP BY substr(timestamp, 1, 10) + ORDER BY date + ''', params) + + # Summary + summary_rows = db.query(f''' + SELECT COUNT(*) as count, + ROUND(AVG(duration_secs), 1) as avg_secs, + ROUND(MAX(duration_secs), 1) as max_secs, + SUM(duration_secs) as total_secs, + SUM(CASE WHEN status = 'passed' THEN 1 ELSE 0 END) as passed, + SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed, + SUM(CASE WHEN status = 'flaked' THEN 1 ELSE 0 END) as flaked + FROM test_events {where} + ''', params) + s = summary_rows[0] if summary_rows else {} + + # Slowest individual test runs + slowest = db.query(f''' + SELECT test_cmd, status, duration_secs, dashboard, + substr(timestamp, 1, 10) as date, commit_author, log_url + FROM test_events {where} + ORDER BY duration_secs DESC + LIMIT 50 + ''', params) + + return _json({ + 'by_test': by_test, + 'by_date': by_date, + 'slowest': slowest, + 'summary': { + 'total_runs': s.get('count', 0), + 'avg_duration_secs': s.get('avg_secs'), + 'max_duration_secs': s.get('max_secs'), + 'total_compute_secs': round(s.get('total_secs', 0) or 0, 0), + 'passed': s.get('passed', 0), + 'failed': s.get('failed', 0), + 'flaked': s.get('flaked', 0), + }, + }) + + +# ---- Dashboard views ---- + +@app.route('/ci-health') +@auth.login_required +def ci_health(): + return redirect('/ci-insights') + + +@app.route('/ci-insights') +@auth.login_required +def ci_insights(): + path = Path(__file__).parent / 'views' / 'ci-insights.html' + if path.exists(): + return path.read_text() + return "Dashboard not found", 404 + + +@app.route('/cost-overview') +@auth.login_required +def cost_overview(): + path = Path(__file__).parent / 'views' / 'cost-overview.html' + if path.exists(): + return path.read_text() + return "Dashboard not found", 404 + + +@app.route('/test-timings') +@auth.login_required +def test_timings(): + path = Path(__file__).parent / 'views' / 'test-timings.html' + if path.exists(): + return path.read_text() + return "Dashboard not found", 404 + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8081) diff --git a/ci3/ci-metrics/billing/__init__.py b/ci3/ci-metrics/billing/__init__.py new file mode 100644 index 000000000000..e097751047c2 --- /dev/null +++ b/ci3/ci-metrics/billing/__init__.py @@ -0,0 +1,14 @@ +"""Billing package: GKE namespace billing and AWS cost data.""" + +from billing.gcp import ( + get_billing_files_in_range, + aggregate_billing_weekly, + aggregate_billing_monthly, + serve_billing_dashboard, +) +from billing.aws import ( + get_costs_overview, + get_aws_cost_details, + decode_branch_info, + decode_instance_name, +) diff --git a/ci3/ci-metrics/billing/aws.py b/ci3/ci-metrics/billing/aws.py new file mode 100644 index 000000000000..481393d74ec3 --- /dev/null +++ b/ci3/ci-metrics/billing/aws.py @@ -0,0 +1,347 @@ +"""AWS Cost Explorer fetch with in-memory cache. + +Fetches on first request, caches for 6 hours. No SQLite, no background threads. +""" +import threading +import time +from datetime import datetime, timedelta, timezone + +SERVICE_CATEGORY_MAP = { + # Compute + 'Amazon Elastic Compute Cloud - Compute': 'ec2', + 'EC2 - Other': 'ec2', # EBS volumes, snapshots, NAT gateways, data transfer + 'Amazon Elastic Container Service': 'ecs', + 'Amazon Elastic Kubernetes Service': 'eks', + 'Amazon EC2 Container Registry (ECR)': 'ecr', + 'AWS Lambda': 'lambda', + 'Amazon Lightsail': 'lightsail', + # Storage + 'Amazon Simple Storage Service': 's3', + 'Amazon Elastic File System': 'efs', + 'Amazon Elastic Block Store': 'ebs', + 'Amazon ElastiCache': 'elasticache', + 'Amazon Relational Database Service': 'rds', + 'Amazon DynamoDB': 'dynamodb', + 'AWS Backup': 'backup', + # Networking + 'Amazon CloudFront': 'cloudfront', + 'CloudFront Flat-Rate Plans': 'cloudfront', + 'Amazon Virtual Private Cloud': 'vpc', + 'Elastic Load Balancing': 'elb', + 'Amazon Elastic Load Balancing': 'elb', + 'Amazon Route 53': 'route53', + 'Amazon API Gateway': 'apigateway', + 'AWS Data Transfer': 'data_transfer', + 'AWS Global Accelerator': 'global_accelerator', + # Monitoring & Security + 'AmazonCloudWatch': 'cloudwatch', + 'AWS CloudTrail': 'cloudtrail', + 'AWS Secrets Manager': 'secrets', + 'AWS Key Management Service': 'kms', + 'AWS WAF': 'waf', + 'AWS Config': 'config', + 'AWS Certificate Manager': 'acm', + # CI/CD & Dev Tools + 'AWS CodeBuild': 'codebuild', + 'AWS CodePipeline': 'codepipeline', + 'AWS CloudFormation': 'cloudformation', + 'AWS Amplify': 'amplify', + # Data & Analytics + 'AWS Glue': 'glue', + # IoT + 'AWS IoT': 'iot', + 'Amazon Location Service': 'location', + # Messaging + 'Amazon Simple Notification Service': 'sns', + 'Amazon Simple Queue Service': 'sqs', + # Other + 'Tax': 'tax', + 'AWS Support (Business)': 'support', + 'AWS Support (Enterprise)': 'support', + 'AWS Cost Explorer': 'cost_explorer', +} + +import re + +_cache = {'rows': [], 'ts': 0} +_cache_lock = threading.Lock() +_detail_cache = {'rows': [], 'ts': 0} +_detail_cache_lock = threading.Lock() +_CACHE_TTL = 6 * 3600 + +# Known job postfixes from ci.sh (these become INSTANCE_POSTFIX) +_JOB_POSTFIXES = re.compile( + r'_(x[0-9]+-(?:full|fast)|a[0-9]+-(?:full|fast)|n-deploy-[0-9]+|grind-test-[a-f0-9]+)$' +) +_ARCH_SUFFIXES = ('_amd64', '_arm64', '_x86_64', '_aarch64') + + +def decode_instance_name(run: dict) -> str: + """Reconstruct the EC2 instance name from CI run metadata. + + bootstrap_ec2 naming: + merge queue: pr-{number}_{arch}[_{postfix}] + branch: {sanitized_branch}_{arch}[_{postfix}] + """ + name = run.get('name', '') + pr = run.get('pr_number') + arch = run.get('arch', 'amd64') + # Normalize arch names + if arch in ('x86_64', 'amd64'): + arch = 'amd64' + elif arch in ('aarch64', 'arm64'): + arch = 'arm64' + job = run.get('job_id', '') + + if '(queue)' in name and pr: + base = f'pr-{pr}_{arch}' + elif pr: + base = f'pr-{pr}_{arch}' + else: + # Replicate: echo -n "$REF_NAME" | head -c 50 | tr -c 'a-zA-Z0-9-' '_' + sanitized = re.sub(r'[^a-zA-Z0-9-]', '_', name[:50]) + base = f'{sanitized}_{arch}' + if job: + return f'{base}_{job}' + return base + + +def decode_branch_info(run: dict) -> dict: + """Extract branch/PR/user context from a CI run.""" + name = run.get('name', '') + dashboard = run.get('dashboard', '') + pr = run.get('pr_number') + author = run.get('author', 'unknown') + + if '(queue)' in name or dashboard == 'next': + run_type = 'merge-queue' + branch = name.replace(' (queue)', '') + elif dashboard == 'prs': + run_type = 'pr' + branch = name + elif dashboard in ('nightly', 'releases', 'network', 'deflake'): + run_type = dashboard + branch = name + else: + run_type = 'other' + branch = name + + return { + 'type': run_type, + 'branch': branch, + 'pr_number': pr, + 'author': author, + 'instance_name': decode_instance_name(run), + } + + +def _fetch_aws_costs(date_from: str, date_to: str) -> list[dict]: + try: + import boto3 + except ImportError: + print("[rk_aws_costs] boto3 not installed, skipping") + return [] + + try: + client = boto3.client('ce', region_name='us-east-2') + rows = [] + next_token = None + + while True: + kwargs = dict( + TimePeriod={'Start': date_from, 'End': date_to}, + Granularity='DAILY', + Metrics=['UnblendedCost'], + GroupBy=[{'Type': 'DIMENSION', 'Key': 'SERVICE'}], + ) + if next_token: + kwargs['NextPageToken'] = next_token + + response = client.get_cost_and_usage(**kwargs) + + for result in response['ResultsByTime']: + date = result['TimePeriod']['Start'] + for group in result['Groups']: + service = group['Keys'][0] + amount = float(group['Metrics']['UnblendedCost']['Amount']) + if amount == 0: + continue + category = SERVICE_CATEGORY_MAP.get(service, 'other') + if category == 'other': + print(f"[rk_aws_costs] unmapped service: {service!r} (${amount:.2f})") + rows.append({ + 'date': date, + 'service': service, + 'category': category, + 'amount_usd': round(amount, 4), + }) + + next_token = response.get('NextPageToken') + if not next_token: + break + + return rows + except Exception as e: + print(f"[rk_aws_costs] Error: {e}") + return [] + + +def _ensure_cached(): + now = time.time() + if _cache['rows'] and now - _cache['ts'] < _CACHE_TTL: + return + if not _cache_lock.acquire(blocking=False): + return + try: + today = datetime.now(timezone.utc).date() + rows = _fetch_aws_costs( + (today - timedelta(days=365)).isoformat(), + today.isoformat(), + ) + if rows: + _cache['rows'] = rows + _cache['ts'] = now + finally: + _cache_lock.release() + + +def get_aws_costs(date_from: str, date_to: str) -> list[dict]: + """Get AWS costs for date range. Blocks on first fetch, async refresh after.""" + if not _cache['rows']: + _ensure_cached() # block on first load so dashboard isn't empty + else: + threading.Thread(target=_ensure_cached, daemon=True).start() + return [r for r in _cache['rows'] if date_from <= r['date'] <= date_to] + + +def _fetch_aws_cost_details(date_from: str, date_to: str) -> list[dict]: + """Fetch per-resource (USAGE_TYPE) cost breakdown from AWS Cost Explorer.""" + try: + import boto3 + except ImportError: + return [] + + try: + client = boto3.client('ce', region_name='us-east-2') + rows = [] + next_token = None + + while True: + kwargs = dict( + TimePeriod={'Start': date_from, 'End': date_to}, + Granularity='DAILY', + Metrics=['UnblendedCost'], + GroupBy=[ + {'Type': 'DIMENSION', 'Key': 'SERVICE'}, + {'Type': 'DIMENSION', 'Key': 'USAGE_TYPE'}, + ], + ) + if next_token: + kwargs['NextPageToken'] = next_token + + response = client.get_cost_and_usage(**kwargs) + + for result in response['ResultsByTime']: + date = result['TimePeriod']['Start'] + for group in result['Groups']: + service = group['Keys'][0] + usage_type = group['Keys'][1] + amount = float(group['Metrics']['UnblendedCost']['Amount']) + if amount == 0: + continue + category = SERVICE_CATEGORY_MAP.get(service, 'other') + rows.append({ + 'date': date, + 'service': service, + 'usage_type': usage_type, + 'category': category, + 'amount_usd': round(amount, 4), + }) + + next_token = response.get('NextPageToken') + if not next_token: + break + + return rows + except Exception as e: + print(f"[rk_aws_costs] Detail fetch error: {e}") + return [] + + +def _ensure_detail_cached(): + now = time.time() + if _detail_cache['rows'] and now - _detail_cache['ts'] < _CACHE_TTL: + return + if not _detail_cache_lock.acquire(blocking=False): + return + try: + today = datetime.now(timezone.utc).date() + rows = _fetch_aws_cost_details( + (today - timedelta(days=365)).isoformat(), + today.isoformat(), + ) + if rows: + _detail_cache['rows'] = rows + _detail_cache['ts'] = now + finally: + _detail_cache_lock.release() + + +def get_aws_cost_details(date_from: str, date_to: str) -> list[dict]: + """Get per-resource AWS cost details. Blocks on first fetch, async refresh after.""" + if not _detail_cache['rows']: + _ensure_detail_cached() + else: + threading.Thread(target=_ensure_detail_cached, daemon=True).start() + return [r for r in _detail_cache['rows'] if date_from <= r['date'] <= date_to] + + +def get_costs_overview(date_from: str, date_to: str) -> dict: + """Combined AWS + GCP cost overview. GCP data comes from billing JSON files.""" + aws_rows = get_aws_costs(date_from, date_to) + + # GCP data from billing files (already on disk, no SQLite needed) + gcp_by_date = {} + try: + from billing.gcp import get_billing_files_in_range + billing_data = get_billing_files_in_range( + datetime.strptime(date_from, '%Y-%m-%d'), + datetime.strptime(date_to, '%Y-%m-%d'), + ) + for entry in billing_data: + d = entry['date'] + if d not in gcp_by_date: + gcp_by_date[d] = {} + for ns_data in entry.get('namespaces', {}).values(): + for cat, amt in ns_data.get('breakdown', {}).items(): + gcp_by_date[d][cat] = gcp_by_date[d].get(cat, 0) + amt + except Exception as e: + print(f"[rk_aws_costs] GCP billing read failed: {e}") + + by_date = {} + for r in aws_rows: + d = r['date'] + if d not in by_date: + by_date[d] = {'date': d, 'aws': {}, 'gcp': {}, 'aws_total': 0, 'gcp_total': 0} + cat = r['category'] + by_date[d]['aws'][cat] = by_date[d]['aws'].get(cat, 0) + r['amount_usd'] + by_date[d]['aws_total'] += r['amount_usd'] + + for d, cats in gcp_by_date.items(): + if d not in by_date: + by_date[d] = {'date': d, 'aws': {}, 'gcp': {}, 'aws_total': 0, 'gcp_total': 0} + by_date[d]['gcp'] = cats + by_date[d]['gcp_total'] = sum(cats.values()) + + sorted_dates = sorted(by_date.values(), key=lambda x: x['date']) + aws_total = sum(d['aws_total'] for d in sorted_dates) + gcp_total = sum(d['gcp_total'] for d in sorted_dates) + + return { + 'by_date': sorted_dates, + 'totals': { + 'aws': round(aws_total, 2), + 'gcp': round(gcp_total, 2), + 'combined': round(aws_total + gcp_total, 2), + } + } diff --git a/ci3/ci-metrics/billing/billing-dashboard.html b/ci3/ci-metrics/billing/billing-dashboard.html new file mode 100644 index 000000000000..87193ffae207 --- /dev/null +++ b/ci3/ci-metrics/billing/billing-dashboard.html @@ -0,0 +1,415 @@ + + + + + ACI - Namespace Billing + + + + +

namespace billing

+ +
+ + + + | + + + | + + + + | + + + + + + | + + + +
+ +
+ +
+ +
+
+

cost over time

+
+
+
+

cost by namespace

+
+
+
+

cost by category

+
+
+
+ + + + +
+ + + + + diff --git a/ci3/ci-metrics/billing/explore.py b/ci3/ci-metrics/billing/explore.py new file mode 100644 index 000000000000..c591d8c847ef --- /dev/null +++ b/ci3/ci-metrics/billing/explore.py @@ -0,0 +1,619 @@ +#!/usr/bin/env python3 +"""CLI tool to explore GCP billing data from the Cloud Billing BigQuery export. + +Queries the actual billing export tables (not usage metering) to get real +invoice-level costs. Caches results in SQLite for fast re-queries. + +Usage: + python billing_explore.py discover # find billing export tables + python billing_explore.py fetch [--months N] # fetch & cache billing data + python billing_explore.py monthly # show monthly totals + python billing_explore.py monthly --by service # monthly by service + python billing_explore.py monthly --by sku # monthly by SKU + python billing_explore.py monthly --by project # monthly by project + python billing_explore.py daily [--month 2024-12] # daily for a month + python billing_explore.py top [--month 2024-12] # top costs for a month + python billing_explore.py compare # compare billing export vs usage metering +""" +import argparse +import os +import sqlite3 +import sys +from datetime import datetime, timedelta, timezone + +DB_PATH = os.path.join(os.getenv('LOGS_DISK_PATH', '/tmp'), 'billing_explore.db') + +SCHEMA = """ +CREATE TABLE IF NOT EXISTS gcp_billing ( + date TEXT NOT NULL, + project_id TEXT NOT NULL DEFAULT '', + service TEXT NOT NULL DEFAULT '', + sku TEXT NOT NULL DEFAULT '', + cost REAL NOT NULL DEFAULT 0, + credits REAL NOT NULL DEFAULT 0, + usage_amount REAL NOT NULL DEFAULT 0, + usage_unit TEXT NOT NULL DEFAULT '', + currency TEXT NOT NULL DEFAULT 'USD', + fetched_at TEXT NOT NULL, + PRIMARY KEY (date, project_id, service, sku) +); +CREATE INDEX IF NOT EXISTS idx_gcp_billing_date ON gcp_billing(date); +CREATE INDEX IF NOT EXISTS idx_gcp_billing_service ON gcp_billing(service); + +CREATE TABLE IF NOT EXISTS gcp_billing_meta ( + key TEXT PRIMARY KEY, + value TEXT +); +""" + + +def get_db(): + os.makedirs(os.path.dirname(DB_PATH) or '.', exist_ok=True) + conn = sqlite3.connect(DB_PATH) + conn.execute('PRAGMA busy_timeout = 5000') + conn.row_factory = sqlite3.Row + conn.executescript(SCHEMA) + return conn + + +def fmt_usd(v): + if v >= 1000: + return f'${v:,.0f}' + if v >= 1: + return f'${v:,.2f}' + return f'${v:,.4f}' + + +# ---- BigQuery Discovery ---- + +def cmd_discover(args): + """Find billing export tables in the project.""" + from google.cloud import bigquery + project = args.project + client = bigquery.Client(project=project) + + print(f'Listing datasets in project: {project}') + datasets = list(client.list_datasets()) + if not datasets: + print(' No datasets found.') + return + + for ds in datasets: + ds_id = ds.dataset_id + tables = list(client.list_tables(ds.reference)) + billing_tables = [t for t in tables if 'billing' in t.table_id.lower() or 'cost' in t.table_id.lower()] + if billing_tables: + print(f'\n Dataset: {ds_id}') + for t in billing_tables: + full = f'{project}.{ds_id}.{t.table_id}' + print(f' {full}') + # Show schema for first billing table + tbl = client.get_table(t.reference) + print(f' rows: {tbl.num_rows}, size: {tbl.num_bytes / 1e6:.1f} MB') + print(f' columns: {", ".join(f.name for f in tbl.schema[:15])}') + else: + # Check for usage metering tables too + usage_tables = [t for t in tables if 'gke_cluster' in t.table_id.lower()] + if usage_tables: + print(f'\n Dataset: {ds_id} (usage metering)') + for t in usage_tables: + print(f' {project}.{ds_id}.{t.table_id}') + + # Also try common billing export naming patterns + print(f'\n Trying common billing export table patterns...') + for ds in datasets: + for t in client.list_tables(ds.reference): + if t.table_id.startswith('gcp_billing_export'): + full = f'{project}.{ds.dataset_id}.{t.table_id}' + print(f' FOUND: {full}') + + +# ---- BigQuery Fetch ---- + +def cmd_fetch(args): + """Fetch billing data from BigQuery and cache in SQLite.""" + from google.cloud import bigquery + + table = args.table + project = args.project + months = args.months + + if not table: + print('ERROR: --table is required. Run "discover" first to find the billing export table.') + print(' e.g. --table project.dataset.gcp_billing_export_resource_v1_XXXXXX') + sys.exit(1) + + client = bigquery.Client(project=project) + end_date = datetime.now(timezone.utc).date() + start_date = end_date - timedelta(days=months * 31) + + print(f'Fetching billing data from {start_date} to {end_date}') + print(f'Table: {table}') + + # Query the billing export table + # The standard billing export has: billing_account_id, service.description, + # sku.description, usage_start_time, project.id, cost, credits, usage.amount, usage.unit + query = f""" + SELECT + DATE(usage_start_time) AS date, + COALESCE(project.id, '') AS project_id, + COALESCE(service.description, '') AS service, + COALESCE(sku.description, '') AS sku, + SUM(cost) AS cost, + SUM(IFNULL((SELECT SUM(c.amount) FROM UNNEST(credits) c), 0)) AS credits, + SUM(usage.amount) AS usage_amount, + MAX(usage.unit) AS usage_unit + FROM `{table}` + WHERE DATE(usage_start_time) BETWEEN @start_date AND @end_date + GROUP BY date, project_id, service, sku + HAVING ABS(cost) > 0.0001 OR ABS(credits) > 0.0001 + ORDER BY date, service, sku + """ + + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter('start_date', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end_date', 'DATE', end_date.isoformat()), + ] + ) + + print('Running query...') + result = list(client.query(query, job_config=job_config).result()) + print(f'Got {len(result)} rows') + + if not result: + print('No data returned. Check table name and date range.') + return + + # Store in SQLite + db = get_db() + now = datetime.now(timezone.utc).isoformat() + + db.execute('DELETE FROM gcp_billing WHERE date >= ? AND date <= ?', + (start_date.isoformat(), end_date.isoformat())) + + for row in result: + db.execute(''' + INSERT OR REPLACE INTO gcp_billing + (date, project_id, service, sku, cost, credits, usage_amount, usage_unit, fetched_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + row.date.isoformat() if hasattr(row.date, 'isoformat') else str(row.date), + row.project_id or '', + row.service or '', + row.sku or '', + float(row.cost or 0), + float(row.credits or 0), + float(row.usage_amount or 0), + row.usage_unit or '', + now, + )) + + db.commit() + db.execute("INSERT OR REPLACE INTO gcp_billing_meta VALUES ('last_fetch', ?)", (now,)) + db.execute("INSERT OR REPLACE INTO gcp_billing_meta VALUES ('table', ?)", (table,)) + db.commit() + + print(f'Cached {len(result)} rows in {DB_PATH}') + + # Show quick summary + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing GROUP BY month ORDER BY month + ''').fetchall() + print(f'\n{"Month":<10} {"Gross":>12} {"Credits":>12} {"Net":>12}') + print('-' * 48) + for r in rows: + net = r['cost'] + r['credits'] + print(f'{r["month"]:<10} {fmt_usd(r["cost"]):>12} {fmt_usd(r["credits"]):>12} {fmt_usd(net):>12}') + + +# ---- Reports ---- + +def cmd_monthly(args): + """Show monthly totals.""" + db = get_db() + group_by = args.by + + if group_by == 'service': + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, service, + SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing GROUP BY month, service ORDER BY month, cost DESC + ''').fetchall() + + current_month = None + for r in rows: + if r['month'] != current_month: + current_month = r['month'] + month_total = sum(row['cost'] + row['credits'] for row in rows if row['month'] == current_month) + print(f'\n {current_month} (net: {fmt_usd(month_total)})') + print(f' {"Service":<45} {"Gross":>10} {"Credits":>10} {"Net":>10}') + print(' ' + '-' * 77) + net = r['cost'] + r['credits'] + if abs(net) >= 0.01: + print(f' {r["service"]:<45} {fmt_usd(r["cost"]):>10} {fmt_usd(r["credits"]):>10} {fmt_usd(net):>10}') + + elif group_by == 'sku': + month_filter = args.month + if not month_filter: + # Use most recent month + row = db.execute('SELECT MAX(substr(date, 1, 7)) as m FROM gcp_billing').fetchone() + month_filter = row['m'] if row else None + + if not month_filter: + print('No data.') + return + + rows = db.execute(''' + SELECT service, sku, SUM(cost) as cost, SUM(credits) as credits, + SUM(usage_amount) as usage_amount, MAX(usage_unit) as usage_unit + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY service, sku ORDER BY cost DESC + ''', (month_filter,)).fetchall() + + total = sum(r['cost'] + r['credits'] for r in rows) + print(f'\n {month_filter} (net: {fmt_usd(total)})') + print(f' {"Service":<30} {"SKU":<40} {"Net":>10} {"Usage":>15}') + print(' ' + '-' * 97) + for r in rows[:40]: + net = r['cost'] + r['credits'] + if abs(net) >= 0.01: + usage = f'{r["usage_amount"]:.1f} {r["usage_unit"]}' if r['usage_amount'] else '' + print(f' {r["service"][:29]:<30} {r["sku"][:39]:<40} {fmt_usd(net):>10} {usage:>15}') + + elif group_by == 'project': + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, project_id, + SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing GROUP BY month, project_id ORDER BY month, cost DESC + ''').fetchall() + + current_month = None + for r in rows: + if r['month'] != current_month: + current_month = r['month'] + month_total = sum(row['cost'] + row['credits'] for row in rows if row['month'] == current_month) + print(f'\n {current_month} (net: {fmt_usd(month_total)})') + print(f' {"Project":<45} {"Net":>12}') + print(' ' + '-' * 59) + net = r['cost'] + r['credits'] + if abs(net) >= 0.01: + print(f' {r["project_id"]:<45} {fmt_usd(net):>12}') + + else: + # Default: just monthly totals + rows = db.execute(''' + SELECT substr(date, 1, 7) as month, + SUM(cost) as cost, SUM(credits) as credits, + COUNT(DISTINCT date) as days + FROM gcp_billing GROUP BY month ORDER BY month + ''').fetchall() + + print(f'\n {"Month":<10} {"Gross":>12} {"Credits":>12} {"Net":>12} {"Days":>6} {"Daily Avg":>12}') + print(' ' + '-' * 68) + grand_total = 0 + for r in rows: + net = r['cost'] + r['credits'] + daily = net / max(r['days'], 1) + grand_total += net + print(f' {r["month"]:<10} {fmt_usd(r["cost"]):>12} {fmt_usd(r["credits"]):>12} {fmt_usd(net):>12} {r["days"]:>6} {fmt_usd(daily):>12}') + print(' ' + '-' * 68) + print(f' {"TOTAL":<10} {"":>12} {"":>12} {fmt_usd(grand_total):>12}') + + +def cmd_daily(args): + """Show daily costs for a month.""" + db = get_db() + month = args.month + if not month: + row = db.execute('SELECT MAX(substr(date, 1, 7)) as m FROM gcp_billing').fetchone() + month = row['m'] if row else None + + if not month: + print('No data.') + return + + rows = db.execute(''' + SELECT date, SUM(cost) as cost, SUM(credits) as credits + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY date ORDER BY date + ''', (month,)).fetchall() + + total = 0 + print(f'\n {"Date":<12} {"Gross":>10} {"Credits":>10} {"Net":>10}') + print(' ' + '-' * 44) + for r in rows: + net = r['cost'] + r['credits'] + total += net + print(f' {r["date"]:<12} {fmt_usd(r["cost"]):>10} {fmt_usd(r["credits"]):>10} {fmt_usd(net):>10}') + print(' ' + '-' * 44) + print(f' {"TOTAL":<12} {"":>10} {"":>10} {fmt_usd(total):>10}') + + +def cmd_top(args): + """Show top cost items for a month.""" + db = get_db() + month = args.month + if not month: + row = db.execute('SELECT MAX(substr(date, 1, 7)) as m FROM gcp_billing').fetchone() + month = row['m'] if row else None + + if not month: + print('No data.') + return + + # Top services + services = db.execute(''' + SELECT service, SUM(cost + credits) as net, SUM(cost) as gross + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY service ORDER BY net DESC LIMIT 15 + ''', (month,)).fetchall() + + total = sum(r['net'] for r in services) + print(f'\n Top services for {month} (total: {fmt_usd(total)})') + print(f' {"Service":<45} {"Net":>12} {"% of Total":>10}') + print(' ' + '-' * 69) + for r in services: + pct = 100 * r['net'] / max(total, 0.01) + if abs(r['net']) >= 0.01: + print(f' {r["service"]:<45} {fmt_usd(r["net"]):>12} {pct:>9.1f}%') + + # Top SKUs + skus = db.execute(''' + SELECT service, sku, SUM(cost + credits) as net + FROM gcp_billing WHERE substr(date, 1, 7) = ? + GROUP BY service, sku ORDER BY net DESC LIMIT 20 + ''', (month,)).fetchall() + + print(f'\n Top SKUs for {month}') + print(f' {"Service":<25} {"SKU":<40} {"Net":>12}') + print(' ' + '-' * 79) + for r in skus: + if abs(r['net']) >= 0.01: + print(f' {r["service"][:24]:<25} {r["sku"][:39]:<40} {fmt_usd(r["net"]):>12}') + + +def cmd_compare(args): + """Compare billing export data vs usage metering estimates.""" + db = get_db() + + # Get billing export monthly totals + billing_rows = db.execute(''' + SELECT substr(date, 1, 7) as month, SUM(cost + credits) as net + FROM gcp_billing GROUP BY month ORDER BY month + ''').fetchall() + + if not billing_rows: + print('No billing export data cached. Run "fetch" first.') + return + + # Get usage metering estimates + try: + from billing import gcp as _gcp_billing + _gcp_billing._ensure_cached() + metering_data = _gcp_billing._cache.get('data', []) + except Exception as e: + print(f'Could not load usage metering data: {e}') + metering_data = [] + + metering_monthly = {} + for entry in metering_data: + month = entry['date'][:7] + day_total = sum(ns.get('total', 0) for ns in entry.get('namespaces', {}).values()) + metering_monthly[month] = metering_monthly.get(month, 0) + day_total + + print(f'\n {"Month":<10} {"Billing Export":>15} {"Usage Metering":>15} {"Ratio":>8}') + print(' ' + '-' * 50) + for r in billing_rows: + billing = r['net'] + metering = metering_monthly.get(r['month'], 0) + ratio = f'{billing / metering:.2f}x' if metering > 0 else '--' + print(f' {r["month"]:<10} {fmt_usd(billing):>15} {fmt_usd(metering):>15} {ratio:>8}') + + +def cmd_status(args): + """Show what data we have cached.""" + db = get_db() + meta = {r['key']: r['value'] for r in db.execute('SELECT * FROM gcp_billing_meta').fetchall()} + billing_count = db.execute('SELECT COUNT(*) as c FROM gcp_billing').fetchone()['c'] + billing_range = db.execute('SELECT MIN(date) as mn, MAX(date) as mx FROM gcp_billing').fetchone() + + print(f'\n Billing export cache:') + print(f' DB path: {DB_PATH}') + print(f' Table: {meta.get("table", "(not set)")}') + print(f' Last fetch: {meta.get("last_fetch", "(never)")}') + print(f' Rows: {billing_count}') + if billing_count: + print(f' Date range: {billing_range["mn"]} to {billing_range["mx"]}') + + # Also check billing export table status + try: + from google.cloud import bigquery + client = bigquery.Client(project=args.project) + table_id = 'testnet-440309.testnet440309billing.gcp_billing_export_v1_01EA8B_291C89_753ABC' + t = client.get_table(table_id) + print(f'\n BigQuery billing export:') + print(f' Table: {table_id}') + print(f' Rows: {t.num_rows}') + print(f' Modified: {t.modified}') + if t.num_rows > 0: + print(f' STATUS: Data available! Run "fetch --table {table_id}" to cache it.') + else: + print(f' STATUS: Not yet populated. GCP takes up to 24h after enabling export.') + except Exception as e: + print(f'\n BigQuery check failed: {e}') + + +def cmd_metering(args): + """Query both usage metering tables and compare with different approaches.""" + from google.cloud import bigquery + project = args.project + client = bigquery.Client(project=project) + months = args.months + + end_date = datetime.now(timezone.utc).date() + start_date = end_date - timedelta(days=months * 31) + + # Table names + usage_table = f'{project}.egress_consumption.gke_cluster_resource_usage' + consumption_table = f'{project}.egress_consumption.gke_cluster_resource_consumption' + + print(f'Date range: {start_date} to {end_date}') + + # 1. Current approach: usage table with our SKU pricing + print('\n=== Approach 1: gke_cluster_resource_usage (requests) with hardcoded SKU prices ===') + _query_metering_table(client, usage_table, start_date, end_date, 'REQUESTS') + + # 2. Consumption table with our SKU pricing + print('\n=== Approach 2: gke_cluster_resource_consumption (actual) with hardcoded SKU prices ===') + _query_metering_table(client, consumption_table, start_date, end_date, 'CONSUMPTION') + + # 3. Raw totals: what does each table report? + print('\n=== Approach 3: Raw resource totals from both tables ===') + for tname, label in [(usage_table, 'REQUESTS'), (consumption_table, 'CONSUMPTION')]: + query = f""" + SELECT + FORMAT_DATE('%Y-%m', DATE(start_time)) AS month, + resource_name, + SUM(usage.amount) AS total_amount, + usage.unit + FROM `{tname}` + WHERE DATE(start_time) BETWEEN @start AND @end + GROUP BY month, resource_name, usage.unit + ORDER BY month, resource_name + """ + job_config = bigquery.QueryJobConfig(query_parameters=[ + bigquery.ScalarQueryParameter('start', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end', 'DATE', end_date.isoformat()), + ]) + rows = list(client.query(query, job_config=job_config).result()) + print(f'\n {label} table raw resources:') + print(f' {"Month":<10} {"Resource":<20} {"Amount":>20} {"Unit":<15}') + print(' ' + '-' * 67) + for r in rows: + print(f' {r.month:<10} {r.resource_name:<20} {r.total_amount:>20,.0f} {r.unit:<15}') + + # 4. Count distinct SKUs + print('\n=== Approach 4: Distinct SKUs in usage table ===') + query = f""" + SELECT sku_id, resource_name, COUNT(*) as row_count, + SUM(usage.amount) as total_amount, usage.unit + FROM `{usage_table}` + WHERE DATE(start_time) BETWEEN @start AND @end + GROUP BY sku_id, resource_name, usage.unit + ORDER BY total_amount DESC + """ + job_config = bigquery.QueryJobConfig(query_parameters=[ + bigquery.ScalarQueryParameter('start', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end', 'DATE', end_date.isoformat()), + ]) + rows = list(client.query(query, job_config=job_config).result()) + # Import pricing to check + from billing.gcp import _SKU_PRICING + print(f' {"SKU ID":<20} {"Resource":<20} {"Rows":>10} {"Amount":>18} {"Unit":<12} {"Known?"}') + print(' ' + '-' * 90) + for r in rows: + known = 'YES' if r.sku_id in _SKU_PRICING else 'MISSING' + print(f' {r.sku_id:<20} {r.resource_name:<20} {r.row_count:>10,} {r.total_amount:>18,.0f} {r.unit:<12} {known}') + + +def _query_metering_table(client, table, start_date, end_date, label): + """Query a metering table and compute costs using our SKU pricing.""" + from google.cloud import bigquery + from billing.gcp import _SKU_PRICING, _usage_to_cost + + query = f""" + SELECT + FORMAT_DATE('%Y-%m', DATE(start_time)) AS month, + namespace, + sku_id, + resource_name, + SUM(usage.amount) AS total_usage + FROM `{table}` + WHERE DATE(start_time) BETWEEN @start AND @end + GROUP BY month, namespace, sku_id, resource_name + ORDER BY month, namespace + """ + job_config = bigquery.QueryJobConfig(query_parameters=[ + bigquery.ScalarQueryParameter('start', 'DATE', start_date.isoformat()), + bigquery.ScalarQueryParameter('end', 'DATE', end_date.isoformat()), + ]) + rows = list(client.query(query, job_config=job_config).result()) + + monthly = {} + monthly_by_cat = {} + missing_skus = set() + for r in rows: + cost, category = _usage_to_cost(r.sku_id, r.resource_name, float(r.total_usage)) + if r.sku_id not in _SKU_PRICING: + missing_skus.add(r.sku_id) + month = r.month + monthly[month] = monthly.get(month, 0) + cost + key = (month, category) + monthly_by_cat[key] = monthly_by_cat.get(key, 0) + cost + + print(f' {"Month":<10} {"Total":>12} {"compute_spot":>14} {"compute_od":>14} {"network":>10} {"storage":>10}') + print(' ' + '-' * 74) + for month in sorted(monthly.keys()): + total = monthly[month] + spot = monthly_by_cat.get((month, 'compute_spot'), 0) + od = monthly_by_cat.get((month, 'compute_ondemand'), 0) + net = monthly_by_cat.get((month, 'network'), 0) + stor = monthly_by_cat.get((month, 'storage'), 0) + print(f' {month:<10} {fmt_usd(total):>12} {fmt_usd(spot):>14} {fmt_usd(od):>14} {fmt_usd(net):>10} {fmt_usd(stor):>10}') + + if missing_skus: + print(f'\n WARNING: {len(missing_skus)} unknown SKU IDs (not priced): {", ".join(sorted(missing_skus)[:5])}...') + + +# ---- Main ---- + +def main(): + parser = argparse.ArgumentParser(description='Explore GCP billing data') + parser.add_argument('--project', default='testnet-440309', help='GCP project ID') + parser.add_argument('--table', default='', help='BigQuery billing export table') + sub = parser.add_subparsers(dest='command') + + sub.add_parser('discover', help='Find billing export tables') + + fetch_p = sub.add_parser('fetch', help='Fetch billing data from BigQuery') + fetch_p.add_argument('--months', type=int, default=6, help='How many months back to fetch') + + monthly_p = sub.add_parser('monthly', help='Monthly totals') + monthly_p.add_argument('--by', choices=['service', 'sku', 'project'], default='', help='Group by') + monthly_p.add_argument('--month', default='', help='Filter to month (YYYY-MM)') + + daily_p = sub.add_parser('daily', help='Daily costs') + daily_p.add_argument('--month', default='', help='Month to show (YYYY-MM)') + + top_p = sub.add_parser('top', help='Top cost items') + top_p.add_argument('--month', default='', help='Month to show (YYYY-MM)') + + sub.add_parser('compare', help='Compare billing export vs usage metering') + sub.add_parser('status', help='Show data status (what we have cached)') + + meter_p = sub.add_parser('metering', help='Query both metering tables directly and compare') + meter_p.add_argument('--months', type=int, default=6, help='How many months back') + + args = parser.parse_args() + + if not args.command: + parser.print_help() + sys.exit(1) + + cmds = { + 'discover': cmd_discover, + 'fetch': cmd_fetch, + 'monthly': cmd_monthly, + 'daily': cmd_daily, + 'top': cmd_top, + 'compare': cmd_compare, + 'metering': cmd_metering, + 'status': cmd_status, + } + cmds[args.command](args) + + +if __name__ == '__main__': + main() diff --git a/ci3/ci-metrics/billing/fetch_billing.py b/ci3/ci-metrics/billing/fetch_billing.py new file mode 100644 index 000000000000..271a788fc6bd --- /dev/null +++ b/ci3/ci-metrics/billing/fetch_billing.py @@ -0,0 +1,262 @@ +#!/usr/bin/env python3 +"""Fetch namespace billing data from GKE resource consumption metering in BigQuery. + +Queries the GKE cluster resource consumption table which records CPU and memory +usage per namespace per pod. Actual GCP SKU prices (from the Cloud Billing +Catalog API) are applied to convert resource usage into dollar costs. + +Categories produced: + - compute_spot (Spot / Preemptible VM cores + RAM) + - compute_ondemand (On-demand VM cores + RAM) + +Usage: + # Fetch last 30 days + python fetch-billing.py + + # Specific range + python fetch-billing.py --from 2026-01-01 --to 2026-01-31 + + # Custom output directory + python fetch-billing.py --output-dir /tmp/billing + +Environment: + Requires Application Default Credentials or GOOGLE_APPLICATION_CREDENTIALS. + pip install google-cloud-bigquery +""" +import argparse +import json +import os +import sys +from datetime import datetime, timedelta + +from google.cloud import bigquery + +# ---- defaults ---- +DEFAULT_PROJECT = 'testnet-440309' +DEFAULT_DATASET = 'egress_consumption' +DEFAULT_TABLE_CONSUMPTION = 'gke_cluster_resource_consumption' +DEFAULT_TABLE_USAGE = 'gke_cluster_resource_usage' +DEFAULT_OUTPUT_DIR = os.path.join( + os.getenv('LOGS_DISK_PATH', '/logs-disk'), 'billing' +) + +# ---- SKU pricing ---- +# Prices sourced from GCP Cloud Billing Catalog API for us-west1. +SKU_PRICING = { + # Compute - Spot (per vCPU-hour / per GiB-hour) + 'E7FF-A0FB-FA82': {'price': 0.00497, 'resource': 'cpu', 'category': 'compute_spot'}, + '48AB-89F5-9112': {'price': 0.000668, 'resource': 'memory', 'category': 'compute_spot'}, + # Compute - On-demand T2D + 'EFE6-E23C-19CB': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + 'FB05-036A-8982': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2 + 'BB77-5FDA-69D9': {'price': 0.031611, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5B01-D157-A097': {'price': 0.004237, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2D + 'A03E-E620-7389': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5535-6D2D-4B50': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Network Egress (per GiB) + '0C3C-6B13-B1E8': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + '6B8F-E63D-832B': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '92CB-C25F-B1D1': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '984A-1F27-2D1F': {'price': 0.04, 'resource': 'networkEgress', 'category': 'network'}, + '9DE9-9092-B3BC': {'price': 0.20, 'resource': 'networkEgress', 'category': 'network'}, + 'C863-37DA-506E': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'C8EA-1A86-3D28': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'DE9E-AFBC-A15A': {'price': 0.01, 'resource': 'networkEgress', 'category': 'network'}, + 'DFA5-B5C6-36D6': {'price': 0.085, 'resource': 'networkEgress', 'category': 'network'}, + 'F274-1692-F213': {'price': 0.08, 'resource': 'networkEgress', 'category': 'network'}, + 'FDBC-6E3B-D4D8': {'price': 0.15, 'resource': 'networkEgress', 'category': 'network'}, + # Storage (per GiB-month) + 'D973-5D65-BAB2': {'price': 0.04, 'resource': 'storage', 'category': 'storage'}, +} + + +def usage_to_cost(sku_id: str, resource_name: str, amount: float) -> tuple[float, str]: + """Convert raw usage amount to dollar cost. Returns (cost_usd, category).""" + info = SKU_PRICING.get(sku_id) + if not info: + return 0.0, 'other' + + price = info['price'] + if resource_name == 'cpu': + return (amount / 3600.0) * price, info['category'] + elif resource_name == 'memory': + return (amount / 3600.0 / (1024 ** 3)) * price, info['category'] + elif resource_name.startswith('networkEgress'): + return (amount / (1024 ** 3)) * price, info['category'] + elif resource_name == 'storage': + gib_months = amount / (1024 ** 3) / (730 * 3600) + return gib_months * price, info['category'] + return 0.0, info['category'] + + +# ---- BigQuery query ---- + +def fetch_usage_rows( + client: bigquery.Client, + project: str, + dataset: str, + date_from: str, + date_to: str, +) -> list[dict]: + """Query both metering tables for daily usage by namespace + SKU.""" + consumption = f'{project}.{dataset}.{DEFAULT_TABLE_CONSUMPTION}' + usage = f'{project}.{dataset}.{DEFAULT_TABLE_USAGE}' + query = f""" + SELECT date, namespace, sku_id, resource_name, SUM(total_usage) AS total_usage FROM ( + SELECT DATE(start_time) AS date, namespace, sku_id, resource_name, SUM(usage.amount) AS total_usage + FROM `{consumption}` + WHERE DATE(start_time) BETWEEN @date_from AND @date_to + GROUP BY date, namespace, sku_id, resource_name + UNION ALL + SELECT DATE(start_time) AS date, namespace, sku_id, resource_name, SUM(usage.amount) AS total_usage + FROM `{usage}` + WHERE DATE(start_time) BETWEEN @date_from AND @date_to + AND resource_name IN ('networkEgress', 'storage') + GROUP BY date, namespace, sku_id, resource_name + ) + GROUP BY date, namespace, sku_id, resource_name + ORDER BY date, namespace + """ + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter('date_from', 'DATE', date_from), + bigquery.ScalarQueryParameter('date_to', 'DATE', date_to), + ] + ) + rows = client.query(query, job_config=job_config).result() + return [dict(row) for row in rows] + + +# ---- aggregate into daily JSON ---- + +def build_daily_files(rows: list[dict]) -> tuple[dict[str, dict], set[str]]: + """Convert raw usage rows into daily billing JSON structures. + + Returns (days_dict, unknown_skus). + """ + days: dict[str, dict] = {} + unknown_skus: set[str] = set() + + for row in rows: + date_str = ( + row['date'].isoformat() + if hasattr(row['date'], 'isoformat') + else str(row['date']) + ) + ns = row['namespace'] + sku_id = row['sku_id'] + resource_name = row['resource_name'] + amount = float(row['total_usage']) + + cost, category = usage_to_cost(sku_id, resource_name, amount) + + if sku_id not in SKU_PRICING: + unknown_skus.add(sku_id) + + if cost <= 0: + continue + + if date_str not in days: + days[date_str] = {'date': date_str, 'namespaces': {}} + if ns not in days[date_str]['namespaces']: + days[date_str]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + + entry = days[date_str]['namespaces'][ns] + entry['breakdown'][category] = ( + entry['breakdown'].get(category, 0) + cost + ) + entry['total'] += cost + + # Round + for day in days.values(): + for ns_data in day['namespaces'].values(): + ns_data['total'] = round(ns_data['total'], 4) + ns_data['breakdown'] = { + k: round(v, 4) for k, v in ns_data['breakdown'].items() + } + + return days, unknown_skus + + +def write_files(days: dict[str, dict], output_dir: str) -> int: + os.makedirs(output_dir, exist_ok=True) + count = 0 + for date_str, data in sorted(days.items()): + filepath = os.path.join(output_dir, f'{date_str}.json') + with open(filepath, 'w') as f: + json.dump(data, f, indent=2) + count += 1 + return count + + +# ---- CLI ---- + +def main(): + parser = argparse.ArgumentParser( + description='Fetch GKE namespace compute billing from resource consumption metering' + ) + today = datetime.utcnow().strftime('%Y-%m-%d') + default_from = (datetime.utcnow() - timedelta(days=30)).strftime('%Y-%m-%d') + + parser.add_argument('--from', dest='date_from', default=default_from, + help='Start date YYYY-MM-DD (default: 30 days ago)') + parser.add_argument('--to', dest='date_to', default=today, + help='End date YYYY-MM-DD (default: today)') + parser.add_argument('--project', default=DEFAULT_PROJECT, + help=f'GCP project ID (default: {DEFAULT_PROJECT})') + parser.add_argument('--dataset', default=DEFAULT_DATASET, + help=f'BigQuery dataset (default: {DEFAULT_DATASET})') + parser.add_argument('--output-dir', default=DEFAULT_OUTPUT_DIR, + help=f'Output directory (default: {DEFAULT_OUTPUT_DIR})') + args = parser.parse_args() + + print(f'Connecting to BigQuery ({args.project})...') + client = bigquery.Client(project=args.project) + + print(f'Fetching metering data {args.date_from} to {args.date_to}...') + print(f' consumption: {args.project}.{args.dataset}.{DEFAULT_TABLE_CONSUMPTION}') + print(f' usage: {args.project}.{args.dataset}.{DEFAULT_TABLE_USAGE}') + rows = fetch_usage_rows( + client, args.project, args.dataset, + args.date_from, args.date_to, + ) + print(f'Got {len(rows)} aggregated rows') + + if not rows: + print('No metering data found. Check that:') + print(' 1. GKE resource consumption metering is enabled') + print(' 2. The date range has data') + return + + days, unknown_skus = build_daily_files(rows) + count = write_files(days, args.output_dir) + print(f'Wrote {count} daily billing files to {args.output_dir}') + + if unknown_skus: + print(f'\nWARNING: {len(unknown_skus)} unknown SKU(s) had zero cost assigned:') + for s in sorted(unknown_skus): + print(f' {s}') + print('Add these to SKU_PRICING in fetch-billing.py with prices from') + print('the GCP Cloud Billing Catalog API.') + + # Summary + total = sum( + ns['total'] for day in days.values() + for ns in day['namespaces'].values() + ) + ns_set: set[str] = set() + cat_set: set[str] = set() + for day in days.values(): + for ns_name, ns_data in day['namespaces'].items(): + ns_set.add(ns_name) + cat_set.update(ns_data['breakdown'].keys()) + + print(f'\nTotal cost: ${total:,.2f}') + print(f'Namespaces ({len(ns_set)}): {sorted(ns_set)}') + print(f'Categories: {sorted(cat_set)}') + + +if __name__ == '__main__': + main() diff --git a/ci3/ci-metrics/billing/gcp.py b/ci3/ci-metrics/billing/gcp.py new file mode 100644 index 000000000000..5254e20bbbf0 --- /dev/null +++ b/ci3/ci-metrics/billing/gcp.py @@ -0,0 +1,289 @@ +"""Namespace billing helpers for rkapp. + +Fetches GKE namespace billing from BigQuery with in-memory cache. +Route definitions remain in rk.py; this module provides the logic. + +SKU pricing: Queries the Cloud Billing pricing export table in BigQuery +if available, otherwise falls back to hardcoded rates. To enable the +pricing export: + 1. Go to GCP Console > Billing > Billing export + 2. Enable "Detailed usage cost" and "Pricing" exports + 3. Set the dataset to the _BQ_DATASET below +""" +import threading +import time +from datetime import datetime, timedelta, timezone +from pathlib import Path + +# BigQuery defaults +_BQ_PROJECT = 'testnet-440309' +_BQ_DATASET = 'egress_consumption' +_BQ_TABLE_USAGE = 'gke_cluster_resource_usage' +_BQ_TABLE_PRICING = 'cloud_pricing_export' + +# Hardcoded fallback SKU pricing (us-west1). +# cpu: price per vCPU-hour, memory: price per GiB-hour +# network: price per GiB, storage: price per GiB-month +_HARDCODED_SKU_PRICING = { + # Compute - Spot + 'E7FF-A0FB-FA82': {'price': 0.00497, 'resource': 'cpu', 'category': 'compute_spot'}, + '48AB-89F5-9112': {'price': 0.000668, 'resource': 'memory', 'category': 'compute_spot'}, + # Compute - On-demand T2D + 'EFE6-E23C-19CB': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + 'FB05-036A-8982': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2 + 'BB77-5FDA-69D9': {'price': 0.031611, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5B01-D157-A097': {'price': 0.004237, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Compute - On-demand N2D + 'A03E-E620-7389': {'price': 0.027502, 'resource': 'cpu', 'category': 'compute_ondemand'}, + '5535-6D2D-4B50': {'price': 0.003686, 'resource': 'memory', 'category': 'compute_ondemand'}, + # Network Egress (price per GiB) + '0C3C-6B13-B1E8': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + '6B8F-E63D-832B': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '92CB-C25F-B1D1': {'price': 0.0, 'resource': 'networkEgress', 'category': 'network'}, + '984A-1F27-2D1F': {'price': 0.04, 'resource': 'networkEgress', 'category': 'network'}, + '9DE9-9092-B3BC': {'price': 0.20, 'resource': 'networkEgress', 'category': 'network'}, + 'C863-37DA-506E': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'C8EA-1A86-3D28': {'price': 0.02, 'resource': 'networkEgress', 'category': 'network'}, + 'DE9E-AFBC-A15A': {'price': 0.01, 'resource': 'networkEgress', 'category': 'network'}, + 'DFA5-B5C6-36D6': {'price': 0.085, 'resource': 'networkEgress', 'category': 'network'}, + 'F274-1692-F213': {'price': 0.08, 'resource': 'networkEgress', 'category': 'network'}, + 'FDBC-6E3B-D4D8': {'price': 0.15, 'resource': 'networkEgress', 'category': 'network'}, + # Storage (price per GiB-month) + 'D973-5D65-BAB2': {'price': 0.04, 'resource': 'storage', 'category': 'storage'}, +} + +# Resource name to category mapping for SKUs discovered from BigQuery +_RESOURCE_CATEGORIES = { + ('cpu', True): 'compute_spot', + ('cpu', False): 'compute_ondemand', + ('memory', True): 'compute_spot', + ('memory', False): 'compute_ondemand', +} + +# Active SKU pricing — updated from BigQuery if available +_SKU_PRICING = dict(_HARDCODED_SKU_PRICING) + +# In-memory caches +_cache = {'data': [], 'ts': 0} +_cache_lock = threading.Lock() +_CACHE_TTL = 6 * 3600 # 6 hours + +_pricing_cache = {'ts': 0} +_pricing_lock = threading.Lock() +_PRICING_CACHE_TTL = 24 * 3600 # 24 hours + + +def _refresh_sku_pricing(): + """Try to fetch SKU pricing from BigQuery pricing export table.""" + global _SKU_PRICING + now = time.time() + if _pricing_cache['ts'] and now - _pricing_cache['ts'] < _PRICING_CACHE_TTL: + return + if not _pricing_lock.acquire(blocking=False): + return + try: + if _pricing_cache['ts'] and time.time() - _pricing_cache['ts'] < _PRICING_CACHE_TTL: + return + from google.cloud import bigquery + client = bigquery.Client(project=_BQ_PROJECT) + table = f'{_BQ_PROJECT}.{_BQ_DATASET}.{_BQ_TABLE_PRICING}' + + # Get the known SKU IDs we need pricing for + sku_ids = list(_HARDCODED_SKU_PRICING.keys()) + placeholders = ', '.join(f"'{s}'" for s in sku_ids) + + query = f""" + SELECT sku.id AS sku_id, + pricing.effective_price AS price, + sku.description AS description + FROM `{table}` + WHERE sku.id IN ({placeholders}) + AND service.description = 'Compute Engine' + QUALIFY ROW_NUMBER() OVER (PARTITION BY sku.id ORDER BY export_time DESC) = 1 + """ + rows = list(client.query(query).result()) + if rows: + updated = dict(_HARDCODED_SKU_PRICING) + for row in rows: + sid = row.sku_id + if sid in updated: + updated[sid] = {**updated[sid], 'price': float(row.price)} + _SKU_PRICING = updated + _pricing_cache['ts'] = time.time() + print(f"[rk_billing] Updated {len(rows)} SKU prices from BigQuery") + else: + _pricing_cache['ts'] = time.time() + print("[rk_billing] No pricing rows returned, using hardcoded rates") + except Exception as e: + # Table probably doesn't exist yet — use hardcoded rates + _pricing_cache['ts'] = time.time() + print(f"[rk_billing] SKU pricing query failed (using hardcoded): {e}") + finally: + _pricing_lock.release() + + +# ---- BigQuery fetch ---- + +def _usage_to_cost(sku_id, resource_name, amount): + info = _SKU_PRICING.get(sku_id) + if not info: + return 0.0, 'other' + price = info['price'] + if resource_name == 'cpu': + # cpu-seconds -> hours + return (amount / 3600.0) * price, info['category'] + elif resource_name == 'memory': + # byte-seconds -> GiB-hours + return (amount / 3600.0 / (1024 ** 3)) * price, info['category'] + elif resource_name.startswith('networkEgress'): + # bytes -> GiB + return (amount / (1024 ** 3)) * price, info['category'] + elif resource_name == 'storage': + # byte-seconds -> GiB-months (730 hours/month) + gib_months = amount / (1024 ** 3) / (730 * 3600) + return gib_months * price, info['category'] + return 0.0, info['category'] + + +def _fetch_from_bigquery(date_from_str, date_to_str): + """Query BigQuery for usage data, return list of daily billing entries.""" + try: + from google.cloud import bigquery + except ImportError: + print("[rk_billing] google-cloud-bigquery not installed") + return [] + + try: + client = bigquery.Client(project=_BQ_PROJECT) + # Use the usage table for all resources (actual consumption, not just requests). + # The consumption table only records resource *requests* which can be far lower + # than actual usage (e.g. prove-n-tps-real: $2.87 requests vs $138.72 actual). + usage = f'{_BQ_PROJECT}.{_BQ_DATASET}.{_BQ_TABLE_USAGE}' + query = f""" + SELECT DATE(start_time) AS date, namespace, sku_id, resource_name, + SUM(usage.amount) AS total_usage + FROM `{usage}` + WHERE DATE(start_time) BETWEEN @date_from AND @date_to + GROUP BY date, namespace, sku_id, resource_name + ORDER BY date, namespace + """ + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter('date_from', 'DATE', date_from_str), + bigquery.ScalarQueryParameter('date_to', 'DATE', date_to_str), + ] + ) + rows = list(client.query(query, job_config=job_config).result()) + except Exception as e: + print(f"[rk_billing] BigQuery fetch failed: {e}") + return [] + + # Build daily structures + days = {} + for row in rows: + date_str = row.date.isoformat() if hasattr(row.date, 'isoformat') else str(row.date) + ns = row.namespace + cost, category = _usage_to_cost(row.sku_id, row.resource_name, float(row.total_usage)) + if cost <= 0: + continue + if date_str not in days: + days[date_str] = {'date': date_str, 'namespaces': {}} + if ns not in days[date_str]['namespaces']: + days[date_str]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + entry = days[date_str]['namespaces'][ns] + entry['breakdown'][category] = entry['breakdown'].get(category, 0) + cost + entry['total'] += cost + + # Round values + for data in days.values(): + for ns_data in data['namespaces'].values(): + ns_data['total'] = round(ns_data['total'], 4) + ns_data['breakdown'] = {k: round(v, 4) for k, v in ns_data['breakdown'].items()} + + return sorted(days.values(), key=lambda x: x['date']) + + +def _ensure_cached(): + now = time.time() + if _cache['data'] and now - _cache['ts'] < _CACHE_TTL: + return + if not _cache_lock.acquire(blocking=False): + return + try: + yesterday = datetime.now(timezone.utc).date() - timedelta(days=1) + date_from = (yesterday - timedelta(days=365)).isoformat() + date_to = yesterday.isoformat() + print(f"[rk_billing] Fetching billing data from BigQuery ({date_from} to {date_to})...") + data = _fetch_from_bigquery(date_from, date_to) + if data: + _cache['data'] = data + _cache['ts'] = now + print(f"[rk_billing] Cached {len(data)} days of billing data") + finally: + _cache_lock.release() + + +# ---- Public API ---- + +def get_billing_files_in_range(date_from, date_to): + """Return billing data for dates in range. Fetches from BigQuery with in-memory cache.""" + # Refresh SKU pricing from BigQuery (async, falls back to hardcoded) + threading.Thread(target=_refresh_sku_pricing, daemon=True).start() + + if not _cache['data']: + _ensure_cached() # block on first load so dashboard isn't empty + else: + threading.Thread(target=_ensure_cached, daemon=True).start() + + # Convert datetime args to date strings for filtering + from_str = date_from.strftime('%Y-%m-%d') if hasattr(date_from, 'strftime') else str(date_from) + to_str = date_to.strftime('%Y-%m-%d') if hasattr(date_to, 'strftime') else str(date_to) + + return [e for e in _cache['data'] if from_str <= e['date'] <= to_str] + + +def _merge_ns_billing(target, ns_data): + target['total'] += ns_data.get('total', 0) + for cat, val in ns_data.get('breakdown', {}).items(): + target['breakdown'][cat] = target['breakdown'].get(cat, 0) + val + + +def aggregate_billing_weekly(daily_data): + if not daily_data: + return [] + weeks = {} + for entry in daily_data: + d = datetime.strptime(entry['date'], '%Y-%m-%d') + week_start = d - timedelta(days=d.weekday()) + week_key = week_start.strftime('%Y-%m-%d') + if week_key not in weeks: + weeks[week_key] = {'date': week_key, 'namespaces': {}} + for ns, ns_data in entry.get('namespaces', {}).items(): + if ns not in weeks[week_key]['namespaces']: + weeks[week_key]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + _merge_ns_billing(weeks[week_key]['namespaces'][ns], ns_data) + return sorted(weeks.values(), key=lambda x: x['date']) + + +def aggregate_billing_monthly(daily_data): + if not daily_data: + return [] + months = {} + for entry in daily_data: + month_key = entry['date'][:7] + '-01' + if month_key not in months: + months[month_key] = {'date': month_key, 'namespaces': {}} + for ns, ns_data in entry.get('namespaces', {}).items(): + if ns not in months[month_key]['namespaces']: + months[month_key]['namespaces'][ns] = {'total': 0, 'breakdown': {}} + _merge_ns_billing(months[month_key]['namespaces'][ns], ns_data) + return sorted(months.values(), key=lambda x: x['date']) + + +def serve_billing_dashboard(): + billing_html_path = Path(__file__).parent / 'billing-dashboard.html' + if billing_html_path.exists(): + with billing_html_path.open('r') as f: + return f.read() + return None diff --git a/ci3/ci-metrics/ci-run-seed.json.gz b/ci3/ci-metrics/ci-run-seed.json.gz new file mode 100644 index 000000000000..a971ad10d38b Binary files /dev/null and b/ci3/ci-metrics/ci-run-seed.json.gz differ diff --git a/ci3/ci-metrics/db.py b/ci3/ci-metrics/db.py new file mode 100644 index 000000000000..93e970fe3a56 --- /dev/null +++ b/ci3/ci-metrics/db.py @@ -0,0 +1,107 @@ +"""SQLite database for CI metrics storage. + +Stores test events (from Redis pub/sub) and merge queue daily stats +(backfilled from GitHub API). +""" +import os +import sqlite3 +import threading + +_DB_PATH = os.path.join(os.getenv('LOGS_DISK_PATH', '/logs-disk'), 'metrics.db') +_local = threading.local() + +SCHEMA = """ +PRAGMA journal_mode=WAL; + +CREATE TABLE IF NOT EXISTS test_events ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + status TEXT NOT NULL, + test_cmd TEXT NOT NULL, + log_url TEXT, + ref_name TEXT NOT NULL, + commit_hash TEXT, + commit_author TEXT, + commit_msg TEXT, + exit_code INTEGER, + duration_secs REAL, + is_scenario INTEGER DEFAULT 0, + owners TEXT, + flake_group_id TEXT, + dashboard TEXT NOT NULL DEFAULT '', + timestamp TEXT NOT NULL +); +CREATE INDEX IF NOT EXISTS idx_test_events_status ON test_events(status); +CREATE INDEX IF NOT EXISTS idx_test_events_ts ON test_events(timestamp); +CREATE INDEX IF NOT EXISTS idx_test_events_cmd ON test_events(test_cmd); +CREATE INDEX IF NOT EXISTS idx_test_events_dashboard ON test_events(dashboard); + +CREATE TABLE IF NOT EXISTS merge_queue_daily ( + date TEXT PRIMARY KEY, + total INTEGER NOT NULL DEFAULT 0, + success INTEGER NOT NULL DEFAULT 0, + failure INTEGER NOT NULL DEFAULT 0, + cancelled INTEGER NOT NULL DEFAULT 0, + in_progress INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS ci_runs ( + dashboard TEXT NOT NULL, + name TEXT NOT NULL DEFAULT '', + timestamp_ms INTEGER NOT NULL, + complete_ms INTEGER, + status TEXT, + author TEXT, + pr_number INTEGER, + instance_type TEXT, + instance_vcpus INTEGER, + spot INTEGER DEFAULT 0, + cost_usd REAL, + job_id TEXT DEFAULT '', + arch TEXT DEFAULT '', + synced_at TEXT NOT NULL, + PRIMARY KEY (dashboard, timestamp_ms, name) +); +CREATE INDEX IF NOT EXISTS idx_ci_runs_ts ON ci_runs(timestamp_ms); +CREATE INDEX IF NOT EXISTS idx_ci_runs_name ON ci_runs(name); +CREATE INDEX IF NOT EXISTS idx_ci_runs_dashboard ON ci_runs(dashboard); +""" + + +_MIGRATIONS = [ + # Add columns introduced after initial schema + "ALTER TABLE ci_runs ADD COLUMN instance_vcpus INTEGER", + "ALTER TABLE ci_runs ADD COLUMN job_id TEXT DEFAULT ''", + "ALTER TABLE ci_runs ADD COLUMN arch TEXT DEFAULT ''", + "CREATE INDEX IF NOT EXISTS idx_ci_runs_dashboard ON ci_runs(dashboard)", +] + + +def get_db() -> sqlite3.Connection: + conn = getattr(_local, 'conn', None) + if conn is None: + os.makedirs(os.path.dirname(_DB_PATH), exist_ok=True) + conn = sqlite3.connect(_DB_PATH) + conn.execute('PRAGMA busy_timeout = 5000') + conn.row_factory = sqlite3.Row + conn.executescript(SCHEMA) + # Run migrations (ignore "duplicate column" errors for idempotency) + for sql in _MIGRATIONS: + try: + conn.execute(sql) + except sqlite3.OperationalError: + pass + conn.commit() + _local.conn = conn + return conn + + +def query(sql: str, params=()) -> list[dict]: + conn = get_db() + rows = conn.execute(sql, params).fetchall() + return [dict(r) for r in rows] + + +def execute(sql: str, params=()): + conn = get_db() + conn.execute(sql, params) + conn.commit() diff --git a/ci3/ci-metrics/ec2_pricing.py b/ci3/ci-metrics/ec2_pricing.py new file mode 100644 index 000000000000..ace55ea4f40a --- /dev/null +++ b/ci3/ci-metrics/ec2_pricing.py @@ -0,0 +1,232 @@ +"""EC2 instance pricing: live on-demand + spot rates with TTL cache. + +Queries the AWS Pricing API (on-demand) and EC2 describe_spot_price_history +(spot) for us-east-2 instance rates. Caches results for 24 hours and falls +back to hardcoded values if the APIs are unavailable. + +Exports: + get_instance_rate(instance_type, is_spot) -> float + get_fallback_vcpu_rate(is_spot) -> float +""" +import json +import threading +import time +from datetime import datetime, timezone + +# ---- Hardcoded fallback rates (us-east-2, USD/hr) ---- + +_HARDCODED_RATES = { + ('m6a.48xlarge', True): 8.31, # spot + ('m6a.48xlarge', False): 16.56, # on-demand + ('m6a.32xlarge', True): 5.54, + ('m6a.32xlarge', False): 11.04, + ('m6a.16xlarge', True): 2.77, + ('m6a.16xlarge', False): 5.52, + ('m7a.48xlarge', True): 8.31, + ('m7a.48xlarge', False): 16.56, + ('m7a.16xlarge', True): 2.77, + ('m7a.16xlarge', False): 5.52, + ('m7i.48xlarge', True): 8.31, + ('m7i.48xlarge', False): 16.56, + ('r7g.16xlarge', True): 1.97, + ('r7g.16xlarge', False): 3.94, +} +_FALLBACK_VCPU_HOUR = {True: 0.0433, False: 0.0864} + +# ---- Cache state ---- + +_REGION = 'us-east-2' +_LOCATION = 'US East (Ohio)' # Pricing API uses location names, not codes +_CACHE_TTL = 24 * 3600 # 24 hours + +_cache = { + 'ondemand': {}, # instance_type -> USD/hr + 'spot': {}, # instance_type -> USD/hr + 'ts': 0, # last successful fetch time +} +_cache_lock = threading.Lock() + + +# ---- On-demand pricing (AWS Pricing API) ---- + +def _fetch_ondemand_rate(pricing_client, instance_type: str) -> float | None: + """Fetch on-demand hourly rate for a single instance type from AWS Pricing API. + + The Pricing API is only available in us-east-1 and ap-south-1. + """ + try: + response = pricing_client.get_products( + ServiceCode='AmazonEC2', + Filters=[ + {'Type': 'TERM_MATCH', 'Field': 'instanceType', 'Value': instance_type}, + {'Type': 'TERM_MATCH', 'Field': 'location', 'Value': _LOCATION}, + {'Type': 'TERM_MATCH', 'Field': 'operatingSystem', 'Value': 'Linux'}, + {'Type': 'TERM_MATCH', 'Field': 'preInstalledSw', 'Value': 'NA'}, + {'Type': 'TERM_MATCH', 'Field': 'tenancy', 'Value': 'Shared'}, + {'Type': 'TERM_MATCH', 'Field': 'capacitystatus', 'Value': 'Used'}, + ], + MaxResults=10, + ) + for price_item in response.get('PriceList', []): + product = json.loads(price_item) if isinstance(price_item, str) else price_item + on_demand = product.get('terms', {}).get('OnDemand', {}) + for term in on_demand.values(): + for dim in term.get('priceDimensions', {}).values(): + price = dim.get('pricePerUnit', {}).get('USD') + if price and float(price) > 0: + return float(price) + except Exception as e: + print(f"[ec2_pricing] on-demand fetch error for {instance_type}: {e}") + return None + + +def _fetch_all_ondemand(instance_types: list[str]) -> dict[str, float]: + """Fetch on-demand rates for all instance types. Returns {type: rate}.""" + try: + import boto3 + except ImportError: + print("[ec2_pricing] boto3 not installed, skipping on-demand fetch") + return {} + + results = {} + try: + # Pricing API is only in us-east-1 and ap-south-1 + pricing = boto3.client('pricing', region_name='us-east-1') + for itype in instance_types: + rate = _fetch_ondemand_rate(pricing, itype) + if rate is not None: + results[itype] = rate + except Exception as e: + print(f"[ec2_pricing] on-demand client error: {e}") + return results + + +# ---- Spot pricing (EC2 describe_spot_price_history) ---- + +def _fetch_all_spot(instance_types: list[str]) -> dict[str, float]: + """Fetch current spot prices for all instance types. Returns {type: rate}. + + Uses describe_spot_price_history with StartTime=now to get the most recent + price. Takes the minimum across availability zones. + """ + try: + import boto3 + except ImportError: + print("[ec2_pricing] boto3 not installed, skipping spot fetch") + return {} + + results = {} + try: + ec2 = boto3.client('ec2', region_name=_REGION) + for itype in instance_types: + try: + response = ec2.describe_spot_price_history( + InstanceTypes=[itype], + ProductDescriptions=['Linux/UNIX'], + StartTime=datetime.now(timezone.utc), + MaxResults=10, + ) + prices = [] + for entry in response.get('SpotPriceHistory', []): + try: + prices.append(float(entry['SpotPrice'])) + except (KeyError, ValueError): + continue + if prices: + # Use the minimum AZ price (what our fleet would target) + results[itype] = min(prices) + except Exception as e: + print(f"[ec2_pricing] spot fetch error for {itype}: {e}") + except Exception as e: + print(f"[ec2_pricing] spot client error: {e}") + return results + + +# ---- Cache refresh ---- + +def _get_known_instance_types() -> list[str]: + """Return the set of instance types we need pricing for.""" + return sorted({itype for itype, _ in _HARDCODED_RATES}) + + +def _refresh_cache(): + """Fetch fresh pricing data and update the cache. Thread-safe.""" + now = time.time() + if _cache['ts'] and now - _cache['ts'] < _CACHE_TTL: + return + if not _cache_lock.acquire(blocking=False): + return # another thread is already refreshing + try: + # Double-check after acquiring lock + if _cache['ts'] and time.time() - _cache['ts'] < _CACHE_TTL: + return + + instance_types = _get_known_instance_types() + ondemand = _fetch_all_ondemand(instance_types) + spot = _fetch_all_spot(instance_types) + + # Only update cache if we got at least some data + if ondemand or spot: + if ondemand: + _cache['ondemand'] = ondemand + if spot: + _cache['spot'] = spot + _cache['ts'] = time.time() + print(f"[ec2_pricing] Cache refreshed: {len(ondemand)} on-demand, {len(spot)} spot rates") + else: + print("[ec2_pricing] No pricing data returned, keeping existing cache/fallbacks") + except Exception as e: + print(f"[ec2_pricing] Cache refresh error: {e}") + finally: + _cache_lock.release() + + +def _ensure_cached(): + """Ensure cache is populated. Blocks on first call, async refresh after.""" + if not _cache['ts']: + _refresh_cache() # block on first load + else: + threading.Thread(target=_refresh_cache, daemon=True).start() + + +# ---- Public API ---- + +def get_instance_rate(instance_type: str, is_spot: bool) -> float: + """Get the hourly rate for an EC2 instance type. + + Tries live pricing cache first, falls back to hardcoded rates. + + Args: + instance_type: EC2 instance type (e.g. 'm6a.48xlarge') + is_spot: True for spot pricing, False for on-demand + + Returns: + Hourly rate in USD. + """ + _ensure_cached() + + # Try live cache + cache_key = 'spot' if is_spot else 'ondemand' + rate = _cache[cache_key].get(instance_type) + if rate is not None: + return rate + + # Fall back to hardcoded + rate = _HARDCODED_RATES.get((instance_type, is_spot)) + if rate is not None: + return rate + + # Unknown instance type -- return 0 (caller should use vCPU fallback) + return 0.0 + + +def get_fallback_vcpu_rate(is_spot: bool) -> float: + """Get the per-vCPU hourly rate for unknown instance types. + + Args: + is_spot: True for spot, False for on-demand + + Returns: + Per-vCPU hourly rate in USD. + """ + return _FALLBACK_VCPU_HOUR[is_spot] diff --git a/ci3/ci-metrics/github_data.py b/ci3/ci-metrics/github_data.py new file mode 100644 index 000000000000..8824d187cb81 --- /dev/null +++ b/ci3/ci-metrics/github_data.py @@ -0,0 +1,666 @@ +"""GitHub API polling with in-memory cache. + +Fetches PR lifecycle, deployment runs, branch lag, and merge queue stats via `gh` CLI. +Most data cached in memory with TTL. Merge queue stats persisted to SQLite daily. +""" +import json +import subprocess +import threading +import time +from datetime import datetime, timedelta, timezone + +REPO = 'AztecProtocol/aztec-packages' + +BRANCH_PAIRS = [ + ('next', 'staging-public'), + ('next', 'testnet'), + ('staging-public', 'testnet'), +] + +DEPLOY_WORKFLOWS = [ + 'deploy-staging-networks.yml', + 'deploy-network.yml', + 'deploy-next-net.yml', +] + +_CACHE_TTL = 3600 # 1 hour +_pr_cache = {'data': [], 'ts': 0} +_deploy_cache = {'data': [], 'ts': 0} +_lag_cache = {'data': [], 'ts': 0} +_pr_author_cache = {} # {pr_number: {'author': str, 'title': str, 'branch': str}} +_pr_lock = threading.Lock() +_deploy_lock = threading.Lock() +_lag_lock = threading.Lock() + + +def _gh(args: list[str]) -> str | None: + try: + result = subprocess.run( + ['gh'] + args, + capture_output=True, text=True, timeout=30 + ) + if result.returncode == 0: + return result.stdout.strip() + except (FileNotFoundError, subprocess.TimeoutExpired) as e: + print(f"[rk_github] gh error: {e}") + return None + + +# ---- PR lifecycle ---- + +def _fetch_and_process_prs() -> list[dict]: + out = _gh([ + 'pr', 'list', '--repo', REPO, '--state', 'merged', + '--limit', '500', + '--json', 'number,author,title,createdAt,mergedAt,closedAt,baseRefName,' + 'headRefName,additions,deletions,changedFiles,isDraft,reviewDecision,labels' + ]) + if not out: + return [] + try: + prs = json.loads(out) + except json.JSONDecodeError: + return [] + + for pr in prs: + author = pr.get('author', {}) + if isinstance(author, dict): + pr['author'] = author.get('login', 'unknown') + # Extract label names from label objects + labels = pr.get('labels', []) + if labels and isinstance(labels[0], dict): + pr['labels'] = [l.get('name', '') for l in labels] + created = pr.get('createdAt', '') + merged = pr.get('mergedAt') + if created and merged: + try: + c = datetime.fromisoformat(created.replace('Z', '+00:00')) + m = datetime.fromisoformat(merged.replace('Z', '+00:00')) + pr['merge_time_hrs'] = round((m - c).total_seconds() / 3600, 2) + except (ValueError, TypeError): + pr['merge_time_hrs'] = None + else: + pr['merge_time_hrs'] = None + pr['merged_date'] = merged[:10] if merged else None + pr['size'] = (pr.get('additions', 0) or 0) + (pr.get('deletions', 0) or 0) + return prs + + +def _ensure_prs(): + now = time.time() + if _pr_cache['data'] and now - _pr_cache['ts'] < _CACHE_TTL: + return + if not _pr_lock.acquire(blocking=False): + return + try: + prs = _fetch_and_process_prs() + if prs: + _pr_cache['data'] = prs + _pr_cache['ts'] = now + finally: + _pr_lock.release() + + +# ---- Deployments ---- + +def _fetch_all_deploys() -> list[dict]: + all_runs = [] + for workflow in DEPLOY_WORKFLOWS: + out = _gh([ + 'run', 'list', '--repo', REPO, + '--workflow', workflow, '--limit', '50', + '--json', 'databaseId,status,conclusion,createdAt,updatedAt,headBranch,name' + ]) + if not out: + continue + try: + runs = json.loads(out) + except json.JSONDecodeError: + continue + for run in runs: + started = run.get('createdAt', '') + completed = run.get('updatedAt') + duration = None + if started and completed: + try: + s = datetime.fromisoformat(started.replace('Z', '+00:00')) + c = datetime.fromisoformat(completed.replace('Z', '+00:00')) + duration = round((c - s).total_seconds(), 1) + except (ValueError, TypeError): + pass + all_runs.append({ + 'run_id': str(run.get('databaseId', '')), + 'workflow_name': workflow.replace('.yml', ''), + 'ref_name': run.get('headBranch', ''), + 'status': run.get('conclusion', run.get('status', 'unknown')), + 'started_at': started, + 'completed_at': completed, + 'duration_secs': duration, + 'started_date': started[:10] if started else None, + }) + return all_runs + + +def _ensure_deploys(): + now = time.time() + if _deploy_cache['data'] and now - _deploy_cache['ts'] < _CACHE_TTL: + return + if not _deploy_lock.acquire(blocking=False): + return + try: + deploys = _fetch_all_deploys() + if deploys: + _deploy_cache['data'] = deploys + _deploy_cache['ts'] = now + finally: + _deploy_lock.release() + + +# ---- Branch lag ---- + +def _fetch_branch_lag() -> list[dict]: + results = [] + today = datetime.now(timezone.utc).date().isoformat() + for source, target in BRANCH_PAIRS: + out = _gh([ + 'api', f'repos/{REPO}/compare/{target}...{source}', + '--jq', '.ahead_by' + ]) + if not out: + continue + try: + commits_behind = int(out) + except (ValueError, TypeError): + continue + + days_behind = None + out2 = _gh([ + 'api', f'repos/{REPO}/compare/{target}...{source}', + '--jq', '.commits[0].commit.committer.date' + ]) + if out2: + try: + oldest = datetime.fromisoformat(out2.replace('Z', '+00:00')) + days_behind = round((datetime.now(timezone.utc) - oldest).total_seconds() / 86400, 1) + except (ValueError, TypeError): + pass + + results.append({ + 'date': today, + 'source': source, + 'target': target, + 'commits_behind': commits_behind, + 'days_behind': days_behind, + }) + return results + + +def _ensure_lag(): + now = time.time() + if _lag_cache['data'] and now - _lag_cache['ts'] < _CACHE_TTL: + return + if not _lag_lock.acquire(blocking=False): + return + try: + lag = _fetch_branch_lag() + if lag: + _lag_cache['data'] = lag + _lag_cache['ts'] = now + finally: + _lag_lock.release() + + +# ---- Query functions for API endpoints ---- + +def get_deployment_speed(date_from: str, date_to: str, workflow: str = '') -> dict: + if not _deploy_cache['data']: + _ensure_deploys() + else: + threading.Thread(target=_ensure_deploys, daemon=True).start() + deploys = [d for d in _deploy_cache['data'] + if d.get('started_date') and date_from <= d['started_date'] <= date_to] + if workflow: + deploys = [d for d in deploys if d['workflow_name'] == workflow] + + # Group by date + by_date_map = {} + for d in deploys: + date = d['started_date'] + if date not in by_date_map: + by_date_map[date] = {'durations': [], 'success': 0, 'failure': 0, 'count': 0} + by_date_map[date]['count'] += 1 + if d['duration_secs'] is not None: + by_date_map[date]['durations'].append(d['duration_secs'] / 60.0) + if d['status'] == 'success': + by_date_map[date]['success'] += 1 + elif d['status'] == 'failure': + by_date_map[date]['failure'] += 1 + + by_date = [] + for date in sorted(by_date_map): + b = by_date_map[date] + durs = sorted(b['durations']) + by_date.append({ + 'date': date, + 'median_mins': round(durs[len(durs)//2], 1) if durs else None, + 'p95_mins': round(durs[int(len(durs)*0.95)], 1) if durs else None, + 'count': b['count'], + 'success': b['success'], + 'failure': b['failure'], + }) + + all_durs = sorted([d['duration_secs']/60.0 for d in deploys if d['duration_secs'] is not None]) + total = len(deploys) + success = sum(1 for d in deploys if d['status'] == 'success') + + recent = [{'run_id': d['run_id'], 'workflow_name': d['workflow_name'], + 'status': d['status'], 'duration_mins': round(d['duration_secs']/60.0, 1) if d['duration_secs'] else None, + 'started_at': d['started_at'], 'ref_name': d['ref_name']} + for d in sorted(deploys, key=lambda x: x['started_at'], reverse=True)[:50]] + + return { + 'by_date': by_date, + 'summary': { + 'median_mins': round(all_durs[len(all_durs)//2], 1) if all_durs else None, + 'p95_mins': round(all_durs[int(len(all_durs)*0.95)], 1) if all_durs else None, + 'success_rate': round(100.0 * success / max(total, 1), 1), + 'total': total, + }, + 'recent': recent, + } + + +def get_branch_lag(date_from: str, date_to: str) -> dict: + if not _lag_cache['data']: + _ensure_lag() + else: + threading.Thread(target=_ensure_lag, daemon=True).start() + pairs = [] + for source, target in BRANCH_PAIRS: + matching = [l for l in _lag_cache['data'] + if l['source'] == source and l['target'] == target] + current = matching[-1] if matching else {'commits_behind': 0, 'days_behind': 0} + pairs.append({ + 'source': source, + 'target': target, + 'current': {'commits_behind': current.get('commits_behind', 0), + 'days_behind': current.get('days_behind', 0)}, + 'history': [{'date': l['date'], 'commits_behind': l['commits_behind'], + 'days_behind': l['days_behind']} for l in matching], + }) + return {'pairs': pairs} + + +def get_pr_author(pr_number) -> dict | None: + """Look up PR author/title by number. Results are cached permanently (PR data doesn't change).""" + pr_number = int(pr_number) if pr_number else None + if not pr_number: + return None + if pr_number in _pr_author_cache: + return _pr_author_cache[pr_number] + + # Check merged PR cache first (already fetched) + for pr in _pr_cache.get('data', []): + if pr.get('number') == pr_number: + info = {'author': pr.get('author', 'unknown'), 'title': pr.get('title', ''), + 'branch': pr.get('headRefName', ''), + 'additions': pr.get('additions', 0), 'deletions': pr.get('deletions', 0)} + _pr_author_cache[pr_number] = info + return info + + # Fetch from GitHub API + out = _gh(['pr', 'view', str(pr_number), '--repo', REPO, + '--json', 'author,title,headRefName,additions,deletions']) + if out: + try: + data = json.loads(out) + author = data.get('author', {}) + if isinstance(author, dict): + author = author.get('login', 'unknown') + info = {'author': author, 'title': data.get('title', ''), + 'branch': data.get('headRefName', ''), + 'additions': data.get('additions', 0), 'deletions': data.get('deletions', 0)} + _pr_author_cache[pr_number] = info + return info + except (json.JSONDecodeError, KeyError): + pass + return None + + +def batch_get_pr_authors(pr_numbers: set) -> dict: + """Fetch authors for multiple PR numbers, using cache. Returns {pr_number: info}.""" + result = {} + to_fetch = [] + for prn in pr_numbers: + if not prn: + continue + prn = int(prn) + if prn in _pr_author_cache: + result[prn] = _pr_author_cache[prn] + else: + to_fetch.append(prn) + + # Check merged PR cache first + for pr in _pr_cache.get('data', []): + num = pr.get('number') + if num in to_fetch: + info = {'author': pr.get('author', 'unknown'), 'title': pr.get('title', ''), + 'branch': pr.get('headRefName', ''), + 'additions': pr.get('additions', 0), 'deletions': pr.get('deletions', 0)} + _pr_author_cache[num] = info + result[num] = info + to_fetch.remove(num) + + # Fetch remaining individually (with a cap to avoid API abuse) + for prn in to_fetch[:50]: + info = get_pr_author(prn) + if info: + result[prn] = info + + return result + + +def get_branch_pr_map() -> dict: + """Return {branch_name: pr_number} from the PR cache. Call _ensure_prs first.""" + if not _pr_cache['data']: + _ensure_prs() + else: + threading.Thread(target=_ensure_prs, daemon=True).start() + return {pr['headRefName']: pr['number'] + for pr in _pr_cache.get('data', []) + if pr.get('headRefName')} + + +def get_pr_metrics(date_from: str, date_to: str, author: str = '', + ci_runs: list = None) -> dict: + """Get PR metrics. ci_runs should be passed from the caller (read from Redis).""" + if not _pr_cache['data']: + _ensure_prs() + else: + threading.Thread(target=_ensure_prs, daemon=True).start() + + prs = [p for p in _pr_cache['data'] + if p.get('merged_date') and date_from <= p['merged_date'] <= date_to] + if author: + prs = [p for p in prs if p.get('author') == author] + + # Compute per-PR CI cost and duration from ci_runs + pr_costs = {} + pr_run_counts = {} + pr_ci_time = {} # total CI compute hours per PR + if ci_runs: + for run in ci_runs: + prn = run.get('pr_number') + if not prn: + continue + if run.get('cost_usd') is not None: + pr_costs[prn] = pr_costs.get(prn, 0) + run['cost_usd'] + pr_run_counts[prn] = pr_run_counts.get(prn, 0) + 1 + c = run.get('complete') + t = run.get('timestamp') + if c and t: + pr_ci_time[prn] = pr_ci_time.get(prn, 0) + (c - t) / 3_600_000 + + for pr in prs: + prn = pr.get('number') + pr['ci_cost_usd'] = round(pr_costs.get(prn, 0), 2) + pr['ci_runs_count'] = pr_run_counts.get(prn, 0) + pr['ci_time_hrs'] = round(pr_ci_time.get(prn, 0), 2) + + # Group by date + by_date_map = {} + for pr in prs: + date = pr['merged_date'] + if date not in by_date_map: + by_date_map[date] = {'costs': [], 'merge_times': [], 'ci_times': [], + 'run_counts': [], 'count': 0} + by_date_map[date]['count'] += 1 + by_date_map[date]['costs'].append(pr['ci_cost_usd']) + by_date_map[date]['ci_times'].append(pr.get('ci_time_hrs', 0)) + by_date_map[date]['run_counts'].append(pr.get('ci_runs_count', 0)) + if pr.get('merge_time_hrs') is not None: + by_date_map[date]['merge_times'].append(pr['merge_time_hrs']) + + def _median(vals): + s = sorted(vals) + n = len(s) + if n == 0: + return None + if n % 2 == 1: + return s[n // 2] + return (s[n // 2 - 1] + s[n // 2]) / 2 + + by_date = [] + for d, v in sorted(by_date_map.items()): + by_date.append({ + 'date': d, + 'pr_count': v['count'], + 'avg_cost': round(sum(v['costs']) / max(len(v['costs']), 1), 2), + 'median_merge_time_hrs': round(_median(v['merge_times']), 1) if v['merge_times'] else None, + 'avg_ci_time_hrs': round(sum(v['ci_times']) / max(len(v['ci_times']), 1), 2), + 'avg_runs': round(sum(v['run_counts']) / max(len(v['run_counts']), 1), 1), + }) + + # By author (all PRs in range, not filtered by author) + all_prs_in_range = [p for p in _pr_cache['data'] + if p.get('merged_date') and date_from <= p['merged_date'] <= date_to] + + author_map = {} + for pr in all_prs_in_range: + prn = pr.get('number') + a = pr.get('author', 'unknown') + if a not in author_map: + author_map[a] = {'total_cost': 0, 'pr_count': 0, 'merge_times': [], + 'total_ci_time': 0, 'total_runs': 0} + author_map[a]['total_cost'] += round(pr_costs.get(prn, 0), 2) + author_map[a]['pr_count'] += 1 + author_map[a]['total_ci_time'] += round(pr_ci_time.get(prn, 0), 2) + author_map[a]['total_runs'] += pr_run_counts.get(prn, 0) + if pr.get('merge_time_hrs') is not None: + author_map[a]['merge_times'].append(pr['merge_time_hrs']) + + by_author = [] + for a, v in sorted(author_map.items(), key=lambda x: -x[1]['total_cost'])[:20]: + by_author.append({ + 'author': a, + 'total_cost': round(v['total_cost'], 2), + 'pr_count': v['pr_count'], + 'avg_merge_time_hrs': round(_median(v['merge_times']), 1) if v['merge_times'] else None, + 'avg_ci_time_hrs': round(v['total_ci_time'] / max(v['pr_count'], 1), 2), + 'avg_runs_per_pr': round(v['total_runs'] / max(v['pr_count'], 1), 1), + }) + + all_costs = [p.get('ci_cost_usd', 0) for p in prs] + all_merge = [p['merge_time_hrs'] for p in prs if p.get('merge_time_hrs') is not None] + all_run_counts = [p.get('ci_runs_count', 0) for p in prs] + all_ci_times = [p.get('ci_time_hrs', 0) for p in prs] + + return { + 'by_date': by_date, + 'by_author': by_author, + 'summary': { + 'avg_cost_per_pr': round(sum(all_costs)/max(len(all_costs),1), 2) if all_costs else 0, + 'median_merge_time_hrs': round(_median(all_merge), 1) if all_merge else None, + 'total_prs': len(prs), + 'total_cost': round(sum(all_costs), 2), + 'avg_ci_runs_per_pr': round(sum(all_run_counts)/max(len(all_run_counts),1), 1) if all_run_counts else 0, + 'avg_ci_time_hrs': round(sum(all_ci_times)/max(len(all_ci_times),1), 2) if all_ci_times else 0, + }, + } + + +# ---- Merge queue failure rate ---- + +CI3_WORKFLOW = 'ci3.yml' + +def _fetch_merge_queue_runs(date_str: str) -> dict: + """Fetch merge_group workflow runs for a single date. Returns daily summary.""" + out = _gh([ + 'api', '--paginate', + f'repos/{REPO}/actions/workflows/{CI3_WORKFLOW}/runs' + f'?event=merge_group&created={date_str}&per_page=100', + '--jq', '.workflow_runs[] | [.conclusion, .status] | @tsv', + ]) + summary = {'date': date_str, 'total': 0, 'success': 0, 'failure': 0, + 'cancelled': 0, 'in_progress': 0} + if not out: + return summary + for line in out.strip().split('\n'): + if not line.strip(): + continue + parts = line.split('\t') + conclusion = parts[0] if parts[0] else '' + status = parts[1] if len(parts) > 1 else '' + summary['total'] += 1 + if conclusion == 'success': + summary['success'] += 1 + elif conclusion == 'failure': + summary['failure'] += 1 + elif conclusion == 'cancelled': + summary['cancelled'] += 1 + elif status in ('in_progress', 'queued', 'waiting'): + summary['in_progress'] += 1 + else: + summary['failure'] += 1 # treat unknown conclusions as failures + return summary + + +def _load_backfill_json(): + """Load seed data from merge-queue-backfill.json if SQLite is empty.""" + import db + from pathlib import Path + conn = db.get_db() + + count = conn.execute('SELECT COUNT(*) as c FROM merge_queue_daily').fetchone()['c'] + if count > 0: + return + + seed = Path(__file__).parent / 'merge-queue-backfill.json' + if not seed.exists(): + return + + import json + with seed.open() as f: + data = json.load(f) + + print(f"[rk_github] Loading {len(data)} days from merge-queue-backfill.json...") + for ds, summary in data.items(): + conn.execute( + 'INSERT OR REPLACE INTO merge_queue_daily (date, total, success, failure, cancelled, in_progress) ' + 'VALUES (?, ?, ?, ?, ?, ?)', + (ds, summary['total'], summary['success'], summary['failure'], + summary['cancelled'], summary['in_progress'])) + conn.commit() + + +def _backfill_merge_queue(): + """Backfill missing merge queue daily stats into SQLite.""" + import db + conn = db.get_db() + + # Load seed data on first run + _load_backfill_json() + + # Find which dates we already have + existing = {row['date'] for row in + conn.execute('SELECT date FROM merge_queue_daily').fetchall()} + + yesterday = (datetime.now(timezone.utc) - timedelta(days=1)).date() + # Backfill up to 365 days + start = yesterday - timedelta(days=365) + current = start + + missing = [] + while current <= yesterday: + ds = current.isoformat() + if ds not in existing: + missing.append(ds) + current += timedelta(days=1) + + if not missing: + return + + print(f"[rk_github] Backfilling {len(missing)} days of merge queue stats...") + for ds in missing: + summary = _fetch_merge_queue_runs(ds) + if summary['total'] == 0: + conn.execute( + 'INSERT OR REPLACE INTO merge_queue_daily (date, total, success, failure, cancelled, in_progress) ' + 'VALUES (?, 0, 0, 0, 0, 0)', (ds,)) + else: + conn.execute( + 'INSERT OR REPLACE INTO merge_queue_daily (date, total, success, failure, cancelled, in_progress) ' + 'VALUES (?, ?, ?, ?, ?, ?)', + (ds, summary['total'], summary['success'], summary['failure'], + summary['cancelled'], summary['in_progress'])) + conn.commit() + + +def refresh_merge_queue_today(): + """Refresh today's (and yesterday's) merge queue stats. Called periodically.""" + import db + conn = db.get_db() + today = datetime.now(timezone.utc).date().isoformat() + yesterday = (datetime.now(timezone.utc) - timedelta(days=1)).date().isoformat() + + for ds in [yesterday, today]: + summary = _fetch_merge_queue_runs(ds) + conn.execute( + 'INSERT OR REPLACE INTO merge_queue_daily (date, total, success, failure, cancelled, in_progress) ' + 'VALUES (?, ?, ?, ?, ?, ?)', + (ds, summary['total'], summary['success'], summary['failure'], + summary['cancelled'], summary['in_progress'])) + conn.commit() + + +_mq_backfill_lock = threading.Lock() +_mq_last_refresh = 0 +_MQ_REFRESH_TTL = 3600 # refresh today's data every hour + + +def ensure_merge_queue_data(): + """Ensure merge queue data is backfilled and today is fresh.""" + global _mq_last_refresh + now = time.time() + if now - _mq_last_refresh < _MQ_REFRESH_TTL: + return + if not _mq_backfill_lock.acquire(blocking=False): + return + try: + _backfill_merge_queue() + refresh_merge_queue_today() + _mq_last_refresh = now + finally: + _mq_backfill_lock.release() + + +def get_merge_queue_stats(date_from: str, date_to: str) -> dict: + """Get merge queue failure rate by day. Triggers backfill if needed.""" + # Ensure data is populated (async after first load) + import db + conn = db.get_db() + count = conn.execute('SELECT COUNT(*) as c FROM merge_queue_daily').fetchone()['c'] + if count == 0: + ensure_merge_queue_data() # block on first load + else: + threading.Thread(target=ensure_merge_queue_data, daemon=True).start() + + rows = db.query( + 'SELECT date, total, success, failure, cancelled, in_progress ' + 'FROM merge_queue_daily WHERE date >= ? AND date <= ? ORDER BY date', + (date_from, date_to)) + + total_runs = sum(r['total'] for r in rows) + total_fail = sum(r['failure'] for r in rows) + total_success = sum(r['success'] for r in rows) + + return { + 'by_date': rows, + 'summary': { + 'total_runs': total_runs, + 'total_success': total_success, + 'total_failure': total_fail, + 'failure_rate': round(total_fail / max(total_runs, 1) * 100, 1), + 'days': len([r for r in rows if r['total'] > 0]), + }, + } diff --git a/ci3/ci-metrics/merge-queue-backfill.json b/ci3/ci-metrics/merge-queue-backfill.json new file mode 100644 index 000000000000..079077590581 --- /dev/null +++ b/ci3/ci-metrics/merge-queue-backfill.json @@ -0,0 +1,2564 @@ +{ + "2025-02-10": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-11": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-12": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-13": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-14": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-15": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-16": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-17": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-18": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-19": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-20": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-21": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-22": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-23": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-24": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-25": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-26": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-27": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-02-28": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-01": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-02": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-03": { + "total": 1, + "success": 0, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-04": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-05": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-06": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-07": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-08": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-09": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-10": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-11": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-12": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-13": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-14": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-15": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-16": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-17": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-18": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-19": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-20": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-21": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-22": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-23": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-24": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-25": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-26": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-27": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-28": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-29": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-30": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-03-31": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-01": { + "total": 3, + "success": 2, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-02": { + "total": 31, + "success": 19, + "failure": 12, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-03": { + "total": 113, + "success": 58, + "failure": 55, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-04": { + "total": 69, + "success": 50, + "failure": 19, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-05": { + "total": 4, + "success": 4, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-06": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-07": { + "total": 42, + "success": 32, + "failure": 10, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-08": { + "total": 27, + "success": 19, + "failure": 8, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-09": { + "total": 29, + "success": 26, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-10": { + "total": 42, + "success": 35, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-11": { + "total": 51, + "success": 36, + "failure": 15, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-12": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-13": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-14": { + "total": 24, + "success": 19, + "failure": 4, + "cancelled": 1, + "in_progress": 0 + }, + "2025-04-15": { + "total": 41, + "success": 22, + "failure": 19, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-16": { + "total": 26, + "success": 21, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-17": { + "total": 29, + "success": 28, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-18": { + "total": 10, + "success": 10, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-19": { + "total": 4, + "success": 4, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-20": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-21": { + "total": 5, + "success": 5, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-22": { + "total": 49, + "success": 33, + "failure": 15, + "cancelled": 1, + "in_progress": 0 + }, + "2025-04-23": { + "total": 32, + "success": 28, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-24": { + "total": 29, + "success": 26, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-25": { + "total": 28, + "success": 26, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-26": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-27": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-28": { + "total": 26, + "success": 20, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-29": { + "total": 60, + "success": 26, + "failure": 34, + "cancelled": 0, + "in_progress": 0 + }, + "2025-04-30": { + "total": 47, + "success": 33, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-01": { + "total": 31, + "success": 27, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-02": { + "total": 8, + "success": 8, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-03": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-04": { + "total": 7, + "success": 7, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-05": { + "total": 14, + "success": 11, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-06": { + "total": 18, + "success": 16, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-07": { + "total": 22, + "success": 20, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-08": { + "total": 18, + "success": 15, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-09": { + "total": 36, + "success": 27, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-10": { + "total": 2, + "success": 1, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-11": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-12": { + "total": 47, + "success": 30, + "failure": 17, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-13": { + "total": 134, + "success": 65, + "failure": 69, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-14": { + "total": 51, + "success": 34, + "failure": 17, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-15": { + "total": 22, + "success": 9, + "failure": 12, + "cancelled": 1, + "in_progress": 0 + }, + "2025-05-16": { + "total": 21, + "success": 15, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-17": { + "total": 2, + "success": 1, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-18": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-19": { + "total": 10, + "success": 9, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-20": { + "total": 30, + "success": 15, + "failure": 15, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-21": { + "total": 26, + "success": 12, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-22": { + "total": 51, + "success": 21, + "failure": 30, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-23": { + "total": 67, + "success": 13, + "failure": 53, + "cancelled": 1, + "in_progress": 0 + }, + "2025-05-24": { + "total": 5, + "success": 2, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-25": { + "total": 5, + "success": 0, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-26": { + "total": 10, + "success": 7, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-27": { + "total": 61, + "success": 12, + "failure": 49, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-28": { + "total": 56, + "success": 15, + "failure": 41, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-29": { + "total": 77, + "success": 24, + "failure": 52, + "cancelled": 1, + "in_progress": 0 + }, + "2025-05-30": { + "total": 25, + "success": 15, + "failure": 10, + "cancelled": 0, + "in_progress": 0 + }, + "2025-05-31": { + "total": 6, + "success": 3, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-01": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-02": { + "total": 50, + "success": 20, + "failure": 29, + "cancelled": 1, + "in_progress": 0 + }, + "2025-06-03": { + "total": 57, + "success": 22, + "failure": 35, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-04": { + "total": 219, + "success": 22, + "failure": 196, + "cancelled": 1, + "in_progress": 0 + }, + "2025-06-05": { + "total": 166, + "success": 19, + "failure": 147, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-06": { + "total": 73, + "success": 27, + "failure": 45, + "cancelled": 1, + "in_progress": 0 + }, + "2025-06-07": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-08": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-09": { + "total": 124, + "success": 31, + "failure": 93, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-10": { + "total": 44, + "success": 29, + "failure": 15, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-11": { + "total": 19, + "success": 16, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-12": { + "total": 26, + "success": 14, + "failure": 12, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-13": { + "total": 29, + "success": 24, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-14": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-15": { + "total": 1, + "success": 0, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-16": { + "total": 44, + "success": 21, + "failure": 23, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-17": { + "total": 29, + "success": 15, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-18": { + "total": 38, + "success": 25, + "failure": 13, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-19": { + "total": 15, + "success": 11, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-20": { + "total": 27, + "success": 21, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-21": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-22": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-23": { + "total": 30, + "success": 14, + "failure": 16, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-24": { + "total": 26, + "success": 17, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-25": { + "total": 26, + "success": 20, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-26": { + "total": 44, + "success": 21, + "failure": 22, + "cancelled": 1, + "in_progress": 0 + }, + "2025-06-27": { + "total": 18, + "success": 13, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-28": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-29": { + "total": 3, + "success": 3, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-06-30": { + "total": 27, + "success": 17, + "failure": 10, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-01": { + "total": 26, + "success": 12, + "failure": 13, + "cancelled": 1, + "in_progress": 0 + }, + "2025-07-02": { + "total": 42, + "success": 25, + "failure": 17, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-03": { + "total": 17, + "success": 12, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-04": { + "total": 15, + "success": 12, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-05": { + "total": 4, + "success": 3, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-06": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-07": { + "total": 20, + "success": 14, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-08": { + "total": 33, + "success": 19, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-09": { + "total": 19, + "success": 13, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-10": { + "total": 22, + "success": 14, + "failure": 7, + "cancelled": 1, + "in_progress": 0 + }, + "2025-07-11": { + "total": 6, + "success": 6, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-12": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-13": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-14": { + "total": 29, + "success": 21, + "failure": 8, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-15": { + "total": 49, + "success": 22, + "failure": 27, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-16": { + "total": 47, + "success": 21, + "failure": 26, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-17": { + "total": 18, + "success": 10, + "failure": 8, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-18": { + "total": 13, + "success": 12, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-19": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-20": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-21": { + "total": 26, + "success": 22, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-22": { + "total": 25, + "success": 19, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-23": { + "total": 33, + "success": 16, + "failure": 15, + "cancelled": 2, + "in_progress": 0 + }, + "2025-07-24": { + "total": 61, + "success": 26, + "failure": 35, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-25": { + "total": 35, + "success": 17, + "failure": 16, + "cancelled": 2, + "in_progress": 0 + }, + "2025-07-26": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-27": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-28": { + "total": 23, + "success": 22, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-29": { + "total": 52, + "success": 21, + "failure": 31, + "cancelled": 0, + "in_progress": 0 + }, + "2025-07-30": { + "total": 30, + "success": 15, + "failure": 14, + "cancelled": 1, + "in_progress": 0 + }, + "2025-07-31": { + "total": 35, + "success": 23, + "failure": 12, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-01": { + "total": 13, + "success": 13, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-02": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-03": { + "total": 4, + "success": 4, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-04": { + "total": 16, + "success": 15, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-05": { + "total": 14, + "success": 10, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-06": { + "total": 23, + "success": 16, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-07": { + "total": 19, + "success": 7, + "failure": 12, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-08": { + "total": 24, + "success": 15, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-09": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-10": { + "total": 4, + "success": 2, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-11": { + "total": 13, + "success": 12, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-12": { + "total": 9, + "success": 9, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-13": { + "total": 14, + "success": 12, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-14": { + "total": 18, + "success": 16, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-15": { + "total": 38, + "success": 30, + "failure": 8, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-16": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-17": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-18": { + "total": 19, + "success": 12, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-19": { + "total": 11, + "success": 7, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-20": { + "total": 11, + "success": 9, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-21": { + "total": 19, + "success": 15, + "failure": 3, + "cancelled": 1, + "in_progress": 0 + }, + "2025-08-22": { + "total": 32, + "success": 24, + "failure": 8, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-23": { + "total": 6, + "success": 5, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-24": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-25": { + "total": 13, + "success": 11, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-26": { + "total": 17, + "success": 10, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-27": { + "total": 20, + "success": 11, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-28": { + "total": 36, + "success": 18, + "failure": 17, + "cancelled": 1, + "in_progress": 0 + }, + "2025-08-29": { + "total": 39, + "success": 28, + "failure": 11, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-30": { + "total": 4, + "success": 2, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-08-31": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-01": { + "total": 20, + "success": 15, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-02": { + "total": 25, + "success": 16, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-03": { + "total": 30, + "success": 19, + "failure": 11, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-04": { + "total": 29, + "success": 15, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-05": { + "total": 32, + "success": 14, + "failure": 18, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-06": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-07": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-08": { + "total": 18, + "success": 12, + "failure": 5, + "cancelled": 1, + "in_progress": 0 + }, + "2025-09-09": { + "total": 25, + "success": 14, + "failure": 11, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-10": { + "total": 38, + "success": 23, + "failure": 15, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-11": { + "total": 39, + "success": 18, + "failure": 21, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-12": { + "total": 34, + "success": 21, + "failure": 13, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-13": { + "total": 1, + "success": 0, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-14": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-15": { + "total": 22, + "success": 11, + "failure": 11, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-16": { + "total": 25, + "success": 15, + "failure": 10, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-17": { + "total": 24, + "success": 17, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-18": { + "total": 24, + "success": 17, + "failure": 6, + "cancelled": 1, + "in_progress": 0 + }, + "2025-09-19": { + "total": 16, + "success": 9, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-20": { + "total": 8, + "success": 3, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-21": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-22": { + "total": 45, + "success": 19, + "failure": 26, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-23": { + "total": 23, + "success": 17, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-24": { + "total": 17, + "success": 13, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-25": { + "total": 47, + "success": 26, + "failure": 21, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-26": { + "total": 22, + "success": 21, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-27": { + "total": 4, + "success": 3, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-28": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-29": { + "total": 20, + "success": 12, + "failure": 8, + "cancelled": 0, + "in_progress": 0 + }, + "2025-09-30": { + "total": 46, + "success": 21, + "failure": 25, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-01": { + "total": 23, + "success": 16, + "failure": 6, + "cancelled": 1, + "in_progress": 0 + }, + "2025-10-02": { + "total": 30, + "success": 17, + "failure": 13, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-03": { + "total": 10, + "success": 9, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-04": { + "total": 4, + "success": 4, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-05": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-06": { + "total": 25, + "success": 9, + "failure": 15, + "cancelled": 1, + "in_progress": 0 + }, + "2025-10-07": { + "total": 42, + "success": 12, + "failure": 29, + "cancelled": 1, + "in_progress": 0 + }, + "2025-10-08": { + "total": 21, + "success": 11, + "failure": 10, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-09": { + "total": 61, + "success": 2, + "failure": 59, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-10": { + "total": 47, + "success": 13, + "failure": 34, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-11": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-12": { + "total": 1, + "success": 0, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-13": { + "total": 32, + "success": 18, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-14": { + "total": 31, + "success": 16, + "failure": 15, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-15": { + "total": 33, + "success": 22, + "failure": 11, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-16": { + "total": 19, + "success": 12, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-17": { + "total": 20, + "success": 12, + "failure": 7, + "cancelled": 1, + "in_progress": 0 + }, + "2025-10-18": { + "total": 1, + "success": 0, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-19": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-20": { + "total": 37, + "success": 14, + "failure": 23, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-21": { + "total": 21, + "success": 12, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-22": { + "total": 24, + "success": 11, + "failure": 13, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-23": { + "total": 61, + "success": 17, + "failure": 44, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-24": { + "total": 30, + "success": 18, + "failure": 12, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-25": { + "total": 3, + "success": 3, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-26": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-27": { + "total": 9, + "success": 9, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-28": { + "total": 18, + "success": 16, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-29": { + "total": 19, + "success": 14, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-30": { + "total": 17, + "success": 16, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-10-31": { + "total": 15, + "success": 14, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-01": { + "total": 4, + "success": 1, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-02": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-03": { + "total": 14, + "success": 13, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-04": { + "total": 19, + "success": 16, + "failure": 1, + "cancelled": 2, + "in_progress": 0 + }, + "2025-11-05": { + "total": 13, + "success": 10, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-06": { + "total": 24, + "success": 11, + "failure": 13, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-07": { + "total": 19, + "success": 14, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-08": { + "total": 3, + "success": 2, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-09": { + "total": 2, + "success": 1, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-10": { + "total": 47, + "success": 13, + "failure": 33, + "cancelled": 1, + "in_progress": 0 + }, + "2025-11-11": { + "total": 15, + "success": 11, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-12": { + "total": 42, + "success": 22, + "failure": 20, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-13": { + "total": 17, + "success": 12, + "failure": 4, + "cancelled": 1, + "in_progress": 0 + }, + "2025-11-14": { + "total": 22, + "success": 15, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-15": { + "total": 3, + "success": 3, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-16": { + "total": 3, + "success": 3, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-17": { + "total": 9, + "success": 7, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-18": { + "total": 19, + "success": 12, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-19": { + "total": 18, + "success": 13, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-20": { + "total": 9, + "success": 8, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-21": { + "total": 16, + "success": 12, + "failure": 3, + "cancelled": 1, + "in_progress": 0 + }, + "2025-11-22": { + "total": 5, + "success": 2, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-23": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-24": { + "total": 8, + "success": 7, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-25": { + "total": 11, + "success": 10, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-26": { + "total": 17, + "success": 16, + "failure": 0, + "cancelled": 1, + "in_progress": 0 + }, + "2025-11-27": { + "total": 17, + "success": 15, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-28": { + "total": 11, + "success": 6, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-29": { + "total": 2, + "success": 2, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-11-30": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-01": { + "total": 13, + "success": 12, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-02": { + "total": 8, + "success": 8, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-03": { + "total": 17, + "success": 10, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-04": { + "total": 11, + "success": 8, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-05": { + "total": 12, + "success": 11, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-06": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-07": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-08": { + "total": 17, + "success": 14, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-09": { + "total": 23, + "success": 14, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-10": { + "total": 43, + "success": 21, + "failure": 20, + "cancelled": 2, + "in_progress": 0 + }, + "2025-12-11": { + "total": 28, + "success": 19, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-12": { + "total": 14, + "success": 12, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-13": { + "total": 2, + "success": 0, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-14": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-15": { + "total": 41, + "success": 15, + "failure": 26, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-16": { + "total": 25, + "success": 21, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-17": { + "total": 10, + "success": 8, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-18": { + "total": 20, + "success": 14, + "failure": 5, + "cancelled": 1, + "in_progress": 0 + }, + "2025-12-19": { + "total": 13, + "success": 11, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-20": { + "total": 7, + "success": 3, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-21": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-22": { + "total": 20, + "success": 16, + "failure": 3, + "cancelled": 1, + "in_progress": 0 + }, + "2025-12-23": { + "total": 28, + "success": 19, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-24": { + "total": 13, + "success": 8, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-25": { + "total": 3, + "success": 1, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-26": { + "total": 6, + "success": 3, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-27": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-28": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-29": { + "total": 4, + "success": 2, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-30": { + "total": 3, + "success": 1, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2025-12-31": { + "total": 2, + "success": 1, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-01": { + "total": 2, + "success": 1, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-02": { + "total": 12, + "success": 8, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-03": { + "total": 3, + "success": 1, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-04": { + "total": 3, + "success": 3, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-05": { + "total": 34, + "success": 27, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-06": { + "total": 45, + "success": 25, + "failure": 20, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-07": { + "total": 17, + "success": 13, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-08": { + "total": 36, + "success": 24, + "failure": 12, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-09": { + "total": 25, + "success": 17, + "failure": 7, + "cancelled": 1, + "in_progress": 0 + }, + "2026-01-10": { + "total": 5, + "success": 2, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-11": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-12": { + "total": 32, + "success": 17, + "failure": 15, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-13": { + "total": 44, + "success": 22, + "failure": 22, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-14": { + "total": 114, + "success": 32, + "failure": 82, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-15": { + "total": 54, + "success": 22, + "failure": 31, + "cancelled": 1, + "in_progress": 0 + }, + "2026-01-16": { + "total": 70, + "success": 27, + "failure": 40, + "cancelled": 3, + "in_progress": 0 + }, + "2026-01-17": { + "total": 6, + "success": 4, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-18": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-19": { + "total": 28, + "success": 25, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-20": { + "total": 42, + "success": 30, + "failure": 12, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-21": { + "total": 51, + "success": 31, + "failure": 20, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-22": { + "total": 32, + "success": 25, + "failure": 5, + "cancelled": 2, + "in_progress": 0 + }, + "2026-01-23": { + "total": 28, + "success": 25, + "failure": 3, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-24": { + "total": 6, + "success": 4, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-25": { + "total": 3, + "success": 2, + "failure": 1, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-26": { + "total": 89, + "success": 33, + "failure": 56, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-27": { + "total": 24, + "success": 21, + "failure": 2, + "cancelled": 1, + "in_progress": 0 + }, + "2026-01-28": { + "total": 48, + "success": 28, + "failure": 20, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-29": { + "total": 24, + "success": 18, + "failure": 6, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-30": { + "total": 31, + "success": 24, + "failure": 7, + "cancelled": 0, + "in_progress": 0 + }, + "2026-01-31": { + "total": 1, + "success": 1, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-01": { + "total": 0, + "success": 0, + "failure": 0, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-02": { + "total": 14, + "success": 12, + "failure": 2, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-03": { + "total": 27, + "success": 18, + "failure": 9, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-04": { + "total": 30, + "success": 16, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-05": { + "total": 33, + "success": 19, + "failure": 14, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-06": { + "total": 20, + "success": 15, + "failure": 5, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-07": { + "total": 8, + "success": 4, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-08": { + "total": 5, + "success": 2, + "failure": 2, + "cancelled": 1, + "in_progress": 0 + }, + "2026-02-09": { + "total": 15, + "success": 11, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + }, + "2026-02-10": { + "total": 24, + "success": 20, + "failure": 4, + "cancelled": 0, + "in_progress": 0 + } +} \ No newline at end of file diff --git a/ci3/ci-metrics/metrics.py b/ci3/ci-metrics/metrics.py new file mode 100644 index 000000000000..5c0d1610e06b --- /dev/null +++ b/ci3/ci-metrics/metrics.py @@ -0,0 +1,602 @@ +"""CI metrics: direct Redis reads + test event listener. + +Reads CI run data directly from Redis sorted sets on each request. +Test events stored in SQLite since they only arrive via pub/sub. +CI runs periodically synced from Redis to SQLite for flake correlation. +""" +import json +import re +import time +import threading +from datetime import datetime, timedelta, timezone + +import db +import github_data +import ec2_pricing + +SECTIONS = ['next', 'prs', 'master', 'staging', 'releases', 'nightly', 'network', 'deflake', 'local'] + +_PR_RE = re.compile(r'(?:pr-|#)(\d+)', re.IGNORECASE) +_ANSI_RE = re.compile(r'\x1b\[[^m]*m|\x1b\]8;;[^\x07]*\x07') +_URL_PR_RE = re.compile(r'/pull/(\d+)') + + +def compute_run_cost(data: dict) -> float | None: + complete = data.get('complete') + ts = data.get('timestamp') + if not complete or not ts: + return None + hours = (complete - ts) / 3_600_000 + instance_type = data.get('instance_type', 'unknown') + is_spot = bool(data.get('spot')) + rate = ec2_pricing.get_instance_rate(instance_type, is_spot) + if not rate: + vcpus = data.get('instance_vcpus', 192) + rate = vcpus * ec2_pricing.get_fallback_vcpu_rate(is_spot) + return round(hours * rate, 4) + + +def extract_pr_number(name: str) -> int | None: + m = _PR_RE.search(name) + if m: + return int(m.group(1)) + # Try matching GitHub PR URL in ANSI-encoded strings + m = _URL_PR_RE.search(name) + if m: + return int(m.group(1)) + # Strip ANSI codes and retry + clean = _ANSI_RE.sub('', name) + m = _PR_RE.search(clean) + return int(m.group(1)) if m else None + + +def _get_ci_runs_from_redis(redis_conn, date_from_ms=None, date_to_ms=None): + """Read CI runs from Redis sorted sets.""" + branch_pr_map = github_data.get_branch_pr_map() + + runs = [] + for section in SECTIONS: + key = f'ci-run-{section}' + try: + if date_from_ms is not None or date_to_ms is not None: + lo = date_from_ms if date_from_ms is not None else '-inf' + hi = date_to_ms if date_to_ms is not None else '+inf' + entries = redis_conn.zrangebyscore(key, lo, hi, withscores=True) + else: + entries = redis_conn.zrange(key, 0, -1, withscores=True) + for entry_bytes, score in entries: + try: + raw = entry_bytes.decode() if isinstance(entry_bytes, bytes) else entry_bytes + data = json.loads(raw) + data.setdefault('dashboard', section) + data['cost_usd'] = compute_run_cost(data) + data['pr_number'] = ( + extract_pr_number(data.get('name', '')) + or extract_pr_number(data.get('msg', '')) + or (int(data['pr_number']) if data.get('pr_number') else None) + or branch_pr_map.get(data.get('name')) + ) + runs.append(data) + except Exception: + continue + except Exception as e: + print(f"[rk_metrics] Error reading {key}: {e}") + return runs + + +def _get_ci_runs_from_sqlite(date_from_ms=None, date_to_ms=None): + """Read CI runs from SQLite (persistent store).""" + conditions = [] + params = [] + if date_from_ms is not None: + conditions.append('timestamp_ms >= ?') + params.append(date_from_ms) + if date_to_ms is not None: + conditions.append('timestamp_ms <= ?') + params.append(date_to_ms) + where = ('WHERE ' + ' AND '.join(conditions)) if conditions else '' + rows = db.query(f'SELECT * FROM ci_runs {where} ORDER BY timestamp_ms', params) + runs = [] + for row in rows: + runs.append({ + 'dashboard': row['dashboard'], + 'name': row['name'], + 'timestamp': row['timestamp_ms'], + 'complete': row['complete_ms'], + 'status': row['status'], + 'author': row['author'], + 'pr_number': row['pr_number'], + 'instance_type': row['instance_type'], + 'instance_vcpus': row.get('instance_vcpus'), + 'spot': bool(row['spot']), + 'cost_usd': row['cost_usd'], + 'job_id': row.get('job_id', ''), + 'arch': row.get('arch', ''), + }) + return runs + + +def get_ci_runs(redis_conn, date_from_ms=None, date_to_ms=None): + """Read CI runs from Redis, backfilled with SQLite for data that Redis has flushed.""" + redis_runs = _get_ci_runs_from_redis(redis_conn, date_from_ms, date_to_ms) + + # Find the earliest timestamp in Redis to know what SQLite needs to fill + redis_keys = set() + redis_min_ts = float('inf') + for run in redis_runs: + ts = run.get('timestamp', 0) + redis_keys.add((run.get('dashboard', ''), ts, run.get('name', ''))) + if ts < redis_min_ts: + redis_min_ts = ts + + # If requesting data older than what Redis has, backfill from SQLite + sqlite_runs = [] + need_sqlite = (date_from_ms is not None and date_from_ms < redis_min_ts) or not redis_runs + if need_sqlite: + sqlite_to = int(redis_min_ts) if redis_runs else date_to_ms + sqlite_runs = _get_ci_runs_from_sqlite(date_from_ms, sqlite_to) + # Deduplicate: only include SQLite runs not already in Redis + sqlite_runs = [r for r in sqlite_runs + if (r.get('dashboard', ''), r.get('timestamp', 0), r.get('name', '')) + not in redis_keys] + + return sqlite_runs + redis_runs + + +def _ts_to_date(ts_ms): + return datetime.fromtimestamp(ts_ms / 1000, tz=timezone.utc).strftime('%Y-%m-%d') + + +# ---- Test event handling (only thing needing SQLite) ---- + +def _handle_test_event(channel: str, data: dict): + status = channel.split(':')[-1] + # Handle field name mismatches: run_test_cmd publishes 'cmd' for failed/flaked + # but 'test_cmd' for started events. Same for 'log_key' vs 'log_url'. + test_cmd = data.get('test_cmd') or data.get('cmd', '') + log_url = data.get('log_url') or data.get('log_key') + if log_url and not log_url.startswith('http'): + log_url = f'http://ci.aztec-labs.com/{log_url}' + db.execute(''' + INSERT INTO test_events + (status, test_cmd, log_url, ref_name, commit_hash, commit_author, + commit_msg, exit_code, duration_secs, is_scenario, owners, + flake_group_id, dashboard, timestamp) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + status, + test_cmd, + log_url, + data.get('ref_name', ''), + data.get('commit_hash'), + data.get('commit_author'), + data.get('commit_msg'), + data.get('exit_code'), + data.get('duration_seconds'), + 1 if data.get('is_scenario_test') else 0, + json.dumps(data['owners']) if data.get('owners') else None, + data.get('flake_group_id'), + data.get('dashboard', ''), + data.get('timestamp', datetime.now(timezone.utc).isoformat()), + )) + + +def start_test_listener(redis_conn): + """Subscribe to test event channels only. Reconnects on failure.""" + channels = [b'ci:test:started', b'ci:test:passed', b'ci:test:failed', b'ci:test:flaked'] + + def listener(): + backoff = 1 + while True: + try: + pubsub = redis_conn.pubsub() + pubsub.subscribe(*channels) + backoff = 1 # reset on successful connection + for message in pubsub.listen(): + if message['type'] != 'message': + continue + channel = message['channel'] + if isinstance(channel, bytes): + channel = channel.decode() + try: + payload = message['data'] + if isinstance(payload, bytes): + payload = payload.decode() + _handle_test_event(channel, json.loads(payload)) + except Exception as e: + print(f"[rk_metrics] Error parsing test event: {e}") + except Exception as e: + print(f"[rk_metrics] Test listener error (reconnecting in {backoff}s): {e}") + time.sleep(backoff) + backoff = min(backoff * 2, 60) + + t = threading.Thread(target=listener, daemon=True, name='test-listener') + t.start() + return t + + +# ---- Sync failed_tests_{section} lists from Redis into SQLite ---- + +_ANSI_STRIP = re.compile(r'\x1b\[[^m]*m|\x1b\]8;;[^\x07]*\x07') +_GRIND_CMD_RE = re.compile(r'/grind\?cmd=([^&\x07"]+)') +_LOG_KEY_RE = re.compile(r'ci\.aztec-labs\.com/([a-f0-9]{16})') +_INLINE_CMD_RE = re.compile(r'(?:grind\)|[0-9a-f]{16}\)):?\s+(.+?)\s+\(\d+s\)') +_DURATION_RE = re.compile(r'\((\d+)s\)') +_AUTHOR_MSG_RE = re.compile(r'\(code: \d+\)\s+\((.+?): (.+?)\)\s*$') +_FLAKE_GROUP_RE = re.compile(r'group:(\S+)') + +_failed_tests_sync_ts = 0 +_FAILED_TESTS_SYNC_TTL = 3600 # 1 hour + + +def _parse_failed_test_entry(raw: str, section: str) -> dict | None: + """Parse an ANSI-formatted failed_tests_{section} entry into structured data.""" + from urllib.parse import unquote + clean = _ANSI_STRIP.sub('', raw) + + # Status + if 'FLAKED' in clean: + status = 'flaked' + elif 'FAILED' in clean: + status = 'failed' + else: + return None + + # Timestamp: "02-11 15:11:00: ..." + ts_match = re.match(r'(\d{2}-\d{2} \d{2}:\d{2}:\d{2})', clean) + if not ts_match: + return None + # Assume current year for MM-DD HH:MM:SS; handle year rollover + now = datetime.now(timezone.utc) + year = now.year + ts_str = f'{year}-{ts_match.group(1)}' + try: + parsed_dt = datetime.strptime(ts_str, '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) + # If parsed date is in the future, it's from the previous year + if parsed_dt > now + timedelta(days=1): + parsed_dt = parsed_dt.replace(year=year - 1) + timestamp = parsed_dt.isoformat() + except ValueError: + return None + + # Log key + log_key = None + m = _LOG_KEY_RE.search(raw) + if m: + log_key = m.group(1) + + # Test command: try grind link first, then inline text + test_cmd = '' + m = _GRIND_CMD_RE.search(raw) + if m: + cmd_raw = unquote(m.group(1)) + # Format: "hash:KEY=VAL:KEY=VAL actual_command" + # Strip the hash:KEY=VAL prefix to get the actual test command + parts = cmd_raw.split(' ', 1) + if len(parts) == 2 and ':' in parts[0]: + test_cmd = parts[1].strip() + else: + test_cmd = cmd_raw + else: + # Fallback: extract from inline text after log key + m = _INLINE_CMD_RE.search(clean) + if m: + test_cmd = m.group(1).strip() + + # Duration + duration = None + m = _DURATION_RE.search(clean) + if m: + duration = float(m.group(1)) + + # Author and commit message + author, msg = None, None + m = _AUTHOR_MSG_RE.search(clean) + if m: + author = m.group(1) + msg = m.group(2) + + # Flake group + flake_group = None + m = _FLAKE_GROUP_RE.search(clean) + if m: + flake_group = m.group(1) + + return { + 'status': status, + 'test_cmd': test_cmd, + 'log_url': f'http://ci.aztec-labs.com/{log_key}' if log_key else None, + 'log_key': log_key, + 'ref_name': section, # section is the best ref we have from these lists + 'commit_author': author, + 'commit_msg': msg, + 'duration_secs': duration, + 'flake_group_id': flake_group, + 'timestamp': timestamp, + 'dashboard': section, + } + + +def sync_failed_tests_to_sqlite(redis_conn): + """Read failed_tests_{section} lists from Redis and insert into test_events.""" + global _failed_tests_sync_ts + now = time.time() + if now - _failed_tests_sync_ts < _FAILED_TESTS_SYNC_TTL: + return + _failed_tests_sync_ts = now + + conn = db.get_db() + # Track existing entries to avoid duplicates: log_url for entries that have one, + # (test_cmd, timestamp, dashboard) composite key for entries without log_url + existing_urls = {row['log_url'] for row in conn.execute( + "SELECT DISTINCT log_url FROM test_events WHERE log_url IS NOT NULL" + ).fetchall()} + existing_keys = {(row['test_cmd'], row['timestamp'], row['dashboard']) for row in conn.execute( + "SELECT test_cmd, timestamp, dashboard FROM test_events WHERE log_url IS NULL" + ).fetchall()} + + total = 0 + for section in SECTIONS: + key = f'failed_tests_{section}' + try: + entries = redis_conn.lrange(key, 0, -1) + except Exception as e: + print(f"[rk_metrics] Error reading {key}: {e}") + continue + + for entry_bytes in entries: + raw = entry_bytes.decode() if isinstance(entry_bytes, bytes) else entry_bytes + parsed = _parse_failed_test_entry(raw, section) + if not parsed: + continue + if parsed['log_url']: + if parsed['log_url'] in existing_urls: + continue + existing_urls.add(parsed['log_url']) + else: + composite = (parsed['test_cmd'], parsed['timestamp'], parsed['dashboard']) + if composite in existing_keys: + continue + existing_keys.add(composite) + try: + conn.execute(''' + INSERT INTO test_events + (status, test_cmd, log_url, ref_name, commit_author, + commit_msg, duration_secs, flake_group_id, dashboard, + timestamp) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + parsed['status'], parsed['test_cmd'], parsed['log_url'], + parsed['ref_name'], parsed['commit_author'], + parsed['commit_msg'], parsed['duration_secs'], + parsed['flake_group_id'], parsed['dashboard'], + parsed['timestamp'], + )) + total += 1 + except Exception as e: + print(f"[rk_metrics] Error inserting test event: {e}") + conn.commit() + if total: + print(f"[rk_metrics] Synced {total} test events from Redis lists") + + +# ---- Seed loading ---- + +def _load_seed_data(): + """Load CI runs and test events from ci-run-seed.json.gz if SQLite is empty.""" + import gzip + from pathlib import Path + + conn = db.get_db() + ci_count = conn.execute('SELECT COUNT(*) as c FROM ci_runs').fetchone()['c'] + te_count = conn.execute('SELECT COUNT(*) as c FROM test_events').fetchone()['c'] + if ci_count > 0 and te_count > 0: + return + + seed = Path(__file__).parent / 'ci-run-seed.json.gz' + if not seed.exists(): + return + + with gzip.open(seed, 'rt') as f: + data = json.load(f) + + now_iso = datetime.now(timezone.utc).isoformat() + + if ci_count == 0 and data.get('ci_runs'): + runs = data['ci_runs'] + for run in runs: + try: + conn.execute(''' + INSERT OR IGNORE INTO ci_runs + (dashboard, name, timestamp_ms, complete_ms, status, author, + pr_number, instance_type, instance_vcpus, spot, cost_usd, + job_id, arch, synced_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + run.get('dashboard', ''), + run.get('name', ''), + run.get('timestamp', 0), + run.get('complete'), + run.get('status'), + run.get('author'), + run.get('pr_number'), + run.get('instance_type'), + run.get('instance_vcpus'), + 1 if run.get('spot') else 0, + run.get('cost_usd'), + run.get('job_id', ''), + run.get('arch', ''), + now_iso, + )) + except Exception: + continue + conn.commit() + print(f"[rk_metrics] Loaded {len(runs)} CI runs from seed") + + if te_count == 0 and data.get('test_events'): + events = data['test_events'] + for ev in events: + try: + conn.execute(''' + INSERT OR IGNORE INTO test_events + (status, test_cmd, log_url, ref_name, commit_hash, commit_author, + commit_msg, exit_code, duration_secs, is_scenario, owners, + flake_group_id, dashboard, timestamp) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + ev.get('status', ''), + ev.get('test_cmd', ''), + ev.get('log_url'), + ev.get('ref_name', ''), + ev.get('commit_hash'), + ev.get('commit_author'), + ev.get('commit_msg'), + ev.get('exit_code'), + ev.get('duration_secs'), + ev.get('is_scenario', 0), + ev.get('owners'), + ev.get('flake_group_id'), + ev.get('dashboard', ''), + ev.get('timestamp', ''), + )) + except Exception: + continue + conn.commit() + print(f"[rk_metrics] Loaded {len(events)} test events from seed") + + +# ---- CI run sync (Redis → SQLite) for flake correlation ---- + +_ci_sync_ts = 0 +_CI_SYNC_TTL = 3600 # 1 hour + + +def sync_ci_runs_to_sqlite(redis_conn): + """Sync all CI runs from Redis into SQLite for persistence.""" + global _ci_sync_ts + now = time.time() + if now - _ci_sync_ts < _CI_SYNC_TTL: + return + _ci_sync_ts = now + + # Sync everything Redis has (not just 30 days) + runs = _get_ci_runs_from_redis(redis_conn) + + now_iso = datetime.now(timezone.utc).isoformat() + conn = db.get_db() + count = 0 + for run in runs: + try: + conn.execute(''' + INSERT OR REPLACE INTO ci_runs + (dashboard, name, timestamp_ms, complete_ms, status, author, + pr_number, instance_type, instance_vcpus, spot, cost_usd, + job_id, arch, synced_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + run.get('dashboard', ''), + run.get('name', ''), + run.get('timestamp', 0), + run.get('complete'), + run.get('status'), + run.get('author'), + run.get('pr_number'), + run.get('instance_type'), + run.get('instance_vcpus'), + 1 if run.get('spot') else 0, + run.get('cost_usd'), + run.get('job_id', ''), + run.get('arch', ''), + now_iso, + )) + count += 1 + except Exception as e: + print(f"[rk_metrics] Error syncing run: {e}") + conn.commit() + print(f"[rk_metrics] Synced {count} CI runs to SQLite") + + +def start_ci_run_sync(redis_conn): + """Start periodic CI run + test event sync thread.""" + _load_seed_data() + + def loop(): + while True: + try: + sync_ci_runs_to_sqlite(redis_conn) + sync_failed_tests_to_sqlite(redis_conn) + except Exception as e: + print(f"[rk_metrics] sync error: {e}") + time.sleep(600) # check every 10 min (TTL gates actual work) + + t = threading.Thread(target=loop, daemon=True, name='ci-run-sync') + t.start() + return t + + +def get_flakes_by_command(date_from, date_to, dashboard=''): + """Get flake stats grouped by CI command type (dashboard/section).""" + if dashboard: + rows = db.query(''' + SELECT dashboard, test_cmd, COUNT(*) as count + FROM test_events + WHERE status = 'flaked' AND dashboard = ? + AND timestamp >= ? AND timestamp < ? + GROUP BY dashboard, test_cmd + ORDER BY count DESC + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + rows = db.query(''' + SELECT dashboard, test_cmd, COUNT(*) as count + FROM test_events + WHERE status = 'flaked' AND dashboard != '' + AND timestamp >= ? AND timestamp < ? + GROUP BY dashboard, test_cmd + ORDER BY count DESC + ''', (date_from, date_to + 'T23:59:59')) + + by_command = {} + total_flakes = 0 + for row in rows: + cmd = row['dashboard'] + if cmd not in by_command: + by_command[cmd] = {'total': 0, 'tests': {}} + by_command[cmd]['total'] += row['count'] + by_command[cmd]['tests'][row['test_cmd']] = row['count'] + total_flakes += row['count'] + + if dashboard: + failure_rows = db.query(''' + SELECT dashboard, COUNT(*) as count + FROM test_events + WHERE status = 'failed' AND dashboard = ? + AND timestamp >= ? AND timestamp < ? + GROUP BY dashboard + ''', (dashboard, date_from, date_to + 'T23:59:59')) + else: + failure_rows = db.query(''' + SELECT dashboard, COUNT(*) as count + FROM test_events + WHERE status = 'failed' AND dashboard != '' + AND timestamp >= ? AND timestamp < ? + GROUP BY dashboard + ''', (date_from, date_to + 'T23:59:59')) + failures_by_command = {r['dashboard']: r['count'] for r in failure_rows} + + result_list = [] + for cmd, data in sorted(by_command.items(), key=lambda x: -x[1]['total']): + top_tests = sorted(data['tests'].items(), key=lambda x: -x[1])[:10] + result_list.append({ + 'command': cmd, + 'total_flakes': data['total'], + 'total_failures': failures_by_command.get(cmd, 0), + 'top_tests': [{'test_cmd': t, 'count': c} for t, c in top_tests], + }) + + return { + 'by_command': result_list, + 'summary': { + 'total_flakes': total_flakes, + 'total_failures': sum(failures_by_command.values()), + }, + } diff --git a/ci3/ci-metrics/requirements.txt b/ci3/ci-metrics/requirements.txt new file mode 100644 index 000000000000..d6516263133f --- /dev/null +++ b/ci3/ci-metrics/requirements.txt @@ -0,0 +1,8 @@ +flask +gunicorn +redis +Flask-Compress +Flask-HTTPAuth +requests +google-cloud-bigquery +boto3 diff --git a/ci3/ci-metrics/sync_to_sqlite.py b/ci3/ci-metrics/sync_to_sqlite.py new file mode 100755 index 000000000000..5dd6faae6172 --- /dev/null +++ b/ci3/ci-metrics/sync_to_sqlite.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 +"""Sync ephemeral Redis CI data to persistent SQLite. + +Normally run automatically by the ci-metrics server's background sync thread. +Can also be run standalone for a one-off manual sync: + + cd ci3/ci-metrics && python3 sync_to_sqlite.py + +Connects to Redis, reads all CI runs and failed test lists, writes to SQLite. +""" +import os +import sys +import time + +# Ensure this script can import sibling modules +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +import redis as redis_lib +import db +import metrics + +REDIS_HOST = os.getenv('REDIS_HOST', 'localhost') +REDIS_PORT = int(os.getenv('REDIS_PORT', '6379')) + + +def main(): + start = time.time() + r = redis_lib.Redis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=False) + + try: + r.ping() + except Exception as e: + print(f"[sync] Cannot connect to Redis at {REDIS_HOST}:{REDIS_PORT}: {e}") + sys.exit(1) + + # Ensure DB schema is up to date + db.get_db() + + # Force sync by resetting the TTL gates + metrics._ci_sync_ts = 0 + metrics._failed_tests_sync_ts = 0 + + # Sync CI runs + print("[sync] Syncing CI runs from Redis to SQLite...") + metrics.sync_ci_runs_to_sqlite(r) + + # Sync failed/flaked test events from Redis lists + print("[sync] Syncing test events from Redis to SQLite...") + metrics.sync_failed_tests_to_sqlite(r) + + # Report + conn = db.get_db() + ci_count = conn.execute('SELECT COUNT(*) as c FROM ci_runs').fetchone()['c'] + te_count = conn.execute('SELECT COUNT(*) as c FROM test_events').fetchone()['c'] + elapsed = time.time() - start + print(f"[sync] Done in {elapsed:.1f}s. SQLite: {ci_count} CI runs, {te_count} test events.") + + +if __name__ == '__main__': + main() diff --git a/ci3/ci-metrics/views/ci-insights.html b/ci3/ci-metrics/views/ci-insights.html new file mode 100644 index 000000000000..533b6bfb62cd --- /dev/null +++ b/ci3/ci-metrics/views/ci-insights.html @@ -0,0 +1,658 @@ + + + + + ACI - CI Insights + + + + + +

ci insights

+ +
+ + + + | + + + | + + + + | + +
+ +
+ + + +
+
daily ci spend
--
+
cost / merge
--
+
mq success rate
--
+
flakes / day
--
+
prs merged / day
--
+
+ + +
+
+

daily ci cost + 7-day rolling cost per merge

+
+
+
+

merge queue: daily outcomes + success rate

+
+
+
+

flakes + test failures per day

+
+
+
+ + +
flakes by pipeline
+
+ + + +
+
+ + +
author ci profile
+
+ + + +
+
+ + + + + diff --git a/ci3/ci-metrics/views/cost-overview.html b/ci3/ci-metrics/views/cost-overview.html new file mode 100644 index 000000000000..53424a2d2d70 --- /dev/null +++ b/ci3/ci-metrics/views/cost-overview.html @@ -0,0 +1,905 @@ + + + + + ACI - Cost Overview + + + + + +

cost overview

+ +
+ + + + | + + + | + + + + | + +
+ +
+ +
+
Overview
+
Resource Details
+
CI Attribution
+
+ +
+
+ +
+
+

combined daily spend

+
+
+
+

service category breakdown

+
+
+
+

aws vs gcp split

+
+
+
+ + + + +
+
+ +
+
+
+
+ + + +
+
+
+ +
+
+ +
+
+
+

ci cost by run type (time series)

+
+
+
+

cost by user (AWS + GCP)

+
+
+
+

cost by run type

+
+
+
+

instances

+
+ + + +
+
+
+ + + + + diff --git a/ci3/ci-metrics/views/test-timings.html b/ci3/ci-metrics/views/test-timings.html new file mode 100644 index 000000000000..0bf6c7213bd6 --- /dev/null +++ b/ci3/ci-metrics/views/test-timings.html @@ -0,0 +1,289 @@ + + + + + ACI - Test Timings + + + + + +

test timings

+ +
+ + + + + | + + + | + + + | + + +
+ +
loading...
+ +
+ +
+
+

avg duration by day

+
+
+
+

test run count by day

+
+
+
+ +

tests by duration

+
+ + + + + + + + + + + + + + + + +
test commandrunsavg (s)min (s)max (s)total (h)pass %passedfailedflaked
+
+ +

slowest individual runs

+
+ + + + + + + + + + + + + +
test commandduration (s)statusdateauthorpipelinelog
+
+ + + + + diff --git a/ci3/dashboard/Dockerfile b/ci3/dashboard/Dockerfile index 2ca190fd9753..2da7805ffa83 100644 --- a/ci3/dashboard/Dockerfile +++ b/ci3/dashboard/Dockerfile @@ -16,7 +16,12 @@ RUN apt update && apt install -y \ WORKDIR /app COPY requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt gunicorn + +# Install ci-metrics dependencies (ci-metrics runs as subprocess) +COPY ci-metrics/requirements.txt ci-metrics/requirements.txt +RUN pip install --no-cache-dir -r ci-metrics/requirements.txt + RUN git config --global --add safe.directory /aztec-packages COPY . . -EXPOSE 8080 +EXPOSE 8080 8081 CMD ["gunicorn", "-w", "100", "-b", "0.0.0.0:8080", "rk:app"] diff --git a/ci3/dashboard/deploy.sh b/ci3/dashboard/deploy.sh index cc417006d072..1d9e930e95a1 100755 --- a/ci3/dashboard/deploy.sh +++ b/ci3/dashboard/deploy.sh @@ -1,7 +1,13 @@ #!/bin/bash set -euo pipefail -rsync -avz --exclude='deploy.sh' -e "ssh -i ~/.ssh/build_instance_key" * ubuntu@ci.aztec-labs.com:rk +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Sync dashboard (rkapp) files +rsync -avz --exclude='deploy.sh' -e "ssh -i ~/.ssh/build_instance_key" "$SCRIPT_DIR"/* ubuntu@ci.aztec-labs.com:rk + +# Sync ci-metrics server (started as subprocess by rkapp) +rsync -avz -e "ssh -i ~/.ssh/build_instance_key" "$SCRIPT_DIR/../ci-metrics/" ubuntu@ci.aztec-labs.com:rk/ci-metrics/ ssh -i ~/.ssh/build_instance_key ubuntu@ci.aztec-labs.com " cd rk diff --git a/ci3/dashboard/rk.py b/ci3/dashboard/rk.py index 4e194cbc3a10..aedf35a824e2 100644 --- a/ci3/dashboard/rk.py +++ b/ci3/dashboard/rk.py @@ -18,13 +18,40 @@ YELLOW, BLUE, GREEN, RED, PURPLE, BOLD, RESET, hyperlink, r, get_section_data, get_list_as_string ) - LOGS_DISK_PATH = os.getenv('LOGS_DISK_PATH', '/logs-disk') DASHBOARD_PASSWORD = os.getenv('DASHBOARD_PASSWORD', 'password') +CI_METRICS_PORT = int(os.getenv('CI_METRICS_PORT', '8081')) +CI_METRICS_URL = os.getenv('CI_METRICS_URL', f'http://localhost:{CI_METRICS_PORT}') + app = Flask(__name__) Compress(app) auth = HTTPBasicAuth() +# Start the ci-metrics server as a subprocess +# Check sibling dir (repo layout) then subdirectory (Docker layout) +_ci_metrics_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'ci-metrics') +if not os.path.isdir(_ci_metrics_dir): + _ci_metrics_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'ci-metrics') +if os.path.isdir(_ci_metrics_dir): + # Kill any stale process on the port (e.g. leftover from previous reload) + import signal + try: + out = subprocess.check_output( + ['lsof', '-ti', f':{CI_METRICS_PORT}'], stderr=subprocess.DEVNULL, text=True) + for pid in out.strip().split('\n'): + if pid: + os.kill(int(pid), signal.SIGTERM) + import time; time.sleep(0.5) + except (subprocess.CalledProcessError, OSError): + pass + _ci_metrics_env = {**os.environ, 'CI_METRICS_PORT': str(CI_METRICS_PORT)} + subprocess.Popen( + ['gunicorn', '-w', '4', '-b', f'0.0.0.0:{CI_METRICS_PORT}', '--timeout', '120', 'app:app'], + cwd=_ci_metrics_dir, + env=_ci_metrics_env, + ) + print(f"[rk.py] ci-metrics server started on port {CI_METRICS_PORT}") + def read_from_disk(key): """Read log from disk as fallback when Redis key not found.""" try: @@ -145,6 +172,14 @@ def root() -> str: f"{hyperlink('https://aztecprotocol.github.io/benchmark-page-data/bench?branch=next', 'next')}\n" f"{hyperlink('/chonk-breakdowns', 'chonk breakdowns')}\n" f"{RESET}" + f"\n" + f"CI Metrics:\n" + f"\n{YELLOW}" + f"{hyperlink('/cost-overview', 'cost overview (AWS + GCP)')}\n" + f"{hyperlink('/namespace-billing', 'namespace billing')}\n" + f"{hyperlink('/ci-insights', 'ci insights')}\n" + f"{hyperlink('/test-timings', 'test timings')}\n" + f"{RESET}" ) def section_view(section: str) -> str: @@ -487,6 +522,57 @@ def make_options(param_name, options, current_value, suffix=''): # Redirect to log view. return redirect(f'/{run_id}') + +# ---- Reverse proxy to ci-metrics server ---- + +_proxy_session = requests.Session() +_HOP_BY_HOP = frozenset([ + 'connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', + 'te', 'trailers', 'transfer-encoding', 'upgrade', 'content-length', + # `requests` auto-decompresses gzip responses, so Content-Encoding is + # stale — strip it so the browser doesn't try to decompress plain content. + # Flask-Compress on rkapp handles browser compression. + 'content-encoding', +]) +# Don't forward Accept-Encoding — let `requests` negotiate with ci-metrics +# (it adds its own and auto-decompresses). +_STRIP_REQUEST_HEADERS = frozenset(['host', 'accept-encoding']) + +def _proxy(path): + """Forward request to ci-metrics, streaming the response back.""" + url = f'{CI_METRICS_URL}/{path.lstrip("/")}' + try: + resp = _proxy_session.request( + method=request.method, + url=url, + params=request.args, + data=request.get_data(), + headers={k: v for k, v in request.headers if k.lower() not in _STRIP_REQUEST_HEADERS}, + stream=True, + timeout=60, + ) + # Strip hop-by-hop headers + headers = {k: v for k, v in resp.headers.items() if k.lower() not in _HOP_BY_HOP} + return Response(resp.iter_content(chunk_size=8192), + status=resp.status_code, headers=headers) + except Exception as e: + return Response(json.dumps({'error': f'ci-metrics unavailable: {e}'}), + mimetype='application/json', status=502) + +@app.route('/namespace-billing') +@app.route('/ci-health') +@app.route('/ci-insights') +@app.route('/cost-overview') +@app.route('/test-timings') +@auth.login_required +def proxy_dashboard(): + return _proxy(request.path) + +@app.route('/api/', methods=['GET', 'POST', 'PUT', 'DELETE']) +@auth.login_required +def proxy_api(path): + return _proxy(f'/api/{path}') + @app.route('/') @auth.login_required def get_value(key): diff --git a/ci3/find_ports b/ci3/find_ports deleted file mode 100755 index d7da0afe53bf..000000000000 --- a/ci3/find_ports +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -source $(git rev-parse --show-toplevel)/ci3/source -# Find 'num_ports' free ports between 9000 and 10000 -# Read first arg, default to 1 port -num_ports="${1:-1}" -echo $(comm -23 <(seq 9000 10000 | sort) <(ss -Htan | awk '{print $4}' | cut -d':' -f2 | sort -u) | shuf | head -n "$num_ports") diff --git a/ci3/log_ci_run b/ci3/log_ci_run index 5c9567ae91dd..b52b93256edc 100755 --- a/ci3/log_ci_run +++ b/ci3/log_ci_run @@ -35,6 +35,14 @@ if [ -z "$key" ]; then author="$(git log -1 --pretty=format:"%an")" name=$REF_NAME [ "$(aws_get_meta_data instance-life-cycle)" == "spot" ] && spot=true || spot=false + instance_type=$(aws_get_meta_data instance-type 2>/dev/null || echo "unknown") + instance_vcpus=$(nproc 2>/dev/null || echo 0) + + # Extract PR number from branch name or merge queue ref + pr_number="" + if [[ "$REF_NAME" =~ [Pp][Rr]-?([0-9]+) ]]; then + pr_number="${BASH_REMATCH[1]}" + fi # If this is github merge queue, just keep the queue name. if [[ "$name" =~ ^gh-readonly-queue/([^/]+)/ ]]; then @@ -42,6 +50,7 @@ if [ -z "$key" ]; then fi msg=$(pr_link "$msg") + dashboard="${range_key#ci-run-}" json=$(jq -c -j -n \ --argjson timestamp "$key" \ @@ -53,7 +62,12 @@ if [ -z "$key" ]; then --arg author "$author" \ --arg arch "$(arch)" \ --argjson spot "$spot" \ - '{timestamp: $timestamp, run_id: $run_id, job_id: $job_id, status: $status, msg: $msg, name: $name, author: $author, arch: $arch, spot: $spot}') + --arg instance_type "$instance_type" \ + --argjson instance_vcpus "$instance_vcpus" \ + --arg pr_number "$pr_number" \ + --arg dashboard "$dashboard" \ + --arg github_actor "${GITHUB_ACTOR:-}" \ + '{timestamp: $timestamp, run_id: $run_id, job_id: $job_id, status: $status, msg: $msg, name: $name, author: $author, github_actor: $github_actor, arch: $arch, spot: $spot, instance_type: $instance_type, instance_vcpus: $instance_vcpus, pr_number: $pr_number, dashboard: $dashboard}') # echo "$json" >&2 redis_cli ZADD $range_key $key "$json" &>/dev/null redis_cli SETEX hb-$key 60 1 &>/dev/null diff --git a/ci3/merge_train_failure_slack_notify b/ci3/merge_train_failure_slack_notify index 29e04d234525..3a10948cbb6f 100755 --- a/ci3/merge_train_failure_slack_notify +++ b/ci3/merge_train_failure_slack_notify @@ -2,11 +2,15 @@ set -eu # Parse arguments -DEQUEUED=false +MODE="" while [[ $# -gt 0 ]]; do case $1 in --dequeued) - DEQUEUED=true + MODE="dequeued" + shift + ;; + --merged) + MODE="merged" shift ;; *) @@ -52,8 +56,10 @@ EOF set -x -if [[ "$DEQUEUED" == "true" ]]; then +if [[ "$MODE" == "dequeued" ]]; then send_slack_message "PR was removed from the merge queue: <$PR_URL|View PR>" +elif [[ "$MODE" == "merged" ]]; then + send_slack_message "PR merged: <$PR_URL|$commit_title> by $commit_author" else send_slack_message "There was a failure (http://ci.aztec-labs.com/$ci_log_id) in the merge-train:\\n$commit_title by $commit_author" fi diff --git a/ci3/run_test_cmd b/ci3/run_test_cmd index 66334e535f27..a8fd10836497 100755 --- a/ci3/run_test_cmd +++ b/ci3/run_test_cmd @@ -119,6 +119,21 @@ function publish_log_final { cat $tmp_file 2>/dev/null | cache_persistent $log_key $expire } +# Finalize the current log and start a fresh one with a new unique key. +function rotate_log { + if [ "$CI_REDIS_AVAILABLE" -eq 1 ]; then + publish_log_final "$@" + fi + log_key=$(uuid) + log_info=" ($(ci_term_link $log_key))" + > $tmp_file + if [ "$live_logging" -eq 1 ]; then + kill ${publish_pid:-} &>/dev/null + live_publish_log & + publish_pid=$! + fi +} + function live_publish_log { # Not replacing previous trap as we run this function in the background. trap 'kill $sleep_pid &>/dev/null; exit' SIGTERM SIGINT @@ -160,7 +175,8 @@ if [ "$publish" -eq 1 ]; then --arg commit_hash "$COMMIT_HASH" \ --arg commit_author "$COMMIT_AUTHOR" \ --arg commit_msg "$COMMIT_MSG" \ - '{status: $status, test_cmd: $test_cmd, log_id: $log_id, log_url: $log_url, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, timestamp: now | todate}') + --arg dashboard "${CI_DASHBOARD:-}" \ + '{status: $status, test_cmd: $test_cmd, log_id: $log_id, log_url: $log_url, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, dashboard: $dashboard, timestamp: now | todate}') redis_publish "ci:test:started" "$start_redis_data" fi @@ -228,15 +244,16 @@ function track_test_failed { function publish_redis { local redis_data=$(jq -n \ --arg status "$1" \ - --arg cmd "$cmd" \ - --arg log_key "$log_key" \ - --arg ref_name "$REF_NAME" \ + --arg test_cmd "$cmd" \ + --arg log_url "http://ci.aztec-labs.com/$log_key" \ + --arg ref_name "${TARGET_BRANCH:-$REF_NAME}" \ --arg commit_hash "$COMMIT_HASH" \ --arg commit_author "$COMMIT_AUTHOR" \ --arg commit_msg "$COMMIT_MSG" \ --argjson code "$code" \ --argjson duration "$SECONDS" \ - '{status: $status, cmd: $cmd, log_key: $log_key, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, exit_code: $code, duration_seconds: $duration, timestamp: now | todate}') + --arg dashboard "${CI_DASHBOARD:-}" \ + '{status: $status, test_cmd: $test_cmd, log_url: $log_url, ref_name: $ref_name, commit_hash: $commit_hash, commit_author: $commit_author, commit_msg: $commit_msg, exit_code: $code, duration_seconds: $duration, dashboard: $dashboard, timestamp: now | todate}') redis_publish "ci:test:$1" "$redis_data" } @@ -247,6 +264,8 @@ function pass { local line="${green}PASSED${reset}${log_info:-}: $test_cmd (${SECONDS}s)" echo -e "$line" + [ "$publish" -eq 1 ] && publish_redis "passed" + if [ "$track_test_history" -eq 1 ]; then local track_line="${green}PASSED${reset}${log_info:-} ${fail_links}: $test_cmd (${SECONDS}s) (${purple}$COMMIT_AUTHOR${reset}: $COMMIT_MSG)" track_test_history "$track_line" @@ -332,12 +351,23 @@ flake_group_id=$(echo "$test_entries" | jq -r '.flake_group_id // empty' | head if [ -z "$owners" ]; then fail else - echo -e "${yellow}RETRYING${reset}${log_info:-}: $test_cmd" + failure_log_key=$log_key + failure_log_info=$log_info + rotate_log $((60 * 60 * 24 * 7 * 12)) + + echo -e "${yellow}RETRYING${reset}${log_info}: $test_cmd" run_test - # Test passed. Signal it as a flake, but pass. - [ $code -eq 0 ] && flake + if [ $code -eq 0 ]; then + # Publish the retry's log, then point back at the failure for flake reporting. + if [ "$CI_REDIS_AVAILABLE" -eq 1 ]; then + publish_log_final + fi + log_key=$failure_log_key + log_info=$failure_log_info + flake + fi # Otherwise we failed twice in a row, so hard fail. fail diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/aztec_js_reference.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/aztec_js_reference.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/aztec_js_reference.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/aztec_js_reference.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_connect_to_local_network.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_connect_to_local_network.md similarity index 92% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_connect_to_local_network.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_connect_to_local_network.md index d3c34a9078a8..010c85504291 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_connect_to_local_network.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_connect_to_local_network.md @@ -16,7 +16,7 @@ This guide shows you how to connect your application to the Aztec local network ## Install dependencies ```bash -yarn add @aztec/aztec.js@4.0.0-nightly.20260217 @aztec/wallets@4.0.0-nightly.20260217 +yarn add @aztec/aztec.js@4.0.0-nightly.20260218 @aztec/wallets@4.0.0-nightly.20260218 ``` ## Connect to the network @@ -37,7 +37,7 @@ await waitForNode(node); // Create an EmbeddedWallet connected to the node const wallet = await EmbeddedWallet.create(node); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L1-L14 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L1-L14 :::note About EmbeddedWallet @@ -57,7 +57,7 @@ const nodeInfo = await node.getNodeInfo(); console.log("Connected to local network version:", nodeInfo.nodeVersion); console.log("Chain ID:", nodeInfo.l1ChainId); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L16-L20 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L16-L20 ### Load pre-funded accounts @@ -75,7 +75,7 @@ const [aliceAddress, bobAddress] = await Promise.all( console.log(`Alice's address: ${aliceAddress.toString()}`); console.log(`Bob's address: ${bobAddress.toString()}`); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L22-L32 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L22-L32 These accounts are pre-funded with fee juice (the native gas token) at genesis, so you can immediately send transactions without needing to bridge funds from L1. @@ -90,7 +90,7 @@ import { getFeeJuiceBalance } from "@aztec/aztec.js/utils"; const aliceBalance = await getFeeJuiceBalance(aliceAddress, node); console.log(`Alice's fee juice balance: ${aliceBalance}`); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L34-L39 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L34-L39 ## Next steps diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_create_account.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_create_account.md similarity index 93% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_create_account.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_create_account.md index 626a82ad921e..08b7ce8f7a73 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_create_account.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_create_account.md @@ -15,7 +15,7 @@ This guide shows you how to create and deploy a new account on Aztec. ## Install dependencies ```bash -yarn add @aztec/aztec.js@4.0.0-nightly.20260217 @aztec/wallets@4.0.0-nightly.20260217 +yarn add @aztec/aztec.js@4.0.0-nightly.20260218 @aztec/wallets@4.0.0-nightly.20260218 ``` ## Create a new account @@ -30,7 +30,7 @@ const salt = Fr.random(); const newAccount = await wallet.createSchnorrAccount(secret, salt); console.log("New account address:", newAccount.address.toString()); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L41-L48 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L41-L48 The secret is used to derive the account's encryption keys, and the salt ensures address uniqueness. The signing key is automatically derived from the secret. @@ -74,7 +74,7 @@ await deployMethod.send({ fee: { paymentMethod: sponsoredPaymentMethod }, }); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L50-L76 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L50-L76 :::info @@ -92,7 +92,7 @@ await deployMethodFeeJuice.send({ from: AztecAddress.ZERO, }); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L78-L84 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L78-L84 The `from: AztecAddress.ZERO` is required because there's no existing account to send from—the transaction itself creates the account. @@ -105,7 +105,7 @@ Confirm the account was deployed successfully: const metadata = await wallet.getContractMetadata(newAccount.address); console.log("Account deployed:", metadata.isContractInitialized); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L86-L89 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L86-L89 ## Next steps diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_deploy_contract.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_deploy_contract.md similarity index 94% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_deploy_contract.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_deploy_contract.md index f3972921bd73..aba8bc4c79e8 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_deploy_contract.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_deploy_contract.md @@ -60,7 +60,7 @@ const token = await TokenContract.deploy( 18, ).send({ from: aliceAddress }); // alice has fee juice and is registered in the wallet ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L30-L40 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L30-L40 On testnet, your account likely won't have Fee Juice. Instead, pay fees using the [Sponsored Fee Payment Contract method](./how_to_pay_fees.md): @@ -83,7 +83,7 @@ const sponsoredContract = await TokenContract.deploy( 18, ).send({ from: aliceAddress, fee: { paymentMethod: sponsoredPaymentMethod } }); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L42-L59 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L42-L59 Here's a complete example from the test suite: @@ -91,7 +91,7 @@ Here's a complete example from the test suite: ```typescript title="deploy_basic" showLineNumbers const contract = await StatefulTestContract.deploy(wallet, owner, 42).send({ from: defaultAccountAddress }); ``` -> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L42-L44 +> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L42-L44 ## Use deployment options @@ -115,7 +115,7 @@ const saltedContract = await TokenContract.deploy( contractAddressSalt: customSalt, }); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L61-L75 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L61-L75 ### Deploy universally @@ -126,7 +126,7 @@ Deploy to the same address across networks by setting `universalDeploy: true`: const opts = { universalDeploy: true, from: defaultAccountAddress }; const contract = await StatefulTestContract.deploy(wallet, owner, 42).send(opts); ``` -> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L59-L62 +> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L59-L62 :::info @@ -159,7 +159,7 @@ await delayedToken.methods console.log("Contract initialized"); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L250-L271 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L250-L271 ### Deploy with a specific initializer @@ -175,7 +175,7 @@ const contract = await StatefulTestContract.deployWithOpts( from: defaultAccountAddress, }); ``` -> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L81-L89 +> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L81-L89 The `deployWithOpts` method accepts an options object as its first argument: @@ -210,7 +210,7 @@ const predictedAddress = instance.address; console.log(`Contract will deploy at: ${predictedAddress}`); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L77-L92 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L77-L92 :::warning @@ -242,7 +242,7 @@ console.log(`Deployment tx: ${txHash}`); const receipt = await waitForTx(node, txHash); console.log(`Deployed in block ${receipt.blockNumber}`); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L129-L147 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L129-L147 For most use cases, simply await the deployment to get the contract directly: @@ -260,7 +260,7 @@ const token = await TokenContract.deploy( console.log(`Token deployed at: ${token.address.toString()}`); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L91-L103 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L91-L103 ## Deploy multiple contracts @@ -274,7 +274,7 @@ const token = await TokenContract.deploy(wallet, owner, 'TOKEN', 'TKN', 18).send from: defaultAccountAddress, }); ``` -> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L70-L74 +> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L70-L74 ### Deploy contracts with dependencies @@ -303,7 +303,7 @@ const derivedToken = await TokenContract.deploy( console.log(`Base token at: ${baseToken.address.toString()}`); console.log(`Derived token at: ${derivedToken.address.toString()}`); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L208-L229 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L208-L229 ### Deploy contracts in parallel @@ -326,7 +326,7 @@ console.log(`Contract 1 at: ${contracts[0].address}`); console.log(`Contract 2 at: ${contracts[1].address}`); console.log(`Contract 3 at: ${contracts[2].address}`); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L231-L248 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L231-L248 :::tip[Parallel deployment considerations] @@ -350,7 +350,7 @@ const contract = await deployMethod.register(); const publicCall = contract.methods.increment_public_value(owner, 84); await new BatchCall(wallet, [deployMethod, publicCall]).send({ from: defaultAccountAddress }); ``` -> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L127-L135 +> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L127-L135 ## Verify deployment @@ -378,7 +378,7 @@ const metadata = await wallet.getContractMetadata(contract.address); const classMetadata = await wallet.getContractClassMetadata(metadata.instance!.currentContractClassId); const isPublished = classMetadata.isContractClassPubliclyRegistered; ``` -> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L49-L53 +> Source code: yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts#L49-L53 ### What the PXE checks automatically @@ -411,7 +411,7 @@ try { console.error("Contract not accessible:", (error as Error).message); } ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L94-L105 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L94-L105 ## Register deployed contracts @@ -437,7 +437,7 @@ await wallet.registerContract(metadata.instance!, TokenContract.artifact); // Now you can interact with the contract const externalContract = await TokenContract.at(contractAddress, wallet); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L107-L123 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L107-L123 :::warning @@ -463,7 +463,7 @@ console.log( `Reconstructed contract address: ${reconstructedInstance.address.toString()}`, ); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L173-L192 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L173-L192 ::: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_pay_fees.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_pay_fees.md similarity index 98% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_pay_fees.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_pay_fees.md index 3fe3439bb5bc..62997ebf5312 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_pay_fees.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_pay_fees.md @@ -136,7 +136,7 @@ const sponsoredContract = await TokenContract.deploy( 18, ).send({ from: aliceAddress, fee: { paymentMethod: sponsoredPaymentMethod } }); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L42-L59 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L42-L59 Here's a simpler example from the test suite: @@ -151,7 +151,7 @@ const tx = await bananaCoin.methods.transfer_in_public(aliceAddress, bobAddress, }, }); ``` -> Source code: yarn-project/end-to-end/src/e2e_fees/sponsored_payments.test.ts#L57-L66 +> Source code: yarn-project/end-to-end/src/e2e_fees/sponsored_payments.test.ts#L57-L66 ### Use other Fee Paying Contracts @@ -180,7 +180,7 @@ const receiptForAlice = await bananaCoin.methods .transfer(bob, amountTransferToBob) .send({ from: alice, fee: { paymentMethod } }); ``` -> Source code: yarn-project/end-to-end/src/composed/e2e_local_network_example.test.ts#L185-L194 +> Source code: yarn-project/end-to-end/src/composed/e2e_local_network_example.test.ts#L185-L194 Public FPCs can be used in the same way: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_read_data.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_read_data.md similarity index 97% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_read_data.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_read_data.md index 21945880c8d9..a812ae432d1e 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_read_data.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_read_data.md @@ -23,7 +23,7 @@ const balance = await token.methods console.log(`Alice's token balance: ${balance}`); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L114-L120 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L114-L120 The `from` option specifies which address context to use for the simulation. This is required for all simulations, though it only affects private function execution (public functions ignore this value). @@ -34,7 +34,7 @@ The `from` option specifies which address context to use for the simulation. Thi const balance = await contract.methods.balance_of_public(newAccountAddress).simulate({ from: newAccountAddress }); expect(balance).toEqual(1n); ``` -> Source code: yarn-project/end-to-end/src/composed/docs_examples.test.ts#L49-L52 +> Source code: yarn-project/end-to-end/src/composed/docs_examples.test.ts#L49-L52 ### Handling return values @@ -151,7 +151,7 @@ const collectedEvent1s = await getPublicEvents( publicEventFilter, ); ``` -> Source code: yarn-project/end-to-end/src/e2e_event_logs.test.ts#L137-L154 +> Source code: yarn-project/end-to-end/src/e2e_event_logs.test.ts#L137-L154 The function parameters are: @@ -197,7 +197,7 @@ const collectedEvent1s = await wallet.getPrivateEvents( eventFilter, ); ``` -> Source code: yarn-project/end-to-end/src/e2e_event_logs.test.ts#L68-L87 +> Source code: yarn-project/end-to-end/src/e2e_event_logs.test.ts#L68-L87 The `PrivateEventFilter` includes: @@ -249,7 +249,7 @@ async function pollForTransferEvents() { // Example: poll once (in production, use setInterval) await pollForTransferEvents(); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L273-L302 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L273-L302 For private events, use the same pattern with `wallet.getPrivateEvents()` and update the `fromBlock` in your filter accordingly. diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_send_transaction.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_send_transaction.md similarity index 96% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_send_transaction.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_send_transaction.md index 1339e1734350..64afbdbb9989 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_send_transaction.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_send_transaction.md @@ -58,7 +58,7 @@ console.log(`Transaction sent: ${transferTxHash.toString()}`); const transferReceipt = await waitForTx(node, transferTxHash); console.log(`Transaction mined in block ${transferReceipt.blockNumber}`); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L149-L160 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L149-L160 ## Send batch transactions @@ -75,7 +75,7 @@ const batch = new BatchCall(wallet, [ const batchReceipt = await batch.send({ from: aliceAddress }); console.log(`Batch executed in block ${batchReceipt.blockNumber}`); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L162-L171 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L162-L171 :::warning @@ -99,7 +99,7 @@ console.log(`Status: ${txReceipt.status}`); console.log(`Block number: ${txReceipt.blockNumber}`); console.log(`Transaction fee: ${txReceipt.transactionFee}`); ``` -> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L194-L206 +> Source code: docs/examples/ts/aztecjs_advanced/index.ts#L194-L206 The receipt includes: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_test.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_test.md similarity index 96% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_test.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_test.md index de5afc95afac..0f57c5d484ad 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_test.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_test.md @@ -30,7 +30,7 @@ const nodeInfo = await node.getNodeInfo(); logger.info(format('Aztec Local Network Info ', nodeInfo)); ``` -> Source code: yarn-project/end-to-end/src/composed/e2e_local_network_example.test.ts#L37-L50 +> Source code: yarn-project/end-to-end/src/composed/e2e_local_network_example.test.ts#L37-L50 The `EmbeddedWallet` manages accounts, tracks deployed contracts, and handles transaction proving. It connects to the Aztec node which provides access to both the Private eXecution Environment (PXE) and the network. @@ -71,7 +71,7 @@ Use `.simulate()` to read contract state without creating a transaction: const balance = await contract.methods.balance_of_public(newAccountAddress).simulate({ from: newAccountAddress }); expect(balance).toEqual(1n); ``` -> Source code: yarn-project/end-to-end/src/composed/docs_examples.test.ts#L49-L52 +> Source code: yarn-project/end-to-end/src/composed/docs_examples.test.ts#L49-L52 Simulations are free (no gas cost) and return the function's result directly. Use them for: @@ -92,7 +92,7 @@ const receipt = await token.methods console.log(`Transaction mined in block ${receipt.blockNumber}`); console.log(`Transaction fee: ${receipt.transactionFee}`); ``` -> Source code: docs/examples/ts/aztecjs_connection/index.ts#L105-L112 +> Source code: docs/examples/ts/aztecjs_connection/index.ts#L105-L112 The `send()` method returns when the transaction is included in a block. @@ -206,7 +206,7 @@ async function runTests() { runTests().catch(console.error); ``` -> Source code: docs/examples/ts/aztecjs_testing/index.ts#L1-L105 +> Source code: docs/examples/ts/aztecjs_testing/index.ts#L1-L105 ## Testing failure cases diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_use_authwit.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_use_authwit.md similarity index 96% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_use_authwit.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_use_authwit.md index dc208ec836ec..7711d0953017 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/how_to_use_authwit.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/how_to_use_authwit.md @@ -55,7 +55,7 @@ const privateWitness = await wallet.createAuthWit(aliceAddress, { // Bob executes the transfer, providing the authwit await privateAction.send({ from: bobAddress, authWitnesses: [privateWitness] }); ``` -> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L43-L63 +> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L43-L63 :::tip @@ -90,7 +90,7 @@ await authwit.send(); // Now Bob can execute the transfer await publicAction.send({ from: bobAddress }); ``` -> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L65-L88 +> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L65-L88 ## Create arbitrary message authwits @@ -116,7 +116,7 @@ const intent = { const arbitraryWitness = await wallet.createAuthWit(aliceAddress, intent); console.log("Arbitrary authwit created:", arbitraryWitness); ``` -> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L90-L108 +> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L90-L108 The `consumer` is the contract address that will verify this authwit. @@ -153,7 +153,7 @@ const revokeInteraction = await SetPublicAuthwitContractInteraction.create( ); await revokeInteraction.send(); ``` -> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L110-L137 +> Source code: docs/examples/ts/aztecjs_authwit/index.ts#L110-L137 ## Next steps diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/index.md similarity index 89% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/index.md index dcd03b0a2cff..f552611f1e2c 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/index.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/index.md @@ -12,7 +12,7 @@ Aztec.js is a library that provides APIs for managing accounts and interacting w ## Installing ```bash -npm install @aztec/aztec.js@4.0.0-nightly.20260217 +npm install @aztec/aztec.js@4.0.0-nightly.20260218 ``` ## Common Dependencies @@ -20,10 +20,10 @@ npm install @aztec/aztec.js@4.0.0-nightly.20260217 Most applications will need additional packages alongside `@aztec/aztec.js`, e.g.: ```bash -npm install @aztec/aztec.js@4.0.0-nightly.20260217 \ - @aztec/accounts@4.0.0-nightly.20260217 \ - @aztec/wallets@4.0.0-nightly.20260217 \ - @aztec/noir-contracts.js@4.0.0-nightly.20260217 +npm install @aztec/aztec.js@4.0.0-nightly.20260218 \ + @aztec/accounts@4.0.0-nightly.20260218 \ + @aztec/wallets@4.0.0-nightly.20260218 \ + @aztec/noir-contracts.js@4.0.0-nightly.20260218 ``` | Package | Description | diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/typescript_api_reference.mdx b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/typescript_api_reference.mdx similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-js/typescript_api_reference.mdx rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-js/typescript_api_reference.mdx diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/api.mdx b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/api.mdx similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/api.mdx rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/api.mdx diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/compiling_contracts.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/compiling_contracts.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/compiling_contracts.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/compiling_contracts.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/contract_readiness_states.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/contract_readiness_states.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/contract_readiness_states.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/contract_readiness_states.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/debugging.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/debugging.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/debugging.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/debugging.md index 15890de58779..c5783b1da147 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/debugging.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/debugging.md @@ -90,7 +90,7 @@ LOG_LEVEL="info;debug:simulator:client_execution_context;debug:simulator:client_ | Error | Solution | | -------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `Aztec dependency not found` | Add to Nargo.toml: `aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/aztec" }` | +| `Aztec dependency not found` | Add to Nargo.toml: `aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/aztec" }` | | `Public state writes only supported in public functions` | Move state writes to public functions | | `Unknown contract 0x0` | Call `wallet.registerContract(...)` to register contract | | `No public key registered for address` | Call `wallet.registerSender(...)` | diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_profile_transactions.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_profile_transactions.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_profile_transactions.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_profile_transactions.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_prove_history.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_prove_history.md similarity index 97% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_prove_history.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_prove_history.md index ab221f88353b..9828f2d054a0 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_prove_history.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_prove_history.md @@ -34,7 +34,7 @@ Import the function: ```rust title="history_import" showLineNumbers use aztec::history::note::assert_note_existed_by; ``` -> Source code: noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr#L5-L7 +> Source code: noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr#L5-L7 Prove a note exists in the note hash tree: @@ -43,7 +43,7 @@ Prove a note exists in the note hash tree: let header = self.context.get_anchor_block_header(); let _ = assert_note_existed_by(header, hinted_note); ``` -> Source code: noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr#L49-L52 +> Source code: noir-projects/noir-contracts/contracts/app/claim_contract/src/main.nr#L49-L52 ## Prove note validity diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_retrieve_filter_notes.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_retrieve_filter_notes.md similarity index 97% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_retrieve_filter_notes.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_retrieve_filter_notes.md index 4f0e49a1ea5c..bf4bcaa3b34d 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_retrieve_filter_notes.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_retrieve_filter_notes.md @@ -52,7 +52,7 @@ let notes = nfts.at(from).pop_notes(NoteGetterOptions::new() .set_limit(1)); assert(notes.len() == 1, "NFT not found when transferring"); ``` -> Source code: noir-projects/noir-contracts/contracts/app/nft_contract/src/main.nr#L249-L254 +> Source code: noir-projects/noir-contracts/contracts/app/nft_contract/src/main.nr#L249-L254 ## Filter notes by properties @@ -122,7 +122,7 @@ pub fn filter_notes_min_sum( selected } ``` -> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/filter.nr#L9-L27 +> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/filter.nr#L9-L27 Then use it with `NoteGetterOptions`: @@ -182,7 +182,7 @@ unconstrained fn get_private_nfts( (owned_nft_ids, page_limit_reached) } ``` -> Source code: noir-projects/noir-contracts/contracts/app/nft_contract/src/main.nr#L294-L314 +> Source code: noir-projects/noir-contracts/contracts/app/nft_contract/src/main.nr#L294-L314 :::tip Viewer vs Getter diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_use_capsules.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_use_capsules.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/how_to_use_capsules.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/how_to_use_capsules.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/partial_notes.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/partial_notes.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/partial_notes.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/partial_notes.md index 56d1143a08d4..ad6354594432 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/partial_notes.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/partial_notes.md @@ -22,7 +22,7 @@ pub struct UintNote { pub value: u128, } ``` -> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L27-L34 +> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L27-L34 The `UintNote` struct itself only contains the `value` field. Additional fields including `owner`, `randomness`, and `storage_slot` are passed as parameters during note hash computation. @@ -55,7 +55,7 @@ pub struct UintNote { pub value: u128, } ``` -> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L27-L34 +> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L27-L34 ### Two-Phase Commitment Process @@ -72,7 +72,7 @@ fn compute_partial_commitment(owner: AztecAddress, storage_slot: Field, randomne ) } ``` -> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L156-L163 +> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L156-L163 This creates a partial note commitment: @@ -92,7 +92,7 @@ fn compute_complete_note_hash(self, value: u128) -> Field { poseidon2_hash_with_separator([self.commitment, value.to_field()], DOM_SEP__NOTE_HASH) } ``` -> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L270-L276 +> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L270-L276 The resulting structure is a nested commitment: @@ -129,7 +129,7 @@ fn compute_note_hash(self, owner: AztecAddress, storage_slot: Field, randomness: partial_note.compute_complete_note_hash(self.value) } ``` -> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L37-L54 +> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L37-L54 This two-step process ensures that notes with identical field values produce identical note hashes, regardless of whether they were created as partial notes or complete notes. diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/protocol_oracles.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/protocol_oracles.md similarity index 90% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/protocol_oracles.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/protocol_oracles.md index 97e972bc08d3..c73d5742859d 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/protocol_oracles.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/protocol_oracles.md @@ -25,17 +25,17 @@ Oracles introduce **non-determinism** into a circuit, and thus are `unconstraine ```rust title="oracles-module" showLineNumbers /// Oracles module ``` -> Source code: noir-projects/aztec-nr/aztec/src/oracle/mod.nr#L3-L5 +> Source code: noir-projects/aztec-nr/aztec/src/oracle/mod.nr#L3-L5 ## Inbuilt oracles -- [`debug_log`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/noir-protocol-circuits/crates/types/src/debug_log.nr) - Provides debug functions that can be used to log information to the console. Read more about debugging [here](../../debugging.md). -- [`auth_witness`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/oracle/auth_witness.nr) - Provides a way to fetch the authentication witness for a given address. This is useful when building account contracts to support approve-like functionality. -- [`get_l1_to_l2_membership_witness`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/oracle/get_l1_to_l2_membership_witness.nr) - Returns the leaf index and sibling path for an L1 to L2 message, used to prove message existence in cross-chain applications like token bridges. -- [`notes`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/oracle/notes.nr) - Provides functions related to notes, such as fetching notes from storage, used behind the scenes for value notes and other pre-built note implementations. -- [`logs`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/oracle/logs.nr) - Provides functions to log encrypted and unencrypted data. +- [`debug_log`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/noir-protocol-circuits/crates/types/src/debug_log.nr) - Provides debug functions that can be used to log information to the console. Read more about debugging [here](../../debugging.md). +- [`auth_witness`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/oracle/auth_witness.nr) - Provides a way to fetch the authentication witness for a given address. This is useful when building account contracts to support approve-like functionality. +- [`get_l1_to_l2_membership_witness`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/oracle/get_l1_to_l2_membership_witness.nr) - Returns the leaf index and sibling path for an L1 to L2 message, used to prove message existence in cross-chain applications like token bridges. +- [`notes`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/oracle/notes.nr) - Provides functions related to notes, such as fetching notes from storage, used behind the scenes for value notes and other pre-built note implementations. +- [`logs`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/oracle/logs.nr) - Provides functions to log encrypted and unencrypted data. -Find a full list [on GitHub](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/oracle). +Find a full list [on GitHub](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/oracle). Please note that it is **not** possible to write a custom oracle for your dapp. Oracles are implemented in the PXE, so all users of your dapp would have to use a PXE with your custom oracle included. If you want to inject some arbitrary data that does not have a dedicated oracle, you can use [capsules](./how_to_use_capsules.md). diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/writing_efficient_contracts.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/writing_efficient_contracts.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/advanced/writing_efficient_contracts.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/advanced/writing_efficient_contracts.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/authentication_witnesses.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/authentication_witnesses.md similarity index 96% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/authentication_witnesses.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/authentication_witnesses.md index e12759d0672f..b9c48e7178ea 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/authentication_witnesses.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/authentication_witnesses.md @@ -45,7 +45,7 @@ fn transfer_in_private( self.storage.balances.at(to).add(amount).deliver(MessageDelivery.ONCHAIN_CONSTRAINED); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L306-L320 +> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L306-L320 ### Public function example @@ -65,7 +65,7 @@ fn transfer_in_public( self.storage.public_balances.at(to).write(to_balance); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L166-L180 +> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L166-L180 The macro parameters specify: @@ -117,7 +117,7 @@ fn _approve_bridge_and_exit_input_asset_to_L1( )); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/uniswap_contract/src/main.nr#L180-L219 +> Source code: noir-projects/noir-contracts/contracts/app/uniswap_contract/src/main.nr#L180-L219 Key steps: @@ -140,7 +140,7 @@ fn cancel_authwit(inner_hash: Field) { self.context.push_nullifier(nullifier); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L297-L304 +> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L297-L304 :::note diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/calling_contracts.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/calling_contracts.md similarity index 94% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/calling_contracts.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/calling_contracts.md index 1d8d4b1a4c38..6607daeb5848 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/calling_contracts.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/calling_contracts.md @@ -14,7 +14,7 @@ Add the contract you want to call to your `Nargo.toml` dependencies: ```toml [dependencies] -token = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/noir-contracts/contracts/app/token_contract" } +token = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/noir-contracts/contracts/app/token_contract" } ``` Then import the contract interface at the top of your contract file: @@ -41,7 +41,7 @@ The pattern is: ```rust title="private_call" showLineNumbers let _ = self.call(Token::at(stable_coin).burn_private(from, amount, authwit_nonce)); ``` -> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L255-L257 +> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L255-L257 ### Public-to-public calls diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/contract_artifact.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/contract_artifact.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/contract_artifact.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/contract_artifact.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/contract_structure.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/contract_structure.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/contract_structure.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/contract_structure.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/contract_upgrades.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/contract_upgrades.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/contract_upgrades.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/contract_upgrades.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/custom_notes.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/custom_notes.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/custom_notes.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/custom_notes.md index f5a6416aa046..763ea37366fd 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/custom_notes.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/custom_notes.md @@ -29,21 +29,21 @@ Aztec.nr provides pre-built note types for common use cases: ```toml # In Nargo.toml -uint_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/uint-note" } +uint_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/uint-note" } ``` **FieldNote** - For storing single Field values: ```toml # In Nargo.toml -field_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/field-note" } +field_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/field-note" } ``` **AddressNote** - For storing Aztec addresses: ```toml # In Nargo.toml -address_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/address-note" } +address_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/address-note" } ``` ::: @@ -61,7 +61,7 @@ pub struct NFTNote { pub token_id: Field, } ``` -> Source code: docs/examples/contracts/nft/src/nft.nr#L1-L9 +> Source code: docs/examples/contracts/nft/src/nft.nr#L1-L9 The `#[note]` macro generates the following for your struct: @@ -125,7 +125,7 @@ fn mint(to: AztecAddress, token_id: Field) { self.enqueue_self._mark_nft_exists(token_id, true); } ``` -> Source code: docs/examples/contracts/nft/src/main.nr#L50-L65 +> Source code: docs/examples/contracts/nft/src/main.nr#L50-L65 ### Reading and removing notes @@ -150,7 +150,7 @@ fn burn(from: AztecAddress, token_id: Field) { self.enqueue_self._mark_nft_exists(token_id, false); } ``` -> Source code: docs/examples/contracts/nft/src/main.nr#L75-L92 +> Source code: docs/examples/contracts/nft/src/main.nr#L75-L92 :::warning @@ -255,7 +255,7 @@ unconstrained fn get_private_nfts( (owned_nft_ids, page_limit_reached) } ``` -> Source code: noir-projects/noir-contracts/contracts/app/nft_contract/src/main.nr#L294-L314 +> Source code: noir-projects/noir-contracts/contracts/app/nft_contract/src/main.nr#L294-L314 ## Further reading diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/dependencies.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/dependencies.md similarity index 80% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/dependencies.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/dependencies.md index d3e102b4dbe1..60470cd94553 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/dependencies.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/dependencies.md @@ -9,7 +9,7 @@ This page lists the available Aztec.nr libraries. Add dependencies to the `[depe ```toml [dependencies] -aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="aztec" } +aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="aztec" } # Add other libraries as needed ``` @@ -18,7 +18,7 @@ aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly. ### Aztec (required) ```toml -aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/aztec" } +aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/aztec" } ``` The core Aztec library required for every Aztec.nr smart contract. @@ -26,7 +26,7 @@ The core Aztec library required for every Aztec.nr smart contract. ### Protocol Types ```toml -protocol = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/noir-protocol-circuits/crates/types"} +protocol = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/noir-protocol-circuits/crates/types"} ``` Contains types used in the Aztec protocol (addresses, constants, hashes, etc.). @@ -36,7 +36,7 @@ Contains types used in the Aztec protocol (addresses, constants, hashes, etc.). ### Address Note ```toml -address_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/address-note" } +address_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/address-note" } ``` Provides `AddressNote`, a note type for storing `AztecAddress` values. @@ -44,7 +44,7 @@ Provides `AddressNote`, a note type for storing `AztecAddress` values. ### Field Note ```toml -field_note = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="field-note" } +field_note = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="field-note" } ``` Provides `FieldNote`, a note type for storing a single `Field` value. @@ -52,7 +52,7 @@ Provides `FieldNote`, a note type for storing a single `Field` value. ### Uint Note ```toml -uint_note = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="uint-note" } +uint_note = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="uint-note" } ``` Provides `UintNote`, a note type for storing `u128` values. Also includes `PartialUintNote` for partial note workflows where the value is completed in public execution. @@ -62,7 +62,7 @@ Provides `UintNote`, a note type for storing `u128` values. Also includes `Parti ### Balance Set ```toml -balance_set = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="balance-set" } +balance_set = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="balance-set" } ``` Provides `BalanceSet`, a state variable for managing private balances. Includes helper functions for adding, subtracting, and querying balances. @@ -72,7 +72,7 @@ Provides `BalanceSet`, a state variable for managing private balances. Includes ### Compressed String ```toml -compressed_string = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="compressed-string" } +compressed_string = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="compressed-string" } ``` Provides `CompressedString` and `FieldCompressedString` utilities for working with compressed string data. diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/ethereum_aztec_messaging.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/ethereum_aztec_messaging.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/ethereum_aztec_messaging.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/ethereum_aztec_messaging.md index 0ad0574a9084..305670310e95 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/ethereum_aztec_messaging.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/ethereum_aztec_messaging.md @@ -41,7 +41,7 @@ function depositToAztecPublic(bytes32 _to, uint256 _amount, bytes32 _secretHash) external returns (bytes32, uint256) ``` -> Source code: l1-contracts/test/portals/TokenPortal.sol#L48-L60 +> Source code: l1-contracts/test/portals/TokenPortal.sol#L48-L60 :::note Message availability @@ -52,7 +52,7 @@ L1 to L2 messages are not available immediately. The proposer batches messages f Call `consume_l1_to_l2_message` on the context. The `content` must match the hash sent from L1, and the `secret` must be the pre-image of the `secretHash`. Consuming a message emits a nullifier to prevent double-spending. -The content hash must be computed identically on both L1 and L2. Create a shared library for your content hash functions—see [`token_portal_content_hash_lib`](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/noir-contracts/contracts/app/token_portal_content_hash_lib) for an example. +The content hash must be computed identically on both L1 and L2. Create a shared library for your content hash functions—see [`token_portal_content_hash_lib`](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/noir-contracts/contracts/app/token_portal_content_hash_lib) for an example. ```rust title="claim_public" showLineNumbers // Consumes a L1->L2 message and calls the token contract to mint the appropriate amount publicly @@ -74,7 +74,7 @@ fn claim_public(to: AztecAddress, amount: u128, secret: Field, message_leaf_inde self.call(Token::at(config.token).mint_to_public(to, amount)); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/token_bridge_contract/src/main.nr#L50-L69 +> Source code: noir-projects/noir-contracts/contracts/app/token_bridge_contract/src/main.nr#L50-L69 This function works in both public and private contexts. @@ -105,7 +105,7 @@ fn exit_to_l1_public( self.call(Token::at(config.token).burn_public(self.msg_sender(), amount, authwit_nonce)); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/token_bridge_contract/src/main.nr#L71-L90 +> Source code: noir-projects/noir-contracts/contracts/app/token_bridge_contract/src/main.nr#L71-L90 This function works in both public and private contexts. @@ -155,7 +155,7 @@ function withdraw( underlying.safeTransfer(_recipient, _amount); } ``` -> Source code: l1-contracts/test/portals/TokenPortal.sol#L114-L150 +> Source code: l1-contracts/test/portals/TokenPortal.sol#L114-L150 :::info Getting the membership witness @@ -187,8 +187,8 @@ const witness = await computeL2ToL1MembershipWitness( ## Example implementations -- [Token Portal (L1)](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/test/portals/TokenPortal.sol) -- [Token Bridge (L2)](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/noir-contracts/contracts/app/token_bridge_contract/src/main.nr) +- [Token Portal (L1)](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/test/portals/TokenPortal.sol) +- [Token Bridge (L2)](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/noir-contracts/contracts/app/token_bridge_contract/src/main.nr) ## Next steps diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/events_and_logs.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/events_and_logs.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/events_and_logs.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/events_and_logs.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/attributes.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/attributes.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/attributes.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/attributes.md index f768bd8545b2..f8a161223424 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/attributes.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/attributes.md @@ -66,7 +66,7 @@ unconstrained fn balance_of_private(owner: AztecAddress) -> u128 { self.storage.balances.at(owner).balance_of() } ``` -> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L518-L523 +> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L518-L523 :::info @@ -90,7 +90,7 @@ fn set_minter(minter: AztecAddress, approve: bool) { self.storage.minters.at(minter).write(approve); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L149-L155 +> Source code: noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr#L149-L155 Under the hood, the macro: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/context.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/context.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/context.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/context.md index d9becb054447..9666de496cd8 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/context.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/context.md @@ -50,7 +50,7 @@ pub expiration_timestamp: u64, pub(crate) note_hash_read_requests: BoundedVec>, MAX_NOTE_HASH_READ_REQUESTS_PER_CALL>, pub(crate) nullifier_read_requests: BoundedVec>, MAX_NULLIFIER_READ_REQUESTS_PER_CALL>, -key_validation_requests_and_generators: BoundedVec, +key_validation_requests_and_separators: BoundedVec, pub note_hashes: BoundedVec, MAX_NOTE_HASHES_PER_CALL>, pub nullifiers: BoundedVec, MAX_NULLIFIERS_PER_CALL>, @@ -60,7 +60,7 @@ pub public_call_requests: BoundedVec, MAX_ENQUEUED_CA pub public_teardown_call_request: PublicCallRequest, pub l2_to_l1_msgs: BoundedVec, MAX_L2_TO_L1_MSGS_PER_CALL>, ``` -> Source code: noir-projects/aztec-nr/aztec/src/context/private_context.nr#L141-L164 +> Source code: noir-projects/aztec-nr/aztec/src/context/private_context.nr#L141-L164 ### Private Context Broken Down @@ -78,7 +78,7 @@ pub struct PrivateContextInputs { pub start_side_effect_counter: u32, } ``` -> Source code: noir-projects/aztec-nr/aztec/src/context/inputs/private_context_inputs.nr#L7-L15 +> Source code: noir-projects/aztec-nr/aztec/src/context/inputs/private_context_inputs.nr#L7-L15 As shown in the snippet, the application context is made up of 4 main structures. The call context, the block header, and the private global variables. @@ -98,7 +98,7 @@ pub struct CallContext { pub is_static_call: bool, } ``` -> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/call_context.nr#L8-L20 +> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/call_context.nr#L8-L20 The call context contains information about the current call being made: @@ -141,7 +141,7 @@ pub struct BlockHeader { pub total_mana_used: Field, } ``` -> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/block_header.nr#L12-L29 +> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/block_header.nr#L12-L29 ### Transaction Context @@ -159,7 +159,7 @@ pub struct TxContext { pub gas_settings: GasSettings, } ``` -> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/transaction/tx_context.nr#L8-L18 +> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/transaction/tx_context.nr#L8-L18 ### Args Hash @@ -182,7 +182,7 @@ Some data structures impose time constraints, e.g. they may make it so that a va ```rust title="expiration-timestamp" showLineNumbers pub fn set_expiration_timestamp(&mut self, expiration_timestamp: u64) { ``` -> Source code: noir-projects/aztec-nr/aztec/src/context/private_context.nr#L618-L620 +> Source code: noir-projects/aztec-nr/aztec/src/context/private_context.nr#L618-L620 A transaction that sets this value will never be included in a block with a timestamp larger than the requested value, since it would be considered invalid. This can also be used to make transactions automatically expire after some time if not included. @@ -238,5 +238,5 @@ pub struct GlobalVariables { pub gas_fees: GasFees, } ``` -> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/global_variables.nr#L7-L19 +> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/global_variables.nr#L7-L19 diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/function_transforms.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/function_transforms.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/function_transforms.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/function_transforms.md index e1ab2f9ce4f9..aecef4663f93 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/function_transforms.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/function_transforms.md @@ -170,4 +170,4 @@ Contract artifacts enable: ## Further reading - [Function attributes and macros](./attributes.md) -- [Aztec.nr macro source code](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/macros) - for those who want to see the actual transformation implementation +- [Aztec.nr macro source code](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/macros) - for those who want to see the actual transformation implementation diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/how_to_define_functions.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/how_to_define_functions.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/how_to_define_functions.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/how_to_define_functions.md index 06848aa8cbcc..b1c58d5944aa 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/how_to_define_functions.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/how_to_define_functions.md @@ -40,7 +40,7 @@ fn increment(owner: AztecAddress) { self.storage.counters.at(owner).add(1).deliver(MessageDelivery.ONCHAIN_CONSTRAINED); } ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L36-L42 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L36-L42 Private functions run in a private context, can access private state, and can read certain public values through storage types like [`DelayedPublicMutable`](../state_variables.md#delayedpublicmutable). @@ -60,7 +60,7 @@ fn mint_public(employee: AztecAddress, amount: u64) { self.storage.public_balances.at(employee).write(current_balance + amount); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L41-L51 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L41-L51 Public functions operate on public state, similar to EVM contracts. They can write to private storage, but any data written from a public function is publicly visible. @@ -77,7 +77,7 @@ unconstrained fn get_counter(owner: AztecAddress) -> pub u128 { self.storage.counters.at(owner).balance_of() } ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L44-L49 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L44-L49 Use `aztec.js` `simulate` to execute utility functions and read their return values. For details, see [Call Types](../../../foundational-topics/call_types.md#simulate). @@ -108,7 +108,7 @@ fn _assert_is_owner(address: AztecAddress) { assert_eq(address, self.storage.owner.read(), "Only Giggle can mint BOB tokens"); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L131-L137 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L131-L137 Only-self functions are only callable by the same contract, which is useful when a private function enqueues a public call that should only be callable internally. @@ -127,7 +127,7 @@ fn initialize(headstart: u64, owner: AztecAddress) { ); } ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L25-L34 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L25-L34 ### Use multiple initializers diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/index.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/index.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/visibility.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/visibility.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/functions/visibility.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/functions/visibility.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/globals.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/globals.md similarity index 96% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/globals.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/globals.md index 3baee75c669e..1579d7d2132c 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/globals.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/globals.md @@ -26,7 +26,7 @@ pub struct TxContext { pub gas_settings: GasSettings, } ``` -> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/transaction/tx_context.nr#L8-L18 +> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/transaction/tx_context.nr#L8-L18 The following fields are accessible via `context` methods: @@ -72,7 +72,7 @@ pub struct GlobalVariables { pub gas_fees: GasFees, } ``` -> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/global_variables.nr#L7-L19 +> Source code: noir-projects/noir-protocol-circuits/crates/types/src/abis/global_variables.nr#L7-L19 :::note diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/macros.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/macros.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/macros.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/macros.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/note_delivery.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/note_delivery.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/note_delivery.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/note_delivery.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/state_variables.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/state_variables.md similarity index 97% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/state_variables.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/state_variables.md index 5a0896063bfd..df5378f7c326 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/framework-description/state_variables.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/framework-description/state_variables.md @@ -125,7 +125,7 @@ For example, storing the address of the collateral asset in a lending contract: ```rust title="public_mutable" showLineNumbers collateral_asset: PublicMutable, ``` -> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L34-L36 +> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L34-L36 #### `read` @@ -139,7 +139,7 @@ fn get_assets() -> pub [AztecAddress; 2] { [self.storage.collateral_asset.read(), self.storage.stable_coin.read()] } ``` -> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L303-L309 +> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L303-L309 #### `write` @@ -149,7 +149,7 @@ The `write` method on `PublicMutable` variables takes the value to write as an i ```rust title="public_mutable_write" showLineNumbers self.storage.collateral_asset.write(collateral_asset); ``` -> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L76-L78 +> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L76-L78 ### PublicImmutable @@ -167,7 +167,7 @@ symbol: PublicImmutable, name: PublicImmutable, decimals: PublicImmutable, ``` -> Source code: noir-projects/noir-contracts/contracts/app/simple_token_contract/src/main.nr#L45-L49 +> Source code: noir-projects/noir-contracts/contracts/app/simple_token_contract/src/main.nr#L45-L49 #### `initialize` @@ -179,7 +179,7 @@ self.storage.name.initialize(FieldCompressedString::from_string(name)); self.storage.symbol.initialize(FieldCompressedString::from_string(symbol)); self.storage.decimals.initialize(decimals); ``` -> Source code: noir-projects/noir-contracts/contracts/app/simple_token_contract/src/main.nr#L55-L59 +> Source code: noir-projects/noir-contracts/contracts/app/simple_token_contract/src/main.nr#L55-L59 :::warning @@ -197,7 +197,7 @@ fn public_get_name() -> FieldCompressedString { self.storage.name.read() } ``` -> Source code: noir-projects/noir-contracts/contracts/app/simple_token_contract/src/main.nr#L62-L68 +> Source code: noir-projects/noir-contracts/contracts/app/simple_token_contract/src/main.nr#L62-L68 ### DelayedPublicMutable @@ -223,7 +223,7 @@ struct Storage { authorized: DelayedPublicMutable, } ``` -> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L15-L25 +> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L15-L25 #### `schedule_value_change` @@ -237,7 +237,7 @@ fn set_authorized(authorized: AztecAddress) { self.storage.authorized.schedule_value_change(authorized); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L34-L40 +> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L34-L40 #### `get_current_value` @@ -251,7 +251,7 @@ fn get_authorized() -> AztecAddress { self.storage.authorized.get_current_value() } ``` -> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L42-L48 +> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L42-L48 :::warning Privacy Consideration @@ -269,7 +269,7 @@ fn get_scheduled_authorized() -> (AztecAddress, u64) { self.storage.authorized.get_scheduled_value() } ``` -> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L50-L56 +> Source code: noir-projects/noir-contracts/contracts/app/auth_contract/src/main.nr#L50-L56 ## Private State Variables @@ -371,7 +371,7 @@ fn mint(amount: u128, recipient: AztecAddress) { self.storage.balances.at(recipient).add(amount).deliver(MessageDelivery.ONCHAIN_CONSTRAINED); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/private_token_contract/src/main.nr#L55-L78 +> Source code: noir-projects/noir-contracts/contracts/app/private_token_contract/src/main.nr#L55-L78 Methods that return `NoteMessage` include `initialize()`, `get_note()`, and `replace()` on `PrivateMutable`, `initialize()` on `PrivateImmutable`, and `insert()` on `PrivateSet` (more on these methods and private state variable types shortly). @@ -457,7 +457,7 @@ Access the underlying state variable for a specific owner using `.at(owner)` ```rust title="owned_private_mutable" showLineNumbers subscriptions: Owned, Context>, ``` -> Source code: noir-projects/noir-contracts/contracts/app/app_subscription_contract/src/main.nr#L60-L62 +> Source code: noir-projects/noir-contracts/contracts/app/app_subscription_contract/src/main.nr#L60-L62 #### `is_initialized` @@ -470,7 +470,7 @@ unconstrained fn is_initialized(subscriber: AztecAddress) -> bool { self.storage.subscriptions.at(subscriber).is_initialized() } ``` -> Source code: noir-projects/noir-contracts/contracts/app/app_subscription_contract/src/main.nr#L172-L177 +> Source code: noir-projects/noir-contracts/contracts/app/app_subscription_contract/src/main.nr#L172-L177 #### `initialize` and `initialize_or_replace` @@ -487,7 +487,7 @@ self }) .deliver(MessageDelivery.ONCHAIN_CONSTRAINED); ``` -> Source code: noir-projects/noir-contracts/contracts/app/app_subscription_contract/src/main.nr#L160-L169 +> Source code: noir-projects/noir-contracts/contracts/app/app_subscription_contract/src/main.nr#L160-L169 #### `get_note` @@ -531,7 +531,7 @@ fn transfer_admin(new_admin: AztecAddress) { .deliver(MessageDelivery.ONCHAIN_CONSTRAINED); } ``` -> Source code: noir-projects/noir-contracts/contracts/app/private_token_contract/src/main.nr#L81-L99 +> Source code: noir-projects/noir-contracts/contracts/app/private_token_contract/src/main.nr#L81-L99 ### PrivateImmutable @@ -545,7 +545,7 @@ Unlike `PrivateMutable`, the `get_note` function for a `PrivateImmutable` doesn' ```rust title="private_immutable" showLineNumbers note_in_private_immutable: Owned, Context>, ``` -> Source code: noir-projects/noir-contracts/contracts/test/test_contract/src/main.nr#L85-L87 +> Source code: noir-projects/noir-contracts/contracts/test/test_contract/src/main.nr#L85-L87 `PrivateImmutable` variables also have the `initialize` and `get_note` functions on them but no `initialize_or_replace` since they cannot be modified. @@ -569,7 +569,7 @@ struct Storage { balances: Owned, Context>, } ``` -> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L27-L32 +> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L27-L32 #### `insert` @@ -579,7 +579,7 @@ Allows us to modify the storage by inserting a note into the `PrivateSet`: ```rust title="private_set_insert" showLineNumbers owner_balance.insert(note).deliver(MessageDelivery.ONCHAIN_CONSTRAINED); ``` -> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L51-L53 +> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L51-L53 Note: The `Owned` wrapper requires calling `.at(owner)` to access the underlying `PrivateSet` for a specific owner. This binds the owner to the state variable instance. @@ -593,7 +593,7 @@ let options = NoteGetterOptions::with_filter(filter_notes_min_sum, amount); // get note (note inserted at bottom of function shouldn't exist yet) let notes = owner_balance.get_notes(options); ``` -> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L70-L74 +> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L70-L74 #### `pop_notes` @@ -604,7 +604,7 @@ This function pops (gets, removes and returns) the notes the account has access let options = NoteGetterOptions::new().set_limit(1); let note = owner_balance.pop_notes(options).get(0); ``` -> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L137-L140 +> Source code: noir-projects/noir-contracts/contracts/test/pending_note_hashes_contract/src/main.nr#L137-L140 #### `remove` diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/index.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/index.md index fe69995a6ef7..e6925959c535 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/index.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/index.md @@ -48,7 +48,7 @@ storage.votes.insert(new_vote).deliver(vote_counter); // the vote counter accoun ```toml # Nargo.toml [dependencies] -aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="aztec" } +aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="aztec" } ``` Update your `main.nr` contract file to use the Aztec.nr macros for writing contracts. @@ -59,7 +59,7 @@ use aztec::macros::aztec; #[aztec] pub contract Counter { ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L1-L6 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L1-L6 and import dependencies from the Aztec.nr library. @@ -74,12 +74,12 @@ use aztec::{ }; use balance_set::BalanceSet; ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L7-L16 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L7-L16 :::info -You can see a complete example of a simple counter contract written with Aztec.nr [here](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/docs/examples/contracts/counter_contract/src/main.nr). +You can see a complete example of a simple counter contract written with Aztec.nr [here](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/docs/examples/contracts/counter_contract/src/main.nr). ::: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/installation.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/installation.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/installation.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/installation.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/testing_contracts.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/testing_contracts.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/testing_contracts.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/testing_contracts.md index 7faa7422e434..8d61ac2551cd 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/aztec-nr/testing_contracts.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/aztec-nr/testing_contracts.md @@ -67,7 +67,7 @@ unconstrained fn test_basic_flow() { - Tests run in parallel by default - Use `unconstrained` functions for faster execution -- See all `TestEnvironment` methods [here](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr) +- See all `TestEnvironment` methods [here](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr) ::: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/aztec_cli_reference.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/aztec_cli_reference.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/aztec_cli_reference.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/aztec_cli_reference.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/aztec_up_cli_reference.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/aztec_up_cli_reference.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/aztec_up_cli_reference.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/aztec_up_cli_reference.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/aztec_wallet_cli_reference.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/aztec_wallet_cli_reference.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/cli/aztec_wallet_cli_reference.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/cli/aztec_wallet_cli_reference.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/accounts/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/accounts/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/accounts/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/accounts/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/accounts/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/accounts/index.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/accounts/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/accounts/index.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/accounts/keys.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/accounts/keys.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/accounts/keys.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/accounts/keys.md index 00c20a8750ab..e17eff0a35bb 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/accounts/keys.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/accounts/keys.md @@ -115,7 +115,7 @@ let pub_key = std::embedded_curve_ops::EmbeddedCurvePoint { // Verify signature of the payload bytes schnorr::verify_signature(pub_key, signature, outer_hash.to_be_bytes::<32>()) ``` -> Source code: noir-projects/noir-contracts/contracts/account/schnorr_account_contract/src/main.nr#L93-L114 +> Source code: noir-projects/noir-contracts/contracts/account/schnorr_account_contract/src/main.nr#L93-L114 The flexibility of signing key storage and rotation is entirely up to your account contract implementation. diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/authwit.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/authwit.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/authwit.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/authwit.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/index.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/index.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/private_kernel.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/private_kernel.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/private_kernel.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/private_kernel.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/public_execution.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/public_execution.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/public_execution.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/public_execution.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/rollup_circuits.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/rollup_circuits.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/circuits/rollup_circuits.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/circuits/rollup_circuits.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/indexed_merkle_tree.mdx b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/indexed_merkle_tree.mdx similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/indexed_merkle_tree.mdx rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/indexed_merkle_tree.mdx diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/note_discovery.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/note_discovery.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/note_discovery.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/note_discovery.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/storage_slots.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/storage_slots.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/advanced/storage/storage_slots.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/advanced/storage/storage_slots.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/call_types.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/call_types.md similarity index 96% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/call_types.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/call_types.md index 793c9b0eef3f..392d7fb9da05 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/call_types.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/call_types.md @@ -118,7 +118,7 @@ Private functions from other contracts can be called either regularly or statica ```rust title="private_call" showLineNumbers let _ = self.call(Token::at(stable_coin).burn_private(from, amount, authwit_nonce)); ``` -> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L255-L257 +> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L255-L257 Unlike the EVM however, private execution doesn't revert in the traditional way: in case of error (e.g. a failed assertion, a state changing operation in a static context, etc.) the proof generation simply fails and no transaction request is generated, spending no network gas or user funds. @@ -132,7 +132,7 @@ Since the public call is made asynchronously, any return values or side effects ```rust title="enqueue_public" showLineNumbers self.enqueue_self._deposit(AztecAddress::from_field(on_behalf_of), amount, collateral_asset); ``` -> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L125-L127 +> Source code: noir-projects/noir-contracts/contracts/app/lending_contract/src/main.nr#L125-L127 It is also possible to create public functions that can _only_ be invoked by privately enqueueing a call from the same contract, which can be very useful to update public state after private execution (e.g. update a token's supply after privately minting). This is achieved by annotating functions with `#[only_self]`. @@ -144,7 +144,7 @@ PublicChecks::at(PUBLIC_CHECKS_ADDRESS) .check_block_number(operation, value) .enqueue_view_incognito(context); ``` -> Source code: noir-projects/noir-contracts/contracts/protocol/public_checks_contract/src/utils.nr#L19-L23 +> Source code: noir-projects/noir-contracts/contracts/protocol/public_checks_contract/src/utils.nr#L19-L23 Note that this reveals what public function is being called on what contract, and perhaps more importantly which contract enqueued the call during private execution. @@ -158,7 +158,7 @@ An example of how a deadline can be checked using the `PublicChecks` contract fo ```rust title="call-check-deadline" showLineNumbers privately_check_timestamp(Comparator.LT, config.deadline, self.context); ``` -> Source code: noir-projects/noir-contracts/contracts/app/crowdfunding_contract/src/main.nr#L48-L50 +> Source code: noir-projects/noir-contracts/contracts/app/crowdfunding_contract/src/main.nr#L48-L50 `privately_check_timestamp` and `privately_check_block_number` are helper functions around the call to the `PublicChecks` contract: @@ -182,7 +182,7 @@ pub fn privately_check_block_number(operation: u8, value: u32, context: &mut Pri .enqueue_view_incognito(context); } ``` -> Source code: noir-projects/noir-contracts/contracts/protocol/public_checks_contract/src/utils.nr#L5-L25 +> Source code: noir-projects/noir-contracts/contracts/protocol/public_checks_contract/src/utils.nr#L5-L25 This is what the implementation of the check timestamp functionality looks like: @@ -198,7 +198,7 @@ fn check_timestamp(operation: u8, value: u64) { assert(compare(lhs_field, operation, rhs_field), "Timestamp mismatch."); } ``` -> Source code: noir-projects/noir-contracts/contracts/protocol/public_checks_contract/src/main.nr#L15-L25 +> Source code: noir-projects/noir-contracts/contracts/protocol/public_checks_contract/src/main.nr#L15-L25 :::note @@ -207,7 +207,7 @@ To add it as a dependency, point to the aztec-packages repository: ```toml [dependencies] -public_checks = { git = "https://github.com/AztecProtocol/aztec-packages/", tag = "v4.0.0-nightly.20260217", directory = "noir-projects/noir-contracts/contracts/protocol/public_checks_contract" } +public_checks = { git = "https://github.com/AztecProtocol/aztec-packages/", tag = "v4.0.0-nightly.20260218", directory = "noir-projects/noir-contracts/contracts/protocol/public_checks_contract" } ``` ::: @@ -232,7 +232,7 @@ self.enqueue(Token::at(config.accepted_asset).transfer_in_public( authwit_nonce, )); ``` -> Source code: noir-projects/noir-contracts/contracts/fees/fpc_contract/src/main.nr#L165-L172 +> Source code: noir-projects/noir-contracts/contracts/fees/fpc_contract/src/main.nr#L165-L172 :::note @@ -260,7 +260,7 @@ This is used to get a result out of an execution, either private or public. It c const balance = await contract.methods.balance_of_public(newAccountAddress).simulate({ from: newAccountAddress }); expect(balance).toEqual(1n); ``` -> Source code: yarn-project/end-to-end/src/composed/docs_examples.test.ts#L49-L52 +> Source code: yarn-project/end-to-end/src/composed/docs_examples.test.ts#L49-L52 :::warning @@ -274,7 +274,7 @@ This creates a transaction, generates proofs for private execution, broadcasts t ```typescript title="send_tx" showLineNumbers await contract.methods.buy_pack(seed).send({ from: firstPlayer }); ``` -> Source code: yarn-project/end-to-end/src/e2e_card_game.test.ts#L113-L115 +> Source code: yarn-project/end-to-end/src/e2e_card_game.test.ts#L113-L115 You can also use `send` to check for execution failures in testing contexts by expecting the transaction to throw: @@ -284,7 +284,7 @@ await expect( claimContract.methods.claim(anotherDonationNote, donorAddress).send({ from: unrelatedAddress }), ).rejects.toThrow('hinted_note.owner == self.msg_sender()'); ``` -> Source code: yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts#L195-L199 +> Source code: yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts#L195-L199 ## Next Steps diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/contract_creation.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/contract_creation.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/contract_creation.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/contract_creation.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/data_structures.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/data_structures.md similarity index 90% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/data_structures.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/data_structures.md index 8295301cfed4..b65656cc13ed 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/data_structures.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/data_structures.md @@ -6,7 +6,7 @@ references: ["l1-contracts/src/core/libraries/DataStructures.sol"] This page documents the Solidity structs used for L1-L2 message passing in the Aztec protocol. -**Source**: [DataStructures.sol](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/core/libraries/DataStructures.sol) +**Source**: [DataStructures.sol](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/core/libraries/DataStructures.sol) ## `L1Actor` @@ -23,7 +23,7 @@ struct L1Actor { uint256 chainId; } ``` -> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L11-L22 +> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L11-L22 ## `L2Actor` @@ -41,12 +41,12 @@ struct L2Actor { uint256 version; } ``` -> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L24-L35 +> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L24-L35 ## `L1ToL2Msg` -A message sent from L1 to L2. The `secretHash` field contains the hash of a secret pre-image that must be known to consume the message on L2. Use [`computeSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/yarn-project/stdlib/src/hash/hash.ts) to compute it from a secret. +A message sent from L1 to L2. The `secretHash` field contains the hash of a secret pre-image that must be known to consume the message on L2. Use [`computeSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/yarn-project/stdlib/src/hash/hash.ts) to compute it from a secret. ```solidity title="l1_to_l2_msg" showLineNumbers /** @@ -66,7 +66,7 @@ struct L1ToL2Msg { uint256 index; } ``` -> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L37-L55 +> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L37-L55 ## `L2ToL1Msg` @@ -87,7 +87,7 @@ struct L2ToL1Msg { bytes32 content; } ``` -> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L57-L70 +> Source code: l1-contracts/src/core/libraries/DataStructures.sol#L57-L70 ## See also diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/inbox.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/inbox.md similarity index 94% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/inbox.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/inbox.md index 49459d76b217..223e1fcd5a78 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/inbox.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/inbox.md @@ -7,7 +7,7 @@ references: ["l1-contracts/src/core/interfaces/messagebridge/IInbox.sol"] The `Inbox` is a contract deployed on L1 that handles message passing from L1 to L2. -**Links**: [Interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/core/interfaces/messagebridge/IInbox.sol), [Implementation](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/core/messagebridge/Inbox.sol). +**Links**: [Interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/core/interfaces/messagebridge/IInbox.sol), [Implementation](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/core/messagebridge/Inbox.sol). ## `sendL2Message()` @@ -27,14 +27,14 @@ function sendL2Message(DataStructures.L2Actor memory _recipient, bytes32 _conten external returns (bytes32, uint256); ``` -> Source code: l1-contracts/src/core/interfaces/messagebridge/IInbox.sol#L35-L48 +> Source code: l1-contracts/src/core/interfaces/messagebridge/IInbox.sol#L35-L48 | Name | Type | Description | | ----------- | -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | Recipient | [`L2Actor`](./data_structures.md#l2actor) | The recipient of the message. The recipient's version **MUST** match the inbox version and the actor must be an Aztec contract that is **attached** to the contract making this call. If the recipient is not attached to the caller, the message cannot be consumed by it. | -| Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to a single field for rollup purposes. If the content is small enough it can be passed directly, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/core/libraries/crypto/Hash.sol) utilities with `sha256ToField` functions). | -| Secret Hash | `field` (~254 bits) | A hash of a secret used when consuming the message on L2. Keep this preimage secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed). Use [`computeSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/yarn-project/stdlib/src/hash/hash.ts) to compute it from a secret. | +| Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to a single field for rollup purposes. If the content is small enough it can be passed directly, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/core/libraries/crypto/Hash.sol) utilities with `sha256ToField` functions). | +| Secret Hash | `field` (~254 bits) | A hash of a secret used when consuming the message on L2. Keep this preimage secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed). Use [`computeSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/yarn-project/stdlib/src/hash/hash.ts) to compute it from a secret. | | ReturnValue | `(bytes32, uint256)` | The message hash (used as an identifier) and the leaf index in the tree. | #### Edge cases @@ -80,7 +80,7 @@ Consumes a message tree for a given checkpoint number. */ function consume(uint256 _toConsume) external returns (bytes32); ``` -> Source code: l1-contracts/src/core/interfaces/messagebridge/IInbox.sol#L50-L62 +> Source code: l1-contracts/src/core/interfaces/messagebridge/IInbox.sol#L50-L62 | Name | Type | Description | diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/index.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/index.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/outbox.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/outbox.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/outbox.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/outbox.md index 0c9c42fb0dd5..5ca098a551a2 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/outbox.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/outbox.md @@ -7,7 +7,7 @@ references: ["l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol"] The `Outbox` is a contract deployed on L1 that handles message passing from L2 to L1. Portal contracts call `consume()` to receive and process messages that were sent from L2 contracts. The Rollup contract inserts message roots via `insert()` when epochs are proven. -**Links**: [Interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol), [Implementation](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/core/messagebridge/Outbox.sol). +**Links**: [Interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol), [Implementation](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/core/messagebridge/Outbox.sol). ## `insert()` @@ -23,7 +23,7 @@ Inserts the root of a merkle tree containing all of the L2 to L1 messages in an */ function insert(Epoch _epoch, bytes32 _root) external; ``` -> Source code: l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol#L18-L27 +> Source code: l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol#L18-L27 | Name | Type | Description | @@ -57,7 +57,7 @@ function consume( bytes32[] calldata _path ) external; ``` -> Source code: l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol#L29-L46 +> Source code: l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol#L29-L46 | Name | Type | Description | @@ -92,7 +92,7 @@ Checks if an L2 to L1 message in a specific epoch has been consumed. */ function hasMessageBeenConsumedAtEpoch(Epoch _epoch, uint256 _leafId) external view returns (bool); ``` -> Source code: l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol#L48-L56 +> Source code: l1-contracts/src/core/interfaces/messagebridge/IOutbox.sol#L48-L56 | Name | Type | Description | diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/registry.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/registry.md similarity index 90% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/registry.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/registry.md index 1a845a1cddad..9fec31a6d0f9 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/ethereum-aztec-messaging/registry.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/ethereum-aztec-messaging/registry.md @@ -7,7 +7,7 @@ references: ["l1-contracts/src/governance/interfaces/IRegistry.sol"] The Registry is a contract deployed on L1 that tracks canonical and historical rollup instances. It allows you to query the current rollup contract and look up prior deployments by version. -**Links**: [Interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/governance/interfaces/IRegistry.sol), [Implementation](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/l1-contracts/src/governance/Registry.sol). +**Links**: [Interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/governance/interfaces/IRegistry.sol), [Implementation](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/l1-contracts/src/governance/Registry.sol). ## `numberOfVersions()` @@ -16,7 +16,7 @@ Retrieves the number of versions that have been deployed. ```solidity title="registry_number_of_versions" showLineNumbers function numberOfVersions() external view returns (uint256); ``` -> Source code: l1-contracts/src/governance/interfaces/IRegistry.sol#L25-L27 +> Source code: l1-contracts/src/governance/interfaces/IRegistry.sol#L25-L27 | Name | Description | @@ -30,7 +30,7 @@ Retrieves the current rollup contract. ```solidity title="registry_get_canonical_rollup" showLineNumbers function getCanonicalRollup() external view returns (IHaveVersion); ``` -> Source code: l1-contracts/src/governance/interfaces/IRegistry.sol#L17-L19 +> Source code: l1-contracts/src/governance/interfaces/IRegistry.sol#L17-L19 | Name | Description | @@ -44,7 +44,7 @@ Retrieves the rollup contract for a specific version. ```solidity title="registry_get_rollup" showLineNumbers function getRollup(uint256 _chainId) external view returns (IHaveVersion); ``` -> Source code: l1-contracts/src/governance/interfaces/IRegistry.sol#L21-L23 +> Source code: l1-contracts/src/governance/interfaces/IRegistry.sol#L21-L23 | Name | Description | diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/fees.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/fees.md similarity index 98% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/fees.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/fees.md index 360f8ce08ebd..0261faf23a34 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/fees.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/fees.md @@ -79,7 +79,7 @@ export class GasSettings { public readonly maxPriorityFeesPerGas: GasFees, ) {} ``` -> Source code: yarn-project/stdlib/src/gas/gas_settings.ts#L17-L26 +> Source code: yarn-project/stdlib/src/gas/gas_settings.ts#L17-L26 diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/index.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/index.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/pxe/index.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/pxe/index.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/pxe/index.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/pxe/index.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/state_management.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/state_management.md similarity index 97% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/state_management.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/state_management.md index 1d2908942fdf..c5d9e6d7928f 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/state_management.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/state_management.md @@ -99,14 +99,14 @@ pub struct UintNote { pub value: u128, } ``` -> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L27-L34 +> Source code: noir-projects/aztec-nr/uint-note/src/uint_note.nr#L27-L34 **`FieldNote`** - Stores a single `Field` value. ### Creating and Destroying Notes -The [lifecycle module](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr) contains functions for note management: +The [lifecycle module](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr) contains functions for note management: - `create_note` - Creates a new note, computing its hash and pushing it to the context - `destroy_note` - Nullifies a note by computing and emitting its nullifier @@ -115,7 +115,7 @@ Notes created and nullified within the same transaction are called **transient n ### Note Interface -Notes must implement the `NoteHash` trait from [note_interface.nr](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/aztec-nr/aztec/src/note/note_interface.nr): +Notes must implement the `NoteHash` trait from [note_interface.nr](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/aztec-nr/aztec/src/note/note_interface.nr): - `compute_note_hash(self, owner, storage_slot, randomness)` - Computes the note's commitment - `compute_nullifier(self, context, owner, note_hash_for_nullification)` - Computes the nullifier for consumption diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/transactions.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/transactions.md similarity index 97% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/transactions.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/transactions.md index 87c48b79ead5..5acb172570e6 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/transactions.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/transactions.md @@ -59,7 +59,7 @@ constructor( public salt: Fr, ) {} ``` -> Source code: yarn-project/stdlib/src/tx/tx_request.ts#L15-L28 +> Source code: yarn-project/stdlib/src/tx/tx_request.ts#L15-L28 Where: @@ -114,7 +114,7 @@ export class TxExecutionRequest { public salt = Fr.random(), ) {} ``` -> Source code: yarn-project/stdlib/src/tx/tx_execution_request.ts#L23-L64 +> Source code: yarn-project/stdlib/src/tx/tx_execution_request.ts#L23-L64 An account contract validates that the transaction request has been authorized via its specified authorization mechanism, via the `is_valid_impl` function. Here is an example using an ECDSA signature: @@ -147,7 +147,7 @@ fn is_valid_impl(context: &mut PrivateContext, outer_hash: Field) -> bool { ) } ``` -> Source code: noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr#L77-L104 +> Source code: noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr#L77-L104 Transaction requests are simulated in the PXE in order to generate the necessary inputs for generating proofs. Once transactions are proven, a `Tx` object is created and can be sent to the network to be included in a block: @@ -190,7 +190,7 @@ export class Tx extends Gossipable { super(); } ``` -> Source code: yarn-project/stdlib/src/tx/tx.ts#L26-L63 +> Source code: yarn-project/stdlib/src/tx/tx.ts#L26-L63 #### Contract Interaction Methods @@ -226,7 +226,7 @@ public async simulate( options: SimulateInteractionOptions, ): Promise> { ``` -> Source code: yarn-project/aztec.js/src/contract/contract_function_interaction.ts#L88-L111 +> Source code: yarn-project/aztec.js/src/contract/contract_function_interaction.ts#L88-L111 ##### `send` @@ -251,7 +251,7 @@ public async send( options: SendInteractionOptions, ): Promise> { ``` -> Source code: yarn-project/aztec.js/src/contract/base_contract_interaction.ts#L36-L55 +> Source code: yarn-project/aztec.js/src/contract/base_contract_interaction.ts#L36-L55 ### Batch Transactions @@ -267,7 +267,7 @@ export class BatchCall extends BaseContractInteraction { super(wallet); } ``` -> Source code: yarn-project/aztec.js/src/contract/batch_call.ts#L18-L26 +> Source code: yarn-project/aztec.js/src/contract/batch_call.ts#L18-L26 ### Enabling Transaction Semantics diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/wallets.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/wallets.md similarity index 98% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/wallets.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/wallets.md index ab743d8ddefa..d6009d67b934 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/foundational-topics/wallets.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/foundational-topics/wallets.md @@ -18,7 +18,7 @@ In addition to these usual responsibilities, wallets in Aztec also need to track The first step for any wallet is to let the user set up their [accounts](./accounts/index.md). An account in Aztec is represented onchain by its corresponding account contract that the user must deploy to begin interacting with the network. This account contract dictates how transactions are authenticated and executed. -A wallet must support at least one specific account contract implementation, which means being able to deploy such a contract, as well as interacting with it when sending transactions. Code-wise, this requires [implementing the `AccountContract` interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/yarn-project/aztec.js/src/account/account_contract.ts). +A wallet must support at least one specific account contract implementation, which means being able to deploy such a contract, as well as interacting with it when sending transactions. Code-wise, this requires [implementing the `AccountContract` interface](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/yarn-project/aztec.js/src/account/account_contract.ts). Note that users must be able to receive funds in Aztec before deploying their account. A wallet should let a user generate a [deterministic complete address](./accounts/keys.md#address-derivation) without having to interact with the network, so they can share it with others to receive funds. This requires that the wallet pins a specific contract implementation, its initialization arguments, a deployment salt, and the user's keys. These values yield a deterministic address, so when the account contract is actually deployed, it is available at the precalculated address. Once the account contract is deployed, the user can start sending transactions using it as the transaction origin. @@ -26,7 +26,7 @@ Note that users must be able to receive funds in Aztec before deploying their ac Every transaction in Aztec is broadcast to the network as a zero-knowledge proof of correct execution, in order to preserve privacy. This means that transaction proofs are generated on the wallet and not on a remote node. This is one of the biggest differences with regard to EVM chain wallets. -A wallet is responsible for **creating** an _execution request_ out of one or more _function calls_ requested by a dapp. For example, a dapp may request a wallet to "invoke the `transfer` function on the contract at `0x1234` with the following arguments", in response to a user action. The wallet turns that into an execution request with the signed instructions to execute that function call from the user's account contract. In an [ECDSA-based account](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr), for instance, this is an execution request that encodes the function call in the _entrypoint payload_, and includes its ECDSA signature with the account's signing private key. +A wallet is responsible for **creating** an _execution request_ out of one or more _function calls_ requested by a dapp. For example, a dapp may request a wallet to "invoke the `transfer` function on the contract at `0x1234` with the following arguments", in response to a user action. The wallet turns that into an execution request with the signed instructions to execute that function call from the user's account contract. In an [ECDSA-based account](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr), for instance, this is an execution request that encodes the function call in the _entrypoint payload_, and includes its ECDSA signature with the account's signing private key. Once the _execution request_ is created, the wallet is responsible for **simulating** and **proving** the execution of its private functions. The simulation yields an execution trace, which can be used to provide the user with a list of side effects of the private execution of the transaction. During this simulation, the wallet is responsible for providing data to the virtual machine, such as private notes, encryption keys, or nullifier secrets. This execution trace is fed into the prover, which returns a zero-knowledge proof that guarantees correct execution and hides all private information. The output of this process is a _transaction object_. @@ -84,5 +84,5 @@ export type Account = EntrypointInterface & getAddress(): AztecAddress; }; ``` -> Source code: yarn-project/aztec.js/src/account/account.ts#L23-L34 +> Source code: yarn-project/aztec.js/src/account/account.ts#L23-L34 diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/community_calls.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/community_calls.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/community_calls.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/community_calls.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/considerations/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/considerations/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/considerations/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/considerations/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/considerations/limitations.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/considerations/limitations.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/considerations/limitations.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/considerations/limitations.md index fd303557429a..98cdb97de74a 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/considerations/limitations.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/considerations/limitations.md @@ -197,7 +197,7 @@ pub global MAX_NOTE_HASH_READ_REQUESTS_PER_CALL: u32 = 16; pub global MAX_NULLIFIER_READ_REQUESTS_PER_CALL: u32 = 16; pub global MAX_KEY_VALIDATION_REQUESTS_PER_CALL: u32 = MAX_PRIVATE_LOGS_PER_CALL; ``` -> Source code: noir-projects/noir-protocol-circuits/crates/types/src/constants.nr#L33-L100 +> Source code: noir-projects/noir-protocol-circuits/crates/types/src/constants.nr#L33-L100 #### What are the consequences? diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/considerations/privacy_considerations.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/considerations/privacy_considerations.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/considerations/privacy_considerations.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/considerations/privacy_considerations.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/glossary.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/glossary.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/glossary.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/glossary.md index 02ebf629a5f0..12a920be794d 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/glossary.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/glossary.md @@ -34,7 +34,7 @@ Full reference [here](../cli/aztec_wallet_cli_reference). A [Node package](https://www.npmjs.com/package/@aztec/aztec.js) to help make Aztec dApps. -Read more and review the source code [here](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260217/yarn-project/aztec.js). +Read more and review the source code [here](https://github.com/AztecProtocol/aztec-packages/blob/v4.0.0-nightly.20260218/yarn-project/aztec.js). ### Aztec.nr diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/migration_notes.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/migration_notes.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/migration_notes.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/migration_notes.md index 474d7dc366e3..a01f87c10884 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/resources/migration_notes.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/resources/migration_notes.md @@ -34,9 +34,9 @@ aztec-up VERSION= bash -i <(curl -sL https://install.aztec.network/) ``` -For example, to install version `4.0.0-nightly.20260217`: +For example, to install version `4.0.0-nightly.20260218`: ```bash -VERSION=4.0.0-nightly.20260217 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260217) +VERSION=4.0.0-nightly.20260218 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260218) ``` **Key changes:** @@ -5436,7 +5436,7 @@ impl Storage { The `protocol` package is now being reexported from `aztec`. It can be accessed through `dep::aztec::protocol`. ```toml -aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="yarn-project/aztec-nr/aztec" } +aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="yarn-project/aztec-nr/aztec" } ``` ### [Aztec.nr] key type definition in Map @@ -5526,8 +5526,8 @@ const tokenBigInt = (await bridge.methods.token().simulate()).inner; ### [Aztec.nr] Add `protocol` to Nargo.toml ```toml -aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="yarn-project/aztec-nr/aztec" } -protocol = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260217", directory="yarn-project/noir-protocol-circuits/crates/types"} +aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="yarn-project/aztec-nr/aztec" } +protocol = { git="https://github.com/AztecProtocol/aztec-packages/", tag="v4.0.0-nightly.20260218", directory="yarn-project/noir-protocol-circuits/crates/types"} ``` ### [Aztec.nr] moving compute_address func to AztecAddress diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/counter_contract.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/counter_contract.md similarity index 92% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/counter_contract.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/counter_contract.md index 351fd6bd1f51..8264cb03e8b5 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/counter_contract.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/counter_contract.md @@ -9,7 +9,7 @@ import Image from "@theme/IdealImage"; In this guide, we will create our first Aztec.nr smart contract. We will build a simple private counter, where you can keep your own private counter - so no one knows what ID you are at or when you increment! This contract will get you started with the basic setup and syntax of Aztec.nr, but doesn't showcase all of the awesome stuff Aztec is capable of. -This tutorial is compatible with the Aztec version `v4.0.0-nightly.20260217`. Install the correct version with `VERSION=4.0.0-nightly.20260217 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260217)`. Or if you'd like to use a different version, you can find the relevant tutorial by clicking the version dropdown at the top of the page. +This tutorial is compatible with the Aztec version `v4.0.0-nightly.20260218`. Install the correct version with `VERSION=4.0.0-nightly.20260218 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260218)`. Or if you'd like to use a different version, you can find the relevant tutorial by clicking the version dropdown at the top of the page. ## Prerequisites @@ -41,8 +41,8 @@ Add the following dependencies to `Nargo.toml` under the autogenerated content: ```toml [dependencies] -aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="aztec" } -balance_set = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260217", directory="balance-set" } +aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="aztec" } +balance_set = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v4.0.0-nightly.20260218", directory="balance-set" } ``` ## Define the functions @@ -81,7 +81,7 @@ use aztec::{ }; use balance_set::BalanceSet; ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L7-L16 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L7-L16 - `macros::{functions::{external, initializer}, storage::storage}` @@ -112,7 +112,7 @@ struct Storage { counters: Owned, Context>, } ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L18-L23 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L18-L23 ## Keep the counter private @@ -131,7 +131,7 @@ fn initialize(headstart: u64, owner: AztecAddress) { ); } ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L25-L34 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L25-L34 This function accesses the counters from storage. It adds the `headstart` value to the `owner`'s counter using `at().add()`, then calls `.deliver(MessageDelivery.ONCHAIN_CONSTRAINED)` to ensure the note is delivered onchain. @@ -149,7 +149,7 @@ fn increment(owner: AztecAddress) { self.storage.counters.at(owner).add(1).deliver(MessageDelivery.ONCHAIN_CONSTRAINED); } ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L36-L42 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L36-L42 The `increment` function works similarly to the `initialize` function. It logs a debug message, then adds 1 to the owner's counter and delivers the note onchain. @@ -164,7 +164,7 @@ unconstrained fn get_counter(owner: AztecAddress) -> pub u128 { self.storage.counters.at(owner).balance_of() } ``` -> Source code: docs/examples/contracts/counter_contract/src/main.nr#L44-L49 +> Source code: docs/examples/contracts/counter_contract/src/main.nr#L44-L49 This is a `utility` function used to obtain the counter value outside of a transaction. We access the `owner`'s balance from the `counters` storage variable using `at(owner)`, then call `balance_of()` to retrieve the current count. This yields a private counter that only the owner can decrypt. diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/recursive_verification.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/recursive_verification.md similarity index 97% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/recursive_verification.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/recursive_verification.md index d77f6b6d7a53..8847a455e812 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/recursive_verification.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/recursive_verification.md @@ -12,7 +12,7 @@ This is called "recursive" verification because the proof is verified inside an ::: :::tip Full Working Example -The complete code for this tutorial is available in the [docs/examples](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/docs/examples) directory. Clone it to follow along or use it as a reference. +The complete code for this tutorial is available in the [docs/examples](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/docs/examples) directory. Clone it to follow along or use it as a reference. ::: ## Prerequisites @@ -21,7 +21,7 @@ Before starting, ensure you have the following installed and configured: - Node.js (v22 or later) - yarn package manager -- Aztec CLI (version v4.0.0-nightly.20260217) +- Aztec CLI (version v4.0.0-nightly.20260218) - Nargo (version 1.0.0-beta.15) - Familiarity with [Noir syntax](https://noir-lang.org/docs) and [Aztec contract basics](../../aztec-nr/index.md) @@ -29,7 +29,7 @@ Install the required tools: ```bash # Install Aztec CLI -VERSION=4.0.0-nightly.20260217 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260217) +VERSION=4.0.0-nightly.20260218 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260218) # Install Nargo via noirup curl -L https://raw.githubusercontent.com/noir-lang/noirup/refs/heads/main/install | bash @@ -139,7 +139,7 @@ fn test_main() { main(1, 2); } ``` -> Source code: docs/examples/circuits/hello_circuit/src/main.nr#L1-L10 +> Source code: docs/examples/circuits/hello_circuit/src/main.nr#L1-L10 This is intentionally minimal to focus on the verification pattern. In production, you would replace `assert(x != y)` with meaningful computations like: @@ -178,7 +178,7 @@ authors = [""] [dependencies] ``` -> Source code: docs/examples/circuits/hello_circuit/Nargo.toml#L1-L8 +> Source code: docs/examples/circuits/hello_circuit/Nargo.toml#L1-L8 **Note**: This is a vanilla Noir circuit, not an Aztec contract. It has `type = "bin"` (binary) and no Aztec dependencies. The circuit is compiled with `nargo`, not `aztec compile`. This distinction is important—you can verify proofs from _any_ Noir circuit inside Aztec contracts. @@ -255,8 +255,8 @@ type = "contract" authors = ["[YOUR_NAME]"] [dependencies] -aztec = { git = "https://github.com/AztecProtocol/aztec-nr/", tag = "v4.0.0-nightly.20260217", directory = "aztec" } -bb_proof_verification = { git = "https://github.com/AztecProtocol/aztec-packages/", tag = "v4.0.0-nightly.20260217", directory = "barretenberg/noir/bb_proof_verification" } +aztec = { git = "https://github.com/AztecProtocol/aztec-nr/", tag = "v4.0.0-nightly.20260218", directory = "aztec" } +bb_proof_verification = { git = "https://github.com/AztecProtocol/aztec-packages/", tag = "v4.0.0-nightly.20260218", directory = "barretenberg/noir/bb_proof_verification" } ``` **Key differences from the circuit's Nargo.toml**: @@ -333,7 +333,7 @@ pub contract ValueNotEqual { } } ``` -> Source code: docs/examples/contracts/recursive_verification_contract/src/main.nr#L1-L78 +> Source code: docs/examples/contracts/recursive_verification_contract/src/main.nr#L1-L78 ### Storage Variables Explained @@ -469,14 +469,14 @@ Create the following files in your project root directory. "recursion": "tsx scripts/run_recursion.ts" }, "dependencies": { - "@aztec/accounts": "4.0.0-nightly.20260217", - "@aztec/aztec.js": "4.0.0-nightly.20260217", - "@aztec/bb.js": "4.0.0-nightly.20260217", - "@aztec/kv-store": "4.0.0-nightly.20260217", - "@aztec/noir-contracts.js": "4.0.0-nightly.20260217", - "@aztec/noir-noir_js": "4.0.0-nightly.20260217", - "@aztec/pxe": "4.0.0-nightly.20260217", - "@aztec/test-wallet": "4.0.0-nightly.20260217", + "@aztec/accounts": "4.0.0-nightly.20260218", + "@aztec/aztec.js": "4.0.0-nightly.20260218", + "@aztec/bb.js": "4.0.0-nightly.20260218", + "@aztec/kv-store": "4.0.0-nightly.20260218", + "@aztec/noir-contracts.js": "4.0.0-nightly.20260218", + "@aztec/noir-noir_js": "4.0.0-nightly.20260218", + "@aztec/pxe": "4.0.0-nightly.20260218", + "@aztec/test-wallet": "4.0.0-nightly.20260218", "tsx": "^4.20.6" }, "devDependencies": { @@ -617,7 +617,7 @@ await barretenbergAPI.destroy(); console.log("Done"); exit(); ``` -> Source code: docs/examples/ts/recursive_verification/scripts/generate_data.ts#L1-L74 +> Source code: docs/examples/ts/recursive_verification/scripts/generate_data.ts#L1-L74 ### Understanding the Proof Generation Pipeline @@ -921,7 +921,7 @@ export async function getSponsoredFPCInstance() { ); } ``` -> Source code: docs/examples/ts/recursive_verification/scripts/sponsored_fpc.ts#L1-L16 +> Source code: docs/examples/ts/recursive_verification/scripts/sponsored_fpc.ts#L1-L16 This utility computes the address of the pre-deployed sponsored FPC contract. The salt ensures we get the same address every time. For more information about fee payment options, see [Paying Fees](../../aztec-js/how_to_pay_fees.md). @@ -962,7 +962,7 @@ The counter starts at 10 (set during deployment), and after successful proof ver ## Quick Reference -If you want to run all commands at once, or if you're starting fresh, here's the complete workflow. You can also reference the [full working example](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/docs/examples) in the main repository. +If you want to run all commands at once, or if you're starting fresh, here's the complete workflow. You can also reference the [full working example](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/docs/examples) in the main repository. ```bash # Install dependencies (after creating package.json and tsconfig.json) diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/token_contract.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/token_contract.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/token_contract.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/token_contract.md index 74cf9d7e5110..c69b38148059 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/contract_tutorials/token_contract.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/contract_tutorials/token_contract.md @@ -23,7 +23,7 @@ This is an intermediate tutorial that assumes you have: - Completed the [Counter Contract tutorial](./counter_contract.md) - A Running Aztec local network (see the Counter tutorial for setup) - Basic understanding of Aztec.nr syntax and structure -- Aztec toolchain installed (`VERSION=4.0.0-nightly.20260217 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260217)`) +- Aztec toolchain installed (`VERSION=4.0.0-nightly.20260218 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260218)`) If you haven't completed the Counter Contract tutorial, please do so first as we'll skip the basic setup steps covered there. @@ -44,7 +44,7 @@ cd bob_token_contract yarn init # This is to ensure yarn uses node_modules instead of pnp for dependency installation yarn config set nodeLinker node-modules -yarn add @aztec/aztec.js@v4.0.0-nightly.20260217 @aztec/accounts@v4.0.0-nightly.20260217 @aztec/test-wallet@v4.0.0-nightly.20260217 @aztec/kv-store@v4.0.0-nightly.20260217 +yarn add @aztec/aztec.js@v4.0.0-nightly.20260218 @aztec/accounts@v4.0.0-nightly.20260218 @aztec/test-wallet@v4.0.0-nightly.20260218 @aztec/kv-store@v4.0.0-nightly.20260218 aztec init ``` @@ -71,7 +71,7 @@ name = "bob_token_contract" type = "contract" [dependencies] -aztec = { git = "https://github.com/AztecProtocol/aztec-nr/", tag = "v4.0.0-nightly.20260217", directory = "aztec" } +aztec = { git = "https://github.com/AztecProtocol/aztec-nr/", tag = "v4.0.0-nightly.20260218", directory = "aztec" } ``` Since we're here, let's import more specific stuff from this library: @@ -152,7 +152,7 @@ fn setup() { self.storage.owner.write(self.msg_sender()); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L32-L39 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L32-L39 The `#[initializer]` decorator ensures this runs once during deployment. Only Giggle's address will have the power to mint new BOB tokens for employees. @@ -172,7 +172,7 @@ fn mint_public(employee: AztecAddress, amount: u64) { self.storage.public_balances.at(employee).write(current_balance + amount); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L41-L51 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L41-L51 This public minting function: @@ -204,7 +204,7 @@ fn transfer_public(to: AztecAddress, amount: u64) { self.storage.public_balances.at(to).write(recipient_balance + amount); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L53-L67 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L53-L67 This might be used when: @@ -228,7 +228,7 @@ fn transfer_ownership(new_owner: AztecAddress) { self.storage.owner.write(new_owner); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L69-L79 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L69-L79 ## Your First Deployment - Let's See It Work @@ -361,8 +361,8 @@ For something like balances, you can use a simple library called `easy_private_s ```toml [dependencies] -aztec = { git="https://github.com/AztecProtocol/aztec-nr", tag="v4.0.0-nightly.20260217", directory="aztec" } -balance_set = { git = "https://github.com/AztecProtocol/aztec-nr/", tag = "v4.0.0-nightly.20260217", directory = "balance-set" } +aztec = { git="https://github.com/AztecProtocol/aztec-nr", tag="v4.0.0-nightly.20260218", directory="aztec" } +balance_set = { git = "https://github.com/AztecProtocol/aztec-nr/", tag = "v4.0.0-nightly.20260218", directory = "balance-set" } ``` Then import `BalanceSet` in our contract: @@ -391,7 +391,7 @@ struct Storage { private_balances: Owned, Context>, } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L19-L30 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L19-L30 The `private_balances` use `BalanceSet` which manages encrypted notes automatically. @@ -412,7 +412,7 @@ fn public_to_private(amount: u64) { ); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L81-L92 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L81-L92 And the helper function: @@ -426,7 +426,7 @@ fn _deduct_public_balance(owner: AztecAddress, amount: u64) { self.storage.public_balances.at(owner).write(balance - amount); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L94-L102 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L94-L102 By calling `public_to_private` we're telling the network "deduct this amount from my balance" while simultaneously creating a Note with that balance in privateland. @@ -449,7 +449,7 @@ fn transfer_private(to: AztecAddress, amount: u64) { ); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L104-L117 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L104-L117 This function simply nullifies the sender's notes, while adding them to the recipient. @@ -479,7 +479,7 @@ unconstrained fn public_balance_of(owner: AztecAddress) -> pub u64 { self.storage.public_balances.at(owner).read() } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L119-L129 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L119-L129 ## Part 3: Securing Private Minting @@ -516,7 +516,7 @@ fn _assert_is_owner(address: AztecAddress) { assert_eq(address, self.storage.owner.read(), "Only Giggle can mint BOB tokens"); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L131-L137 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L131-L137 Now we can add a secure private minting function. It looks pretty easy, and it is, since the whole thing will revert if the public function fails: @@ -533,7 +533,7 @@ fn mint_private(employee: AztecAddress, amount: u64) { ); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L139-L150 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L139-L150 This pattern ensures: @@ -566,7 +566,7 @@ fn _credit_public_balance(owner: AztecAddress, amount: u64) { self.storage.public_balances.at(owner).write(balance + amount); } ``` -> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L152-L170 +> Source code: docs/examples/contracts/bob_token_contract/src/main.nr#L152-L170 Now you've made changes to your contract, you need to recompile your contract. diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/faceid_wallet.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/faceid_wallet.md similarity index 98% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/faceid_wallet.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/faceid_wallet.md index d6381cbdb4ce..51017e5224df 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/faceid_wallet.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/faceid_wallet.md @@ -90,4 +90,4 @@ Check out the [CLI Wallet Reference](../cli/aztec_wallet_cli_reference.md) for t In this tutorial, we created an account with the Aztec's [CLI Wallet](../cli/aztec_wallet_cli_reference.md), using the Apple Mac's Secure Enclave to store the private key. -You can use a multitude of authentication methods, for example with RSA you could use a passport as a recovery, or even as a signer in a multisig. All of this is based on the [account contract](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/noir-contracts/contracts/account). +You can use a multitude of authentication methods, for example with RSA you could use a passport as a recovery, or even as a signer in a multisig. All of this is based on the [account contract](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/noir-contracts/contracts/account). diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/_category_.json b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/_category_.json similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/_category_.json rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/_category_.json diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/aztecjs-getting-started.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/aztecjs-getting-started.md similarity index 90% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/aztecjs-getting-started.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/aztecjs-getting-started.md index b8f59e82dd97..008e7d95cfa6 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/aztecjs-getting-started.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/aztecjs-getting-started.md @@ -9,7 +9,7 @@ import Image from "@theme/IdealImage"; In this guide, we will retrieving the local network and deploy a pre-written token contract to it using Aztec.js. [Check out the source code](https://github.com/AztecProtocol/aztec-packages/blob/master/noir-projects/noir-contracts/contracts/app/token_contract/src/main.nr). We will then use Aztec.js to interact with this contract and transfer tokens. -Before starting, make sure to be running Aztec local network at version 4.0.0-nightly.20260217. Check out [the guide](../../tutorials/local_network.md) for info about that. +Before starting, make sure to be running Aztec local network at version 4.0.0-nightly.20260218. Check out [the guide](../../tutorials/local_network.md) for info about that. ## Set up the project @@ -36,7 +36,7 @@ Never heard of `tsx`? Well, it will just run `typescript` with reasonable defaul Let's also import the Aztec dependencies for this tutorial: ```sh -yarn add @aztec/aztec.js@4.0.0-nightly.20260217 @aztec/accounts@4.0.0-nightly.20260217 @aztec/noir-contracts.js@4.0.0-nightly.20260217 @aztec/wallets@4.0.0-nightly.20260217 +yarn add @aztec/aztec.js@4.0.0-nightly.20260218 @aztec/accounts@4.0.0-nightly.20260218 @aztec/noir-contracts.js@4.0.0-nightly.20260218 @aztec/wallets@4.0.0-nightly.20260218 ``` Aztec.js assumes your project is using ESM, so make sure you add `"type": "module"` to `package.json`. You probably also want at least a `start` script. For example: @@ -79,7 +79,7 @@ const [alice, bob] = await getInitialTestAccountsData(); await wallet.createSchnorrAccount(alice.secret, alice.salt); await wallet.createSchnorrAccount(bob.secret, bob.salt); ``` -> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L1-L11 +> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L1-L11 **Step 3: Verify the script runs** @@ -94,7 +94,7 @@ If there are no errors, you're ready to continue. For more details on connecting ## Deploy the token contract -Now that we have our accounts loaded, let's deploy a pre-compiled token contract from the Aztec library. You can find the full code for the contract [here (GitHub link)](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/noir-contracts/contracts/app/token_contract/src). +Now that we have our accounts loaded, let's deploy a pre-compiled token contract from the Aztec library. You can find the full code for the contract [here (GitHub link)](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/noir-contracts/contracts/app/token_contract/src). Add the following to `index.ts` to import the contract and deploy it with Alice as the admin: @@ -109,7 +109,7 @@ const token = await TokenContract.deploy( 18, ).send({ from: alice.address }); ``` -> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L13-L23 +> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L13-L23 ## Mint and transfer @@ -121,7 +121,7 @@ await token.methods .mint_to_private(alice.address, 100) .send({ from: alice.address }); ``` -> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L25-L29 +> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L25-L29 Let's check both Alice's and Bob's balances now: @@ -136,7 +136,7 @@ let bobBalance = await token.methods .simulate({ from: bob.address }); console.log(`Bob's balance: ${bobBalance}`); ``` -> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L31-L40 +> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L31-L40 Alice should have 100 tokens, while Bob has none yet. @@ -150,7 +150,7 @@ bobBalance = await token.methods .simulate({ from: bob.address }); console.log(`Bob's balance: ${bobBalance}`); ``` -> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L42-L48 +> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L42-L48 Bob should now see 10 tokens in his balance. @@ -162,7 +162,7 @@ Say that Alice is nice and wants to set Bob as a minter. Even though it's a publ ```typescript title="set_minter" showLineNumbers await token.methods.set_minter(bob.address, true).send({ from: alice.address }); ``` -> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L50-L52 +> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L50-L52 Bob is now the minter, so he can mint some tokens to himself: @@ -176,7 +176,7 @@ bobBalance = await token.methods .simulate({ from: bob.address }); console.log(`Bob's balance: ${bobBalance}`); ``` -> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L54-L62 +> Source code: docs/examples/ts/aztecjs_getting_started/index.ts#L54-L62 :::info diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/token_bridge.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/token_bridge.md similarity index 95% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/token_bridge.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/token_bridge.md index 1774129a786a..d80cf6934ff8 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/js_tutorials/token_bridge.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/js_tutorials/token_bridge.md @@ -11,7 +11,7 @@ Imagine you own a CryptoPunk NFT on Ethereum. You want to use it in games, socia In this tutorial, you'll build a **private NFT bridge**. By the end, you'll understand how **portals** work and how **cross-chain messages** flow between L1 and L2. -Before starting, make sure you have the Aztec local network running at version v4.0.0-nightly.20260217. Check out [the local network guide](../../../getting_started_on_local_network.md) for setup instructions. +Before starting, make sure you have the Aztec local network running at version v4.0.0-nightly.20260218. Check out [the local network guide](../../../getting_started_on_local_network.md) for setup instructions. ## What You'll Build @@ -36,7 +36,7 @@ We want to add a few more dependencies now before we start: ```bash cd hardhat-aztec-example -yarn add @aztec/aztec.js@4.0.0-nightly.20260217 @aztec/accounts@4.0.0-nightly.20260217 @aztec/stdlib@4.0.0-nightly.20260217 @aztec/wallets@4.0.0-nightly.20260217 tsx +yarn add @aztec/aztec.js@4.0.0-nightly.20260218 @aztec/accounts@4.0.0-nightly.20260218 @aztec/stdlib@4.0.0-nightly.20260218 @aztec/wallets@4.0.0-nightly.20260218 tsx ``` Now start the local network in another terminal: @@ -96,7 +96,7 @@ Open `Nargo.toml` and make sure `aztec` is a dependency: ```toml [dependencies] -aztec = { git = "https://github.com/AztecProtocol/aztec-nr", tag = "v4.0.0-nightly.20260217", directory = "aztec" } +aztec = { git = "https://github.com/AztecProtocol/aztec-nr", tag = "v4.0.0-nightly.20260218", directory = "aztec" } ``` ### Create the NFT Note @@ -118,7 +118,7 @@ pub struct NFTNote { pub token_id: Field, } ``` -> Source code: docs/examples/contracts/nft/src/nft.nr#L1-L9 +> Source code: docs/examples/contracts/nft/src/nft.nr#L1-L9 You now have a note that represents the owner of a particular NFT. Next, move on to the contract itself. @@ -189,7 +189,7 @@ fn _mark_nft_exists(token_id: Field, exists: bool) { self.storage.nfts.at(token_id).schedule_value_change(exists); } ``` -> Source code: docs/examples/contracts/nft/src/main.nr#L42-L48 +> Source code: docs/examples/contracts/nft/src/main.nr#L42-L48 This function is marked with `#[only_self]`, meaning only the contract itself can call it. It uses `schedule_value_change` to update the `nfts` storage, preventing the same NFT from being minted twice or burned when it doesn't exist. You'll call this public function from a private function later using `enqueue_self`. @@ -203,7 +203,7 @@ unconstrained fn notes_of(from: AztecAddress) -> Field { notes.len() as Field } ``` -> Source code: docs/examples/contracts/nft/src/main.nr#L67-L73 +> Source code: docs/examples/contracts/nft/src/main.nr#L67-L73 ### Add Minting and Burning @@ -217,7 +217,7 @@ fn set_minter(minter: AztecAddress) { self.storage.minter.initialize(minter); } ``` -> Source code: docs/examples/contracts/nft/src/main.nr#L34-L40 +> Source code: docs/examples/contracts/nft/src/main.nr#L34-L40 Now for the magic - minting NFTs **privately**. The bridge will call this to mint to a user, deliver the note using [constrained message delivery](../../aztec-nr/framework-description/events_and_logs.md) (best practice when "sending someone a @@ -239,7 +239,7 @@ fn mint(to: AztecAddress, token_id: Field) { self.enqueue_self._mark_nft_exists(token_id, true); } ``` -> Source code: docs/examples/contracts/nft/src/main.nr#L50-L65 +> Source code: docs/examples/contracts/nft/src/main.nr#L50-L65 The bridge will also need to burn NFTs when users withdraw back to L1: @@ -262,7 +262,7 @@ fn burn(from: AztecAddress, token_id: Field) { self.enqueue_self._mark_nft_exists(token_id, false); } ``` -> Source code: docs/examples/contracts/nft/src/main.nr#L75-L92 +> Source code: docs/examples/contracts/nft/src/main.nr#L75-L92 ### Compiling! @@ -321,7 +321,7 @@ And again, add the `aztec-nr` dependency to `Nargo.toml`. We also need to add th ```toml [dependencies] -aztec = { git="https://github.com/AztecProtocol/aztec-nr", tag = "v4.0.0-nightly.20260217", directory = "aztec" } +aztec = { git="https://github.com/AztecProtocol/aztec-nr", tag = "v4.0.0-nightly.20260218", directory = "aztec" } NFTPunk = { path = "../nft" } ``` @@ -401,7 +401,7 @@ fn claim(to: AztecAddress, token_id: Field, secret: Field, message_leaf_index: F self.call(NFTPunk::at(nft).mint(to, token_id)); } ``` -> Source code: docs/examples/contracts/nft_bridge/src/main.nr#L31-L50 +> Source code: docs/examples/contracts/nft_bridge/src/main.nr#L31-L50 :::tip Secret @@ -426,7 +426,7 @@ fn exit(token_id: Field, recipient: EthAddress) { self.call(NFTPunk::at(nft).burn(self.msg_sender(), token_id)); } ``` -> Source code: docs/examples/contracts/nft_bridge/src/main.nr#L52-L65 +> Source code: docs/examples/contracts/nft_bridge/src/main.nr#L52-L65 Cross-chain messaging on Aztec is powerful because it doesn't conform to any specific format—you can structure messages however you want. @@ -498,7 +498,7 @@ contract SimpleNFT is ERC721 { } } ``` -> Source code: docs/examples/solidity/nft_bridge/SimpleNFT.sol#L2-L18 +> Source code: docs/examples/solidity/nft_bridge/SimpleNFT.sol#L2-L18 ### Create the NFT Portal @@ -583,7 +583,7 @@ function withdraw( nftContract.transferFrom(address(this), msg.sender, tokenId); } ``` -> Source code: docs/examples/solidity/nft_bridge/NFTPortal.sol#L36-L70 +> Source code: docs/examples/solidity/nft_bridge/NFTPortal.sol#L36-L70 The portal handles two flows: @@ -675,7 +675,7 @@ const rollupAddress = nodeInfo.l1ContractAddresses.rollupAddress.toString(); // Create rollup contract instance for querying epoch information const rollup = new RollupContract(l1Client, rollupAddress); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L1-L48 +> Source code: docs/examples/ts/token_bridge/index.ts#L1-L48 You now have wallets for both chains, correctly connected to their respective chains. Next, deploy the L1 contracts: @@ -698,7 +698,7 @@ const { address: portalAddress } = await deployL1Contract( console.log(`SimpleNFT: ${nftAddress}`); console.log(`NFTPortal: ${portalAddress}\n`); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L50-L67 +> Source code: docs/examples/ts/token_bridge/index.ts#L50-L67 Now deploy the L2 contracts. Thanks to the TypeScript bindings generated with `aztec codegen`, deployment is straightforward: @@ -718,7 +718,7 @@ const l2Bridge = await NFTBridgeContract.deploy( console.log(`L2 NFT: ${l2Nft.address.toString()}`); console.log(`L2 Bridge: ${l2Bridge.address.toString()}\n`); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L69-L83 +> Source code: docs/examples/ts/token_bridge/index.ts#L69-L83 Now that you have the L2 bridge's contract address, initialize the L1 bridge: @@ -738,7 +738,7 @@ await l1Client.waitForTransactionReceipt({ hash: initHash }); console.log("Portal initialized\n"); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L85-L99 +> Source code: docs/examples/ts/token_bridge/index.ts#L85-L99 The L2 contracts were already initialized when you deployed them, but you still need to: @@ -761,7 +761,7 @@ await l2Nft.methods console.log("Bridge configured\n"); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L101-L113 +> Source code: docs/examples/ts/token_bridge/index.ts#L101-L113 This completes the setup. It's a lot of configuration, but you're dealing with four contracts across two chains. @@ -787,7 +787,7 @@ const tokenId = 0n; console.log(`Minted tokenId: ${tokenId}\n`); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L115-L131 +> Source code: docs/examples/ts/token_bridge/index.ts#L115-L131 To bridge, first approve the portal address to transfer the NFT, then transfer it by calling `depositToAztec`: @@ -823,7 +823,7 @@ const depositReceipt = await l1Client.waitForTransactionReceipt({ hash: depositHash, }); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L133-L163 +> Source code: docs/examples/ts/token_bridge/index.ts#L133-L163 The `Inbox` contract will emit an important log: `MessageSent(inProgress, index, leaf, updatedRollingHash);`. This log provides the **leaf index** of the message in the [L1-L2 Message Tree](../../foundational-topics/ethereum-aztec-messaging/index.md)—the location of the message in the tree that will appear on L2. You need this index, plus the secret, to correctly claim and decrypt the message. @@ -867,7 +867,7 @@ const messageSentLogs = depositReceipt.logs const messageLeafIndex = new Fr(messageSentLogs[0].decoded.args.index); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L165-L201 +> Source code: docs/examples/ts/token_bridge/index.ts#L165-L201 This extracts the logs from the deposit and retrieves the leaf index. You can now claim it on L2. However, for security reasons, at least 2 blocks must pass before a message can be claimed on L2. If you called `claim` on the L2 contract immediately, it would return "no message available". @@ -889,7 +889,7 @@ async function mine2Blocks( }); } ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L203-L217 +> Source code: docs/examples/ts/token_bridge/index.ts#L203-L217 Now claim the message on L2: @@ -918,7 +918,7 @@ const notesAfterClaim = await l2Nft.methods .simulate({ from: account.address }); console.log(` Notes count: ${notesAfterClaim}\n`); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L219-L242 +> Source code: docs/examples/ts/token_bridge/index.ts#L219-L242 ### L2 → L1 Flow @@ -946,7 +946,7 @@ const notesAfterBurn = await l2Nft.methods .simulate({ from: account.address }); console.log(` Notes count: ${notesAfterBurn}\n`); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L244-L264 +> Source code: docs/examples/ts/token_bridge/index.ts#L244-L264 Just like in the L1 → L2 flow, you need to know what to claim on L1. Where in the message tree is the message you want to claim? Use the utility `computeL2ToL1MembershipWitness`, which provides the leaf and the sibling path of the message: @@ -1009,7 +1009,7 @@ const siblingPathHex = witness!.siblingPath .toBufferArray() .map((buf: Buffer) => `0x${buf.toString("hex")}` as `0x${string}`); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L266-L323 +> Source code: docs/examples/ts/token_bridge/index.ts#L266-L323 With this information, call the L1 contract and use the index and the sibling path to claim the L1 NFT: @@ -1026,7 +1026,7 @@ const withdrawHash = await l1Client.writeContract({ await l1Client.waitForTransactionReceipt({ hash: withdrawHash }); console.log("NFT withdrawn to L1\n"); ``` -> Source code: docs/examples/ts/token_bridge/index.ts#L325-L336 +> Source code: docs/examples/ts/token_bridge/index.ts#L325-L336 You can now try the whole flow with: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/local_network.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/local_network.md similarity index 96% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/local_network.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/local_network.md index 39eb9c3d55c7..8d764e922979 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/local_network.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/local_network.md @@ -7,8 +7,8 @@ description: Information about running the Aztec local network development envir -- Current version: `v4.0.0-nightly.20260217` -- Update with `aztec-up 4.0.0-nightly.20260217` +- Current version: `v4.0.0-nightly.20260218` +- Update with `aztec-up 4.0.0-nightly.20260218` On this page you will find @@ -43,7 +43,7 @@ Check the `git=` github url, tag, and directory. Example contracts serve as a helpful reference between versions of the Aztec.nr framework since they are strictly maintained with each release. -Code referenced in the documentation is sourced from contracts within [this directory (GitHub link)](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260217/noir-projects/noir-contracts/contracts). +Code referenced in the documentation is sourced from contracts within [this directory (GitHub link)](https://github.com/AztecProtocol/aztec-packages/tree/v4.0.0-nightly.20260218/noir-projects/noir-contracts/contracts). As in the previous section, the location of the noir contracts moved at version `0.24.0`, from `yarn-project/noir-contracts` before, to `noir-projects/noir-contracts`. @@ -136,9 +136,9 @@ To update the aztec.nr packages manually, update the tags of the `aztec.nr` depe ```diff [dependencies] -aztec = { git="https://github.com/AztecProtocol/aztec-packages", tag="v0.7.5", directory="noir-projects/aztec-nr/aztec" } -+aztec = { git="https://github.com/AztecProtocol/aztec-packages", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/aztec" } ++aztec = { git="https://github.com/AztecProtocol/aztec-packages", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/aztec" } -value_note = { git="https://github.com/AztecProtocol/aztec-packages", tag="v0.7.5", directory="noir-projects/aztec-nr/value-note" } -+value_note = { git="https://github.com/AztecProtocol/aztec-packages", tag="v4.0.0-nightly.20260217", directory="noir-projects/aztec-nr/value-note" } ++value_note = { git="https://github.com/AztecProtocol/aztec-packages", tag="v4.0.0-nightly.20260218", directory="noir-projects/aztec-nr/value-note" } ``` Go to the contract directory and try compiling it to verify that the update was successful: @@ -157,9 +157,9 @@ To update Aztec.js packages, go to your `package.json` and replace the versions ```diff [dependencies] -"@aztec/accounts": "0.7.5", -+"@aztec/accounts": "v4.0.0-nightly.20260217", ++"@aztec/accounts": "v4.0.0-nightly.20260218", -"@aztec/noir-contracts.js": "0.35.1", -+"@aztec/accounts": "v4.0.0-nightly.20260217", ++"@aztec/accounts": "v4.0.0-nightly.20260218", ``` ## Local Network PXE Proving diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/testing_governance_rollup_upgrade.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/testing_governance_rollup_upgrade.md similarity index 99% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/testing_governance_rollup_upgrade.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/testing_governance_rollup_upgrade.md index 906ded826777..5ac58c45dd63 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/docs/tutorials/testing_governance_rollup_upgrade.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/docs/tutorials/testing_governance_rollup_upgrade.md @@ -32,7 +32,7 @@ The default governance configuration for local networks: Ensure you are on the correct Aztec version: ```bash -aztec-up 4.0.0-nightly.20260217 +aztec-up 4.0.0-nightly.20260218 ``` ```bash @@ -57,7 +57,7 @@ Clone the l1-contracts repo and checkout the version matching your Aztec install ```bash git clone https://github.com/AztecProtocol/l1-contracts.git cd l1-contracts -git checkout 4.0.0-nightly.20260217 +git checkout 4.0.0-nightly.20260218 ``` Install dependencies and set up the build environment: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/getting_started_on_devnet.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/getting_started_on_devnet.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/getting_started_on_devnet.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/getting_started_on_devnet.md diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/getting_started_on_local_network.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/getting_started_on_local_network.md similarity index 98% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/getting_started_on_local_network.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/getting_started_on_local_network.md index e8f5195b8159..2aa3c63933da 100644 --- a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/getting_started_on_local_network.md +++ b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/getting_started_on_local_network.md @@ -34,7 +34,7 @@ import { General, Fees } from '@site/src/components/Snippets/general_snippets'; Run: ```bash -VERSION=4.0.0-nightly.20260217 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260217) +VERSION=4.0.0-nightly.20260218 bash -i <(curl -sL https://install.aztec.network/4.0.0-nightly.20260218) ``` This will install the following tools: diff --git a/docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/overview.md b/docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/overview.md similarity index 100% rename from docs/developer_versioned_docs/version-v4.0.0-nightly.20260217/overview.md rename to docs/developer_versioned_docs/version-v4.0.0-nightly.20260218/overview.md diff --git a/docs/developer_versioned_sidebars/version-v4.0.0-nightly.20260217-sidebars.json b/docs/developer_versioned_sidebars/version-v4.0.0-nightly.20260218-sidebars.json similarity index 100% rename from docs/developer_versioned_sidebars/version-v4.0.0-nightly.20260217-sidebars.json rename to docs/developer_versioned_sidebars/version-v4.0.0-nightly.20260218-sidebars.json diff --git a/docs/developer_versions.json b/docs/developer_versions.json index df816fd76070..b51765a953da 100644 --- a/docs/developer_versions.json +++ b/docs/developer_versions.json @@ -1,4 +1,4 @@ [ "v3.0.0-devnet.6-patch.1", - "v4.0.0-nightly.20260217" + "v4.0.0-nightly.20260218" ] diff --git a/docs/docs-operate/operators/reference/changelog/v4.md b/docs/docs-operate/operators/reference/changelog/v4.md index 10bfef79cc4d..369e3fec643a 100644 --- a/docs/docs-operate/operators/reference/changelog/v4.md +++ b/docs/docs-operate/operators/reference/changelog/v4.md @@ -88,6 +88,55 @@ A new environment variable `AZTEC_INITIAL_ETH_PER_FEE_ASSET` has been added to c This replaces the previous hardcoded default and allows network operators to set the starting price point for the fee asset. +### `reloadKeystore` admin RPC endpoint + +Node operators can now update validator attester keys, coinbase, and fee recipient without restarting the node by calling the new `reloadKeystore` admin RPC endpoint. + +What is updated on reload: +- Validator attester keys (add, remove, or replace) +- Coinbase and fee recipient per validator +- Publisher-to-validator mapping + +What is NOT updated (requires restart): +- L1 publisher signers +- Prover keys +- HA signer connections + +New validators must use a publisher key already initialized at startup. Reload is rejected with a clear error if validation fails. + +### Admin API key authentication + +The admin JSON-RPC endpoint now supports auto-generated API key authentication. + +**Behavior:** +- A cryptographically secure API key is auto-generated at first startup and displayed once via stdout +- Only the SHA-256 hash is persisted to `/admin/api_key_hash` +- The key is reused across restarts when `--data-directory` is set +- Supports both `x-api-key` and `Authorization: Bearer ` headers +- Health check endpoint (`GET /status`) is excluded from auth (for k8s probes) + +**Configuration:** + +```bash +--admin-api-key-hash ($AZTEC_ADMIN_API_KEY_HASH) # Use a pre-generated SHA-256 key hash +--no-admin-api-key ($AZTEC_NO_ADMIN_API_KEY) # Disable auth entirely +--reset-admin-api-key ($AZTEC_RESET_ADMIN_API_KEY) # Force key regeneration +``` + +**Helm charts**: Admin API key auth is disabled by default (`noAdminApiKey: true`). Set to `false` in production values to enable. + +**Migration**: No action required — auth is opt-out. To enable, ensure `--no-admin-api-key` is not set and note the key printed at startup. + +### Transaction pool error codes for RPC callers + +Transaction submission via RPC now returns structured rejection codes when a transaction is rejected by the mempool: + +- `LOW_PRIORITY_FEE` — tx priority fee is too low +- `INSUFFICIENT_FEE_PAYER_BALANCE` — fee payer doesn't have enough balance +- `NULLIFIER_CONFLICT` — conflicting nullifier already in pool + +**Impact**: Improved developer experience — callers can now programmatically handle specific rejection reasons. + ## Changed defaults ## Troubleshooting diff --git a/docs/static/aztec-nr-api/nightly/all.html b/docs/static/aztec-nr-api/nightly/all.html index f7fb57c71135..4417efa004eb 100644 --- a/docs/static/aztec-nr-api/nightly/all.html +++ b/docs/static/aztec-nr-api/nightly/all.html @@ -133,7 +133,7 @@

All items in aztec-nr

  • noir_aztec::protocol::abis::tree_snapshots::TreeSnapshots
  • noir_aztec::protocol::abis::tx_constant_data::TxConstantData
  • noir_aztec::protocol::abis::validation_requests::key_validation_request::KeyValidationRequest
  • -
  • noir_aztec::protocol::abis::validation_requests::key_validation_request_and_generator::KeyValidationRequestAndGenerator
  • +
  • noir_aztec::protocol::abis::validation_requests::key_validation_request_and_separator::KeyValidationRequestAndSeparator
  • noir_aztec::protocol::abis::validation_requests::private_validation_requests::PrivateValidationRequests
  • noir_aztec::protocol::address::aztec_address::AztecAddress
  • noir_aztec::protocol::address::eth_address::EthAddress
  • @@ -237,7 +237,7 @@

    All items in aztec-nr

  • noir_aztec::keys::constants::NUM_KEY_TYPES
  • noir_aztec::keys::constants::OUTGOING_INDEX
  • noir_aztec::keys::constants::TAGGING_INDEX
  • -
  • noir_aztec::keys::constants::sk_generators
  • +
  • noir_aztec::keys::constants::public_key_domain_separators
  • noir_aztec::macros::events::EVENT_SELECTORS
  • noir_aztec::macros::notes::NOTES
  • noir_aztec::macros::storage::STORAGE_LAYOUT_NAME
  • @@ -524,6 +524,7 @@

    All items in aztec-nr

  • noir_aztec::protocol::constants::COUNTED_PUBLIC_CALL_REQUEST_LENGTH
  • noir_aztec::protocol::constants::DA_BYTES_PER_FIELD
  • noir_aztec::protocol::constants::DA_GAS_PER_BYTE
  • +
  • noir_aztec::protocol::constants::DA_GAS_PER_FIELD
  • noir_aztec::protocol::constants::DEFAULT_DA_GAS_LIMIT
  • noir_aztec::protocol::constants::DEFAULT_IVPK_M_X
  • noir_aztec::protocol::constants::DEFAULT_IVPK_M_Y
  • diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/context/struct.PrivateContext.html b/docs/static/aztec-nr-api/nightly/noir_aztec/context/struct.PrivateContext.html index d7896efb742f..1088d25f8c3b 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/context/struct.PrivateContext.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/context/struct.PrivateContext.html @@ -714,7 +714,7 @@

    Advanced

    Validates message existence in the L1-to-L2 message tree and nullifies the message to prevent double-consumption.

    -pub fn emit_private_log(&mut self, log: [Field; 18], length: u32) +pub fn emit_private_log(&mut self, log: [Field; 16], length: u32)

    Emits a private log (an array of Fields) that will be published to an Ethereum blob.

    @@ -757,7 +757,7 @@

    Arguments

    pub fn emit_raw_note_log( &mut self, - log: [Field; 18], + log: [Field; 16], length: u32, note_hash_counter: u32, ) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/event/event_interface/fn.compute_private_serialized_event_commitment.html b/docs/static/aztec-nr-api/nightly/noir_aztec/event/event_interface/fn.compute_private_serialized_event_commitment.html index 20c1d2b3ffac..d6177e64dcad 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/event/event_interface/fn.compute_private_serialized_event_commitment.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/event/event_interface/fn.compute_private_serialized_event_commitment.html @@ -30,7 +30,7 @@

    Functions

      Function compute_private_serialized_event_commitment

      pub unconstrained fn compute_private_serialized_event_commitment(
      -    serialized_event: BoundedVec<Field, 12>,
      +    serialized_event: BoundedVec<Field, 11>,
           randomness: Field,
           event_type_id: Field,
       ) -> Field
      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.INCOMING_INDEX.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.INCOMING_INDEX.html index 0be66f64cc5a..8966cd4a3e04 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.INCOMING_INDEX.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.INCOMING_INDEX.html @@ -23,7 +23,7 @@

      Globals

      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NULLIFIER_INDEX.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NULLIFIER_INDEX.html index 5fcbb182469e..9e4375320a52 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NULLIFIER_INDEX.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NULLIFIER_INDEX.html @@ -23,7 +23,7 @@

      Globals

      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NUM_KEY_TYPES.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NUM_KEY_TYPES.html index 5779bff9b563..1c65fd288f63 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NUM_KEY_TYPES.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.NUM_KEY_TYPES.html @@ -23,7 +23,7 @@

      Globals

      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.OUTGOING_INDEX.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.OUTGOING_INDEX.html index 8a0c05b08b43..d7060f34bb15 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.OUTGOING_INDEX.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.OUTGOING_INDEX.html @@ -23,7 +23,7 @@

      Globals

      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.TAGGING_INDEX.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.TAGGING_INDEX.html index 3d8ba601479f..2d2f09dd3233 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.TAGGING_INDEX.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.TAGGING_INDEX.html @@ -23,7 +23,7 @@

      Globals

      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.sk_generators.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.public_key_domain_separators.html similarity index 72% rename from docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.sk_generators.html rename to docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.public_key_domain_separators.html index 938f175823a8..7f3c48bcb8b9 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.sk_generators.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/global.public_key_domain_separators.html @@ -4,12 +4,12 @@ -Global sk_generators documentation +Global public_key_domain_separators documentation
      -

      Global sk_generators

      -
      pub global sk_generators: [Field; 4];
      +

      Global public_key_domain_separators

      +
      pub global public_key_domain_separators: [Field; 4];
      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/index.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/index.html index 26bd3691618e..b45420b57231 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/index.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/constants/index.html @@ -38,7 +38,7 @@

      Globals

      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ecdh_shared_secret/fn.derive_ecdh_shared_secret.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ecdh_shared_secret/fn.derive_ecdh_shared_secret.html index ab7b01644ea8..c40d810ac44a 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ecdh_shared_secret/fn.derive_ecdh_shared_secret.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ecdh_shared_secret/fn.derive_ecdh_shared_secret.html @@ -26,9 +26,9 @@

      Functions

        Function derive_ecdh_shared_secret

        pub fn derive_ecdh_shared_secret(
        -    secret: EmbeddedCurveScalar,
        -    public_key: EmbeddedCurvePoint,
        -) -> EmbeddedCurvePoint
        + secret: EmbeddedCurveScalar, + public_key: EmbeddedCurvePoint, +) -> EmbeddedCurvePoint

        Computes a standard ECDH shared secret: secret * public_key = shared_secret.

        diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ephemeral/fn.generate_ephemeral_key_pair.html b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ephemeral/fn.generate_ephemeral_key_pair.html index 4fe08c0b914b..7cf559cf4142 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ephemeral/fn.generate_ephemeral_key_pair.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/keys/ephemeral/fn.generate_ephemeral_key_pair.html @@ -25,7 +25,7 @@

        Functions

          Function generate_ephemeral_key_pair

          -
          pub fn generate_ephemeral_key_pair() -> (EmbeddedCurveScalar, EmbeddedCurvePoint)
          +
          pub fn generate_ephemeral_key_pair() -> (EmbeddedCurveScalar, EmbeddedCurvePoint)
          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/nonce_discovery/fn.attempt_note_nonce_discovery.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/nonce_discovery/fn.attempt_note_nonce_discovery.html index 8878bc4ac789..ef7d2fffc274 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/nonce_discovery/fn.attempt_note_nonce_discovery.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/nonce_discovery/fn.attempt_note_nonce_discovery.html @@ -37,7 +37,7 @@

          Function attempt_note_nonce_discovery

          storage_slot: Field, randomness: Field, note_type_id: Field, - packed_note: BoundedVec<Field, 10>, + packed_note: BoundedVec<Field, 9>, ) -> BoundedVec<DiscoveredNoteInfo, 64>
          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/partial_notes/fn.process_partial_note_private_msg.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/partial_notes/fn.process_partial_note_private_msg.html index b523ea2958e6..e42aac4aea16 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/partial_notes/fn.process_partial_note_private_msg.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/partial_notes/fn.process_partial_note_private_msg.html @@ -33,7 +33,7 @@

          Function process_partial_note_private_msg

          contract_address: AztecAddress, recipient: AztecAddress, msg_metadata: u64, - msg_content: BoundedVec<Field, 13>, + msg_content: BoundedVec<Field, 12>, ) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_events/fn.process_private_event_msg.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_events/fn.process_private_event_msg.html index 1ed131262ca1..a5d82504e7a0 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_events/fn.process_private_event_msg.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_events/fn.process_private_event_msg.html @@ -29,7 +29,7 @@

          Function process_private_event_msg

          contract_address: AztecAddress, recipient: AztecAddress, msg_metadata: u64, - msg_content: BoundedVec<Field, 13>, + msg_content: BoundedVec<Field, 12>, tx_hash: Field, ) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.attempt_note_discovery.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.attempt_note_discovery.html index 729ab4def69a..56045e01362b 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.attempt_note_discovery.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.attempt_note_discovery.html @@ -37,7 +37,7 @@

          Function attempt_note_discovery

          storage_slot: Field, randomness: Field, note_type_id: Field, - packed_note: BoundedVec<Field, 10>, + packed_note: BoundedVec<Field, 9>, )
          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.process_private_note_msg.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.process_private_note_msg.html index f981d2880d5c..21496d1bc50f 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.process_private_note_msg.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/private_notes/fn.process_private_note_msg.html @@ -34,7 +34,7 @@

          Function process_private_note_msg

          recipient: AztecAddress, compute_note_hash_and_nullifier: ComputeNoteHashAndNullifier<Env>, msg_metadata: u64, - msg_content: BoundedVec<Field, 13>, + msg_content: BoundedVec<Field, 12>, ) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_ciphertext.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_ciphertext.html index 008630687134..c8f410be7b08 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_ciphertext.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_ciphertext.html @@ -29,7 +29,7 @@

          Function process_message_ciphertext

          pub unconstrained fn process_message_ciphertext<Env>(
               contract_address: AztecAddress,
               compute_note_hash_and_nullifier: ComputeNoteHashAndNullifier<Env>,
          -    message_ciphertext: BoundedVec<Field, 17>,
          +    message_ciphertext: BoundedVec<Field, 15>,
               message_context: MessageContext,
           )
          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_plaintext.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_plaintext.html index d70ecfcc53af..a4a669eaa213 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_plaintext.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/process_message/fn.process_message_plaintext.html @@ -29,7 +29,7 @@

          Function process_message_plaintext

          pub unconstrained fn process_message_plaintext<Env>(
               contract_address: AztecAddress,
               compute_note_hash_and_nullifier: ComputeNoteHashAndNullifier<Env>,
          -    message_plaintext: BoundedVec<Field, 14>,
          +    message_plaintext: BoundedVec<Field, 13>,
               message_context: MessageContext,
           )
          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/type.ComputeNoteHashAndNullifier.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/type.ComputeNoteHashAndNullifier.html index 4b5a3a19d83e..894c05b3032b 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/type.ComputeNoteHashAndNullifier.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/discovery/type.ComputeNoteHashAndNullifier.html @@ -38,7 +38,7 @@

          Functions

            Type alias ComputeNoteHashAndNullifier

            -
            pub type ComputeNoteHashAndNullifier<Env> = unconstrained fn[Env](BoundedVec<Field, 10>, AztecAddress, Field, Field, AztecAddress, Field, Field) -> Option<NoteHashAndNullifier>;
            +
            pub type ComputeNoteHashAndNullifier<Env> = unconstrained fn[Env](BoundedVec<Field, 9>, AztecAddress, Field, Field, AztecAddress, Field, Field) -> Option<NoteHashAndNullifier>;

            A function which takes a note's packed content, address of the emitting contract, note nonce, storage slot and note diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encoding/fn.decode_message.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encoding/fn.decode_message.html index d3c6da9bed59..c42717766f6d 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encoding/fn.decode_message.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encoding/fn.decode_message.html @@ -35,8 +35,8 @@

            Globals

              Function decode_message

              pub unconstrained fn decode_message(
              -    message: BoundedVec<Field, 14>,
              -) -> (u64, u64, BoundedVec<Field, 13>)
              + message: BoundedVec<Field, 13>, +) -> (u64, u64, BoundedVec<Field, 12>)

              Decodes a standard aztec-nr message, i.e. one created via encode_message, returning the original encoded values.

              diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/fn.derive_aes_symmetric_key_and_iv_from_ecdh_shared_secret_using_poseidon2_unsafe.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/fn.derive_aes_symmetric_key_and_iv_from_ecdh_shared_secret_using_poseidon2_unsafe.html index be75a7936c59..bd0e4d164a14 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/fn.derive_aes_symmetric_key_and_iv_from_ecdh_shared_secret_using_poseidon2_unsafe.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/fn.derive_aes_symmetric_key_and_iv_from_ecdh_shared_secret_using_poseidon2_unsafe.html @@ -29,7 +29,7 @@

              Functions

            diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/struct.AES128.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/struct.AES128.html index a828ef7e79e8..0f97cc210983 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/struct.AES128.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/aes128/struct.AES128.html @@ -41,12 +41,12 @@

            impl pub fn encrypt<let PlaintextLen: u32>( plaintext: [Field; PlaintextLen], recipient: AztecAddress, -) -> [Field; 17] +) -> [Field; 15] pub unconstrained fn decrypt( - ciphertext: BoundedVec<Field, 17>, + ciphertext: BoundedVec<Field, 15>, recipient: AztecAddress, -) -> Option<BoundedVec<Field, 14>> +) -> Option<BoundedVec<Field, 13>>

          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/message_encryption/trait.MessageEncryption.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/message_encryption/trait.MessageEncryption.html index 26ee711cf48f..53f944373ad9 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/message_encryption/trait.MessageEncryption.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/message_encryption/trait.MessageEncryption.html @@ -40,11 +40,11 @@

          Trait MessageEncryption

          pub fn encrypt<let PlaintextLen: u32>( plaintext: [Field; PlaintextLen], recipient: AztecAddress, - ) -> [Field; 17]; + ) -> [Field; 15]; pub unconstrained fn decrypt( - ciphertext: BoundedVec<Field, 17>, + ciphertext: BoundedVec<Field, 15>, recipient: AztecAddress, - ) -> Option<BoundedVec<Field, 14>>; + ) -> Option<BoundedVec<Field, 13>>; }
          @@ -65,7 +65,7 @@

          Required methods

          pub fn encrypt<let PlaintextLen: u32>( plaintext: [Field; PlaintextLen], recipient: AztecAddress, -) -> [Field; 17] +) -> [Field; 15]

          Encrypts a plaintext message to recipient.

          @@ -79,9 +79,9 @@

          Privacy

          These properties make it secure to distribute the ciphertext publicly, e.g. on blockchain logs (assuming the encryption function is itself secure).

          pub unconstrained fn decrypt( - ciphertext: BoundedVec<Field, 17>, + ciphertext: BoundedVec<Field, 15>, recipient: AztecAddress, -) -> Option<BoundedVec<Field, 14>> +) -> Option<BoundedVec<Field, 13>>

          Decrypts a message ciphertext into its original plaintext.

          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_decrypt.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_decrypt.html index 2dc9b650945b..eb51c7d32778 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_decrypt.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_decrypt.html @@ -28,7 +28,7 @@

          Functions

            Function poseidon2_decrypt

            pub fn poseidon2_decrypt<let L: u32>(
                 ciphertext: [Field; L + 2 / 3 * 3 + 1],
            -    shared_secret: EmbeddedCurvePoint,
            +    shared_secret: EmbeddedCurvePoint,
                 encryption_nonce: Field,
             ) -> Option<[Field; L]>
            diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_encrypt.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_encrypt.html index 4fbc40fc7a1b..a38a2192067a 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_encrypt.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/encryption/poseidon2/fn.poseidon2_encrypt.html @@ -28,7 +28,7 @@

            Functions

              Function poseidon2_encrypt

              pub fn poseidon2_encrypt<let L: u32>(
                   msg: [Field; L],
              -    shared_secret: EmbeddedCurvePoint,
              +    shared_secret: EmbeddedCurvePoint,
                   encryption_nonce: Field,
               ) -> [Field; L + 2 / 3 * 3 + 1]
              diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/logs/partial_note/fn.compute_partial_note_private_content_log.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/logs/partial_note/fn.compute_partial_note_private_content_log.html index 7a2ae86c6541..241ee1d749d3 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/logs/partial_note/fn.compute_partial_note_private_content_log.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/logs/partial_note/fn.compute_partial_note_private_content_log.html @@ -36,7 +36,7 @@

              Function compute_partial_note_private_content_logField, recipient: AztecAddress, note_completion_log_tag: Field, -) -> [Field; 18] +) -> [Field; 16]
              where PartialNotePrivateContent: NoteType, PartialNotePrivateContent: Packable
              diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/offchain_messages/fn.deliver_offchain_message.html b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/offchain_messages/fn.deliver_offchain_message.html index 394cf6efc100..d0d2915b510c 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/messages/offchain_messages/fn.deliver_offchain_message.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/messages/offchain_messages/fn.deliver_offchain_message.html @@ -28,7 +28,7 @@

              Globals

                Function deliver_offchain_message

                -
                pub fn deliver_offchain_message(ciphertext: [Field; 17], recipient: AztecAddress)
                +
                pub fn deliver_offchain_message(ciphertext: [Field; 15], recipient: AztecAddress)

                Emits a message that will be delivered offchain rather than through the data availability layer.

                diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/oracle/shared_secret/fn.get_shared_secret.html b/docs/static/aztec-nr-api/nightly/noir_aztec/oracle/shared_secret/fn.get_shared_secret.html index 68b55c0ddce6..621c3ec3847f 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/oracle/shared_secret/fn.get_shared_secret.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/oracle/shared_secret/fn.get_shared_secret.html @@ -27,8 +27,8 @@

                Functions

                  Function get_shared_secret

                  pub unconstrained fn get_shared_secret(
                       address: AztecAddress,
                  -    ephPk: EmbeddedCurvePoint,
                  -) -> EmbeddedCurvePoint
                  + ephPk: EmbeddedCurvePoint, +) -> EmbeddedCurvePoint

                  Returns an app-siloed shared secret between address and someone who knows the secret key behind an ephemeral diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_public_accumulated_data/struct.PrivateToPublicAccumulatedData.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_public_accumulated_data/struct.PrivateToPublicAccumulatedData.html index 099eb8fca6c7..419d16860485 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_public_accumulated_data/struct.PrivateToPublicAccumulatedData.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_public_accumulated_data/struct.PrivateToPublicAccumulatedData.html @@ -61,7 +61,7 @@

                  Fields

                  Trait implementations

                  impl Deserialize for PrivateToPublicAccumulatedData

                  -
                  pub fn deserialize(fields: [Field; 1499]) -> Self +
                  pub fn deserialize(fields: [Field; 1371]) -> Self pub fn stream_deserialize<let K: u32>(reader: &mut Reader<K>) -> Self @@ -79,7 +79,7 @@

                  impl Serialize for PrivateToPublicAccumulatedData

                  -
                  pub fn serialize(self) -> [Field; 1499] +
                  pub fn serialize(self) -> [Field; 1371] pub fn stream_serialize<let K: u32>(self, writer: &mut Writer<K>) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_rollup_accumulated_data/struct.PrivateToRollupAccumulatedData.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_rollup_accumulated_data/struct.PrivateToRollupAccumulatedData.html index 6e2006851f6c..12919b09768e 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_rollup_accumulated_data/struct.PrivateToRollupAccumulatedData.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/accumulated_data/private_to_rollup_accumulated_data/struct.PrivateToRollupAccumulatedData.html @@ -58,7 +58,7 @@

                  Fields

                  Trait implementations

                  impl Deserialize for PrivateToRollupAccumulatedData

                  -
                  pub fn deserialize(fields: [Field; 1371]) -> Self +
                  pub fn deserialize(fields: [Field; 1243]) -> Self pub fn stream_deserialize<let K: u32>(reader: &mut Reader<K>) -> Self @@ -76,7 +76,7 @@

                  impl Serialize for PrivateToRollupAccumulatedData

                  -
                  pub fn serialize(self) -> [Field; 1371] +
                  pub fn serialize(self) -> [Field; 1243] pub fn stream_serialize<let K: u32>(self, writer: &mut Writer<K>) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_public_kernel_circuit_public_inputs/struct.PrivateToPublicKernelCircuitPublicInputs.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_public_kernel_circuit_public_inputs/struct.PrivateToPublicKernelCircuitPublicInputs.html index f0aad99b6bd8..ddd6ec24596d 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_public_kernel_circuit_public_inputs/struct.PrivateToPublicKernelCircuitPublicInputs.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_public_kernel_circuit_public_inputs/struct.PrivateToPublicKernelCircuitPublicInputs.html @@ -65,7 +65,7 @@

                  Fields

                  Trait implementations

                  impl Deserialize for PrivateToPublicKernelCircuitPublicInputs

                  -
                  pub fn deserialize(fields: [Field; 3040]) -> Self +
                  pub fn deserialize(fields: [Field; 2784]) -> Self pub fn stream_deserialize<let K: u32>(reader: &mut Reader<K>) -> Self @@ -87,7 +87,7 @@

                  impl Serialize for PrivateToPublicKernelCircuitPublicInputs

                  -
                  pub fn serialize(self) -> [Field; 3040] +
                  pub fn serialize(self) -> [Field; 2784] pub fn stream_serialize<let K: u32>(self, writer: &mut Writer<K>) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_rollup_kernel_circuit_public_inputs/struct.PrivateToRollupKernelCircuitPublicInputs.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_rollup_kernel_circuit_public_inputs/struct.PrivateToRollupKernelCircuitPublicInputs.html index 10967f4d3e39..881bc968eeed 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_rollup_kernel_circuit_public_inputs/struct.PrivateToRollupKernelCircuitPublicInputs.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/kernel_circuit_public_inputs/private_to_rollup_kernel_circuit_public_inputs/struct.PrivateToRollupKernelCircuitPublicInputs.html @@ -59,7 +59,7 @@

                  Fields

                  Trait implementations

                  impl Deserialize for PrivateToRollupKernelCircuitPublicInputs

                  -
                  pub fn deserialize(fields: [Field; 1409]) -> Self +
                  pub fn deserialize(fields: [Field; 1281]) -> Self pub fn stream_deserialize<let K: u32>(reader: &mut Reader<K>) -> Self @@ -81,7 +81,7 @@

                  impl Serialize for PrivateToRollupKernelCircuitPublicInputs

                  -
                  pub fn serialize(self) -> [Field; 1409] +
                  pub fn serialize(self) -> [Field; 1281] pub fn stream_serialize<let K: u32>(self, writer: &mut Writer<K>) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_circuit_public_inputs/struct.PrivateCircuitPublicInputs.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_circuit_public_inputs/struct.PrivateCircuitPublicInputs.html index f3cf71ce343d..07eefdd0daee 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_circuit_public_inputs/struct.PrivateCircuitPublicInputs.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_circuit_public_inputs/struct.PrivateCircuitPublicInputs.html @@ -29,7 +29,7 @@

                  Fields

                • expected_revertible_side_effect_counter
                • expiration_timestamp
                • is_fee_payer
                • -
                • key_validation_requests_and_generators
                • +
                • key_validation_requests_and_separators
                • l2_to_l1_msgs
                • min_revertible_side_effect_counter
                • note_hash_read_requests
                • @@ -74,7 +74,7 @@

                  Struct PrivateCircuitPublicInputsu32, pub note_hash_read_requests: ClaimedLengthArray<Scoped<Counted<Field>>, 16>, pub nullifier_read_requests: ClaimedLengthArray<Scoped<Counted<Field>>, 16>, - pub key_validation_requests_and_generators: ClaimedLengthArray<KeyValidationRequestAndGenerator, 16>, + pub key_validation_requests_and_separators: ClaimedLengthArray<KeyValidationRequestAndSeparator, 16>, pub private_call_requests: ClaimedLengthArray<PrivateCallRequest, 8>, pub public_call_requests: ClaimedLengthArray<Counted<PublicCallRequest>, 32>, pub public_teardown_call_request: PublicCallRequest, @@ -100,7 +100,7 @@

                  Fields

                  expected_revertible_side_effect_counter: u32
                  note_hash_read_requests: ClaimedLengthArray<Scoped<Counted<Field>>, 16>
                  nullifier_read_requests: ClaimedLengthArray<Scoped<Counted<Field>>, 16>
                  -
                  key_validation_requests_and_generators: ClaimedLengthArray<KeyValidationRequestAndGenerator, 16>
                  +
                  key_validation_requests_and_separators: ClaimedLengthArray<KeyValidationRequestAndSeparator, 16>
                  private_call_requests: ClaimedLengthArray<PrivateCallRequest, 8>
                  public_call_requests: ClaimedLengthArray<Counted<PublicCallRequest>, 32>
                  public_teardown_call_request: PublicCallRequest
                  @@ -112,7 +112,7 @@

                  Fields

                  Trait implementations

                  impl Deserialize for PrivateCircuitPublicInputs

                  -
                  pub fn deserialize(fields: [Field; 902]) -> Self +
                  pub fn deserialize(fields: [Field; 870]) -> Self pub fn stream_deserialize<let K: u32>(reader: &mut Reader<K>) -> Self @@ -130,7 +130,7 @@

                  impl Serialize for PrivateCircuitPublicInputs

                  -
                  pub fn serialize(self) -> [Field; 902] +
                  pub fn serialize(self) -> [Field; 870] pub fn stream_serialize<let K: u32>(self, writer: &mut Writer<K>) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/struct.PrivateLogData.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/struct.PrivateLogData.html index 44878f629d00..16dc245fed01 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/struct.PrivateLogData.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/struct.PrivateLogData.html @@ -46,12 +46,12 @@

                  Type aliases

                    Struct PrivateLogData

                    pub struct PrivateLogData {
                    -    pub log: Log<18>,
                    +    pub log: Log<16>,
                         pub note_hash_counter: u32,
                     }
                     

                    Fields

                    -
                    log: Log<18>
                    +
                    log: Log<16>
                    note_hash_counter: u32

                    Implementations

                    impl PrivateLogData

                    @@ -61,7 +61,7 @@

                    impl impl Deserialize for PrivateLogData

                    -
                    pub fn deserialize(fields: [Field; 20]) -> Self +
                    pub fn deserialize(fields: [Field; 18]) -> Self pub fn stream_deserialize<let K: u32>(reader: &mut Reader<K>) -> Self @@ -79,7 +79,7 @@

                    impl

                    impl Serialize for PrivateLogData

                    -
                    pub fn serialize(self) -> [Field; 20] +
                    pub fn serialize(self) -> [Field; 18] pub fn stream_serialize<let K: u32>(self, writer: &mut Writer<K>) diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/type.PrivateLog.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/type.PrivateLog.html index 2d1686f72cb1..e1444d72f909 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/type.PrivateLog.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/private_log/type.PrivateLog.html @@ -28,7 +28,7 @@

                    Type aliases

                diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/index.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/index.html index 64d55c59ddbb..736d79e7cb89 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/index.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/index.html @@ -25,7 +25,7 @@

                Module items

                In module validation_requests

                Modules

          -
        • key_validation_request_and_generator
        • +
        • key_validation_request_and_separator
        • private_validation_requests
        diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/struct.KeyValidationRequest.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/struct.KeyValidationRequest.html index 0460d772f0c0..98300b158e81 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/struct.KeyValidationRequest.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request/struct.KeyValidationRequest.html @@ -39,12 +39,12 @@

        Structs

          Struct KeyValidationRequest

          pub struct KeyValidationRequest {
          -    pub pk_m: EmbeddedCurvePoint,
          +    pub pk_m: EmbeddedCurvePoint,
               pub sk_app: Field,
           }
           

          Fields

          - +
          sk_app: Field

          Trait implementations

          impl Deserialize for KeyValidationRequest

          diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_generator/index.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_separator/index.html similarity index 77% rename from docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_generator/index.html rename to docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_separator/index.html index dffde433c5ef..75812b40baa5 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_generator/index.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_separator/index.html @@ -4,12 +4,12 @@ -Module key_validation_request_and_generator documentation +Module key_validation_request_and_separator documentation
        -
      • key_validation_request_and_generator
      • +
      • key_validation_request_and_separator
      • private_validation_requests
      -
      aztec-nr - noir_aztec::protocol::abis::validation_requests::key_validation_request_and_generator
      -

      Module key_validation_request_and_generator

      +
      aztec-nr - noir_aztec::protocol::abis::validation_requests::key_validation_request_and_separator
      +

      Module key_validation_request_and_separator

      Structs

      diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_generator/struct.KeyValidationRequestAndGenerator.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_separator/struct.KeyValidationRequestAndSeparator.html similarity index 67% rename from docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_generator/struct.KeyValidationRequestAndGenerator.html rename to docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_separator/struct.KeyValidationRequestAndSeparator.html index 9153d0fcad1e..4c504ba6396c 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_generator/struct.KeyValidationRequestAndGenerator.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/key_validation_request_and_separator/struct.KeyValidationRequestAndSeparator.html @@ -4,12 +4,12 @@ -Struct KeyValidationRequestAndGenerator documentation +Struct KeyValidationRequestAndSeparator documentation
      - -

      Struct KeyValidationRequestAndGenerator

      -
      pub struct KeyValidationRequestAndGenerator {
      +
      +

      Struct KeyValidationRequestAndSeparator

      +
      pub struct KeyValidationRequestAndSeparator {
           pub request: KeyValidationRequest,
      -    pub sk_app_generator: Field,
      +    pub key_type_domain_separator: Field,
       }
       

      Fields

      -
      sk_app_generator: Field
      +
      key_type_domain_separator: Field

      Implementations

      -

      impl KeyValidationRequestAndGenerator

      +

      impl KeyValidationRequestAndSeparator

      pub fn scope(self, contract_address: AztecAddress) -> Scoped<Self>

      Trait implementations

      -

      impl Deserialize for KeyValidationRequestAndGenerator

      +

      impl Deserialize for KeyValidationRequestAndSeparator

      pub fn deserialize(fields: [Field; 5]) -> Self pub fn stream_deserialize<let K: u32>(reader: &mut Reader<K>) -> Self -

      impl Empty for KeyValidationRequestAndGenerator

      +

      impl Empty for KeyValidationRequestAndSeparator

      pub fn empty() -> Self @@ -70,11 +70,11 @@

      pub fn assert_empty<let S: u32>(self, msg: str<S>) -

      impl Eq for KeyValidationRequestAndGenerator

      +

      impl Eq for KeyValidationRequestAndSeparator

      pub fn eq(_self: Self, _other: Self) -> bool -

      impl Serialize for KeyValidationRequestAndGenerator

      +

      impl Serialize for KeyValidationRequestAndSeparator

      pub fn serialize(self) -> [Field; 5] diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/index.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/index.html index 4138c7ab0733..d6c26829e873 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/index.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/index.html @@ -25,7 +25,7 @@

      Module items

      In module validation_requests

      Modules

      -
    • key_validation_request_and_generator
    • +
    • key_validation_request_and_separator
    • private_validation_requests
    diff --git a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/struct.PrivateValidationRequests.html b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/struct.PrivateValidationRequests.html index 94d85bda2c80..53b0773362a8 100644 --- a/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/struct.PrivateValidationRequests.html +++ b/docs/static/aztec-nr-api/nightly/noir_aztec/protocol/abis/validation_requests/private_validation_requests/struct.PrivateValidationRequests.html @@ -22,7 +22,7 @@

    Fields

    Trait implementations