Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// This circuit aggregates two Honk proof from `assert_statement`.
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkProof, verify_ultrahonk_proof};
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkProof, verify_honk_proof_non_zk};

fn main(
verification_key: UltraHonkVerificationKey,
Expand All @@ -11,13 +11,13 @@ fn main(
// The second proof, currently set to be identical
proof_b: UltraHonkProof,
) {
verify_ultrahonk_proof(
verify_honk_proof_non_zk(
verification_key,
proof,
public_inputs,
key_hash,
);
verify_ultrahonk_proof(
verify_honk_proof_non_zk(
verification_key,
proof_b,
public_inputs,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// This circuit aggregates two ZK-Honk proofs from `assert_statement`.
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkZKProof, verify_ultrahonkzk_proof};
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkZKProof, verify_honk_proof};

fn main(
verification_key: UltraHonkVerificationKey,
Expand All @@ -11,13 +11,13 @@ fn main(
// The second proof, currently set to be identical
proof_b: UltraHonkZKProof,
) {
verify_ultrahonkzk_proof(
verify_honk_proof(
verification_key,
proof,
public_inputs,
key_hash,
);
verify_ultrahonkzk_proof(
verify_honk_proof(
verification_key,
proof_b,
public_inputs,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// This circuit aggregates a single Honk proof from `assert_statement`.
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkProof, verify_ultrahonk_proof};
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkProof, verify_honk_proof_non_zk};

fn main(
verification_key: UltraHonkVerificationKey,
Expand All @@ -11,7 +11,7 @@ fn main(
// I believe we want to eventually make it public too though.
key_hash: Field,
) {
verify_ultrahonk_proof(
verify_honk_proof_non_zk(
verification_key,
proof,
public_inputs,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// This circuit aggregates a single ZK-Honk proof from `assert_statement`.
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkZKProof, verify_ultrahonkzk_proof};
use bb_proof_verification::{UltraHonkVerificationKey, UltraHonkZKProof, verify_honk_proof};

fn main(
verification_key: UltraHonkVerificationKey,
Expand All @@ -10,7 +10,7 @@ fn main(
// This is currently not public. It is fine given that the vk is a part of the circuit definition.
key_hash: Field,
) {
verify_ultrahonkzk_proof(
verify_honk_proof(
verification_key,
proof,
public_inputs,
Expand Down
8 changes: 4 additions & 4 deletions barretenberg/cpp/scripts/bench_hardware_concurrency.sh
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,8 @@ for test_case in test_cases:
# Categorize metrics based on name
if "Chonk" in metric_name or "Chonk" in metric_name:
components["Main"][metric_name][cpu] = time_ms
elif "ProtogalaxyProver" in metric_name:
components["ProtogalaxyProver"][metric_name][cpu] = time_ms
elif "HypernovaProver" in metric_name or "HypernovaFoldingProver" in metric_name or "HypernovaFoldingVerifier" in metric_name:
components["HypernovaProver"][metric_name][cpu] = time_ms
elif "OinkProver" in metric_name:
components["OinkProver"][metric_name][cpu] = time_ms
elif "Decider" in metric_name:
Expand All @@ -224,7 +224,7 @@ for test_case in test_cases:
# Generate tables for each component
sections = [
("Main Components", "Main"),
("ProtogalaxyProver Components", "ProtogalaxyProver"),
("HypernovaProver Components", "HypernovaProver"),
("OinkProver", "OinkProver"),
("Decider", "Decider"),
("Goblin", "Goblin"),
Expand Down Expand Up @@ -259,7 +259,7 @@ for test_case in test_cases:
count = int(count_match.group(1)) if count_match else None

# Clean up metric name
clean_name = metric_name.replace('ProtogalaxyProver::', '').replace('OinkProver::', '')
clean_name = metric_name.replace('HypernovaFoldingProver::', '').replace('HypernovaFoldingVerifier::', '').replace('HypernovaProver::', '').replace('OinkProver::', '')

row = generate_table_row(clean_name, times, available_cpus, count)
print("| " + row + " |")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ cd ..
# - Generate a hash for versioning: sha256sum bb-chonk-inputs.tar.gz
# - Upload the compressed results: aws s3 cp bb-chonk-inputs.tar.gz s3://aztec-ci-artifacts/protocol/bb-chonk-inputs-[hash(0:8)].tar.gz
# Note: In case of the "Test suite failed to run ... Unexpected token 'with' " error, need to run: docker pull aztecprotocol/build:3.0
pinned_short_hash="7222b532"
pinned_short_hash="8fa51383"
pinned_chonk_inputs_url="https://aztec-ci-artifacts.s3.us-east-2.amazonaws.com/protocol/bb-chonk-inputs-${pinned_short_hash}.tar.gz"

function compress_and_upload {
Expand Down Expand Up @@ -77,7 +77,7 @@ function check_circuit_vks {
local exit_code=0

if [[ "${2:-}" == "--update_inputs" ]]; then
output=$($bb check --update_inputs --scheme chonk --ivc_inputs_path "$flow_folder/ivc-inputs.msgpack" 2>&1) || exit_code=$?
output=$($bb check --vk_policy=rewrite --scheme chonk --ivc_inputs_path "$flow_folder/ivc-inputs.msgpack" 2>&1) || exit_code=$?
else
output=$($bb check --scheme chonk --ivc_inputs_path "$flow_folder/ivc-inputs.msgpack" 2>&1) || exit_code=$?
fi
Expand Down
1 change: 0 additions & 1 deletion barretenberg/cpp/src/barretenberg/api/api.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ class API {
bool include_gates_per_opcode{ false }; // should we include gates_per_opcode in the gates command output
bool slow_low_memory{ false }; // use file backed memory for polynomials
std::string storage_budget; // storage budget for file backed memory (e.g. "500m", "2g")
bool update_inputs{ false }; // update inputs when check fails
std::string vk_policy{ "default" }; // policy for handling VKs during IVC accumulation

bool optimized_solidity_verifier{ false }; // should we use the optimized sol verifier? (temp)
Expand Down
3 changes: 2 additions & 1 deletion barretenberg/cpp/src/barretenberg/api/api_chonk.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ bool ChonkAPI::check_precomputed_vks(const Flags& flags, const std::filesystem::
bbapi::BBApiRequest request;
std::vector<PrivateExecutionStepRaw> raw_steps = PrivateExecutionStepRaw::load_and_decompress(input_path);

bbapi::VkPolicy vk_policy = bbapi::parse_vk_policy(flags.vk_policy);
bool check_failed = false;
for (auto& step : raw_steps) {
if (step.vk.empty()) {
Expand All @@ -163,7 +164,7 @@ bool ChonkAPI::check_precomputed_vks(const Flags& flags, const std::filesystem::

if (!response.valid) {
info("VK mismatch detected for function ", step.function_name);
if (!flags.update_inputs) {
if (vk_policy != bbapi::VkPolicy::REWRITE) {
info("Computed VK differs from precomputed VK in ivc-inputs.msgpack");
return false;
}
Expand Down
4 changes: 2 additions & 2 deletions barretenberg/cpp/src/barretenberg/api/api_chonk.test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,8 @@ TEST_F(ChonkAPITests, CheckPrecomputedVksMismatch)
bool result = api.check_precomputed_vks(ChonkAPI::Flags{}, input_path);
EXPECT_FALSE(result);

// Check with --update_input should still fail but update the VK in the input.
result = api.check_precomputed_vks(ChonkAPI::Flags{ .update_inputs = true }, input_path);
// Check with --vk_policy=rewrite should still fail but update the VK in the input.
result = api.check_precomputed_vks(ChonkAPI::Flags{ .vk_policy = "rewrite" }, input_path);
EXPECT_FALSE(result);

// Check again and it should succeed with the updated VK.
Expand Down
15 changes: 6 additions & 9 deletions barretenberg/cpp/src/barretenberg/bb/cli.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -280,18 +280,15 @@ int parse_and_run_cli_command(int argc, char* argv[])
"back to RAM (requires --slow_low_memory).");
};

const auto add_update_inputs_flag = [&](CLI::App* subcommand) {
return subcommand->add_flag("--update_inputs", flags.update_inputs, "Update inputs if vk check fails.");
};

const auto add_vk_policy_option = [&](CLI::App* subcommand) {
return subcommand
->add_option("--vk_policy",
flags.vk_policy,
"Policy for handling verification keys during IVC accumulation. 'default' uses the provided "
"VK as-is, 'check' verifies the provided VK matches the computed VK (throws error on "
"mismatch), 'recompute' always ignores the provided VK and treats it as nullptr.")
->check(CLI::IsMember({ "default", "check", "recompute" }).name("is_member"));
"Policy for handling verification keys. 'default' uses the provided VK as-is, 'check' "
"verifies the provided VK matches the computed VK (throws error on mismatch), 'recompute' "
"always ignores the provided VK and treats it as nullptr, 'rewrite' checks the VK and "
"rewrites the input file with the correct VK if there's a mismatch (for check command).")
->check(CLI::IsMember({ "default", "check", "recompute", "rewrite" }).name("is_member"));
};

const auto add_optimized_solidity_verifier_flag = [&](CLI::App* subcommand) {
Expand Down Expand Up @@ -342,7 +339,7 @@ int parse_and_run_cli_command(int argc, char* argv[])
add_bytecode_path_option(check);
add_witness_path_option(check);
add_ivc_inputs_path_options(check);
add_update_inputs_flag(check);
add_vk_policy_option(check);

/***************************************************************************************************************
* Subcommand: gates
Expand Down
10 changes: 7 additions & 3 deletions barretenberg/cpp/src/barretenberg/bbapi/bbapi_shared.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,10 @@ namespace bb::bbapi {
* @brief Policy for handling verification keys during IVC accumulation
*/
enum class VkPolicy {
DEFAULT, // Use the provided VK as-is (default behavior)
CHECK, // Verify the provided VK matches the computed VK, throw error if mismatch
RECOMPUTE // Always ignore the provided VK and treat it as nullptr
DEFAULT, // Use the provided VK as-is (default behavior)
CHECK, // Verify the provided VK matches the computed VK, throw error if mismatch
RECOMPUTE, // Always ignore the provided VK and treat it as nullptr
REWRITE // Check the VK and rewrite the input file with correct VK if mismatch (for check command)
};

/**
Expand Down Expand Up @@ -137,6 +138,9 @@ inline VkPolicy parse_vk_policy(const std::string& policy)
if (policy == "recompute") {
return VkPolicy::RECOMPUTE;
}
if (policy == "rewrite") {
return VkPolicy::REWRITE;
}
return VkPolicy::DEFAULT; // default
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ void build_constraints(Builder& builder, AcirProgram& program, const ProgramMeta
bool has_chonk_recursion_constraints = !constraint_system.chonk_recursion_constraints.empty();

if constexpr (IsMegaBuilder<Builder>) {
// We shouldn't have both honk recursion constraints and pg recursion constraints.
// We shouldn't have both honk recursion constraints and HN recursion constraints.
BB_ASSERT_EQ(!has_honk_recursion_constraints || !has_hn_recursion_constraints,
true,
"Invalid circuit: both honk and ivc recursion constraints present.");
Expand Down Expand Up @@ -312,7 +312,7 @@ void build_constraints(Builder& builder, AcirProgram& program, const ProgramMeta
// we return a vinfo for the case of Chonk + AVM
BB_ASSERT_EQ(has_hn_recursion_constraints,
false,
"Invalid circuit: pg recursion constraints are present with UltraBuilder.");
"Invalid circuit: HN recursion constraints are present with UltraBuilder.");
BB_ASSERT_EQ(!(has_chonk_recursion_constraints && has_honk_recursion_constraints),
true,
"Invalid circuit: both honk and chonk recursion constraints are present.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,9 @@ template <typename Builder> void create_blake2s_constraints(Builder& builder, co
using byte_array_ct = stdlib::byte_array<Builder>;
using field_ct = stdlib::field_t<Builder>;

// Create byte array struct
byte_array_ct arr(&builder);
// Build input byte array by appending constrained byte_arrays
byte_array_ct arr = byte_array_ct::constant_padding(&builder, 0); // Start with empty array

// Get the witness assignment for each witness index
// Write the witness assignment to the byte_array
for (const auto& witness_index_num_bits : constraint.inputs) {
auto witness_index = witness_index_num_bits.blackbox_input;
auto num_bits = witness_index_num_bits.num_bits;
Expand All @@ -31,8 +29,11 @@ template <typename Builder> void create_blake2s_constraints(Builder& builder, co
auto num_bytes = round_to_nearest_byte(num_bits);

field_ct element = to_field_ct(witness_index, builder);

// byte_array_ct(field, num_bytes) constructor adds range constraints for each byte
byte_array_ct element_bytes(element, num_bytes);

// Safe write: both arr and element_bytes are constrained
arr.write(element_bytes);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,9 @@ template <typename Builder> void create_blake3_constraints(Builder& builder, con
using byte_array_ct = bb::stdlib::byte_array<Builder>;
using field_ct = bb::stdlib::field_t<Builder>;

// Create byte array struct
byte_array_ct arr(&builder);
// Build input byte array by appending constrained byte_arrays
byte_array_ct arr = byte_array_ct::constant_padding(&builder, 0); // Start with empty array

// Get the witness assignment for each witness index
// Write the witness assignment to the byte_array
for (const auto& witness_index_num_bits : constraint.inputs) {
auto witness_index = witness_index_num_bits.blackbox_input;
auto num_bits = witness_index_num_bits.num_bits;
Expand All @@ -30,8 +28,11 @@ template <typename Builder> void create_blake3_constraints(Builder& builder, con
auto num_bytes = round_to_nearest_byte(num_bits);
BB_ASSERT_LTE(num_bytes, 1024U, "barretenberg does not support blake3 inputs with more than 1024 bytes");
field_ct element = to_field_ct(witness_index, builder);

// byte_array_ct(field, num_bytes) constructor adds range constraints for each byte
byte_array_ct element_bytes(element, num_bytes);

// Safe write: both arr and element_bytes are constrained
arr.write(element_bytes);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,9 @@ using namespace bb;
* coordinates.
* 3. Conditionally select the public key, the signature, and the hash of the message when the predicate is witness
* false. This ensures that the circuit is satisfied when the predicate is false. We set:
* - The first byte of r and s to 1 (NOTE: This only works when the order of the curve divided by two is bigger
* than \f$2^{241}\f$).
* - r = s = H(m) = 1 (the hash is set to 1 to avoid failures in the byte_array constructor)
* - The public key to 2 times the generator of the curve (this is to avoid problems with lookup tables in
* secp265r1).
* secp265r1)
* 4. Verify the signature against the public key and the hash of the message. We return a bool_t bearing witness to
* whether the signature verification was successfull or not.
* 5. Enforce that the result of the signature verification matches the expected result.
Expand Down Expand Up @@ -60,48 +59,55 @@ void create_ecdsa_verify_constraints(typename Curve::Builder& builder,
std::vector<field_ct> pub_x_fields = fields_from_witnesses(builder, input.pub_x_indices);
std::vector<field_ct> pub_y_fields = fields_from_witnesses(builder, input.pub_y_indices);
field_ct result_field = field_ct::from_witness_index(&builder, input.result);
field_ct predicate_field = to_field_ct(input.predicate, builder);
bool_ct predicate(to_field_ct(input.predicate, builder)); // Constructor enforces predicate = 0 or 1

if (!has_valid_witness_assignments) {
// Fill builder variables in case of empty witness assignment
create_dummy_ecdsa_constraint<Curve>(
builder, hashed_message_fields, r_fields, s_fields, pub_x_fields, pub_y_fields, result_field);
}

// Step 1.
// Step 1: Conditionally assign field values when predicate is false
if (!predicate.is_constant()) {
// Set r = s = H(m) = 1 when the predicate is false
for (size_t idx = 0; idx < 32; idx++) {
r_fields[idx] = field_ct::conditional_assign(predicate, r_fields[idx], field_ct(idx == 0 ? 1 : 0));
s_fields[idx] = field_ct::conditional_assign(predicate, s_fields[idx], field_ct(idx == 0 ? 1 : 0));
hashed_message_fields[idx] =
field_ct::conditional_assign(predicate, hashed_message_fields[idx], field_ct(idx == 0 ? 1 : 0));
}

// Set public key to 2*generator when predicate is false
// Compute as native type to get byte representation
typename Curve::AffineElementNative default_point_native(Curve::g1::one + Curve::g1::one);
std::array<uint8_t, 32> default_x_bytes;
std::array<uint8_t, 32> default_y_bytes;
Curve::fq::serialize_to_buffer(default_point_native.x, default_x_bytes.data());
Curve::fq::serialize_to_buffer(default_point_native.y, default_y_bytes.data());

for (size_t i = 0; i < 32; ++i) {
pub_x_fields[i] = field_ct::conditional_assign(predicate, pub_x_fields[i], field_ct(default_x_bytes[i]));
pub_y_fields[i] = field_ct::conditional_assign(predicate, pub_y_fields[i], field_ct(default_y_bytes[i]));
}
} else {
BB_ASSERT(input.predicate.value, "Creating ECDSA constraints with a constant predicate equal to false.");
}

// Step 2: Convert conditionally-assigned fields to byte arrays (adds range constraints on the correct values)
byte_array_ct hashed_message = fields_to_bytes(builder, hashed_message_fields);
byte_array_ct pub_x_bytes = fields_to_bytes(builder, pub_x_fields);
byte_array_ct pub_y_bytes = fields_to_bytes(builder, pub_y_fields);
byte_array_ct r = fields_to_bytes(builder, r_fields);
byte_array_ct s = fields_to_bytes(builder, s_fields);
bool_ct result = static_cast<bool_ct>(result_field); // Constructor enforces result = 0 or 1
bool_ct predicate = static_cast<bool_ct>(predicate_field); // Constructor enforces predicate = 0 or 1
bool_ct result(result_field); // Constructor enforces result = 0 or 1

// Step 2.
// Step 3: Construct public key from byte arrays
Fq pub_x(pub_x_bytes);
Fq pub_y(pub_y_bytes);
// This constructor sets the infinity flag of public_key to false. This is OK because the point at infinity is not a
// point on the curve and we check tha public_key is on the curve.
G1 public_key(pub_x, pub_y);

// Step 3.
// There is one remaining edge case that happens with negligible probability, see here:
// https://github.com/AztecProtocol/barretenberg/issues/1570
if (!input.predicate.is_constant) {
r[0] = field_ct::conditional_assign(predicate, r[0], field_ct(1)); // 0 < r < n
s[0] = field_ct::conditional_assign(predicate, s[0], field_ct(1)); // 0 < s < n/2

// P is on the curve
typename Curve::AffineElement default_point(Curve::g1::one + Curve::g1::one);
// BIGGROUP_AUDITTODO: mutable accessor needed for conditional_assign(). Could add a conditional_assign method
// to biggroup or could just perform these operations on the underlying fields prior to constructing the
// biggroup element.
public_key.x() = Fq::conditional_assign(predicate, public_key.x(), default_point.x());
public_key.y() = Fq::conditional_assign(predicate, public_key.y(), default_point.y());
} else {
BB_ASSERT(input.predicate.value, "Creating ECDSA constraints with a constant predicate equal to false.");
}

// Step 4.
bool_ct signature_result =
stdlib::ecdsa_verify_signature<Builder, Curve, Fq, Fr, G1>(hashed_message, public_key, { r, s });
Expand Down
Loading
Loading