From 2e93ba9b9e7b10d6ba35c2e0d94ab0cf277e6d37 Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 7 Sep 2023 14:34:54 +0100 Subject: [PATCH 01/40] updated deps --- Cargo.toml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index af4c060..096ce8f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ark-marlin" -version = "0.3.0" +version = "0.4.0" authors = [ "Alessandro Chiesa ", "Mary Maller ", @@ -20,12 +20,12 @@ license = "MIT/Apache-2.0" edition = "2018" [dependencies] -ark-serialize = { version = "^0.3.0", default-features = false, features = [ "derive" ] } -ark-ff = { version = "^0.3.0", default-features = false } -ark-std = { version = "^0.3.0", default-features = false } -ark-poly = { version = "^0.3.0", default-features = false } -ark-relations = { version = "^0.3.0", default-features = false } -ark-poly-commit = { version = "^0.3.0", default-features = false } +ark-serialize = { version = "^0.4.0", default-features = false, features = [ "derive" ] } +ark-ff = { version = "^0.4.0", default-features = false } +ark-std = { version = "^0.4.0", default-features = false } +ark-poly = { version = "^0.4.0", default-features = false } +ark-relations = { version = "^0.4.0", default-features = false } +ark-poly-commit = { version = "^0.4.0", default-features = false } rayon = { version = "1", optional = true } digest = { version = "0.9" } @@ -34,11 +34,11 @@ derivative = { version = "2", features = ["use_core"] } [dev-dependencies] rand_chacha = { version = "0.3.0", default-features = false } blake2 = { version = "0.9", default-features = false } -ark-bls12-381 = { version = "^0.3.0", default-features = false, features = [ "curve" ] } -ark-mnt4-298 = { version = "^0.3.0", default-features = false, features = ["r1cs", "curve"] } -ark-mnt6-298 = { version = "^0.3.0", default-features = false, features = ["r1cs"] } -ark-mnt4-753 = { version = "^0.3.0", default-features = false, features = ["r1cs", "curve"] } -ark-mnt6-753 = { version = "^0.3.0", default-features = false, features = ["r1cs"] } +ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } +ark-mnt4-298 = { version = "^0.4.0", default-features = false, features = ["r1cs", "curve"] } +ark-mnt6-298 = { version = "^0.4.0", default-features = false, features = ["r1cs"] } +ark-mnt4-753 = { version = "^0.4.0", default-features = false, features = ["r1cs", "curve"] } +ark-mnt6-753 = { version = "^0.4.0", default-features = false, features = ["r1cs"] } [profile.release] opt-level = 3 From a4b81efe56195ba86fe780199a6c84d80ca4ce51 Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 7 Sep 2023 15:30:35 +0100 Subject: [PATCH 02/40] implemented canonical serialization --- src/ahp/constraint_systems.rs | 7 +-- src/ahp/indexer.rs | 21 ++++++--- src/ahp/mod.rs | 2 +- src/ahp/prover.rs | 83 +++++++---------------------------- src/data_structures.rs | 36 ++++++++++----- src/lib.rs | 31 +++++++++++-- src/rng.rs | 14 +++--- 7 files changed, 92 insertions(+), 102 deletions(-) diff --git a/src/ahp/constraint_systems.rs b/src/ahp/constraint_systems.rs index ddac9ca..bee9525 100644 --- a/src/ahp/constraint_systems.rs +++ b/src/ahp/constraint_systems.rs @@ -6,11 +6,8 @@ use crate::BTreeMap; use ark_ff::{Field, PrimeField}; use ark_poly::{EvaluationDomain, Evaluations as EvaluationsOnDomain, GeneralEvaluationDomain}; use ark_relations::{lc, r1cs::ConstraintSystemRef}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; -use ark_std::{ - cfg_iter_mut, - io::{Read, Write}, -}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::cfg_iter_mut; use derivative::Derivative; /* ************************************************************************* */ diff --git a/src/ahp/indexer.rs b/src/ahp/indexer.rs index b43c580..cdf2f32 100644 --- a/src/ahp/indexer.rs +++ b/src/ahp/indexer.rs @@ -12,7 +12,7 @@ use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, SynthesisMode, }; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{ io::{Read, Write}, marker::PhantomData, @@ -60,11 +60,20 @@ impl IndexInfo { } } -impl ark_ff::ToBytes for IndexInfo { - fn write(&self, mut w: W) -> ark_std::io::Result<()> { - (self.num_variables as u64).write(&mut w)?; - (self.num_constraints as u64).write(&mut w)?; - (self.num_non_zero as u64).write(&mut w) +impl CanonicalSerialize for IndexInfo { + fn serialize_with_mode( + &self, + writer: W, + compress: ark_serialize::Compress, + ) -> Result<(), ark_serialize::SerializationError> { + + (self.num_variables as u64).write(&mut writer)?; + (self.num_constraints as u64).write(&mut writer)?; + (self.num_non_zero as u64).write(&mut writer) + } + + fn serialized_size(&self) -> usize { + 3 * ark_std::mem::size_of::() } } diff --git a/src/ahp/mod.rs b/src/ahp/mod.rs index 383a6e8..e2bc747 100644 --- a/src/ahp/mod.rs +++ b/src/ahp/mod.rs @@ -334,7 +334,7 @@ mod tests { use ark_ff::{One, UniformRand, Zero}; use ark_poly::{ univariate::{DenseOrSparsePolynomial, DensePolynomial}, - Polynomial, UVPolynomial, + Polynomial, DenseUVPolynomial, }; #[test] diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 60039f7..44902ee 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -11,11 +11,13 @@ use crate::{ToString, Vec}; use ark_ff::{Field, PrimeField, Zero}; use ark_poly::{ univariate::DensePolynomial, EvaluationDomain, Evaluations as EvaluationsOnDomain, - GeneralEvaluationDomain, Polynomial, UVPolynomial, + GeneralEvaluationDomain, Polynomial, DenseUVPolynomial, }; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, }; +use ark_serialize::Compress; +use ark_serialize::Validate; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; use ark_std::rand::RngCore; use ark_std::{ @@ -72,86 +74,31 @@ pub enum ProverMsg { FieldElements(Vec), } -impl ark_ff::ToBytes for ProverMsg { - fn write(&self, w: W) -> ark_std::io::Result<()> { - match self { - ProverMsg::EmptyMessage => Ok(()), - ProverMsg::FieldElements(field_elems) => field_elems.write(w), - } - } -} - impl CanonicalSerialize for ProverMsg { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - let res: Option> = match self { - ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), - }; - res.serialize(&mut writer) - } - - fn serialized_size(&self) -> usize { - let res: Option> = match self { - ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), - }; - res.serialized_size() - } - - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialize_with_mode(&self, mut writer: W, compress: Compress) -> Result<(), SerializationError> { let res: Option> = match self { ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), + ProverMsg::FieldElements(v) => v.serialize_with_mode(writer, compress), }; - res.serialize_unchecked(&mut writer) + Ok(res) } - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialized_size(&self, compress: Compress) -> usize { let res: Option> = match self { - ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), + ProverMsg::EmptyMessage => 0, + ProverMsg::FieldElements(v) => v.serialized_size(compress), }; - res.serialize_uncompressed(&mut writer) + Ok(res) } +} - fn uncompressed_size(&self) -> usize { +impl CanonicalDeserialize for ProverMsg { + fn deserialize_with_mode(&self, mut reader: R, compress:Compress, validate: Validate) -> Result { let res: Option> = match self { ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), + ProverMsg::FieldElements(v) => v.deserialize_with_mode(reader, compress, validate), }; - res.uncompressed_size() - } -} - -impl CanonicalDeserialize for ProverMsg { - fn deserialize(mut reader: R) -> Result { - let res = Option::>::deserialize(&mut reader)?; - - if let Some(res) = res { - Ok(ProverMsg::FieldElements(res)) - } else { - Ok(ProverMsg::EmptyMessage) - } - } - - fn deserialize_unchecked(mut reader: R) -> Result { - let res = Option::>::deserialize_unchecked(&mut reader)?; - - if let Some(res) = res { - Ok(ProverMsg::FieldElements(res)) - } else { - Ok(ProverMsg::EmptyMessage) - } - } - - fn deserialize_uncompressed(mut reader: R) -> Result { - let res = Option::>::deserialize_uncompressed(&mut reader)?; - - if let Some(res) = res { - Ok(ProverMsg::FieldElements(res)) - } else { - Ok(ProverMsg::EmptyMessage) - } + Ok(res) } } diff --git a/src/data_structures.rs b/src/data_structures.rs index 66b5643..84b7488 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -4,10 +4,10 @@ use crate::Vec; use ark_ff::PrimeField; use ark_poly::univariate::DensePolynomial; use ark_poly_commit::{BatchLCProof, PolynomialCommitment}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{ format, - io::{Read, Write}, + io::Write, }; /* ************************************************************************* */ @@ -33,12 +33,24 @@ pub struct IndexVerifierKey>> ark_ff::ToBytes +impl>> CanonicalSerialize for IndexVerifierKey { - fn write(&self, mut w: W) -> ark_std::io::Result<()> { - self.index_info.write(&mut w)?; - self.index_comms.write(&mut w) + fn serialize_with_mode( + &self, + writer: W, + compress: ark_serialize::Compress, + ) -> Result<(), ark_serialize::SerializationError> { + self.index_info.serialize_with_mode(writer, compress)?; + self.index_comms.serialize_with_mode(writer, compress)?; + self.verifier_key.serialize_with_mode(writer, compress)?; + Ok(()) + } + + fn serialized_size(&self, compress: ark_serialize::Compress) -> usize { + self.index_info.serialized_size(compress) + + self.index_comms.serialized_size(compress) + + self.verifier_key.serialized_size(compress) } } @@ -136,19 +148,19 @@ impl>> Proof = self.pc_proof.proof.clone().into(); let num_proofs = proofs.len(); - let size_bytes_proofs = self.pc_proof.proof.serialized_size(); + let size_bytes_proofs = self.pc_proof.proof.compressed_size(); let num_evals = self.evaluations.len(); - let evals_size_in_bytes = self.evaluations.serialized_size(); + let evals_size_in_bytes = self.evaluations.compressed_size(); let num_prover_messages: usize = self .prover_messages .iter() @@ -157,8 +169,8 @@ impl>> Proof elems.len(), }) .sum(); - let prover_msg_size_in_bytes = self.prover_messages.serialized_size(); - let arg_size = self.serialized_size(); + let prover_msg_size_in_bytes = self.prover_messages.compressed_size(); + let arg_size = self.compressed_size(); let stats = format!( "Argument size in bytes: {}\n\n\ Number of commitments without degree bounds: {}\n\ diff --git a/src/lib.rs b/src/lib.rs index 7ca47b4..aa49b90 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -8,17 +8,17 @@ //! matrices are square). Furthermore, Marlin only supports instances where the //! public inputs are of size one less than a power of 2 (i.e., 2^n - 1). #![deny(unused_import_braces, unused_qualifications, trivial_casts)] -#![deny(trivial_numeric_casts, private_in_public)] +#![deny(trivial_numeric_casts)] #![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] #![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] -#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)] +#![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] #[macro_use] extern crate ark_std; -use ark_ff::{to_bytes, PrimeField, UniformRand}; +use ark_ff::{PrimeField, UniformRand}; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::Evaluations; use ark_poly_commit::{LabeledCommitment, PCUniversalParams, PolynomialCommitment}; @@ -33,6 +33,7 @@ use ark_std::{ vec, vec::Vec, }; +use ark_serialize::CanonicalSerialize; #[cfg(not(feature = "std"))] macro_rules! eprintln { @@ -40,6 +41,30 @@ macro_rules! eprintln { ($($arg: tt)*) => {}; } +/// Takes as input a sequence of structs, and converts them to a series of +/// bytes. All traits that implement `Bytes` can be automatically converted to +/// bytes in this manner. +#[macro_export] +macro_rules! to_bytes { + ($($x:expr),*) => ({ + let mut buf = $crate::vec![]; + {$crate::push_to_vec!(buf, $($x),*)}.map(|_| buf) + }); +} + +#[doc(hidden)] +#[macro_export] +macro_rules! push_to_vec { + ($buf:expr, $y:expr, $($x:expr),*) => ({ + { + $crate::CanonicalSerialize::write(&$y, &mut $buf) + }.and({$crate::push_to_vec!($buf, $($x),*)}) + }); + + ($buf:expr, $x:expr) => ({ + $crate::CanonicalSerialize::write(&$x, &mut $buf) + }) +} /// Implements a Fiat-Shamir based Rng that allows one to incrementally update /// the seed based on new messages in the proof transcript. pub mod rng; diff --git a/src/rng.rs b/src/rng.rs index efea8e5..0bf3689 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,5 +1,5 @@ use crate::Vec; -use ark_ff::{FromBytes, ToBytes}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::convert::From; use ark_std::marker::PhantomData; use ark_std::rand::{RngCore, SeedableRng}; @@ -8,9 +8,9 @@ use digest::Digest; /// An RNG suitable for Fiat-Shamir transforms pub trait FiatShamirRng: RngCore { /// Create a new `Self` with an initial input - fn initialize<'a, T: 'a + ToBytes>(initial_input: &'a T) -> Self; + fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self; /// Absorb new inputs into state - fn absorb<'a, T: 'a + ToBytes>(&mut self, new_input: &'a T); + fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T); } /// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed @@ -51,12 +51,12 @@ where /// Create a new `Self` by initializing with a fresh seed. /// `self.seed = H(initial_input)`. #[inline] - fn initialize<'a, T: 'a + ToBytes>(initial_input: &'a T) -> Self { + fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self { let mut bytes = Vec::new(); initial_input .write(&mut bytes) .expect("failed to convert to bytes"); - let seed = FromBytes::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); + let seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); let r = R::from_seed(::from(seed)); Self { r, @@ -68,13 +68,13 @@ where /// Refresh `self.seed` with new material. Achieved by setting /// `self.seed = H(new_input || self.seed)`. #[inline] - fn absorb<'a, T: 'a + ToBytes>(&mut self, new_input: &'a T) { + fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T) { let mut bytes = Vec::new(); new_input .write(&mut bytes) .expect("failed to convert to bytes"); bytes.extend_from_slice(&self.seed); - self.seed = FromBytes::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); + self.seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); self.r = R::from_seed(::from(self.seed)); } } From c7ae1725a0f2ba27722e47532aa8209ec908a3db Mon Sep 17 00:00:00 2001 From: AB Date: Fri, 8 Sep 2023 12:22:55 +0100 Subject: [PATCH 03/40] moved sponge and fiat shamir in --- Cargo.toml | 7 +- src/lib.rs | 4 +- src/rng.rs | 80 ----- src/rng/fiat_shamir/constraints.rs | 433 +++++++++++++++++++++++ src/rng/fiat_shamir/mod.rs | 537 +++++++++++++++++++++++++++++ src/rng/mod.rs | 5 + src/rng/sponge/mod.rs | 62 ++++ src/rng/sponge/poseidon.rs | 303 ++++++++++++++++ 8 files changed, 1348 insertions(+), 83 deletions(-) delete mode 100644 src/rng.rs create mode 100644 src/rng/fiat_shamir/constraints.rs create mode 100644 src/rng/fiat_shamir/mod.rs create mode 100644 src/rng/mod.rs create mode 100644 src/rng/sponge/mod.rs create mode 100644 src/rng/sponge/poseidon.rs diff --git a/Cargo.toml b/Cargo.toml index 096ce8f..614da48 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,13 +26,18 @@ ark-std = { version = "^0.4.0", default-features = false } ark-poly = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false } ark-poly-commit = { version = "^0.4.0", default-features = false } +ark-r1cs-std = { version = "^0.4.0", default-features = false } +ark-sponge = { version = "^0.3.0", default-features = false, features = [ "r1cs" ] } +ark-nonnative-field = { version = "^0.3.0", default-features = false } +ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = [ "r1cs" ] } rayon = { version = "1", optional = true } digest = { version = "0.9" } derivative = { version = "2", features = ["use_core"] } +rand_chacha = { version = "^0.3.1", default-features = false } +tracing = { version = "0.1", default-features = false, features = [ "attributes" ] } [dev-dependencies] -rand_chacha = { version = "0.3.0", default-features = false } blake2 = { version = "0.9", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-mnt4-298 = { version = "^0.4.0", default-features = false, features = ["r1cs", "curve"] } diff --git a/src/lib.rs b/src/lib.rs index aa49b90..889b855 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -34,6 +34,7 @@ use ark_std::{ vec::Vec, }; use ark_serialize::CanonicalSerialize; +use crate::rng::FiatShamirRng; #[cfg(not(feature = "std"))] macro_rules! eprintln { @@ -68,8 +69,7 @@ macro_rules! push_to_vec { /// Implements a Fiat-Shamir based Rng that allows one to incrementally update /// the seed based on new messages in the proof transcript. pub mod rng; -use rng::FiatShamirRng; -pub use rng::SimpleHashFiatShamirRng; +// pub use rng::*; mod error; pub use error::*; diff --git a/src/rng.rs b/src/rng.rs deleted file mode 100644 index 0bf3689..0000000 --- a/src/rng.rs +++ /dev/null @@ -1,80 +0,0 @@ -use crate::Vec; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::convert::From; -use ark_std::marker::PhantomData; -use ark_std::rand::{RngCore, SeedableRng}; -use digest::Digest; - -/// An RNG suitable for Fiat-Shamir transforms -pub trait FiatShamirRng: RngCore { - /// Create a new `Self` with an initial input - fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self; - /// Absorb new inputs into state - fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T); -} - -/// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed -/// and the new seed material. -pub struct SimpleHashFiatShamirRng { - r: R, - seed: [u8; 32], - #[doc(hidden)] - digest: PhantomData, -} - -impl RngCore for SimpleHashFiatShamirRng { - #[inline] - fn next_u32(&mut self) -> u32 { - self.r.next_u32() - } - - #[inline] - fn next_u64(&mut self) -> u64 { - self.r.next_u64() - } - - #[inline] - fn fill_bytes(&mut self, dest: &mut [u8]) { - self.r.fill_bytes(dest); - } - - #[inline] - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { - Ok(self.r.fill_bytes(dest)) - } -} - -impl FiatShamirRng for SimpleHashFiatShamirRng -where - R::Seed: From<[u8; 32]>, -{ - /// Create a new `Self` by initializing with a fresh seed. - /// `self.seed = H(initial_input)`. - #[inline] - fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self { - let mut bytes = Vec::new(); - initial_input - .write(&mut bytes) - .expect("failed to convert to bytes"); - let seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); - let r = R::from_seed(::from(seed)); - Self { - r, - seed: seed, - digest: PhantomData, - } - } - - /// Refresh `self.seed` with new material. Achieved by setting - /// `self.seed = H(new_input || self.seed)`. - #[inline] - fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T) { - let mut bytes = Vec::new(); - new_input - .write(&mut bytes) - .expect("failed to convert to bytes"); - bytes.extend_from_slice(&self.seed); - self.seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); - self.r = R::from_seed(::from(self.seed)); - } -} diff --git a/src/rng/fiat_shamir/constraints.rs b/src/rng/fiat_shamir/constraints.rs new file mode 100644 index 0000000..81a4d71 --- /dev/null +++ b/src/rng/fiat_shamir/constraints.rs @@ -0,0 +1,433 @@ +use crate::rng::CryptographicSpongeVarNonNative; +use crate::rng::CryptographicSpongeParameters; +use crate::{overhead,Vec}; +use ark_ff::PrimeField; +use ark_nonnative_field::params::{get_params, OptimizationType}; +use ark_nonnative_field::{AllocatedNonNativeFieldVar, NonNativeFieldVar}; +use ark_r1cs_std::{ + alloc::AllocVar, + bits::{uint8::UInt8, ToBitsGadget}, + boolean::Boolean, + fields::fp::AllocatedFp, + fields::fp::FpVar, + R1CSVar, +}; +use ark_relations::lc; +use ark_relations::r1cs::{ + ConstraintSystemRef, LinearCombination, OptimizationGoal, SynthesisError, +}; +use ark_sponge::constraints::{AbsorbGadget, CryptographicSpongeVar}; +use ark_sponge::CryptographicSponge; +use core::marker::PhantomData; + +/// Vars for a RNG for use in a Fiat-Shamir transform. +pub trait FiatShamirRngVar: + From> + CryptographicSpongeVar +{ + // Instantiate from a plaintext fs_rng. + fn constant(cs: ConstraintSystemRef, pfs: &S) -> Self; + + /// Take in field elements. + fn absorb_nonnative( + &mut self, + elems: &[NonNativeFieldVar], + ty: OptimizationType, + ) -> Result<(), SynthesisError>; + + /// Take in field elements. + fn absorb_native(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError>; + + /// Take in bytes. + fn absorb_bytes(&mut self, elems: &[UInt8]) -> Result<(), SynthesisError>; + + /// Output field elements. + fn squeeze_native(&mut self, num: usize) -> Result>, SynthesisError>; + + /// Output field elements. + fn squeeze_nonnative( + &mut self, + num: usize, + ) -> Result>, SynthesisError>; + + /// Output field elements with only 128 bits. + fn squeeze_128_bits_nonnative( + &mut self, + num: usize, + ) -> Result>, SynthesisError>; + + /// Output field elements with only 128 bits, and the corresponding bits (this can reduce + /// repeated computation). + #[allow(clippy::type_complexity)] + fn squeeze_128_bits_nonnative_and_bits( + &mut self, + num: usize, + ) -> Result<(Vec>, Vec>>), SynthesisError>; +} + +/// Building the Fiat-Shamir sponge's gadget from any algebraic sponge's gadget. +#[derive(Clone)] +pub struct FiatShamirSpongeRngVar< + F: PrimeField, + CF: PrimeField, + PS: CryptographicSponge, + S: CryptographicSpongeVar, +> { + pub cs: ConstraintSystemRef, + pub s: S, + #[doc(hidden)] + f_phantom: PhantomData, + cf_phantom: PhantomData, + ps_phantom: PhantomData, +} + +impl> + FiatShamirSpongeRngVar +{ + /// Compress every two elements if possible. Provides a vector of (limb, num_of_additions), + /// both of which are CF. + #[tracing::instrument(target = "r1cs")] + pub fn compress_gadgets( + src_limbs: &[(FpVar, CF)], + ty: OptimizationType, + ) -> Result>, SynthesisError> { + let capacity = CF::size_in_bits() - 1; + let mut dest_limbs = Vec::>::new(); + + if src_limbs.is_empty() { + return Ok(vec![]); + } + + let params = get_params(F::size_in_bits(), CF::size_in_bits(), ty); + + let adjustment_factor_lookup_table = { + let mut table = Vec::::new(); + + let mut cur = CF::one(); + for _ in 1..=capacity { + table.push(cur); + cur.double_in_place(); + } + + table + }; + + let mut i: usize = 0; + let src_len = src_limbs.len(); + while i < src_len { + let first = &src_limbs[i]; + let second = if i + 1 < src_len { + Some(&src_limbs[i + 1]) + } else { + None + }; + + let first_max_bits_per_limb = params.bits_per_limb + overhead!(first.1 + &CF::one()); + let second_max_bits_per_limb = if second.is_some() { + params.bits_per_limb + overhead!(second.unwrap().1 + &CF::one()) + } else { + 0 + }; + + if second.is_some() && first_max_bits_per_limb + second_max_bits_per_limb <= capacity { + let adjustment_factor = &adjustment_factor_lookup_table[second_max_bits_per_limb]; + + dest_limbs.push(&first.0 * *adjustment_factor + &second.unwrap().0); + i += 2; + } else { + dest_limbs.push(first.0.clone()); + i += 1; + } + } + + Ok(dest_limbs) + } + + /// Push gadgets to sponge. + #[tracing::instrument(target = "r1cs", skip(sponge))] + pub fn push_gadgets_to_sponge( + sponge: &mut S, + src: &[NonNativeFieldVar], + ty: OptimizationType, + ) -> Result<(), SynthesisError> { + let mut src_limbs: Vec<(FpVar, CF)> = Vec::new(); + + for elem in src.iter() { + match elem { + NonNativeFieldVar::Constant(c) => { + let v = AllocatedNonNativeFieldVar::::new_constant(sponge.cs(), c)?; + + for limb in v.limbs.iter() { + let num_of_additions_over_normal_form = + if v.num_of_additions_over_normal_form == CF::zero() { + CF::one() + } else { + v.num_of_additions_over_normal_form + }; + src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); + } + } + NonNativeFieldVar::Var(v) => { + for limb in v.limbs.iter() { + let num_of_additions_over_normal_form = + if v.num_of_additions_over_normal_form == CF::zero() { + CF::one() + } else { + v.num_of_additions_over_normal_form + }; + src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); + } + } + } + } + + let dest_limbs = Self::compress_gadgets(&src_limbs, ty)?; + sponge.absorb(&dest_limbs)?; + Ok(()) + } + + /// Obtain random bits from hashchain gadget. (Not guaranteed to be uniformly distributed, + /// should only be used in certain situations.) + #[tracing::instrument(target = "r1cs", skip(sponge))] + pub fn get_booleans_from_sponge( + sponge: &mut S, + num_bits: usize, + ) -> Result>, SynthesisError> { + let bits_per_element = CF::size_in_bits() - 1; + let num_elements = (num_bits + bits_per_element - 1) / bits_per_element; + + let src_elements = sponge.squeeze_field_elements(num_elements)?; + let mut dest_bits = Vec::>::new(); + + for elem in src_elements.iter() { + let elem_bits = elem.to_bits_be()?; + dest_bits.extend_from_slice(&elem_bits[1..]); // discard the highest bit + } + + Ok(dest_bits) + } + + /// Obtain random elements from hashchain gadget. (Not guaranteed to be uniformly distributed, + /// should only be used in certain situations.) + #[tracing::instrument(target = "r1cs", skip(sponge))] + pub fn get_gadgets_from_sponge( + sponge: &mut S, + num_elements: usize, + outputs_short_elements: bool, + ) -> Result>, SynthesisError> { + let (dest_gadgets, _) = + Self::get_gadgets_and_bits_from_sponge(sponge, num_elements, outputs_short_elements)?; + + Ok(dest_gadgets) + } + + /// Obtain random elements, and the corresponding bits, from hashchain gadget. (Not guaranteed + /// to be uniformly distributed, should only be used in certain situations.) + #[tracing::instrument(target = "r1cs", skip(sponge))] + #[allow(clippy::type_complexity)] + pub fn get_gadgets_and_bits_from_sponge( + sponge: &mut S, + num_elements: usize, + outputs_short_elements: bool, + ) -> Result<(Vec>, Vec>>), SynthesisError> { + let cs = sponge.cs(); + + let optimization_type = match cs.optimization_goal() { + OptimizationGoal::None => OptimizationType::Constraints, + OptimizationGoal::Constraints => OptimizationType::Constraints, + OptimizationGoal::Weight => OptimizationType::Weight, + }; + + let params = get_params(F::size_in_bits(), CF::size_in_bits(), optimization_type); + + let num_bits_per_nonnative = if outputs_short_elements { + 128 + } else { + F::size_in_bits() - 1 // also omit the highest bit + }; + let bits = Self::get_booleans_from_sponge(sponge, num_bits_per_nonnative * num_elements)?; + + let mut lookup_table = Vec::>::new(); + let mut cur = F::one(); + for _ in 0..num_bits_per_nonnative { + let repr = AllocatedNonNativeFieldVar::::get_limbs_representations( + &cur, + optimization_type, + )?; + lookup_table.push(repr); + cur.double_in_place(); + } + + let mut dest_gadgets = Vec::>::new(); + let mut dest_bits = Vec::>>::new(); + bits.chunks_exact(num_bits_per_nonnative) + .for_each(|per_nonnative_bits| { + let mut val = vec![CF::zero(); params.num_limbs]; + let mut lc = vec![LinearCombination::::zero(); params.num_limbs]; + + let mut per_nonnative_bits_le = per_nonnative_bits.to_vec(); + per_nonnative_bits_le.reverse(); + + dest_bits.push(per_nonnative_bits_le.clone()); + + for (j, bit) in per_nonnative_bits_le.iter().enumerate() { + if bit.value().unwrap_or_default() { + for (k, val) in val.iter_mut().enumerate().take(params.num_limbs) { + *val += &lookup_table[j][k]; + } + } + + #[allow(clippy::needless_range_loop)] + for k in 0..params.num_limbs { + lc[k] = &lc[k] + bit.lc() * lookup_table[j][k]; + } + } + + let mut limbs = Vec::new(); + for k in 0..params.num_limbs { + let gadget = + AllocatedFp::new_witness(ark_relations::ns!(cs, "alloc"), || Ok(val[k])) + .unwrap(); + lc[k] = lc[k].clone() - (CF::one(), gadget.variable); + cs.enforce_constraint(lc!(), lc!(), lc[k].clone()).unwrap(); + limbs.push(FpVar::::from(gadget)); + } + + dest_gadgets.push(NonNativeFieldVar::::Var( + AllocatedNonNativeFieldVar:: { + cs: cs.clone(), + limbs, + num_of_additions_over_normal_form: CF::zero(), + is_in_the_normal_form: true, + target_phantom: Default::default(), + }, + )); + }); + + Ok((dest_gadgets, dest_bits)) + } +} + +impl< + F: PrimeField, + CF: PrimeField, + PS: CryptographicSponge, + S: CryptographicSpongeVarNonNative, + > From> for FiatShamirSpongeRngVar +where + >::Parameters: CryptographicSpongeParameters, +{ + fn from(cs: ConstraintSystemRef) -> Self { + Self { + cs: cs.clone(), + s: S::with_default_rate(cs), + f_phantom: PhantomData, + cf_phantom: PhantomData, + ps_phantom: PhantomData, + } + } +} + +impl> + CryptographicSpongeVar for FiatShamirSpongeRngVar +{ + type Parameters = S::Parameters; + + fn new(cs: ConstraintSystemRef, params: &Self::Parameters) -> Self { + Self { + cs: cs.clone(), + s: S::new(cs, params), + f_phantom: PhantomData, + cf_phantom: PhantomData, + ps_phantom: PhantomData, + } + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn cs(&self) -> ConstraintSystemRef { + self.cs.clone() + } + + fn absorb(&mut self, input: &impl AbsorbGadget) -> Result<(), SynthesisError> { + self.s.absorb(input) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn squeeze_bytes(&mut self, num_bytes: usize) -> Result>, SynthesisError> { + self.s.squeeze_bytes(num_bytes) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn squeeze_bits(&mut self, num_bits: usize) -> Result>, SynthesisError> { + self.s.squeeze_bits(num_bits) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn squeeze_field_elements( + &mut self, + num_elements: usize, + ) -> Result>, SynthesisError> { + self.s.squeeze_field_elements(num_elements) + } +} + +impl< + F: PrimeField, + CF: PrimeField, + PS: CryptographicSponge, + S: CryptographicSpongeVarNonNative, + > FiatShamirRngVar for FiatShamirSpongeRngVar +where + >::Parameters: CryptographicSpongeParameters, +{ + fn constant(cs: ConstraintSystemRef, _pfs: &PS) -> Self { + Self::from(cs) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn absorb_nonnative( + &mut self, + elems: &[NonNativeFieldVar], + ty: OptimizationType, + ) -> Result<(), SynthesisError> { + Self::push_gadgets_to_sponge(&mut self.s, &elems.to_vec(), ty) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn absorb_native(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError> { + self.absorb(&elems) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn absorb_bytes(&mut self, elems: &[UInt8]) -> Result<(), SynthesisError> { + self.absorb(&elems) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn squeeze_native(&mut self, num: usize) -> Result>, SynthesisError> { + self.s.squeeze_field_elements(num) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn squeeze_nonnative( + &mut self, + num: usize, + ) -> Result>, SynthesisError> { + Self::get_gadgets_from_sponge(&mut self.s, num, false) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn squeeze_128_bits_nonnative( + &mut self, + num: usize, + ) -> Result>, SynthesisError> { + Self::get_gadgets_from_sponge(&mut self.s, num, true) + } + + #[tracing::instrument(target = "r1cs", skip(self))] + fn squeeze_128_bits_nonnative_and_bits( + &mut self, + num: usize, + ) -> Result<(Vec>, Vec>>), SynthesisError> { + Self::get_gadgets_and_bits_from_sponge(&mut self.s, num, true) + } +} diff --git a/src/rng/fiat_shamir/mod.rs b/src/rng/fiat_shamir/mod.rs new file mode 100644 index 0000000..4e430f8 --- /dev/null +++ b/src/rng/fiat_shamir/mod.rs @@ -0,0 +1,537 @@ +use crate::rng::{CryptographicSpongeParameters, CryptographicSpongeWithRate}; +use crate::Vec; +use ark_ff::{BigInteger, PrimeField, ToConstraintField}; +use ark_nonnative_field::params::{get_params, OptimizationType}; +use ark_nonnative_field::AllocatedNonNativeFieldVar; +use ark_sponge::{Absorb, CryptographicSponge}; +use ark_std::io::{Read, Result as IoResult, Write}; +use ark_std::marker::PhantomData; +use ark_std::rand::{RngCore, SeedableRng}; +use core::{cmp, iter}; +use digest::Digest; +use rand_chacha::ChaChaRng; + +/// The constraints for Fiat-Shamir +pub mod constraints; + +/// a macro for computing ceil(log2(x))+1 for a field element x +#[doc(hidden)] +#[macro_export] +macro_rules! overhead { + ($x:expr) => {{ + use ark_ff::BigInteger; + let num = $x; + let num_bits = num.into_repr().to_bits_be(); + let mut skipped_bits = 0; + for b in num_bits.iter() { + if *b == false { + skipped_bits += 1; + } else { + break; + } + } + + let mut is_power_of_2 = true; + for b in num_bits.iter().skip(skipped_bits + 1) { + if *b == true { + is_power_of_2 = false; + } + } + + if is_power_of_2 { + num_bits.len() - skipped_bits + } else { + num_bits.len() - skipped_bits + 1 + } + }}; +} + +/// the trait for Fiat-Shamir RNG +pub trait FiatShamirRng: + Default + RngCore + Write + CryptographicSponge +{ + /// take in field elements + fn absorb_nonnative(&mut self, elems: &[F], ty: OptimizationType); + /// take in field elements + fn absorb_native>(&mut self, elems: &[T]); + /// take in bytes + fn absorb_bytes(&mut self, bytes: &[u8]) { + ::write(self, bytes).ok(); + } + + /// take out field elements + fn squeeze_nonnative(&mut self, num: usize, ty: OptimizationType) -> Vec; + /// take in field elements + fn squeeze_native(&mut self, num: usize) -> Vec; + /// take out field elements of 128 bits + fn squeeze_128_bits_nonnative(&mut self, num: usize) -> Vec; +} + +/// use a ChaCha stream cipher to generate the actual pseudorandom bits +/// use a digest funcion to do absorbing +pub struct FiatShamirChaChaRng { + pub r: ChaChaRng, + pub seed: Vec, + #[doc(hidden)] + field: PhantomData, + representation_field: PhantomData, + digest: PhantomData, +} + +impl Default for FiatShamirChaChaRng { + fn default() -> Self { + let seed = [0; 32]; + let r = ChaChaRng::from_seed(seed); + + Self { + r, + seed: seed.to_vec(), + field: PhantomData, + representation_field: PhantomData, + digest: PhantomData, + } + } +} + +impl Clone for FiatShamirChaChaRng { + fn clone(&self) -> Self { + Self { + r: self.r.clone(), + seed: self.seed.clone(), + field: PhantomData, + representation_field: PhantomData, + digest: PhantomData, + } + } +} + +impl RngCore for FiatShamirChaChaRng { + fn next_u32(&mut self) -> u32 { + self.r.next_u32() + } + + fn next_u64(&mut self) -> u64 { + self.r.next_u64() + } + + fn fill_bytes(&mut self, dest: &mut [u8]) { + self.r.fill_bytes(dest) + } + + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { + self.r.try_fill_bytes(dest) + } +} + +impl FiatShamirRng + for FiatShamirChaChaRng +{ + fn absorb_nonnative(&mut self, elems: &[F], _: OptimizationType) { + elems + .iter() + .try_for_each(|elem| elem.write(&mut *self)) + .expect("failed to convert to bytes"); + } + + fn absorb_native>(&mut self, elems: &[T]) { + elems + .iter() + .filter_map(|elem| elem.to_field_elements()) + .flat_map(|v| v.into_iter()) + .try_for_each(|elem| elem.write(&mut *self)) + .expect("failed to convert to bytes"); + } + + fn squeeze_nonnative(&mut self, num: usize, _: OptimizationType) -> Vec { + iter::from_fn(|| Some(F::rand(&mut self.r))) + .take(num) + .collect() + } + + fn squeeze_native(&mut self, num: usize) -> Vec { + iter::from_fn(|| Some(CF::rand(&mut self.r))) + .take(num) + .collect() + } + + fn squeeze_128_bits_nonnative(&mut self, num: usize) -> Vec { + let mut x = [0u8; 16]; + + iter::from_fn(|| { + self.r.fill_bytes(&mut x); + + let elem = F::from_random_bytes(&x).expect("failed to create field element"); + + Some(elem) + }) + .take(num) + .collect() + } +} + +impl Write for FiatShamirChaChaRng { + fn write(&mut self, buf: &[u8]) -> IoResult { + self.seed = D::digest(buf).to_vec(); + + let l = cmp::min(32, self.seed.len()); + let mut seed = [0u8; 32]; + + (&mut seed[..l]).copy_from_slice(&self.seed[..l]); + + self.r = ChaChaRng::from_seed(seed); + + Ok(buf.len()) + } + + fn flush(&mut self) -> IoResult<()> { + Ok(()) + } +} + +impl Read for FiatShamirChaChaRng { + fn read(&mut self, buf: &mut [u8]) -> IoResult { + self.fill_bytes(buf); + + Ok(buf.len()) + } +} + +impl CryptographicSponge + for FiatShamirChaChaRng +{ + type Parameters = (); + + fn new(_params: &Self::Parameters) -> Self { + let seed = [0; 32]; + let r = ChaChaRng::from_seed(seed); + + Self { + r, + seed: seed.to_vec(), + field: PhantomData, + representation_field: PhantomData, + digest: PhantomData, + } + } + + fn absorb(&mut self, input: &impl Absorb) { + let bytes = input.to_sponge_bytes_as_vec(); + + self.seed = D::digest(&bytes).to_vec(); + + let l = cmp::min(32, self.seed.len()); + let mut seed = [0u8; 32]; + + (&mut seed[..l]).copy_from_slice(&self.seed[..l]); + + self.r = ChaChaRng::from_seed(seed); + } + + fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { + let mut output = vec![0u8; num_bytes]; + + self.fill_bytes(output.as_mut_slice()); + + output + } + + fn squeeze_bits(&mut self, num_bits: usize) -> Vec { + self.squeeze_bytes(num_bits) + .into_iter() + .map(|b| (b & 0x01) == 1) + .collect() + } +} + +/// rng from any algebraic sponge +pub struct FiatShamirSpongeRng { + pub s: S, + #[doc(hidden)] + f_phantom: PhantomData, + cf_phantom: PhantomData, +} + +impl Clone + for FiatShamirSpongeRng +{ + fn clone(&self) -> Self { + Self { + s: self.s.clone(), + f_phantom: PhantomData, + cf_phantom: PhantomData, + } + } +} + +impl From + for FiatShamirSpongeRng +{ + fn from(s: S) -> Self { + Self { + s, + f_phantom: PhantomData, + cf_phantom: PhantomData, + } + } +} + +impl Default + for FiatShamirSpongeRng +where + ::Parameters: CryptographicSpongeParameters, +{ + fn default() -> Self { + S::with_default_rate().into() + } +} + +impl CryptographicSponge + for FiatShamirSpongeRng +{ + type Parameters = S::Parameters; + + fn new(params: &Self::Parameters) -> Self { + S::new(params).into() + } + + fn absorb(&mut self, input: &impl Absorb) { + self.s.absorb(input) + } + + fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { + self.s.squeeze_bytes(num_bytes) + } + + fn squeeze_bits(&mut self, num_bits: usize) -> Vec { + self.s.squeeze_bits(num_bits) + } +} + +impl FiatShamirRng + for FiatShamirSpongeRng +where + CF: Absorb, + ::Parameters: CryptographicSpongeParameters, +{ + fn absorb_nonnative(&mut self, elems: &[F], ty: OptimizationType) { + // FIXME ignoring faulty elements; maybe panic? + let src: Vec<(CF, CF)> = elems + .iter() + .filter_map(|elem| { + AllocatedNonNativeFieldVar::::get_limbs_representations(elem, ty).ok() + }) + .flatten() + // specifically set to one since most gadgets in the constraint world would not have + // zero noise (due to the relatively weak normal form testing in `alloc`) + .map(|limb| (limb, CF::one())) + .collect(); + + let dest = Self::compress_elements(&src, ty); + + self.absorb(&dest); + } + + fn absorb_native>(&mut self, elems: &[T]) { + elems + .iter() + .filter_map(|elem| elem.to_field_elements()) + .flat_map(|v| v.into_iter()) + .for_each(|elem| self.absorb(&elem)); + } + + fn squeeze_nonnative(&mut self, num: usize, _: OptimizationType) -> Vec { + Self::get_elements_from_sponge(&mut self.s, num, false) + } + + fn squeeze_native(&mut self, num: usize) -> Vec { + self.squeeze_field_elements(num) + } + + fn squeeze_128_bits_nonnative(&mut self, num: usize) -> Vec { + Self::get_elements_from_sponge(&mut self.s, num, true) + } +} + +impl FiatShamirSpongeRng { + /// compress every two elements if possible. Provides a vector of (limb, num_of_additions), + /// both of which are P::BaseField. + fn compress_elements(src_limbs: &[(CF, CF)], ty: OptimizationType) -> Vec { + let capacity = CF::size_in_bits() - 1; + let mut dest_limbs = Vec::::new(); + + let params = get_params(F::size_in_bits(), CF::size_in_bits(), ty); + + let adjustment_factor_lookup_table = { + let mut table = Vec::::new(); + + let mut cur = CF::one(); + for _ in 1..=capacity { + table.push(cur); + cur.double_in_place(); + } + + table + }; + + let mut i = 0; + let src_len = src_limbs.len(); + while i < src_len { + let first = &src_limbs[i]; + let second = if i + 1 < src_len { + Some(&src_limbs[i + 1]) + } else { + None + }; + + let first_max_bits_per_limb = params.bits_per_limb + overhead!(first.1 + &CF::one()); + let second_max_bits_per_limb = if let Some(second) = second { + params.bits_per_limb + overhead!(second.1 + &CF::one()) + } else { + 0 + }; + + if let Some(second) = second { + if first_max_bits_per_limb + second_max_bits_per_limb <= capacity { + let adjustment_factor = + &adjustment_factor_lookup_table[second_max_bits_per_limb]; + + dest_limbs.push(first.0 * adjustment_factor + &second.0); + i += 2; + } else { + dest_limbs.push(first.0); + i += 1; + } + } else { + dest_limbs.push(first.0); + i += 1; + } + } + + dest_limbs + } + + /// obtain random elements from hashchain. + /// + /// not guaranteed to be uniformly distributed, should only be used in certain situations. + fn get_elements_from_sponge( + sponge: &mut S, + num_elements: usize, + outputs_short_elements: bool, + ) -> Vec { + let num_bits_per_nonnative = if outputs_short_elements { + 128 + } else { + F::size_in_bits() - 1 // also omit the highest bit + }; + let bits = sponge.squeeze_bits(num_bits_per_nonnative * num_elements); + + let mut lookup_table = Vec::::new(); + let mut cur = F::one(); + for _ in 0..num_bits_per_nonnative { + lookup_table.push(cur); + cur.double_in_place(); + } + + let mut dest_elements = Vec::::new(); + bits.chunks_exact(num_bits_per_nonnative) + .for_each(|per_nonnative_bits| { + // this can be done via BigInterger::from_bits; here, we use this method for + // consistency with the gadget counterpart + let mut res = F::zero(); + + for (i, bit) in per_nonnative_bits.iter().rev().enumerate() { + if *bit { + res += &lookup_table[i]; + } + } + + dest_elements.push(res); + }); + + dest_elements + } +} + +impl RngCore + for FiatShamirSpongeRng +{ + fn next_u32(&mut self) -> u32 { + let mut dest = [0u8; 4]; + + self.fill_bytes(&mut dest); + + u32::from_be_bytes(dest) + } + + fn next_u64(&mut self) -> u64 { + let mut dest = [0u8; 8]; + + self.fill_bytes(&mut dest); + + u64::from_be_bytes(dest) + } + + fn fill_bytes(&mut self, dest: &mut [u8]) { + assert!( + CF::size_in_bits() > 128, + "The native field of the algebraic sponge is too small." + ); + + let capacity = CF::size_in_bits() - 128; + let len = dest.len() * 8; + + let num_of_elements = (capacity + len - 1) / len; + let elements: Vec = self.s.squeeze_field_elements(num_of_elements); + + let mut bits = Vec::::new(); + for elem in elements.iter() { + let mut elem_bits = elem.into_repr().to_bits_be(); + elem_bits.reverse(); + bits.extend_from_slice(&elem_bits[0..capacity]); + } + + bits.truncate(len); + bits.chunks_exact(8) + .enumerate() + .for_each(|(i, bits_per_byte)| { + let mut byte = 0; + for (j, bit) in bits_per_byte.iter().enumerate() { + if *bit { + byte += 1 << j; + } + } + dest[i] = byte; + }); + } + + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { + self.fill_bytes(dest); + + Ok(()) + } +} + +impl Write + for FiatShamirSpongeRng +where + CF: Absorb, + ::Parameters: CryptographicSpongeParameters, +{ + fn write(&mut self, buf: &[u8]) -> IoResult { + self.absorb(&buf); + + Ok(buf.len()) + } + + fn flush(&mut self) -> IoResult<()> { + Ok(()) + } +} + +impl Read for FiatShamirSpongeRng { + fn read(&mut self, buf: &mut [u8]) -> IoResult { + self.fill_bytes(buf); + + Ok(buf.len()) + } +} diff --git a/src/rng/mod.rs b/src/rng/mod.rs new file mode 100644 index 0000000..88b3c71 --- /dev/null +++ b/src/rng/mod.rs @@ -0,0 +1,5 @@ +mod sponge; +pub use sponge::*; + +mod fiat_shamir; +pub use fiat_shamir::*; \ No newline at end of file diff --git a/src/rng/sponge/mod.rs b/src/rng/sponge/mod.rs new file mode 100644 index 0000000..40414f9 --- /dev/null +++ b/src/rng/sponge/mod.rs @@ -0,0 +1,62 @@ +use ark_ff::PrimeField; +use ark_nonnative_field::{params::OptimizationType, NonNativeFieldVar}; +use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; +use ark_sponge::{constraints::CryptographicSpongeVar, CryptographicSponge}; + +pub mod poseidon; + +pub trait CryptographicSpongeParameters { + fn from_rate(rate: usize) -> Self; +} + +pub trait CryptographicSpongeWithRate: CryptographicSponge +where + ::Parameters: CryptographicSpongeParameters, +{ + fn default_rate() -> usize; + + fn with_default_rate() -> Self { + let rate = Self::default_rate(); + + Self::from_rate(rate) + } + + fn from_rate(rate: usize) -> Self { + let params = + <::Parameters as CryptographicSpongeParameters>::from_rate( + rate, + ); + + ::new(¶ms) + } +} + +pub trait CryptographicSpongeVarNonNative: + CryptographicSpongeVar +where + >::Parameters: CryptographicSpongeParameters, +{ + fn default_rate() -> usize; + + fn with_default_rate(cs: ConstraintSystemRef) -> Self { + let rate = Self::default_rate(); + + Self::from_rate(cs, rate) + } + + fn from_rate(cs: ConstraintSystemRef, rate: usize) -> Self { + let params = + <>::Parameters as CryptographicSpongeParameters>::from_rate( + rate, + ); + + >::new(cs, ¶ms) + } + + /// Absorb non native elements + fn absorb_nonnative( + &mut self, + input: &[NonNativeFieldVar], + ty: OptimizationType, + ) -> Result<(), SynthesisError>; +} diff --git a/src/rng/sponge/poseidon.rs b/src/rng/sponge/poseidon.rs new file mode 100644 index 0000000..045dd26 --- /dev/null +++ b/src/rng/sponge/poseidon.rs @@ -0,0 +1,303 @@ +use core::marker::PhantomData; + +use ark_ff::{FpConfig, PrimeField}; +use ark_nonnative_field::{ + params::OptimizationType, AllocatedNonNativeFieldVar, NonNativeFieldVar, +}; +use ark_r1cs_std::fields::fp::FpVar; +use ark_r1cs_std::prelude::UInt8; +use ark_r1cs_std::{alloc::AllocVar, boolean::Boolean}; +use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; +use ark_sponge::constraints::AbsorbGadget; +use ark_sponge::{ + constraints::CryptographicSpongeVar, + poseidon::{constraints::PoseidonSpongeVar, PoseidonParameters, PoseidonSponge}, + CryptographicSponge, +}; + +use super::{CryptographicSpongeParameters, CryptographicSpongeVarNonNative}; +use crate::rng::CryptographicSpongeWithRate; +use crate::overhead; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct PoseidonArguments { + pub prime_bits: u64, + pub full_rounds: u32, + pub partial_rounds: u32, + pub skip_matrices: u64, + + _field: PhantomData, +} + +impl PoseidonArguments { + pub const DEFAULT: Self = Self { + prime_bits: F::Params::MODULUS_BITS as u64, + full_rounds: 8, + partial_rounds: 60, + skip_matrices: 0, + _field: PhantomData, + }; +} + +impl CryptographicSpongeWithRate for PoseidonSponge { + fn default_rate() -> usize { + PoseidonParametersWithDefaultRate::::DEFAULT_RATE + } +} + +impl CryptographicSpongeParameters for PoseidonParameters { + fn from_rate(rate: usize) -> Self { + PoseidonParametersWithDefaultRate::from_rate(rate).params + } +} + +impl + CryptographicSpongeVarNonNative for PoseidonSpongeVar +where + PoseidonSpongeVar: CryptographicSpongeVar, + >::Parameters: CryptographicSpongeParameters, +{ + fn default_rate() -> usize { + PoseidonParametersWithDefaultRate::::DEFAULT_RATE + } + + fn absorb_nonnative( + &mut self, + input: &[NonNativeFieldVar], + ty: OptimizationType, + ) -> Result<(), SynthesisError> { + let mut src_limbs: Vec<(FpVar, CF)> = Vec::new(); + + for elem in input.iter() { + match elem { + NonNativeFieldVar::Constant(c) => { + let v = AllocatedNonNativeFieldVar::::new_constant(self.cs(), c)?; + + for limb in v.limbs.iter() { + let num_of_additions_over_normal_form = + if v.num_of_additions_over_normal_form == CF::zero() { + CF::one() + } else { + v.num_of_additions_over_normal_form + }; + + src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); + } + } + NonNativeFieldVar::Var(v) => { + for limb in v.limbs.iter() { + let num_of_additions_over_normal_form = + if v.num_of_additions_over_normal_form == CF::zero() { + CF::one() + } else { + v.num_of_additions_over_normal_form + }; + + src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); + } + } + } + } + + let capacity = CF::size_in_bits() - 1; + let mut dest_limbs = Vec::>::new(); + + if !src_limbs.is_empty() { + let params = + ark_nonnative_field::params::get_params(F::size_in_bits(), CF::size_in_bits(), ty); + + let adjustment_factor_lookup_table = { + let mut table = Vec::::new(); + + let mut cur = CF::one(); + for _ in 1..=capacity { + table.push(cur); + cur.double_in_place(); + } + + table + }; + + let mut i: usize = 0; + let src_len = src_limbs.len(); + while i < src_len { + let first = &src_limbs[i]; + let second = if i + 1 < src_len { + Some(&src_limbs[i + 1]) + } else { + None + }; + + let first_max_bits_per_limb = + params.bits_per_limb + overhead!(first.1 + &CF::one()); + let second_max_bits_per_limb = if second.is_some() { + params.bits_per_limb + overhead!(second.unwrap().1 + &CF::one()) + } else { + 0 + }; + + if second.is_some() + && first_max_bits_per_limb + second_max_bits_per_limb <= capacity + { + let adjustment_factor = + &adjustment_factor_lookup_table[second_max_bits_per_limb]; + + dest_limbs.push(&first.0 * *adjustment_factor + &second.unwrap().0); + i += 2; + } else { + dest_limbs.push(first.0.clone()); + i += 1; + } + } + } + + self.absorb(&dest_limbs)?; + + Ok(()) + } +} + +/// Parameters and RNG used +#[derive(Clone, Debug)] +pub struct PoseidonParametersWithDefaultRate { + pub params: PoseidonParameters, +} + +impl PoseidonParametersWithDefaultRate { + /// Default rate for poseidon + pub const DEFAULT_RATE: usize = 4; +} + +impl From> for PoseidonParametersWithDefaultRate { + fn from(params: PoseidonParameters) -> Self { + Self { params } + } +} + +impl CryptographicSpongeParameters for PoseidonParametersWithDefaultRate { + fn from_rate(rate: usize) -> Self { + let PoseidonArguments { + prime_bits, + full_rounds, + partial_rounds, + skip_matrices, + .. + } = PoseidonArguments::::DEFAULT; + + // TODO consume the arguments + let capacity = 1; + let alpha = 5; + let _ = (rate, prime_bits, skip_matrices); + + // TODO generate secure constants + let ark = F::one(); + let ark = vec![ark; 3]; + let ark = vec![ark; (full_rounds + partial_rounds) as usize]; + + // TODO generate secure matrix + let mds = F::one(); + let mds = vec![mds; rate + capacity]; + let mds = vec![mds; rate + capacity]; + + PoseidonParameters::new(full_rounds, partial_rounds, alpha, mds, ark).into() + } +} + +#[derive(Clone)] +/// Wrapper for [`PoseidonSponge`] +pub struct PoseidonSpongeWithDefaultRate { + pub s: PoseidonSponge, +} + +impl From> for PoseidonSpongeWithDefaultRate { + fn from(s: PoseidonSponge) -> Self { + Self { s } + } +} + +impl CryptographicSponge for PoseidonSpongeWithDefaultRate { + type Parameters = PoseidonParametersWithDefaultRate; + + fn new(p: &Self::Parameters) -> Self { + PoseidonSponge::new(&p.params).into() + } + + fn absorb(&mut self, input: &impl ark_sponge::Absorb) { + self.s.absorb(input) + } + + fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { + self.s.squeeze_bytes(num_bytes) + } + + fn squeeze_bits(&mut self, num_bits: usize) -> Vec { + self.s.squeeze_bits(num_bits) + } +} + +impl CryptographicSpongeWithRate for PoseidonSpongeWithDefaultRate { + fn default_rate() -> usize { + PoseidonParametersWithDefaultRate::::DEFAULT_RATE + } +} + +#[derive(Clone)] +/// Wrapper for [`PoseidonSpongeVar`] +pub struct PoseidonSpongeVarWithDefaultRate { + pub s: PoseidonSpongeVar, +} + +impl From> for PoseidonSpongeVarWithDefaultRate { + fn from(s: PoseidonSpongeVar) -> Self { + Self { s } + } +} + +impl CryptographicSpongeVar + for PoseidonSpongeVarWithDefaultRate +{ + type Parameters = PoseidonParametersWithDefaultRate; + + fn new(cs: ConstraintSystemRef, p: &Self::Parameters) -> Self { + PoseidonSpongeVar::new(cs, &p.params).into() + } + + fn cs(&self) -> ConstraintSystemRef { + self.s.cs() + } + + fn absorb(&mut self, input: &impl AbsorbGadget) -> Result<(), SynthesisError> { + self.s.absorb(input) + } + + fn squeeze_bytes(&mut self, num_bytes: usize) -> Result>, SynthesisError> { + self.s.squeeze_bytes(num_bytes) + } + + fn squeeze_bits(&mut self, num_bits: usize) -> Result>, SynthesisError> { + self.s.squeeze_bits(num_bits) + } + + fn squeeze_field_elements( + &mut self, + num_elements: usize, + ) -> Result>, SynthesisError> { + self.s.squeeze_field_elements(num_elements) + } +} + +impl + CryptographicSpongeVarNonNative for PoseidonSpongeVarWithDefaultRate +{ + fn default_rate() -> usize { + PoseidonParametersWithDefaultRate::::DEFAULT_RATE + } + + fn absorb_nonnative( + &mut self, + input: &[NonNativeFieldVar], + ty: OptimizationType, + ) -> Result<(), SynthesisError> { + self.s.absorb_nonnative(input, ty) + } +} From 4d616f4e349c6ae73ac0902be3aea475174361e1 Mon Sep 17 00:00:00 2001 From: AB Date: Mon, 11 Sep 2023 13:34:03 +0100 Subject: [PATCH 04/40] changed dependencies to ark_r1cs lib and fixes --- Cargo.toml | 2 -- src/ahp/indexer.rs | 28 +++------------- src/ahp/mod.rs | 4 +-- src/ahp/prover.rs | 10 +++++- src/data_structures.rs | 51 ++++++++---------------------- src/lib.rs | 21 ++++++------ src/rng/fiat_shamir/constraints.rs | 8 ++--- src/rng/fiat_shamir/mod.rs | 20 ++++++------ src/rng/sponge/mod.rs | 8 ++--- src/rng/sponge/poseidon.rs | 26 +++++++-------- src/test.rs | 8 ++--- 11 files changed, 75 insertions(+), 111 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 614da48..92019a4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,8 +27,6 @@ ark-poly = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false } ark-poly-commit = { version = "^0.4.0", default-features = false } ark-r1cs-std = { version = "^0.4.0", default-features = false } -ark-sponge = { version = "^0.3.0", default-features = false, features = [ "r1cs" ] } -ark-nonnative-field = { version = "^0.3.0", default-features = false } ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = [ "r1cs" ] } rayon = { version = "1", optional = true } diff --git a/src/ahp/indexer.rs b/src/ahp/indexer.rs index cdf2f32..00bd59e 100644 --- a/src/ahp/indexer.rs +++ b/src/ahp/indexer.rs @@ -7,16 +7,13 @@ use crate::ahp::{ AHPForR1CS, Error, LabeledPolynomial, }; use crate::Vec; -use ark_ff::PrimeField; +use ark_ff::{PrimeField, Field}; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, SynthesisMode, }; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::{ - io::{Read, Write}, - marker::PhantomData, -}; +use ark_std::marker::PhantomData; use derivative::Derivative; use crate::ahp::constraint_systems::{ @@ -28,7 +25,7 @@ use crate::ahp::constraint_systems::{ /// entries in any of the constraint matrices. #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Clone(bound = ""), Copy(bound = ""))] -pub struct IndexInfo { +pub struct IndexInfo { /// The total number of variables in the constraint system. pub num_variables: usize, /// The number of constraints. @@ -42,7 +39,7 @@ pub struct IndexInfo { f: PhantomData, } -impl IndexInfo { +impl IndexInfo { /// Construct a new index info pub fn new( num_variables: usize, @@ -60,23 +57,6 @@ impl IndexInfo { } } -impl CanonicalSerialize for IndexInfo { - fn serialize_with_mode( - &self, - writer: W, - compress: ark_serialize::Compress, - ) -> Result<(), ark_serialize::SerializationError> { - - (self.num_variables as u64).write(&mut writer)?; - (self.num_constraints as u64).write(&mut writer)?; - (self.num_non_zero as u64).write(&mut writer) - } - - fn serialized_size(&self) -> usize { - 3 * ark_std::mem::size_of::() - } -} - impl IndexInfo { /// The maximum degree of polynomial required to represent this index in the /// the AHP. diff --git a/src/ahp/mod.rs b/src/ahp/mod.rs index e2bc747..35b9154 100644 --- a/src/ahp/mod.rs +++ b/src/ahp/mod.rs @@ -415,7 +415,7 @@ mod tests { .coeffs .iter() .filter_map(|f| if !f.is_zero() { - Some(f.into_repr()) + Some(f) } else { None }) @@ -447,7 +447,7 @@ mod tests { .coeffs .iter() .filter_map(|f| if !f.is_zero() { - Some(f.into_repr()) + Some(f) } else { None }) diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 44902ee..e123029 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -18,7 +18,7 @@ use ark_relations::r1cs::{ }; use ark_serialize::Compress; use ark_serialize::Validate; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError, Valid}; use ark_std::rand::RngCore; use ark_std::{ cfg_into_iter, cfg_iter, cfg_iter_mut, @@ -92,6 +92,14 @@ impl CanonicalSerialize for ProverMsg { } } +impl Valid for ProverMsg{ + fn check_valid(&self) -> Result<(), SerializationError> { + match self { + ProverMsg::EmptyMessage => Ok(()), + ProverMsg::FieldElements(v) => v.check_valid(), + } + } +} impl CanonicalDeserialize for ProverMsg { fn deserialize_with_mode(&self, mut reader: R, compress:Compress, validate: Validate) -> Result { let res: Option> = match self { diff --git a/src/data_structures.rs b/src/data_structures.rs index 84b7488..2251675 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -5,17 +5,15 @@ use ark_ff::PrimeField; use ark_poly::univariate::DensePolynomial; use ark_poly_commit::{BatchLCProof, PolynomialCommitment}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::{ - format, - io::Write, -}; +use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_std::format; /* ************************************************************************* */ /* ************************************************************************* */ /* ************************************************************************* */ /// The universal public parameters for the argument system. -pub type UniversalSRS = >>::UniversalParams; +pub type UniversalSRS = ,S>>::UniversalParams; /* ************************************************************************* */ /* ************************************************************************* */ @@ -23,7 +21,7 @@ pub type UniversalSRS = /// Verification key for a specific index (i.e., R1CS matrices). #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct IndexVerifierKey>> { +pub struct IndexVerifierKey, S>> { /// Stores information about the size of the index, as well as its field of /// definition. pub index_info: IndexInfo, @@ -33,29 +31,8 @@ pub struct IndexVerifierKey>> CanonicalSerialize - for IndexVerifierKey -{ - fn serialize_with_mode( - &self, - writer: W, - compress: ark_serialize::Compress, - ) -> Result<(), ark_serialize::SerializationError> { - self.index_info.serialize_with_mode(writer, compress)?; - self.index_comms.serialize_with_mode(writer, compress)?; - self.verifier_key.serialize_with_mode(writer, compress)?; - Ok(()) - } - - fn serialized_size(&self, compress: ark_serialize::Compress) -> usize { - self.index_info.serialized_size(compress) - + self.index_comms.serialized_size(compress) - + self.verifier_key.serialized_size(compress) - } -} - -impl>> Clone - for IndexVerifierKey +impl, S>, S:CryptographicSponge> Clone + for IndexVerifierKey { fn clone(&self) -> Self { Self { @@ -66,7 +43,7 @@ impl>> Clone } } -impl>> IndexVerifierKey { +impl,S>, S:CryptographicSponge> IndexVerifierKey { /// Iterate over the commitments to indexed polynomials in `self`. pub fn iter(&self) -> impl Iterator { self.index_comms.iter() @@ -79,9 +56,9 @@ impl>> IndexVerifi /// Proving key for a specific index (i.e., R1CS matrices). #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct IndexProverKey>> { +pub struct IndexProverKey,S>,S:CryptographicSponge> { /// The index verifier key. - pub index_vk: IndexVerifierKey, + pub index_vk: IndexVerifierKey, /// The randomness for the index polynomial commitments. pub index_comm_rands: Vec, /// The index itself. @@ -90,7 +67,7 @@ pub struct IndexProverKey>> Clone for IndexProverKey +impl,S>,S:CryptographicSponge> Clone for IndexProverKey where PC::Commitment: Clone, { @@ -110,7 +87,7 @@ where /// A zkSNARK proof. #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct Proof>> { +pub struct Proof,S>,S:CryptographicSponge> { /// Commitments to the polynomials produced by the AHP prover. pub commitments: Vec>, /// Evaluations of these polynomials. @@ -118,16 +95,16 @@ pub struct Proof>> /// The field elements sent by the prover. pub prover_messages: Vec>, /// An evaluation proof from the polynomial commitment. - pub pc_proof: BatchLCProof, PC>, + pub pc_proof: BatchLCProof>, } -impl>> Proof { +impl,S>,S:CryptographicSponge> Proof { /// Construct a new proof. pub fn new( commitments: Vec>, evaluations: Vec, prover_messages: Vec>, - pc_proof: BatchLCProof, PC>, + pc_proof: BatchLCProof>, ) -> Self { Self { commitments, diff --git a/src/lib.rs b/src/lib.rs index 889b855..4960a56 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -86,14 +86,15 @@ use ahp::EvaluationsProvider; mod test; /// The compiled argument system. -pub struct Marlin>, FS: FiatShamirRng>( +pub struct Marlin, FS>, FS: FiatShamirRng>( #[doc(hidden)] PhantomData, + #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, ); -impl>, FS: FiatShamirRng> - Marlin +impl, FS>, FS: FiatShamirRng> + Marlin { /// The personalization string for this protocol. Used to personalize the /// Fiat-Shamir rng. @@ -106,7 +107,7 @@ impl>, FS: FiatSha num_variables: usize, num_non_zero: usize, rng: &mut R, - ) -> Result, Error> { + ) -> Result, Error> { let max_degree = AHPForR1CS::::max_degree(num_constraints, num_variables, num_non_zero)?; let setup_time = start_timer!(|| { format!( @@ -123,9 +124,9 @@ impl>, FS: FiatSha /// Generate the index-specific (i.e., circuit-specific) prover and verifier /// keys. This is a deterministic algorithm that anyone can rerun. pub fn index>( - srs: &UniversalSRS, + srs: &UniversalSRS, c: C, - ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { + ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { let index_time = start_timer!(|| "Marlin::Index"); // TODO: Add check that c is in the correct mode. @@ -174,10 +175,10 @@ impl>, FS: FiatSha /// Create a zkSNARK asserting that the constraint system is satisfied. pub fn prove, R: RngCore>( - index_pk: &IndexProverKey, + index_pk: &IndexProverKey, c: C, zk_rng: &mut R, - ) -> Result, Error> { + ) -> Result, Error> { let prover_time = start_timer!(|| "Marlin::Prover"); // Add check that c is in the correct mode. @@ -338,9 +339,9 @@ impl>, FS: FiatSha /// Verify that a proof for the constrain system defined by `C` asserts that /// all constraints are satisfied. pub fn verify( - index_vk: &IndexVerifierKey, + index_vk: &IndexVerifierKey, public_input: &[F], - proof: &Proof, + proof: &Proof, rng: &mut R, ) -> Result> { let verifier_time = start_timer!(|| "Marlin::Verify"); diff --git a/src/rng/fiat_shamir/constraints.rs b/src/rng/fiat_shamir/constraints.rs index 81a4d71..13add71 100644 --- a/src/rng/fiat_shamir/constraints.rs +++ b/src/rng/fiat_shamir/constraints.rs @@ -2,8 +2,8 @@ use crate::rng::CryptographicSpongeVarNonNative; use crate::rng::CryptographicSpongeParameters; use crate::{overhead,Vec}; use ark_ff::PrimeField; -use ark_nonnative_field::params::{get_params, OptimizationType}; -use ark_nonnative_field::{AllocatedNonNativeFieldVar, NonNativeFieldVar}; +use ark_r1cs_std::fields::nonnative::params::{get_params, OptimizationType}; +use ark_r1cs_std::fields::nonnative::{AllocatedNonNativeFieldVar, NonNativeFieldVar}; use ark_r1cs_std::{ alloc::AllocVar, bits::{uint8::UInt8, ToBitsGadget}, @@ -16,8 +16,8 @@ use ark_relations::lc; use ark_relations::r1cs::{ ConstraintSystemRef, LinearCombination, OptimizationGoal, SynthesisError, }; -use ark_sponge::constraints::{AbsorbGadget, CryptographicSpongeVar}; -use ark_sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::constraints::{AbsorbGadget, CryptographicSpongeVar}; +use ark_crypto_primitives::sponge::CryptographicSponge; use core::marker::PhantomData; /// Vars for a RNG for use in a Fiat-Shamir transform. diff --git a/src/rng/fiat_shamir/mod.rs b/src/rng/fiat_shamir/mod.rs index 4e430f8..11e0feb 100644 --- a/src/rng/fiat_shamir/mod.rs +++ b/src/rng/fiat_shamir/mod.rs @@ -1,9 +1,9 @@ use crate::rng::{CryptographicSpongeParameters, CryptographicSpongeWithRate}; use crate::Vec; use ark_ff::{BigInteger, PrimeField, ToConstraintField}; -use ark_nonnative_field::params::{get_params, OptimizationType}; -use ark_nonnative_field::AllocatedNonNativeFieldVar; -use ark_sponge::{Absorb, CryptographicSponge}; +use ark_r1cs_std::fields::nonnative::params::{get_params, OptimizationType}; +use ark_r1cs_std::fields::nonnative::AllocatedNonNativeFieldVar; +use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; use ark_std::io::{Read, Result as IoResult, Write}; use ark_std::marker::PhantomData; use ark_std::rand::{RngCore, SeedableRng}; @@ -199,9 +199,9 @@ impl Read for FiatShamirChaChaRng CryptographicSponge for FiatShamirChaChaRng { - type Parameters = (); + type Config = (); - fn new(_params: &Self::Parameters) -> Self { + fn new(_params: &Self::Config) -> Self { let seed = [0; 32]; let r = ChaChaRng::from_seed(seed); @@ -278,7 +278,7 @@ impl From impl Default for FiatShamirSpongeRng where - ::Parameters: CryptographicSpongeParameters, + ::Config: CryptographicSpongeParameters, { fn default() -> Self { S::with_default_rate().into() @@ -288,9 +288,9 @@ where impl CryptographicSponge for FiatShamirSpongeRng { - type Parameters = S::Parameters; + type Config = S::Config; - fn new(params: &Self::Parameters) -> Self { + fn new(params: &Self::Config) -> Self { S::new(params).into() } @@ -311,7 +311,7 @@ impl FiatShamirRn for FiatShamirSpongeRng where CF: Absorb, - ::Parameters: CryptographicSpongeParameters, + ::Config: CryptographicSpongeParameters, { fn absorb_nonnative(&mut self, elems: &[F], ty: OptimizationType) { // FIXME ignoring faulty elements; maybe panic? @@ -515,7 +515,7 @@ impl Write for FiatShamirSpongeRng where CF: Absorb, - ::Parameters: CryptographicSpongeParameters, + ::Config: CryptographicSpongeParameters, { fn write(&mut self, buf: &[u8]) -> IoResult { self.absorb(&buf); diff --git a/src/rng/sponge/mod.rs b/src/rng/sponge/mod.rs index 40414f9..978a566 100644 --- a/src/rng/sponge/mod.rs +++ b/src/rng/sponge/mod.rs @@ -1,7 +1,7 @@ use ark_ff::PrimeField; -use ark_nonnative_field::{params::OptimizationType, NonNativeFieldVar}; +use ark_r1cs_std::fields::nonnative::{params::OptimizationType, NonNativeFieldVar}; use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; -use ark_sponge::{constraints::CryptographicSpongeVar, CryptographicSponge}; +use ark_crypto_primitives::sponge::{constraints::CryptographicSpongeVar, CryptographicSponge}; pub mod poseidon; @@ -11,7 +11,7 @@ pub trait CryptographicSpongeParameters { pub trait CryptographicSpongeWithRate: CryptographicSponge where - ::Parameters: CryptographicSpongeParameters, + ::Config: CryptographicSpongeParameters, { fn default_rate() -> usize; @@ -23,7 +23,7 @@ where fn from_rate(rate: usize) -> Self { let params = - <::Parameters as CryptographicSpongeParameters>::from_rate( + <::Config as CryptographicSpongeParameters>::from_rate( rate, ); diff --git a/src/rng/sponge/poseidon.rs b/src/rng/sponge/poseidon.rs index 045dd26..4c265ea 100644 --- a/src/rng/sponge/poseidon.rs +++ b/src/rng/sponge/poseidon.rs @@ -1,17 +1,17 @@ use core::marker::PhantomData; use ark_ff::{FpConfig, PrimeField}; -use ark_nonnative_field::{ +use ark_r1cs_std::fields::nonnative::{ params::OptimizationType, AllocatedNonNativeFieldVar, NonNativeFieldVar, }; use ark_r1cs_std::fields::fp::FpVar; use ark_r1cs_std::prelude::UInt8; use ark_r1cs_std::{alloc::AllocVar, boolean::Boolean}; use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; -use ark_sponge::constraints::AbsorbGadget; -use ark_sponge::{ +use ark_crypto_primitives::sponge::constraints::AbsorbGadget; +use ark_crypto_primitives::sponge::{ constraints::CryptographicSpongeVar, - poseidon::{constraints::PoseidonSpongeVar, PoseidonParameters, PoseidonSponge}, + poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig, PoseidonSponge}, CryptographicSponge, }; @@ -45,7 +45,7 @@ impl CryptographicSpongeWithRate for PoseidonSponge { } } -impl CryptographicSpongeParameters for PoseidonParameters { +impl CryptographicSpongeParameters for PoseidonConfig { fn from_rate(rate: usize) -> Self { PoseidonParametersWithDefaultRate::from_rate(rate).params } @@ -104,7 +104,7 @@ where if !src_limbs.is_empty() { let params = - ark_nonnative_field::params::get_params(F::size_in_bits(), CF::size_in_bits(), ty); + ark_r1cs_std::fields::nonnative::params::get_params(F::size_in_bits(), CF::size_in_bits(), ty); let adjustment_factor_lookup_table = { let mut table = Vec::::new(); @@ -160,7 +160,7 @@ where /// Parameters and RNG used #[derive(Clone, Debug)] pub struct PoseidonParametersWithDefaultRate { - pub params: PoseidonParameters, + pub params: PoseidonConfig, } impl PoseidonParametersWithDefaultRate { @@ -168,8 +168,8 @@ impl PoseidonParametersWithDefaultRate { pub const DEFAULT_RATE: usize = 4; } -impl From> for PoseidonParametersWithDefaultRate { - fn from(params: PoseidonParameters) -> Self { +impl From> for PoseidonParametersWithDefaultRate { + fn from(params: PoseidonConfig) -> Self { Self { params } } } @@ -199,7 +199,7 @@ impl CryptographicSpongeParameters for PoseidonParametersWithDefa let mds = vec![mds; rate + capacity]; let mds = vec![mds; rate + capacity]; - PoseidonParameters::new(full_rounds, partial_rounds, alpha, mds, ark).into() + PoseidonConfig::new(full_rounds, partial_rounds, alpha, mds, ark).into() } } @@ -216,13 +216,13 @@ impl From> for PoseidonSpongeWithDefaultRate } impl CryptographicSponge for PoseidonSpongeWithDefaultRate { - type Parameters = PoseidonParametersWithDefaultRate; + type Config = PoseidonParametersWithDefaultRate; - fn new(p: &Self::Parameters) -> Self { + fn new(p: &Self::Config) -> Self { PoseidonSponge::new(&p.params).into() } - fn absorb(&mut self, input: &impl ark_sponge::Absorb) { + fn absorb(&mut self, input: &impl ark_crypto_primitives::sponge::Absorb) { self.s.absorb(input) } diff --git a/src/test.rs b/src/test.rs index fc91e12..42326c8 100644 --- a/src/test.rs +++ b/src/test.rs @@ -115,7 +115,7 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; - use crate::{Marlin, SimpleHashFiatShamirRng}; + use crate::{Marlin, rng::FiatShamirChaChaRng}; use ark_bls12_381::{Bls12_381, Fr}; use ark_ff::UniformRand; @@ -125,9 +125,9 @@ mod marlin { use blake2::Blake2s; use rand_chacha::ChaChaRng; - type MultiPC = MarlinKZG10>; - type FS = SimpleHashFiatShamirRng; - type MarlinInst = Marlin; + type FS = FiatShamirChaChaRng; + type MultiPC = MarlinKZG10, FS>; + type MarlinInst = Marlin; fn test_circuit(num_constraints: usize, num_variables: usize) { let rng = &mut ark_std::test_rng(); From 5d6d7d0fda31d257c8aba336ce08a18ba8d898f2 Mon Sep 17 00:00:00 2001 From: AB Date: Mon, 11 Sep 2023 15:36:55 +0100 Subject: [PATCH 05/40] checkin --- src/ahp/prover.rs | 6 +++--- src/test.rs | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index e123029..5b6cca7 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -93,15 +93,15 @@ impl CanonicalSerialize for ProverMsg { } impl Valid for ProverMsg{ - fn check_valid(&self) -> Result<(), SerializationError> { + fn check(&self) -> Result<(), SerializationError> { match self { ProverMsg::EmptyMessage => Ok(()), - ProverMsg::FieldElements(v) => v.check_valid(), + ProverMsg::FieldElements(v) => v.check(), } } } impl CanonicalDeserialize for ProverMsg { - fn deserialize_with_mode(&self, mut reader: R, compress:Compress, validate: Validate) -> Result { + fn deserialize_with_mode(mut reader: R, compress:Compress, validate: Validate) -> Result { let res: Option> = match self { ProverMsg::EmptyMessage => None, ProverMsg::FieldElements(v) => v.deserialize_with_mode(reader, compress, validate), diff --git a/src/test.rs b/src/test.rs index 42326c8..9346d12 100644 --- a/src/test.rs +++ b/src/test.rs @@ -122,12 +122,11 @@ mod marlin { use ark_poly::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; use ark_std::ops::MulAssign; - use blake2::Blake2s; use rand_chacha::ChaChaRng; - type FS = FiatShamirChaChaRng; + type FS = FiatShamirChaChaRng; type MultiPC = MarlinKZG10, FS>; - type MarlinInst = Marlin; + type MarlinInst = Marlin; fn test_circuit(num_constraints: usize, num_variables: usize) { let rng = &mut ark_std::test_rng(); From aa666b52cb44e970906efb5a5a5e7cbe7b56adfa Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 10:29:13 +0100 Subject: [PATCH 06/40] serialize mostly done --- Cargo.toml | 1 + src/ahp/prover.rs | 28 +- src/lib.rs | 10 +- src/rng/fiat_shamir/constraints.rs | 433 ----------------------- src/rng/fiat_shamir/mod.rs | 537 ----------------------------- src/rng/mod.rs | 5 - src/rng/sponge/mod.rs | 62 ---- src/rng/sponge/poseidon.rs | 303 ---------------- src/test.rs | 13 +- 9 files changed, 28 insertions(+), 1364 deletions(-) delete mode 100644 src/rng/fiat_shamir/constraints.rs delete mode 100644 src/rng/fiat_shamir/mod.rs delete mode 100644 src/rng/mod.rs delete mode 100644 src/rng/sponge/mod.rs delete mode 100644 src/rng/sponge/poseidon.rs diff --git a/Cargo.toml b/Cargo.toml index 92019a4..8371137 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,6 +22,7 @@ edition = "2018" [dependencies] ark-serialize = { version = "^0.4.0", default-features = false, features = [ "derive" ] } ark-ff = { version = "^0.4.0", default-features = false } +ark-ec = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0", default-features = false } ark-poly = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false } diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 5b6cca7..843a8c5 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -76,20 +76,21 @@ pub enum ProverMsg { impl CanonicalSerialize for ProverMsg { fn serialize_with_mode(&self, mut writer: W, compress: Compress) -> Result<(), SerializationError> { - let res: Option> = match self { + let res = match self { ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => v.serialize_with_mode(writer, compress), + ProverMsg::FieldElements(v) => Some(v.clone()), }; - Ok(res) + res.serialize_with_mode(writer, compress); + Ok(()) } fn serialized_size(&self, compress: Compress) -> usize { let res: Option> = match self { - ProverMsg::EmptyMessage => 0, - ProverMsg::FieldElements(v) => v.serialized_size(compress), + ProverMsg::EmptyMessage => None, + ProverMsg::FieldElements(v) => Some(v.clone()), }; - Ok(res) - } + res.serialized_size(compress) +} } impl Valid for ProverMsg{ @@ -102,12 +103,13 @@ impl Valid for ProverMsg{ } impl CanonicalDeserialize for ProverMsg { fn deserialize_with_mode(mut reader: R, compress:Compress, validate: Validate) -> Result { - let res: Option> = match self { - ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => v.deserialize_with_mode(reader, compress, validate), - }; - Ok(res) - } + let res = Option::>::deserialize_with_mode(reader, compress, validate)?; + if let Some(res) = res { + Ok(ProverMsg::FieldElements(res)) + } else { + Ok(ProverMsg::EmptyMessage) + } +} } /// The first set of prover oracles. diff --git a/src/lib.rs b/src/lib.rs index 4960a56..43fd005 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -58,12 +58,12 @@ macro_rules! to_bytes { macro_rules! push_to_vec { ($buf:expr, $y:expr, $($x:expr),*) => ({ { - $crate::CanonicalSerialize::write(&$y, &mut $buf) + $y.serialize_compressed(&mut $buf) }.and({$crate::push_to_vec!($buf, $($x),*)}) }); ($buf:expr, $x:expr) => ({ - $crate::CanonicalSerialize::write(&$x, &mut $buf) + $x.serialize_compressed(&mut $buf) }) } /// Implements a Fiat-Shamir based Rng that allows one to incrementally update @@ -85,15 +85,15 @@ use ahp::EvaluationsProvider; #[cfg(test)] mod test; -/// The compiled argument system. -pub struct Marlin, FS>, FS: FiatShamirRng>( +/// The compiled argument system.FiatShamiRng +pub struct Marlin, FS>, FS: FiatShamirRng>( #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, ); -impl, FS>, FS: FiatShamirRng> +impl, FS>, FS: FiatShamirRng> Marlin { /// The personalization string for this protocol. Used to personalize the diff --git a/src/rng/fiat_shamir/constraints.rs b/src/rng/fiat_shamir/constraints.rs deleted file mode 100644 index 13add71..0000000 --- a/src/rng/fiat_shamir/constraints.rs +++ /dev/null @@ -1,433 +0,0 @@ -use crate::rng::CryptographicSpongeVarNonNative; -use crate::rng::CryptographicSpongeParameters; -use crate::{overhead,Vec}; -use ark_ff::PrimeField; -use ark_r1cs_std::fields::nonnative::params::{get_params, OptimizationType}; -use ark_r1cs_std::fields::nonnative::{AllocatedNonNativeFieldVar, NonNativeFieldVar}; -use ark_r1cs_std::{ - alloc::AllocVar, - bits::{uint8::UInt8, ToBitsGadget}, - boolean::Boolean, - fields::fp::AllocatedFp, - fields::fp::FpVar, - R1CSVar, -}; -use ark_relations::lc; -use ark_relations::r1cs::{ - ConstraintSystemRef, LinearCombination, OptimizationGoal, SynthesisError, -}; -use ark_crypto_primitives::sponge::constraints::{AbsorbGadget, CryptographicSpongeVar}; -use ark_crypto_primitives::sponge::CryptographicSponge; -use core::marker::PhantomData; - -/// Vars for a RNG for use in a Fiat-Shamir transform. -pub trait FiatShamirRngVar: - From> + CryptographicSpongeVar -{ - // Instantiate from a plaintext fs_rng. - fn constant(cs: ConstraintSystemRef, pfs: &S) -> Self; - - /// Take in field elements. - fn absorb_nonnative( - &mut self, - elems: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError>; - - /// Take in field elements. - fn absorb_native(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError>; - - /// Take in bytes. - fn absorb_bytes(&mut self, elems: &[UInt8]) -> Result<(), SynthesisError>; - - /// Output field elements. - fn squeeze_native(&mut self, num: usize) -> Result>, SynthesisError>; - - /// Output field elements. - fn squeeze_nonnative( - &mut self, - num: usize, - ) -> Result>, SynthesisError>; - - /// Output field elements with only 128 bits. - fn squeeze_128_bits_nonnative( - &mut self, - num: usize, - ) -> Result>, SynthesisError>; - - /// Output field elements with only 128 bits, and the corresponding bits (this can reduce - /// repeated computation). - #[allow(clippy::type_complexity)] - fn squeeze_128_bits_nonnative_and_bits( - &mut self, - num: usize, - ) -> Result<(Vec>, Vec>>), SynthesisError>; -} - -/// Building the Fiat-Shamir sponge's gadget from any algebraic sponge's gadget. -#[derive(Clone)] -pub struct FiatShamirSpongeRngVar< - F: PrimeField, - CF: PrimeField, - PS: CryptographicSponge, - S: CryptographicSpongeVar, -> { - pub cs: ConstraintSystemRef, - pub s: S, - #[doc(hidden)] - f_phantom: PhantomData, - cf_phantom: PhantomData, - ps_phantom: PhantomData, -} - -impl> - FiatShamirSpongeRngVar -{ - /// Compress every two elements if possible. Provides a vector of (limb, num_of_additions), - /// both of which are CF. - #[tracing::instrument(target = "r1cs")] - pub fn compress_gadgets( - src_limbs: &[(FpVar, CF)], - ty: OptimizationType, - ) -> Result>, SynthesisError> { - let capacity = CF::size_in_bits() - 1; - let mut dest_limbs = Vec::>::new(); - - if src_limbs.is_empty() { - return Ok(vec![]); - } - - let params = get_params(F::size_in_bits(), CF::size_in_bits(), ty); - - let adjustment_factor_lookup_table = { - let mut table = Vec::::new(); - - let mut cur = CF::one(); - for _ in 1..=capacity { - table.push(cur); - cur.double_in_place(); - } - - table - }; - - let mut i: usize = 0; - let src_len = src_limbs.len(); - while i < src_len { - let first = &src_limbs[i]; - let second = if i + 1 < src_len { - Some(&src_limbs[i + 1]) - } else { - None - }; - - let first_max_bits_per_limb = params.bits_per_limb + overhead!(first.1 + &CF::one()); - let second_max_bits_per_limb = if second.is_some() { - params.bits_per_limb + overhead!(second.unwrap().1 + &CF::one()) - } else { - 0 - }; - - if second.is_some() && first_max_bits_per_limb + second_max_bits_per_limb <= capacity { - let adjustment_factor = &adjustment_factor_lookup_table[second_max_bits_per_limb]; - - dest_limbs.push(&first.0 * *adjustment_factor + &second.unwrap().0); - i += 2; - } else { - dest_limbs.push(first.0.clone()); - i += 1; - } - } - - Ok(dest_limbs) - } - - /// Push gadgets to sponge. - #[tracing::instrument(target = "r1cs", skip(sponge))] - pub fn push_gadgets_to_sponge( - sponge: &mut S, - src: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError> { - let mut src_limbs: Vec<(FpVar, CF)> = Vec::new(); - - for elem in src.iter() { - match elem { - NonNativeFieldVar::Constant(c) => { - let v = AllocatedNonNativeFieldVar::::new_constant(sponge.cs(), c)?; - - for limb in v.limbs.iter() { - let num_of_additions_over_normal_form = - if v.num_of_additions_over_normal_form == CF::zero() { - CF::one() - } else { - v.num_of_additions_over_normal_form - }; - src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); - } - } - NonNativeFieldVar::Var(v) => { - for limb in v.limbs.iter() { - let num_of_additions_over_normal_form = - if v.num_of_additions_over_normal_form == CF::zero() { - CF::one() - } else { - v.num_of_additions_over_normal_form - }; - src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); - } - } - } - } - - let dest_limbs = Self::compress_gadgets(&src_limbs, ty)?; - sponge.absorb(&dest_limbs)?; - Ok(()) - } - - /// Obtain random bits from hashchain gadget. (Not guaranteed to be uniformly distributed, - /// should only be used in certain situations.) - #[tracing::instrument(target = "r1cs", skip(sponge))] - pub fn get_booleans_from_sponge( - sponge: &mut S, - num_bits: usize, - ) -> Result>, SynthesisError> { - let bits_per_element = CF::size_in_bits() - 1; - let num_elements = (num_bits + bits_per_element - 1) / bits_per_element; - - let src_elements = sponge.squeeze_field_elements(num_elements)?; - let mut dest_bits = Vec::>::new(); - - for elem in src_elements.iter() { - let elem_bits = elem.to_bits_be()?; - dest_bits.extend_from_slice(&elem_bits[1..]); // discard the highest bit - } - - Ok(dest_bits) - } - - /// Obtain random elements from hashchain gadget. (Not guaranteed to be uniformly distributed, - /// should only be used in certain situations.) - #[tracing::instrument(target = "r1cs", skip(sponge))] - pub fn get_gadgets_from_sponge( - sponge: &mut S, - num_elements: usize, - outputs_short_elements: bool, - ) -> Result>, SynthesisError> { - let (dest_gadgets, _) = - Self::get_gadgets_and_bits_from_sponge(sponge, num_elements, outputs_short_elements)?; - - Ok(dest_gadgets) - } - - /// Obtain random elements, and the corresponding bits, from hashchain gadget. (Not guaranteed - /// to be uniformly distributed, should only be used in certain situations.) - #[tracing::instrument(target = "r1cs", skip(sponge))] - #[allow(clippy::type_complexity)] - pub fn get_gadgets_and_bits_from_sponge( - sponge: &mut S, - num_elements: usize, - outputs_short_elements: bool, - ) -> Result<(Vec>, Vec>>), SynthesisError> { - let cs = sponge.cs(); - - let optimization_type = match cs.optimization_goal() { - OptimizationGoal::None => OptimizationType::Constraints, - OptimizationGoal::Constraints => OptimizationType::Constraints, - OptimizationGoal::Weight => OptimizationType::Weight, - }; - - let params = get_params(F::size_in_bits(), CF::size_in_bits(), optimization_type); - - let num_bits_per_nonnative = if outputs_short_elements { - 128 - } else { - F::size_in_bits() - 1 // also omit the highest bit - }; - let bits = Self::get_booleans_from_sponge(sponge, num_bits_per_nonnative * num_elements)?; - - let mut lookup_table = Vec::>::new(); - let mut cur = F::one(); - for _ in 0..num_bits_per_nonnative { - let repr = AllocatedNonNativeFieldVar::::get_limbs_representations( - &cur, - optimization_type, - )?; - lookup_table.push(repr); - cur.double_in_place(); - } - - let mut dest_gadgets = Vec::>::new(); - let mut dest_bits = Vec::>>::new(); - bits.chunks_exact(num_bits_per_nonnative) - .for_each(|per_nonnative_bits| { - let mut val = vec![CF::zero(); params.num_limbs]; - let mut lc = vec![LinearCombination::::zero(); params.num_limbs]; - - let mut per_nonnative_bits_le = per_nonnative_bits.to_vec(); - per_nonnative_bits_le.reverse(); - - dest_bits.push(per_nonnative_bits_le.clone()); - - for (j, bit) in per_nonnative_bits_le.iter().enumerate() { - if bit.value().unwrap_or_default() { - for (k, val) in val.iter_mut().enumerate().take(params.num_limbs) { - *val += &lookup_table[j][k]; - } - } - - #[allow(clippy::needless_range_loop)] - for k in 0..params.num_limbs { - lc[k] = &lc[k] + bit.lc() * lookup_table[j][k]; - } - } - - let mut limbs = Vec::new(); - for k in 0..params.num_limbs { - let gadget = - AllocatedFp::new_witness(ark_relations::ns!(cs, "alloc"), || Ok(val[k])) - .unwrap(); - lc[k] = lc[k].clone() - (CF::one(), gadget.variable); - cs.enforce_constraint(lc!(), lc!(), lc[k].clone()).unwrap(); - limbs.push(FpVar::::from(gadget)); - } - - dest_gadgets.push(NonNativeFieldVar::::Var( - AllocatedNonNativeFieldVar:: { - cs: cs.clone(), - limbs, - num_of_additions_over_normal_form: CF::zero(), - is_in_the_normal_form: true, - target_phantom: Default::default(), - }, - )); - }); - - Ok((dest_gadgets, dest_bits)) - } -} - -impl< - F: PrimeField, - CF: PrimeField, - PS: CryptographicSponge, - S: CryptographicSpongeVarNonNative, - > From> for FiatShamirSpongeRngVar -where - >::Parameters: CryptographicSpongeParameters, -{ - fn from(cs: ConstraintSystemRef) -> Self { - Self { - cs: cs.clone(), - s: S::with_default_rate(cs), - f_phantom: PhantomData, - cf_phantom: PhantomData, - ps_phantom: PhantomData, - } - } -} - -impl> - CryptographicSpongeVar for FiatShamirSpongeRngVar -{ - type Parameters = S::Parameters; - - fn new(cs: ConstraintSystemRef, params: &Self::Parameters) -> Self { - Self { - cs: cs.clone(), - s: S::new(cs, params), - f_phantom: PhantomData, - cf_phantom: PhantomData, - ps_phantom: PhantomData, - } - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn cs(&self) -> ConstraintSystemRef { - self.cs.clone() - } - - fn absorb(&mut self, input: &impl AbsorbGadget) -> Result<(), SynthesisError> { - self.s.absorb(input) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_bytes(&mut self, num_bytes: usize) -> Result>, SynthesisError> { - self.s.squeeze_bytes(num_bytes) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_bits(&mut self, num_bits: usize) -> Result>, SynthesisError> { - self.s.squeeze_bits(num_bits) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_field_elements( - &mut self, - num_elements: usize, - ) -> Result>, SynthesisError> { - self.s.squeeze_field_elements(num_elements) - } -} - -impl< - F: PrimeField, - CF: PrimeField, - PS: CryptographicSponge, - S: CryptographicSpongeVarNonNative, - > FiatShamirRngVar for FiatShamirSpongeRngVar -where - >::Parameters: CryptographicSpongeParameters, -{ - fn constant(cs: ConstraintSystemRef, _pfs: &PS) -> Self { - Self::from(cs) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn absorb_nonnative( - &mut self, - elems: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError> { - Self::push_gadgets_to_sponge(&mut self.s, &elems.to_vec(), ty) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn absorb_native(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError> { - self.absorb(&elems) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn absorb_bytes(&mut self, elems: &[UInt8]) -> Result<(), SynthesisError> { - self.absorb(&elems) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_native(&mut self, num: usize) -> Result>, SynthesisError> { - self.s.squeeze_field_elements(num) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_nonnative( - &mut self, - num: usize, - ) -> Result>, SynthesisError> { - Self::get_gadgets_from_sponge(&mut self.s, num, false) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_128_bits_nonnative( - &mut self, - num: usize, - ) -> Result>, SynthesisError> { - Self::get_gadgets_from_sponge(&mut self.s, num, true) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_128_bits_nonnative_and_bits( - &mut self, - num: usize, - ) -> Result<(Vec>, Vec>>), SynthesisError> { - Self::get_gadgets_and_bits_from_sponge(&mut self.s, num, true) - } -} diff --git a/src/rng/fiat_shamir/mod.rs b/src/rng/fiat_shamir/mod.rs deleted file mode 100644 index 11e0feb..0000000 --- a/src/rng/fiat_shamir/mod.rs +++ /dev/null @@ -1,537 +0,0 @@ -use crate::rng::{CryptographicSpongeParameters, CryptographicSpongeWithRate}; -use crate::Vec; -use ark_ff::{BigInteger, PrimeField, ToConstraintField}; -use ark_r1cs_std::fields::nonnative::params::{get_params, OptimizationType}; -use ark_r1cs_std::fields::nonnative::AllocatedNonNativeFieldVar; -use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; -use ark_std::io::{Read, Result as IoResult, Write}; -use ark_std::marker::PhantomData; -use ark_std::rand::{RngCore, SeedableRng}; -use core::{cmp, iter}; -use digest::Digest; -use rand_chacha::ChaChaRng; - -/// The constraints for Fiat-Shamir -pub mod constraints; - -/// a macro for computing ceil(log2(x))+1 for a field element x -#[doc(hidden)] -#[macro_export] -macro_rules! overhead { - ($x:expr) => {{ - use ark_ff::BigInteger; - let num = $x; - let num_bits = num.into_repr().to_bits_be(); - let mut skipped_bits = 0; - for b in num_bits.iter() { - if *b == false { - skipped_bits += 1; - } else { - break; - } - } - - let mut is_power_of_2 = true; - for b in num_bits.iter().skip(skipped_bits + 1) { - if *b == true { - is_power_of_2 = false; - } - } - - if is_power_of_2 { - num_bits.len() - skipped_bits - } else { - num_bits.len() - skipped_bits + 1 - } - }}; -} - -/// the trait for Fiat-Shamir RNG -pub trait FiatShamirRng: - Default + RngCore + Write + CryptographicSponge -{ - /// take in field elements - fn absorb_nonnative(&mut self, elems: &[F], ty: OptimizationType); - /// take in field elements - fn absorb_native>(&mut self, elems: &[T]); - /// take in bytes - fn absorb_bytes(&mut self, bytes: &[u8]) { - ::write(self, bytes).ok(); - } - - /// take out field elements - fn squeeze_nonnative(&mut self, num: usize, ty: OptimizationType) -> Vec; - /// take in field elements - fn squeeze_native(&mut self, num: usize) -> Vec; - /// take out field elements of 128 bits - fn squeeze_128_bits_nonnative(&mut self, num: usize) -> Vec; -} - -/// use a ChaCha stream cipher to generate the actual pseudorandom bits -/// use a digest funcion to do absorbing -pub struct FiatShamirChaChaRng { - pub r: ChaChaRng, - pub seed: Vec, - #[doc(hidden)] - field: PhantomData, - representation_field: PhantomData, - digest: PhantomData, -} - -impl Default for FiatShamirChaChaRng { - fn default() -> Self { - let seed = [0; 32]; - let r = ChaChaRng::from_seed(seed); - - Self { - r, - seed: seed.to_vec(), - field: PhantomData, - representation_field: PhantomData, - digest: PhantomData, - } - } -} - -impl Clone for FiatShamirChaChaRng { - fn clone(&self) -> Self { - Self { - r: self.r.clone(), - seed: self.seed.clone(), - field: PhantomData, - representation_field: PhantomData, - digest: PhantomData, - } - } -} - -impl RngCore for FiatShamirChaChaRng { - fn next_u32(&mut self) -> u32 { - self.r.next_u32() - } - - fn next_u64(&mut self) -> u64 { - self.r.next_u64() - } - - fn fill_bytes(&mut self, dest: &mut [u8]) { - self.r.fill_bytes(dest) - } - - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { - self.r.try_fill_bytes(dest) - } -} - -impl FiatShamirRng - for FiatShamirChaChaRng -{ - fn absorb_nonnative(&mut self, elems: &[F], _: OptimizationType) { - elems - .iter() - .try_for_each(|elem| elem.write(&mut *self)) - .expect("failed to convert to bytes"); - } - - fn absorb_native>(&mut self, elems: &[T]) { - elems - .iter() - .filter_map(|elem| elem.to_field_elements()) - .flat_map(|v| v.into_iter()) - .try_for_each(|elem| elem.write(&mut *self)) - .expect("failed to convert to bytes"); - } - - fn squeeze_nonnative(&mut self, num: usize, _: OptimizationType) -> Vec { - iter::from_fn(|| Some(F::rand(&mut self.r))) - .take(num) - .collect() - } - - fn squeeze_native(&mut self, num: usize) -> Vec { - iter::from_fn(|| Some(CF::rand(&mut self.r))) - .take(num) - .collect() - } - - fn squeeze_128_bits_nonnative(&mut self, num: usize) -> Vec { - let mut x = [0u8; 16]; - - iter::from_fn(|| { - self.r.fill_bytes(&mut x); - - let elem = F::from_random_bytes(&x).expect("failed to create field element"); - - Some(elem) - }) - .take(num) - .collect() - } -} - -impl Write for FiatShamirChaChaRng { - fn write(&mut self, buf: &[u8]) -> IoResult { - self.seed = D::digest(buf).to_vec(); - - let l = cmp::min(32, self.seed.len()); - let mut seed = [0u8; 32]; - - (&mut seed[..l]).copy_from_slice(&self.seed[..l]); - - self.r = ChaChaRng::from_seed(seed); - - Ok(buf.len()) - } - - fn flush(&mut self) -> IoResult<()> { - Ok(()) - } -} - -impl Read for FiatShamirChaChaRng { - fn read(&mut self, buf: &mut [u8]) -> IoResult { - self.fill_bytes(buf); - - Ok(buf.len()) - } -} - -impl CryptographicSponge - for FiatShamirChaChaRng -{ - type Config = (); - - fn new(_params: &Self::Config) -> Self { - let seed = [0; 32]; - let r = ChaChaRng::from_seed(seed); - - Self { - r, - seed: seed.to_vec(), - field: PhantomData, - representation_field: PhantomData, - digest: PhantomData, - } - } - - fn absorb(&mut self, input: &impl Absorb) { - let bytes = input.to_sponge_bytes_as_vec(); - - self.seed = D::digest(&bytes).to_vec(); - - let l = cmp::min(32, self.seed.len()); - let mut seed = [0u8; 32]; - - (&mut seed[..l]).copy_from_slice(&self.seed[..l]); - - self.r = ChaChaRng::from_seed(seed); - } - - fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { - let mut output = vec![0u8; num_bytes]; - - self.fill_bytes(output.as_mut_slice()); - - output - } - - fn squeeze_bits(&mut self, num_bits: usize) -> Vec { - self.squeeze_bytes(num_bits) - .into_iter() - .map(|b| (b & 0x01) == 1) - .collect() - } -} - -/// rng from any algebraic sponge -pub struct FiatShamirSpongeRng { - pub s: S, - #[doc(hidden)] - f_phantom: PhantomData, - cf_phantom: PhantomData, -} - -impl Clone - for FiatShamirSpongeRng -{ - fn clone(&self) -> Self { - Self { - s: self.s.clone(), - f_phantom: PhantomData, - cf_phantom: PhantomData, - } - } -} - -impl From - for FiatShamirSpongeRng -{ - fn from(s: S) -> Self { - Self { - s, - f_phantom: PhantomData, - cf_phantom: PhantomData, - } - } -} - -impl Default - for FiatShamirSpongeRng -where - ::Config: CryptographicSpongeParameters, -{ - fn default() -> Self { - S::with_default_rate().into() - } -} - -impl CryptographicSponge - for FiatShamirSpongeRng -{ - type Config = S::Config; - - fn new(params: &Self::Config) -> Self { - S::new(params).into() - } - - fn absorb(&mut self, input: &impl Absorb) { - self.s.absorb(input) - } - - fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { - self.s.squeeze_bytes(num_bytes) - } - - fn squeeze_bits(&mut self, num_bits: usize) -> Vec { - self.s.squeeze_bits(num_bits) - } -} - -impl FiatShamirRng - for FiatShamirSpongeRng -where - CF: Absorb, - ::Config: CryptographicSpongeParameters, -{ - fn absorb_nonnative(&mut self, elems: &[F], ty: OptimizationType) { - // FIXME ignoring faulty elements; maybe panic? - let src: Vec<(CF, CF)> = elems - .iter() - .filter_map(|elem| { - AllocatedNonNativeFieldVar::::get_limbs_representations(elem, ty).ok() - }) - .flatten() - // specifically set to one since most gadgets in the constraint world would not have - // zero noise (due to the relatively weak normal form testing in `alloc`) - .map(|limb| (limb, CF::one())) - .collect(); - - let dest = Self::compress_elements(&src, ty); - - self.absorb(&dest); - } - - fn absorb_native>(&mut self, elems: &[T]) { - elems - .iter() - .filter_map(|elem| elem.to_field_elements()) - .flat_map(|v| v.into_iter()) - .for_each(|elem| self.absorb(&elem)); - } - - fn squeeze_nonnative(&mut self, num: usize, _: OptimizationType) -> Vec { - Self::get_elements_from_sponge(&mut self.s, num, false) - } - - fn squeeze_native(&mut self, num: usize) -> Vec { - self.squeeze_field_elements(num) - } - - fn squeeze_128_bits_nonnative(&mut self, num: usize) -> Vec { - Self::get_elements_from_sponge(&mut self.s, num, true) - } -} - -impl FiatShamirSpongeRng { - /// compress every two elements if possible. Provides a vector of (limb, num_of_additions), - /// both of which are P::BaseField. - fn compress_elements(src_limbs: &[(CF, CF)], ty: OptimizationType) -> Vec { - let capacity = CF::size_in_bits() - 1; - let mut dest_limbs = Vec::::new(); - - let params = get_params(F::size_in_bits(), CF::size_in_bits(), ty); - - let adjustment_factor_lookup_table = { - let mut table = Vec::::new(); - - let mut cur = CF::one(); - for _ in 1..=capacity { - table.push(cur); - cur.double_in_place(); - } - - table - }; - - let mut i = 0; - let src_len = src_limbs.len(); - while i < src_len { - let first = &src_limbs[i]; - let second = if i + 1 < src_len { - Some(&src_limbs[i + 1]) - } else { - None - }; - - let first_max_bits_per_limb = params.bits_per_limb + overhead!(first.1 + &CF::one()); - let second_max_bits_per_limb = if let Some(second) = second { - params.bits_per_limb + overhead!(second.1 + &CF::one()) - } else { - 0 - }; - - if let Some(second) = second { - if first_max_bits_per_limb + second_max_bits_per_limb <= capacity { - let adjustment_factor = - &adjustment_factor_lookup_table[second_max_bits_per_limb]; - - dest_limbs.push(first.0 * adjustment_factor + &second.0); - i += 2; - } else { - dest_limbs.push(first.0); - i += 1; - } - } else { - dest_limbs.push(first.0); - i += 1; - } - } - - dest_limbs - } - - /// obtain random elements from hashchain. - /// - /// not guaranteed to be uniformly distributed, should only be used in certain situations. - fn get_elements_from_sponge( - sponge: &mut S, - num_elements: usize, - outputs_short_elements: bool, - ) -> Vec { - let num_bits_per_nonnative = if outputs_short_elements { - 128 - } else { - F::size_in_bits() - 1 // also omit the highest bit - }; - let bits = sponge.squeeze_bits(num_bits_per_nonnative * num_elements); - - let mut lookup_table = Vec::::new(); - let mut cur = F::one(); - for _ in 0..num_bits_per_nonnative { - lookup_table.push(cur); - cur.double_in_place(); - } - - let mut dest_elements = Vec::::new(); - bits.chunks_exact(num_bits_per_nonnative) - .for_each(|per_nonnative_bits| { - // this can be done via BigInterger::from_bits; here, we use this method for - // consistency with the gadget counterpart - let mut res = F::zero(); - - for (i, bit) in per_nonnative_bits.iter().rev().enumerate() { - if *bit { - res += &lookup_table[i]; - } - } - - dest_elements.push(res); - }); - - dest_elements - } -} - -impl RngCore - for FiatShamirSpongeRng -{ - fn next_u32(&mut self) -> u32 { - let mut dest = [0u8; 4]; - - self.fill_bytes(&mut dest); - - u32::from_be_bytes(dest) - } - - fn next_u64(&mut self) -> u64 { - let mut dest = [0u8; 8]; - - self.fill_bytes(&mut dest); - - u64::from_be_bytes(dest) - } - - fn fill_bytes(&mut self, dest: &mut [u8]) { - assert!( - CF::size_in_bits() > 128, - "The native field of the algebraic sponge is too small." - ); - - let capacity = CF::size_in_bits() - 128; - let len = dest.len() * 8; - - let num_of_elements = (capacity + len - 1) / len; - let elements: Vec = self.s.squeeze_field_elements(num_of_elements); - - let mut bits = Vec::::new(); - for elem in elements.iter() { - let mut elem_bits = elem.into_repr().to_bits_be(); - elem_bits.reverse(); - bits.extend_from_slice(&elem_bits[0..capacity]); - } - - bits.truncate(len); - bits.chunks_exact(8) - .enumerate() - .for_each(|(i, bits_per_byte)| { - let mut byte = 0; - for (j, bit) in bits_per_byte.iter().enumerate() { - if *bit { - byte += 1 << j; - } - } - dest[i] = byte; - }); - } - - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { - self.fill_bytes(dest); - - Ok(()) - } -} - -impl Write - for FiatShamirSpongeRng -where - CF: Absorb, - ::Config: CryptographicSpongeParameters, -{ - fn write(&mut self, buf: &[u8]) -> IoResult { - self.absorb(&buf); - - Ok(buf.len()) - } - - fn flush(&mut self) -> IoResult<()> { - Ok(()) - } -} - -impl Read for FiatShamirSpongeRng { - fn read(&mut self, buf: &mut [u8]) -> IoResult { - self.fill_bytes(buf); - - Ok(buf.len()) - } -} diff --git a/src/rng/mod.rs b/src/rng/mod.rs deleted file mode 100644 index 88b3c71..0000000 --- a/src/rng/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod sponge; -pub use sponge::*; - -mod fiat_shamir; -pub use fiat_shamir::*; \ No newline at end of file diff --git a/src/rng/sponge/mod.rs b/src/rng/sponge/mod.rs deleted file mode 100644 index 978a566..0000000 --- a/src/rng/sponge/mod.rs +++ /dev/null @@ -1,62 +0,0 @@ -use ark_ff::PrimeField; -use ark_r1cs_std::fields::nonnative::{params::OptimizationType, NonNativeFieldVar}; -use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; -use ark_crypto_primitives::sponge::{constraints::CryptographicSpongeVar, CryptographicSponge}; - -pub mod poseidon; - -pub trait CryptographicSpongeParameters { - fn from_rate(rate: usize) -> Self; -} - -pub trait CryptographicSpongeWithRate: CryptographicSponge -where - ::Config: CryptographicSpongeParameters, -{ - fn default_rate() -> usize; - - fn with_default_rate() -> Self { - let rate = Self::default_rate(); - - Self::from_rate(rate) - } - - fn from_rate(rate: usize) -> Self { - let params = - <::Config as CryptographicSpongeParameters>::from_rate( - rate, - ); - - ::new(¶ms) - } -} - -pub trait CryptographicSpongeVarNonNative: - CryptographicSpongeVar -where - >::Parameters: CryptographicSpongeParameters, -{ - fn default_rate() -> usize; - - fn with_default_rate(cs: ConstraintSystemRef) -> Self { - let rate = Self::default_rate(); - - Self::from_rate(cs, rate) - } - - fn from_rate(cs: ConstraintSystemRef, rate: usize) -> Self { - let params = - <>::Parameters as CryptographicSpongeParameters>::from_rate( - rate, - ); - - >::new(cs, ¶ms) - } - - /// Absorb non native elements - fn absorb_nonnative( - &mut self, - input: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError>; -} diff --git a/src/rng/sponge/poseidon.rs b/src/rng/sponge/poseidon.rs deleted file mode 100644 index 4c265ea..0000000 --- a/src/rng/sponge/poseidon.rs +++ /dev/null @@ -1,303 +0,0 @@ -use core::marker::PhantomData; - -use ark_ff::{FpConfig, PrimeField}; -use ark_r1cs_std::fields::nonnative::{ - params::OptimizationType, AllocatedNonNativeFieldVar, NonNativeFieldVar, -}; -use ark_r1cs_std::fields::fp::FpVar; -use ark_r1cs_std::prelude::UInt8; -use ark_r1cs_std::{alloc::AllocVar, boolean::Boolean}; -use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; -use ark_crypto_primitives::sponge::constraints::AbsorbGadget; -use ark_crypto_primitives::sponge::{ - constraints::CryptographicSpongeVar, - poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig, PoseidonSponge}, - CryptographicSponge, -}; - -use super::{CryptographicSpongeParameters, CryptographicSpongeVarNonNative}; -use crate::rng::CryptographicSpongeWithRate; -use crate::overhead; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct PoseidonArguments { - pub prime_bits: u64, - pub full_rounds: u32, - pub partial_rounds: u32, - pub skip_matrices: u64, - - _field: PhantomData, -} - -impl PoseidonArguments { - pub const DEFAULT: Self = Self { - prime_bits: F::Params::MODULUS_BITS as u64, - full_rounds: 8, - partial_rounds: 60, - skip_matrices: 0, - _field: PhantomData, - }; -} - -impl CryptographicSpongeWithRate for PoseidonSponge { - fn default_rate() -> usize { - PoseidonParametersWithDefaultRate::::DEFAULT_RATE - } -} - -impl CryptographicSpongeParameters for PoseidonConfig { - fn from_rate(rate: usize) -> Self { - PoseidonParametersWithDefaultRate::from_rate(rate).params - } -} - -impl - CryptographicSpongeVarNonNative for PoseidonSpongeVar -where - PoseidonSpongeVar: CryptographicSpongeVar, - >::Parameters: CryptographicSpongeParameters, -{ - fn default_rate() -> usize { - PoseidonParametersWithDefaultRate::::DEFAULT_RATE - } - - fn absorb_nonnative( - &mut self, - input: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError> { - let mut src_limbs: Vec<(FpVar, CF)> = Vec::new(); - - for elem in input.iter() { - match elem { - NonNativeFieldVar::Constant(c) => { - let v = AllocatedNonNativeFieldVar::::new_constant(self.cs(), c)?; - - for limb in v.limbs.iter() { - let num_of_additions_over_normal_form = - if v.num_of_additions_over_normal_form == CF::zero() { - CF::one() - } else { - v.num_of_additions_over_normal_form - }; - - src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); - } - } - NonNativeFieldVar::Var(v) => { - for limb in v.limbs.iter() { - let num_of_additions_over_normal_form = - if v.num_of_additions_over_normal_form == CF::zero() { - CF::one() - } else { - v.num_of_additions_over_normal_form - }; - - src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); - } - } - } - } - - let capacity = CF::size_in_bits() - 1; - let mut dest_limbs = Vec::>::new(); - - if !src_limbs.is_empty() { - let params = - ark_r1cs_std::fields::nonnative::params::get_params(F::size_in_bits(), CF::size_in_bits(), ty); - - let adjustment_factor_lookup_table = { - let mut table = Vec::::new(); - - let mut cur = CF::one(); - for _ in 1..=capacity { - table.push(cur); - cur.double_in_place(); - } - - table - }; - - let mut i: usize = 0; - let src_len = src_limbs.len(); - while i < src_len { - let first = &src_limbs[i]; - let second = if i + 1 < src_len { - Some(&src_limbs[i + 1]) - } else { - None - }; - - let first_max_bits_per_limb = - params.bits_per_limb + overhead!(first.1 + &CF::one()); - let second_max_bits_per_limb = if second.is_some() { - params.bits_per_limb + overhead!(second.unwrap().1 + &CF::one()) - } else { - 0 - }; - - if second.is_some() - && first_max_bits_per_limb + second_max_bits_per_limb <= capacity - { - let adjustment_factor = - &adjustment_factor_lookup_table[second_max_bits_per_limb]; - - dest_limbs.push(&first.0 * *adjustment_factor + &second.unwrap().0); - i += 2; - } else { - dest_limbs.push(first.0.clone()); - i += 1; - } - } - } - - self.absorb(&dest_limbs)?; - - Ok(()) - } -} - -/// Parameters and RNG used -#[derive(Clone, Debug)] -pub struct PoseidonParametersWithDefaultRate { - pub params: PoseidonConfig, -} - -impl PoseidonParametersWithDefaultRate { - /// Default rate for poseidon - pub const DEFAULT_RATE: usize = 4; -} - -impl From> for PoseidonParametersWithDefaultRate { - fn from(params: PoseidonConfig) -> Self { - Self { params } - } -} - -impl CryptographicSpongeParameters for PoseidonParametersWithDefaultRate { - fn from_rate(rate: usize) -> Self { - let PoseidonArguments { - prime_bits, - full_rounds, - partial_rounds, - skip_matrices, - .. - } = PoseidonArguments::::DEFAULT; - - // TODO consume the arguments - let capacity = 1; - let alpha = 5; - let _ = (rate, prime_bits, skip_matrices); - - // TODO generate secure constants - let ark = F::one(); - let ark = vec![ark; 3]; - let ark = vec![ark; (full_rounds + partial_rounds) as usize]; - - // TODO generate secure matrix - let mds = F::one(); - let mds = vec![mds; rate + capacity]; - let mds = vec![mds; rate + capacity]; - - PoseidonConfig::new(full_rounds, partial_rounds, alpha, mds, ark).into() - } -} - -#[derive(Clone)] -/// Wrapper for [`PoseidonSponge`] -pub struct PoseidonSpongeWithDefaultRate { - pub s: PoseidonSponge, -} - -impl From> for PoseidonSpongeWithDefaultRate { - fn from(s: PoseidonSponge) -> Self { - Self { s } - } -} - -impl CryptographicSponge for PoseidonSpongeWithDefaultRate { - type Config = PoseidonParametersWithDefaultRate; - - fn new(p: &Self::Config) -> Self { - PoseidonSponge::new(&p.params).into() - } - - fn absorb(&mut self, input: &impl ark_crypto_primitives::sponge::Absorb) { - self.s.absorb(input) - } - - fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { - self.s.squeeze_bytes(num_bytes) - } - - fn squeeze_bits(&mut self, num_bits: usize) -> Vec { - self.s.squeeze_bits(num_bits) - } -} - -impl CryptographicSpongeWithRate for PoseidonSpongeWithDefaultRate { - fn default_rate() -> usize { - PoseidonParametersWithDefaultRate::::DEFAULT_RATE - } -} - -#[derive(Clone)] -/// Wrapper for [`PoseidonSpongeVar`] -pub struct PoseidonSpongeVarWithDefaultRate { - pub s: PoseidonSpongeVar, -} - -impl From> for PoseidonSpongeVarWithDefaultRate { - fn from(s: PoseidonSpongeVar) -> Self { - Self { s } - } -} - -impl CryptographicSpongeVar - for PoseidonSpongeVarWithDefaultRate -{ - type Parameters = PoseidonParametersWithDefaultRate; - - fn new(cs: ConstraintSystemRef, p: &Self::Parameters) -> Self { - PoseidonSpongeVar::new(cs, &p.params).into() - } - - fn cs(&self) -> ConstraintSystemRef { - self.s.cs() - } - - fn absorb(&mut self, input: &impl AbsorbGadget) -> Result<(), SynthesisError> { - self.s.absorb(input) - } - - fn squeeze_bytes(&mut self, num_bytes: usize) -> Result>, SynthesisError> { - self.s.squeeze_bytes(num_bytes) - } - - fn squeeze_bits(&mut self, num_bits: usize) -> Result>, SynthesisError> { - self.s.squeeze_bits(num_bits) - } - - fn squeeze_field_elements( - &mut self, - num_elements: usize, - ) -> Result>, SynthesisError> { - self.s.squeeze_field_elements(num_elements) - } -} - -impl - CryptographicSpongeVarNonNative for PoseidonSpongeVarWithDefaultRate -{ - fn default_rate() -> usize { - PoseidonParametersWithDefaultRate::::DEFAULT_RATE - } - - fn absorb_nonnative( - &mut self, - input: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError> { - self.s.absorb_nonnative(input, ty) - } -} diff --git a/src/test.rs b/src/test.rs index 9346d12..ea2a4da 100644 --- a/src/test.rs +++ b/src/test.rs @@ -115,18 +115,19 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; - use crate::{Marlin, rng::FiatShamirChaChaRng}; + use crate::{Marlin, rng::SimpleHashFiatShamirRng}; use ark_bls12_381::{Bls12_381, Fr}; use ark_ff::UniformRand; - use ark_poly::univariate::DensePolynomial; + use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; use ark_std::ops::MulAssign; - use rand_chacha::ChaChaRng; + use ark_ec::pairing::Pairing; - type FS = FiatShamirChaChaRng; - type MultiPC = MarlinKZG10, FS>; - type MarlinInst = Marlin; + type BF = ::BaseField; + type FS = SimpleHashFiatShamirRng; + type MultiPC = MarlinKZG10::ScalarField>, FS>; + type MarlinInst = Marlin; fn test_circuit(num_constraints: usize, num_variables: usize) { let rng = &mut ark_std::test_rng(); From 9bdd8a2ab18cf41b485facb3b2797959a792cced Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 10:29:28 +0100 Subject: [PATCH 07/40] readd old rng mod --- src/rng.rs | 88 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 src/rng.rs diff --git a/src/rng.rs b/src/rng.rs new file mode 100644 index 0000000..c4bdb0a --- /dev/null +++ b/src/rng.rs @@ -0,0 +1,88 @@ +use crate::Vec; +use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::convert::From; +use ark_std::marker::PhantomData; +use ark_std::rand::{RngCore, SeedableRng}; +use digest::Digest; + +/// An RNG suitable for Fiat-Shamir transforms +pub trait FiatShamirRng: RngCore +CryptographicSponge { + /// Create a new `Self` with an initial input + fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self{ + ::new(initial_input) + } + /// Absorb new inputs into state + fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T){ + ::absorb(&mut self, new_input); + } +} + +/// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed +/// and the new seed material. + +#[derive(Clone)] +pub struct SimpleHashFiatShamirRng { + r: R, + seed: [u8; 32], + #[doc(hidden)] + digest: PhantomData, +} + +impl RngCore for SimpleHashFiatShamirRng { + #[inline] + fn next_u32(&mut self) -> u32 { + self.r.next_u32() + } + + #[inline] + fn next_u64(&mut self) -> u64 { + self.r.next_u64() + } + + #[inline] + fn fill_bytes(&mut self, dest: &mut [u8]) { + self.r.fill_bytes(dest); + } + + #[inline] + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { + Ok(self.r.fill_bytes(dest)) + } +}; + +impl FiatShamirRng for SimpleHashFiatShamirRng{}; + +impl CryptographicSponge for SimpleHashFiatShamirRng { + /// Create a new `Self` by initializing with a fresh seed. + /// `self.seed = H(initial_input)`. + #[inline] + fn new<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self { + let mut bytes = Vec::new(); + initial_input + .write(&mut bytes) + .expect("failed to convert to bytes"); + let seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); + let r = R::from_seed(::from(seed)); + Self { + r, + seed: seed, + digest: PhantomData, + } + } + + /// Refresh `self.seed` with new material. Achieved by setting + /// `self.seed = H(new_input || self.seed)`. + #[inline] + fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T) { + let mut bytes = Vec::new(); + new_input + .write(&mut bytes) + .expect("failed to convert to bytes"); + bytes.extend_from_slice(&self.seed); + self.seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); + self.r = R::from_seed(::from(self.seed)); + } + +} + From 71a603b107e135774297dad7e97ab87a45a3e529 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 11:10:48 +0100 Subject: [PATCH 08/40] remove pedanticerrors temporarily --- src/lib.rs | 9 ++++----- src/test.rs | 10 ++++++---- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 43fd005..9373030 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,7 +10,7 @@ #![deny(unused_import_braces, unused_qualifications, trivial_casts)] #![deny(trivial_numeric_casts)] #![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] -#![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] +// #![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] @@ -86,15 +86,14 @@ use ahp::EvaluationsProvider; mod test; /// The compiled argument system.FiatShamiRng -pub struct Marlin, FS>, FS: FiatShamirRng>( +pub struct Marlin, FS>, FS: FiatShamirRng>( #[doc(hidden)] PhantomData, - #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, ); -impl, FS>, FS: FiatShamirRng> - Marlin +impl, FS>, FS: FiatShamirRng> + Marlin { /// The personalization string for this protocol. Used to personalize the /// Fiat-Shamir rng. diff --git a/src/test.rs b/src/test.rs index ea2a4da..60b45e7 100644 --- a/src/test.rs +++ b/src/test.rs @@ -122,12 +122,14 @@ mod marlin { use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; use ark_std::ops::MulAssign; - use ark_ec::pairing::Pairing; + use ark_ec::{pairing::Pairing, bls12::Bls12}; + use blake2::Blake2s; + use rand_chacha::ChaChaRng; type BF = ::BaseField; - type FS = SimpleHashFiatShamirRng; - type MultiPC = MarlinKZG10::ScalarField>, FS>; - type MarlinInst = Marlin; + type FS = SimpleHashFiatShamirRng; + type MultiPC = MarlinKZG10, FS>; + type MarlinInst = Marlin; fn test_circuit(num_constraints: usize, num_variables: usize) { let rng = &mut ark_std::test_rng(); From c3dbc051bbf9914d6d5b5dc0f2de2a5b949ef63c Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 11:53:13 +0100 Subject: [PATCH 09/40] updated realtive to poly-commit-v0.4 --- Cargo.toml | 2 +- src/data_structures.rs | 4 ++-- src/lib.rs | 9 +++++---- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8371137..b4eafec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ keywords = ["cryptography", "commitments", "zkSNARK"] categories = ["cryptography"] include = ["Cargo.toml", "src", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] license = "MIT/Apache-2.0" -edition = "2018" +edition = "2021" [dependencies] ark-serialize = { version = "^0.4.0", default-features = false, features = [ "derive" ] } diff --git a/src/data_structures.rs b/src/data_structures.rs index 2251675..f223d74 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -95,7 +95,7 @@ pub struct Proof,S /// The field elements sent by the prover. pub prover_messages: Vec>, /// An evaluation proof from the polynomial commitment. - pub pc_proof: BatchLCProof>, + pub pc_proof: BatchLCProof, } impl,S>,S:CryptographicSponge> Proof { @@ -104,7 +104,7 @@ impl,S>,S:Cryptogr commitments: Vec>, evaluations: Vec, prover_messages: Vec>, - pc_proof: BatchLCProof>, + pc_proof: BatchLCProof, ) -> Self { Self { commitments, diff --git a/src/lib.rs b/src/lib.rs index 9373030..71dcd89 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,6 +21,7 @@ extern crate ark_std; use ark_ff::{PrimeField, UniformRand}; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::Evaluations; +use ark_poly_commit::challenge::ChallengeGenerator; use ark_poly_commit::{LabeledCommitment, PCUniversalParams, PolynomialCommitment}; use ark_relations::r1cs::ConstraintSynthesizer; use ark_std::rand::RngCore; @@ -312,7 +313,7 @@ impl, FS>, FS: Fia end_timer!(eval_time); fs_rng.absorb(&evaluations); - let opening_challenge: F = u128::rand(&mut fs_rng).into(); + let mut opening_challenge = ChallengeGenerator::new_univariate(&mut fs_rng); let pc_proof = PC::open_combinations( &index_pk.committer_key, @@ -320,7 +321,7 @@ impl, FS>, FS: Fia polynomials, &labeled_comms, &query_set, - opening_challenge, + &mut opening_challenge, &comm_rands, Some(zk_rng), ) @@ -413,7 +414,7 @@ impl, FS>, FS: Fia AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); fs_rng.absorb(&proof.evaluations); - let opening_challenge: F = u128::rand(&mut fs_rng).into(); + let mut opening_challenge = ChallengeGenerator::new_univariate(&mut fs_rng); let mut evaluations = Evaluations::new(); let mut evaluation_labels = Vec::new(); @@ -442,7 +443,7 @@ impl, FS>, FS: Fia &query_set, &evaluations, &proof.pc_proof, - opening_challenge, + &mut opening_challenge, rng, ) .map_err(Error::from_pc_err)?; From cbf5a4ac822aad51211340ffeef4cc9da77f7e0b Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 13:51:18 +0100 Subject: [PATCH 10/40] implemented poseidon as rng --- src/rng.rs | 69 +++++++----------------------------------------------- 1 file changed, 9 insertions(+), 60 deletions(-) diff --git a/src/rng.rs b/src/rng.rs index c4bdb0a..19f726b 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,88 +1,37 @@ use crate::Vec; use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; +use ark_ff::{Field, PrimeField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::convert::From; use ark_std::marker::PhantomData; use ark_std::rand::{RngCore, SeedableRng}; use digest::Digest; -/// An RNG suitable for Fiat-Shamir transforms -pub trait FiatShamirRng: RngCore +CryptographicSponge { - /// Create a new `Self` with an initial input - fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self{ - ::new(initial_input) - } - /// Absorb new inputs into state - fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T){ - ::absorb(&mut self, new_input); - } -} - /// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed /// and the new seed material. +/// Exposes a particular instantiation of the Poseidon sponge -#[derive(Clone)] -pub struct SimpleHashFiatShamirRng { - r: R, - seed: [u8; 32], - #[doc(hidden)] - digest: PhantomData, -} +pub struct SimplePoseidonRng(PoseidonSponge); -impl RngCore for SimpleHashFiatShamirRng { +impl RngCore for SimplePoseidonRng { #[inline] fn next_u32(&mut self) -> u32 { - self.r.next_u32() + self.0.squeeze_bits(32).iter().rev().fold(0, |acc, &bit| (acc << 1) | (bit as u32)) } #[inline] fn next_u64(&mut self) -> u64 { - self.r.next_u64() + self.0.squeeze_bits(64).iter().rev().fold(0, |acc, &bit| (acc << 1) | (bit as u64)) } #[inline] fn fill_bytes(&mut self, dest: &mut [u8]) { - self.r.fill_bytes(dest); + self.0.squeeze_bytes(dest.len()).iter().enumerate().map(|(i, x)| dest[i] = *x ); } #[inline] fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { - Ok(self.r.fill_bytes(dest)) + Ok(self.fill_bytes(dest)) } -}; - -impl FiatShamirRng for SimpleHashFiatShamirRng{}; - -impl CryptographicSponge for SimpleHashFiatShamirRng { - /// Create a new `Self` by initializing with a fresh seed. - /// `self.seed = H(initial_input)`. - #[inline] - fn new<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self { - let mut bytes = Vec::new(); - initial_input - .write(&mut bytes) - .expect("failed to convert to bytes"); - let seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); - let r = R::from_seed(::from(seed)); - Self { - r, - seed: seed, - digest: PhantomData, - } - } - - /// Refresh `self.seed` with new material. Achieved by setting - /// `self.seed = H(new_input || self.seed)`. - #[inline] - fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T) { - let mut bytes = Vec::new(); - new_input - .write(&mut bytes) - .expect("failed to convert to bytes"); - bytes.extend_from_slice(&self.seed); - self.seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); - self.r = R::from_seed(::from(self.seed)); - } - } - From d860a915a26a70b91f925a19411830126fccf40c Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 14:05:59 +0100 Subject: [PATCH 11/40] added default and spong methods --- src/rng.rs | 733 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 732 insertions(+), 1 deletion(-) diff --git a/src/rng.rs b/src/rng.rs index 19f726b..44be0d7 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,5 +1,5 @@ use crate::Vec; -use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::{CryptographicSponge, Absorb}; use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; use ark_ff::{Field, PrimeField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; @@ -12,6 +12,7 @@ use digest::Digest; /// and the new seed material. /// Exposes a particular instantiation of the Poseidon sponge +#[derive(Clone)] pub struct SimplePoseidonRng(PoseidonSponge); impl RngCore for SimplePoseidonRng { @@ -35,3 +36,733 @@ impl RngCore for SimplePoseidonRng { Ok(self.fill_bytes(dest)) } } + +impl CryptographicSponge for SimplePoseidonRng { + type Config = PoseidonConfig; + + fn new(params: &Self::Config) -> Self { + Self(PoseidonSponge::new(params)) + } + + fn absorb(&mut self, input: &impl Absorb) { + self.0.absorb(input); + } + + fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { + self.0.squeeze_bytes(num_bytes) + } + + fn squeeze_bits(&mut self, num_bits: usize) -> Vec { + self.0.squeeze_bits(num_bits) + } +} + +impl Default for SimplePoseidonRng { + fn default() -> Self { + Self(PoseidonSponge::new(&poseidon_parameters_for_test())) + } +} + + +/// Generate default parameters (bls381-fr-only) for alpha = 17, state-size = 8 +fn poseidon_parameters_for_test() -> PoseidonConfig { + let alpha = 17; + let mds = vec![ + vec![ + F::from_str( + "43228725308391137369947362226390319299014033584574058394339561338097152657858", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "20729134655727743386784826341366384914431326428651109729494295849276339718592", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "14275792724825301816674509766636153429127896752891673527373812580216824074377", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "3039440043015681380498693766234886011876841428799441709991632635031851609481", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "6678863357926068615342013496680930722082156498064457711885464611323928471101", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "37355038393562575053091209735467454314247378274125943833499651442997254948957", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "26481612700543967643159862864328231943993263806649000633819754663276818191580", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30103264397473155564098369644643015994024192377175707604277831692111219371047", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "5712721806190262694719203887224391960978962995663881615739647362444059585747", + ) + .map_err(|_| ()) + .unwrap(), + ], + ]; + let ark = vec![ + vec![ + F::from_str( + "44595993092652566245296379427906271087754779418564084732265552598173323099784", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "23298463296221002559050231199021122673158929708101049474262017406235785365706", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34212491019164671611180318500074499609633402631511849759183986060951187784466", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "19098051134080182375553680073525644187968170656591203562523489333616681350367", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7027675418691353855077049716619550622043312043660992344940177187528247727783", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47642753235356257928619065424282314733361764347085604019867862722762702755609", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "24281836129477728386327945482863886685457469794572168729834072693507088619997", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "12624893078331920791384400430193929292743809612452779381349824703573823883410", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "22654862987689323504199204643771547606936339944127455903448909090318619188561", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "27229172992560143399715985732065737093562061782414043625359531774550940662372", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "13224952063922250960936823741448973692264041750100990569445192064567307041002", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40380869235216625717296601204704413215735530626882135230693823362552484855508", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "4245751157938905689397184705633683893932492370323323780371834663438472308145", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "8252156875535418429533049587170755750275631534314711502253775796882240991261", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "32910829712934971129644416249914075073083903821282503505466324428991624789936", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "49412601297460128335642438246716127241669915737656789613664349252868389975962", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "841661305510340459373323516098909074520942972558284146843779636353111592117", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "37926489020263024391336570420006226544461516787280929232555625742588667303947", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "18433043696013996573551852847056868761017170818820490351056924728720017242180", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45376910275288438312773930242803223482318753992595269901397542214841496212310", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47854349410014339708332226068958253098964727682486278458389508597930796651514", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "32638426693771251366613055506166587312642876874690861030672730491779486904360", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "19105439281696418043426755774110765432959446684037017837894045255490581318047", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "13484299981373196201166722380389594773562113262309564134825386266765751213853", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "63360321133852659797114062808297090090814531427710842859827725871241144161", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42427543035537409467993338717379268954936885184662765745740070438835506287271", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "149101987103211771991327927827692640556911620408176100290586418839323044234", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "8341764062226826803887898710015561861526081583071950015446833446251359696930", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45635980415044299013530304465786867101223925975971912073759959440335364441441", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "49833261156201520743834327917353893365097424877680239796845398698940689734850", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "26764715016591436228000634284249890185894507497739511725029482580508707525029", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25054530812095491217523557726611612265064441619646263299990388543372685322499", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47654590955096246997622155031169641628093104787883934397920286718814889326452", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "16463825890556752307085325855351334996898686633642574805918056141310194135796", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "17473961341633494489168064889016732306117097771640351649096482400214968053040", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "49914603434867854893558366922996753035832008639512305549839666311012232077468", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "17122578514152308432111470949473865420090463026624297565504381163777697818362", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34870689836420861427379101859113225049736283485335674111421609473028315711541", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "4622082908476410083286670201138165773322781640914243047922441301693321472984", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "6079244375752010013798561155333454682564824861645642293573415833483620500976", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "2635090520059500019661864086615522409798872905401305311748231832709078452746", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "19070766579582338321241892986615538320421651429118757507174186491084617237586", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "12622420533971517050761060317049369208980632120901481436392835424625664738526", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "8965101225657199137904506150282256568170501907667138404080397024857524386266", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27085091008069524593196374148553176565775450537072498305327481366756159319838", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "45929056591150668409624595495643698205830429971690813312608217341940499221218", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "50361689160518167880500080025023064746137161030119436080957023803101861300846", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "6722586346537620732668048024627882970582133613352245923413730968378696371065", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "7340485916200743279276570085958556798507770452421357119145466906520506506342", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25946733168219652706630789514519162148860502996914241011500280690204368174083", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "9962367658743163006517635070396368828381757404628822422306438427554934645464", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "7221669722700687417346373353960536661883467014204005276831020252277657076044", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "21487980358388383563030903293359140836304488103090321183948009095669344637431", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44389482047246878765773958430749333249729101516826571588063797358040130313157", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "32887270862917330820874162842519225370447850172085449103568878409533683733185", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "15453393396765207016379045014101989306173462885430532298601655955681532648226", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "5478929644476681096437469958231489102974161353940993351588559414552523375472", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "41981370411247590312677561209178363054744730805951096631186178388981705304138", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "3474136981645476955784428843999869229067282976757744542648188369810577298585", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26251477770740399889956219915654371915771248171098220204692699710414817081869", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "51916561889718854106125837319509539220778634838409949714061033196765117231752", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25355145802812435959748831835587713214179184608408449220418373832038339021974", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31950684570730625275416731570246297947385359051792335826965013637877068017530", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "40966378914980473680181850710703295982197782082391794594149984057481543436879", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1141315130963422417761731263662398620858625339733452795772225916965481730059", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "9812100862165422922235757591915383485338044715409891361026651619010947646011", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "25276091996614379065765602410190790163396484122487585763380676888280427744737", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "18512694312063606403196469408971540495273694846641903978723927656359350642619", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "5791584766415439694303685437881192048262049244830616851865505314899699012588", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "34501536331706470927069149344450300773777486993504673779438188495686129846168", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10797737565565774079718466476236831116206064650762676383469703413649447678207", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42599392747310354323136214835734307933597896695637215127297036595538235868368", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "1336670998775417133322626564820911986969949054454812685145275612519924150700", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "2630141283339761901081411552890260088516693208402906795133548756078952896770", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "5206688943117414740600380377278238268309952400341418217132724749372435975215", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "10739264253827005683370721104077252560524362323422172665530191908848354339715", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48010640624945719826344492755710886355389194986527731603685956726907395779674", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47880724693177306044229143357252697148359033158394459365791331000715957339701", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "51658938856669444737833983076793759752280196674149218924101718974926964118996", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27558055650076329657496888512074319504342606463881203707330358472954748913263", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38886981777859313701520424626728402175860609948757992393598285291689196608037", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "17152756165118461969542990684402410297675979513690903033350206658079448802479", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "43766946932033687220387514221943418338304186408056458476301583041390483707207", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "24324495647041812436929170644873622904287038078113808264580396461953421400343", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "6935839211798937659784055008131602708847374430164859822530563797964932598700", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42126767398190942911395299419182514513368023621144776598842282267908712110039", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "5702364486091252903915715761606014714345316580946072019346660327857498603375", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "28184981699552917714085740963279595942132561155181044254318202220270242523053", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27078204494010940048327822707224393686245007379331357330801926151074766130790", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "5004172841233947987988267535285080365124079140142987718231874743202918551203", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "7974360962120296064882769128577382489451060235999590492215336103105134345602", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48062035869818179910046292951628308709251170031813126950740044942870578526376", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26361151154829600651603985995297072258262605598910254660032612019129606811983", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "46973867849986280770641828877435510444176572688208439836496241838832695841519", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1219439673853113792340300173186247996249367102884530407862469123523013083971", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "8063356002935671186275773257019749639571745240775941450161086349727882957042", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "8815571992701260640209942886673939234666734294275300852283020522390608544536", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "36384568984671043678320545346945893232044626942887414733675890845013312931948", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7493936589040764830842760521372106574503511314427857201860148571929278344956", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "26516538878265871822073279450474977673130300973488209984756372331392531193948", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "3872858659373466814413243601289105962248870842202907364656526273784217311104", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "8291822807524000248589997648893671538524566700364221355689839490238724479848", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "32842548776827046388198955038089826231531188946525483251252938248379132381248", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10749428410907700061565796335489079278748501945557710351216806276547834974736", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "43342287917341177925402357903832370099402579088513884654598017447701677948416", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "29658571352070370791360499299098360881857072189358092237807807261478461425147", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7805182565862454238315452208989152534554369855020544477885853141626690738363", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30699555847500141715826240743138908521140760599479365867708690318477369178275", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "1231951350103545216624376889222508148537733140742167414518514908719103925687", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "24784260089125933876714702247471508077514206350883487938806451152907502751770", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "36563542611079418454711392295126742705798573252480028863133394504154697924536", + ) + .map_err(|_| ()) + .unwrap(), + ], + ]; + let full_rounds = 8; + let total_rounds = 37; + let partial_rounds = total_rounds - full_rounds; + let capacity = 1; + let rate = 2; + PoseidonConfig { + full_rounds, + partial_rounds, + alpha, + ark, + mds, + rate, + capacity, + } +} \ No newline at end of file From 66a6786fb3cdf3d5c6aeb37c87e5b92c420fb5ee Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 16:54:58 +0100 Subject: [PATCH 12/40] LabelledCommitment does not implement CanonicalSerialize --- src/lib.rs | 47 +++++++++++++++++++++++++---------------------- src/rng.rs | 4 ++++ src/test.rs | 9 +++++---- 3 files changed, 34 insertions(+), 26 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 71dcd89..7fb561c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -18,6 +18,7 @@ #[macro_use] extern crate ark_std; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::{PrimeField, UniformRand}; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::Evaluations; @@ -35,7 +36,7 @@ use ark_std::{ vec::Vec, }; use ark_serialize::CanonicalSerialize; -use crate::rng::FiatShamirRng; +use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonDefaultConfig, PoseidonConfig}; #[cfg(not(feature = "std"))] macro_rules! eprintln { @@ -70,7 +71,8 @@ macro_rules! push_to_vec { /// Implements a Fiat-Shamir based Rng that allows one to incrementally update /// the seed based on new messages in the proof transcript. pub mod rng; -// pub use rng::*; +pub use rng::*; + mod error; pub use error::*; @@ -87,14 +89,14 @@ use ahp::EvaluationsProvider; mod test; /// The compiled argument system.FiatShamiRng -pub struct Marlin, FS>, FS: FiatShamirRng>( +pub struct Marlin,S>, S: DefaultSpongeRNG>( #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, - #[doc(hidden)] PhantomData, + #[doc(hidden)] PhantomData, ); -impl, FS>, FS: FiatShamirRng> - Marlin +impl, S>, S: DefaultSpongeRNG> + Marlin { /// The personalization string for this protocol. Used to personalize the /// Fiat-Shamir rng. @@ -107,7 +109,7 @@ impl, FS>, FS: Fia num_variables: usize, num_non_zero: usize, rng: &mut R, - ) -> Result, Error> { + ) -> Result, Error> { let max_degree = AHPForR1CS::::max_degree(num_constraints, num_variables, num_non_zero)?; let setup_time = start_timer!(|| { format!( @@ -124,9 +126,9 @@ impl, FS>, FS: Fia /// Generate the index-specific (i.e., circuit-specific) prover and verifier /// keys. This is a deterministic algorithm that anyone can rerun. pub fn index>( - srs: &UniversalSRS, + srs: &UniversalSRS, c: C, - ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { + ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { let index_time = start_timer!(|| "Marlin::Index"); // TODO: Add check that c is in the correct mode. @@ -175,18 +177,19 @@ impl, FS>, FS: Fia /// Create a zkSNARK asserting that the constraint system is satisfied. pub fn prove, R: RngCore>( - index_pk: &IndexProverKey, + index_pk: &IndexProverKey, c: C, - zk_rng: &mut R, - ) -> Result, Error> { + zk_rng: &mut R + ) -> Result, Error> { let prover_time = start_timer!(|| "Marlin::Prover"); // Add check that c is in the correct mode. let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; let public_input = prover_init_state.public_input(); - let mut fs_rng = FS::initialize( - &to_bytes![&Self::PROTOCOL_NAME, &index_pk.index_vk, &public_input].unwrap(), - ); + let init_bytes =&to_bytes![&Self::PROTOCOL_NAME, &index_pk.index_vk, &public_input].unwrap(); + let mut fs_rng = S::default(); + fs_rng.absorb(init_bytes); + // -------------------------------------------------------------------- // First round @@ -312,8 +315,8 @@ impl, FS>, FS: Fia let evaluations = evaluations.into_iter().map(|x| x.1).collect::>(); end_timer!(eval_time); - fs_rng.absorb(&evaluations); - let mut opening_challenge = ChallengeGenerator::new_univariate(&mut fs_rng); + fs_rng.absorb(&to_bytes![&evaluations].unwrap()); + let mut opening_challenge = ChallengeGenerator::::new_multivariate(fs_rng); let pc_proof = PC::open_combinations( &index_pk.committer_key, @@ -339,9 +342,9 @@ impl, FS>, FS: Fia /// Verify that a proof for the constrain system defined by `C` asserts that /// all constraints are satisfied. pub fn verify( - index_vk: &IndexVerifierKey, + index_vk: &IndexVerifierKey, public_input: &[F], - proof: &Proof, + proof: &Proof, rng: &mut R, ) -> Result> { let verifier_time = start_timer!(|| "Marlin::Verify"); @@ -358,8 +361,8 @@ impl, FS>, FS: Fia unpadded_input }; - let mut fs_rng = - FS::initialize(&to_bytes![&Self::PROTOCOL_NAME, &index_vk, &public_input].unwrap()); + let mut fs_rng = S::default(); + fs_rng.absorb(&to_bytes![&Self::PROTOCOL_NAME, &index_vk, &public_input].unwrap()); // -------------------------------------------------------------------- // First round @@ -413,7 +416,7 @@ impl, FS>, FS: Fia let (query_set, verifier_state) = AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); - fs_rng.absorb(&proof.evaluations); + fs_rng.absorb(&to_bytes![&proof.evaluations].unwrap()); let mut opening_challenge = ChallengeGenerator::new_univariate(&mut fs_rng); let mut evaluations = Evaluations::new(); diff --git a/src/rng.rs b/src/rng.rs index 44be0d7..d45cfe9 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -63,6 +63,10 @@ impl Default for SimplePoseidonRng { } } +pub trait DefaultSpongeRNG : Default + CryptographicSponge + RngCore{} + +impl DefaultSpongeRNG for SimplePoseidonRng {} + /// Generate default parameters (bls381-fr-only) for alpha = 17, state-size = 8 fn poseidon_parameters_for_test() -> PoseidonConfig { diff --git a/src/test.rs b/src/test.rs index 60b45e7..944af8d 100644 --- a/src/test.rs +++ b/src/test.rs @@ -115,7 +115,8 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; - use crate::{Marlin, rng::SimpleHashFiatShamirRng}; + use crate::Marlin; + use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_bls12_381::{Bls12_381, Fr}; use ark_ff::UniformRand; @@ -127,9 +128,9 @@ mod marlin { use rand_chacha::ChaChaRng; type BF = ::BaseField; - type FS = SimpleHashFiatShamirRng; - type MultiPC = MarlinKZG10, FS>; - type MarlinInst = Marlin; + type S = PoseidonSponge; + type MultiPC = MarlinKZG10, S>; + type MarlinInst = Marlin; fn test_circuit(num_constraints: usize, num_variables: usize) { let rng = &mut ark_std::test_rng(); From 9ab0b3e885342ea90aa678caff07511ca175ac10 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 12 Sep 2023 17:07:45 +0100 Subject: [PATCH 13/40] builds! --- Cargo.toml | 3 ++- src/ahp/prover.rs | 2 +- src/rng.rs | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b4eafec..3c7af32 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,7 +26,8 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0", default-features = false } ark-poly = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false } -ark-poly-commit = { version = "^0.4.0", default-features = false } +#ark-poly-commit = { version = "^0.4.0", default-features = false } +ark-poly-commit = {path = "../poly-commit"} ark-r1cs-std = { version = "^0.4.0", default-features = false } ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = [ "r1cs" ] } diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 843a8c5..bfae1db 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -80,7 +80,7 @@ impl CanonicalSerialize for ProverMsg { ProverMsg::EmptyMessage => None, ProverMsg::FieldElements(v) => Some(v.clone()), }; - res.serialize_with_mode(writer, compress); + res.serialize_with_mode(writer, compress)?; Ok(()) } diff --git a/src/rng.rs b/src/rng.rs index d45cfe9..0bdab60 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -28,7 +28,7 @@ impl RngCore for SimplePoseidonRng { #[inline] fn fill_bytes(&mut self, dest: &mut [u8]) { - self.0.squeeze_bytes(dest.len()).iter().enumerate().map(|(i, x)| dest[i] = *x ); + dest.copy_from_slice(self.0.squeeze_bytes(dest.len()).as_slice()); } #[inline] From e85c8fc25237f99e11ee33f8378faadff428852e Mon Sep 17 00:00:00 2001 From: AB Date: Mon, 18 Sep 2023 12:40:30 +0100 Subject: [PATCH 14/40] tests building --- src/ahp/prover.rs | 4 ++-- src/lib.rs | 11 +++++------ src/rng.rs | 10 +++++----- src/test.rs | 3 ++- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index bfae1db..3459d2c 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -75,7 +75,7 @@ pub enum ProverMsg { } impl CanonicalSerialize for ProverMsg { - fn serialize_with_mode(&self, mut writer: W, compress: Compress) -> Result<(), SerializationError> { + fn serialize_with_mode(&self, writer: W, compress: Compress) -> Result<(), SerializationError> { let res = match self { ProverMsg::EmptyMessage => None, ProverMsg::FieldElements(v) => Some(v.clone()), @@ -102,7 +102,7 @@ impl Valid for ProverMsg{ } } impl CanonicalDeserialize for ProverMsg { - fn deserialize_with_mode(mut reader: R, compress:Compress, validate: Validate) -> Result { + fn deserialize_with_mode(reader: R, compress:Compress, validate: Validate) -> Result { let res = Option::>::deserialize_with_mode(reader, compress, validate)?; if let Some(res) = res { Ok(ProverMsg::FieldElements(res)) diff --git a/src/lib.rs b/src/lib.rs index 7fb561c..1b29c10 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,7 +7,7 @@ //! is the same as the number of constraints (i.e., where the constraint //! matrices are square). Furthermore, Marlin only supports instances where the //! public inputs are of size one less than a power of 2 (i.e., 2^n - 1). -#![deny(unused_import_braces, unused_qualifications, trivial_casts)] +// #![deny(unused_import_braces, unused_qualifications, trivial_casts)] #![deny(trivial_numeric_casts)] #![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] // #![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] @@ -19,7 +19,7 @@ extern crate ark_std; use ark_crypto_primitives::sponge::CryptographicSponge; -use ark_ff::{PrimeField, UniformRand}; +use ark_ff::{PrimeField}; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::Evaluations; use ark_poly_commit::challenge::ChallengeGenerator; @@ -36,7 +36,7 @@ use ark_std::{ vec::Vec, }; use ark_serialize::CanonicalSerialize; -use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonDefaultConfig, PoseidonConfig}; + #[cfg(not(feature = "std"))] macro_rules! eprintln { @@ -186,9 +186,8 @@ impl, S>, S: Defau let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; let public_input = prover_init_state.public_input(); - let init_bytes =&to_bytes![&Self::PROTOCOL_NAME, &index_pk.index_vk, &public_input].unwrap(); let mut fs_rng = S::default(); - fs_rng.absorb(init_bytes); + fs_rng.absorb(&to_bytes![&Self::PROTOCOL_NAME, &index_pk.index_vk, &public_input].unwrap()); // -------------------------------------------------------------------- @@ -368,7 +367,7 @@ impl, S>, S: Defau // First round let first_comms = &proof.commitments[0]; - fs_rng.absorb(&to_bytes![first_comms, proof.prover_messages[0]].unwrap()); + fs_rng.absorb(&to_bytes![first_comms.to_owned(), proof.prover_messages[0]].unwrap()); let (_, verifier_state) = AHPForR1CS::verifier_first_round(index_vk.index_info, &mut fs_rng)?; diff --git a/src/rng.rs b/src/rng.rs index 0bdab60..3b2481d 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,11 +1,11 @@ use crate::Vec; use ark_crypto_primitives::sponge::{CryptographicSponge, Absorb}; use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; -use ark_ff::{Field, PrimeField}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::convert::From; -use ark_std::marker::PhantomData; -use ark_std::rand::{RngCore, SeedableRng}; +use ark_ff::{PrimeField}; + + + +use ark_std::rand::{RngCore}; use digest::Digest; /// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed diff --git a/src/test.rs b/src/test.rs index 944af8d..b98ed95 100644 --- a/src/test.rs +++ b/src/test.rs @@ -116,6 +116,7 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; use crate::Marlin; + use crate::rng::{DefaultSpongeRNG, SimplePoseidonRng}; use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_bls12_381::{Bls12_381, Fr}; @@ -128,7 +129,7 @@ mod marlin { use rand_chacha::ChaChaRng; type BF = ::BaseField; - type S = PoseidonSponge; + type S = SimplePoseidonRng; type MultiPC = MarlinKZG10, S>; type MarlinInst = Marlin; From d805506af8d13a6e2af0988feb816175401a9f07 Mon Sep 17 00:00:00 2001 From: AB Date: Mon, 18 Sep 2023 14:44:16 +0100 Subject: [PATCH 15/40] to unlabeled commit --- src/lib.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 1b29c10..af9241e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -204,8 +204,9 @@ impl, S>, S: Defau ) .map_err(Error::from_pc_err)?; end_timer!(first_round_comm_time); + let fcinput = first_comms.clone().iter().map(|p| p.commitment().clone()).collect::>(); - fs_rng.absorb(&to_bytes![first_comms, prover_first_msg].unwrap()); + fs_rng.absorb(&to_bytes![fcinput, prover_first_msg].unwrap()); let (verifier_first_msg, verifier_state) = AHPForR1CS::verifier_first_round(index_pk.index_vk.index_info, &mut fs_rng)?; @@ -226,7 +227,8 @@ impl, S>, S: Defau .map_err(Error::from_pc_err)?; end_timer!(second_round_comm_time); - fs_rng.absorb(&to_bytes![second_comms, prover_second_msg].unwrap()); + let scinput = second_comms.clone().iter().map(|p| p.commitment().clone()).collect::>(); + fs_rng.absorb(&to_bytes![scinput, prover_second_msg].unwrap()); let (verifier_second_msg, verifier_state) = AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); @@ -246,7 +248,9 @@ impl, S>, S: Defau .map_err(Error::from_pc_err)?; end_timer!(third_round_comm_time); - fs_rng.absorb(&to_bytes![third_comms, prover_third_msg].unwrap()); + + let tcinput = third_comms.clone().iter().map(|p| p.commitment().clone()).collect::>(); + fs_rng.absorb(&to_bytes![tcinput, prover_third_msg].unwrap()); let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); // -------------------------------------------------------------------- @@ -376,7 +380,7 @@ impl, S>, S: Defau // -------------------------------------------------------------------- // Second round let second_comms = &proof.commitments[1]; - fs_rng.absorb(&to_bytes![second_comms, proof.prover_messages[1]].unwrap()); + fs_rng.absorb(&to_bytes![second_comms.to_owned(), proof.prover_messages[1]].unwrap()); let (_, verifier_state) = AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); // -------------------------------------------------------------------- @@ -384,7 +388,7 @@ impl, S>, S: Defau // -------------------------------------------------------------------- // Third round let third_comms = &proof.commitments[2]; - fs_rng.absorb(&to_bytes![third_comms, proof.prover_messages[2]].unwrap()); + fs_rng.absorb(&to_bytes![third_comms.to_owned(), proof.prover_messages[2]].unwrap()); let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); // -------------------------------------------------------------------- From 1df70b324b67090058749bd92f6213704b371118 Mon Sep 17 00:00:00 2001 From: AB Date: Mon, 18 Sep 2023 16:08:30 +0100 Subject: [PATCH 16/40] outlinign test pass --- src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index af9241e..4baf16f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -420,7 +420,7 @@ impl, S>, S: Defau AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); fs_rng.absorb(&to_bytes![&proof.evaluations].unwrap()); - let mut opening_challenge = ChallengeGenerator::new_univariate(&mut fs_rng); + let mut opening_challenge = ChallengeGenerator::new_multivariate(fs_rng); let mut evaluations = Evaluations::new(); let mut evaluation_labels = Vec::new(); From 8a91943e6ef693b12456c695c0702139a6da760b Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 12:08:21 +0100 Subject: [PATCH 17/40] revert to ark poly commit --- Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 3c7af32..b4eafec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,8 +26,7 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0", default-features = false } ark-poly = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false } -#ark-poly-commit = { version = "^0.4.0", default-features = false } -ark-poly-commit = {path = "../poly-commit"} +ark-poly-commit = { version = "^0.4.0", default-features = false } ark-r1cs-std = { version = "^0.4.0", default-features = false } ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = [ "r1cs" ] } From d1b8e51d6f91e68ee83b0c5ea4db69ebfd53f6e1 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 12:31:26 +0100 Subject: [PATCH 18/40] bench building --- benches/bench.rs | 61 +++++++++++++++++++++++++++--------------------- 1 file changed, 35 insertions(+), 26 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 8e3218a..14c63a5 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -4,7 +4,7 @@ use ark_bls12_381::{Bls12_381, Fr as BlsFr}; use ark_ff::PrimeField; -use ark_marlin::{Marlin, SimpleHashFiatShamirRng}; +use ark_marlin::{Marlin, SimplePoseidonRng}; use ark_mnt4_298::{Fr as MNT4Fr, MNT4_298}; use ark_mnt4_753::{Fr as MNT4BigFr, MNT4_753}; use ark_mnt6_298::{Fr as MNT6Fr, MNT6_298}; @@ -18,6 +18,7 @@ use ark_relations::{ use ark_std::{ops::Mul, UniformRand}; use blake2::Blake2s; use rand_chacha::ChaChaRng; +use ark_ec::pairing::Pairing; const NUM_PROVE_REPEATITIONS: usize = 10; const NUM_VERIFY_REPEATITIONS: usize = 50; @@ -76,27 +77,31 @@ macro_rules! marlin_prove_bench { num_constraints: 65536, }; + let srs = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>>, - SimpleHashFiatShamirRng, - >::universal_setup(65536, 65536, 3 * 65536, rng) - .unwrap(); + SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> + ::universal_setup(65536, 65536, 3 * 65536, rng) + .unwrap(); let (pk, _) = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>>, - SimpleHashFiatShamirRng, - >::index(&srs, c) - .unwrap(); + SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> + ::index(&srs, c) + .unwrap(); let start = ark_std::time::Instant::now(); for _ in 0..NUM_PROVE_REPEATITIONS { let _ = Marlin::< - $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>>, - SimpleHashFiatShamirRng, - >::prove(&pk, c.clone(), rng) + $bench_field, + SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> + ::prove(&pk, c.clone(), rng) .unwrap(); } @@ -120,21 +125,24 @@ macro_rules! marlin_verify_bench { let srs = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>>, - SimpleHashFiatShamirRng, - >::universal_setup(65536, 65536, 3 * 65536, rng) + SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> + ::universal_setup(65536, 65536, 3 * 65536, rng) .unwrap(); let (pk, vk) = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>>, - SimpleHashFiatShamirRng, - >::index(&srs, c) + SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> + ::index(&srs, c) .unwrap(); let proof = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>>, - SimpleHashFiatShamirRng, - >::prove(&pk, c.clone(), rng) + SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> + ::prove(&pk, c.clone(), rng) .unwrap(); let v = c.a.unwrap().mul(c.b.unwrap()); @@ -143,10 +151,11 @@ macro_rules! marlin_verify_bench { for _ in 0..NUM_VERIFY_REPEATITIONS { let _ = Marlin::< - $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>>, - SimpleHashFiatShamirRng, - >::verify(&vk, &vec![v], &proof, rng) + $bench_field, + SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> + ::verify(&vk, &vec![v], &proof, rng) .unwrap(); } From b5071fcd719cbed88a4f12eeea1c48142c361a84 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 12:33:53 +0100 Subject: [PATCH 19/40] fmt and fix --- .gitignore | 1 + benches/bench.rs | 92 +++++++++++++++++++++++++----------------- src/ahp/indexer.rs | 6 +-- src/ahp/mod.rs | 14 ++----- src/ahp/prover.rs | 22 ++++++---- src/data_structures.rs | 40 +++++++++++++----- src/lib.rs | 48 +++++++++++++--------- src/rng.rs | 39 ++++++++++-------- src/test.rs | 10 ++--- 9 files changed, 164 insertions(+), 108 deletions(-) diff --git a/.gitignore b/.gitignore index be1aec0..9a11628 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ Cargo.lock *.pyc *.sage.py params +rngNEW/ \ No newline at end of file diff --git a/benches/bench.rs b/benches/bench.rs index 14c63a5..10e5542 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -3,6 +3,7 @@ // where N is the number of threads you want to use (N = 1 for single-thread). use ark_bls12_381::{Bls12_381, Fr as BlsFr}; +use ark_ec::pairing::Pairing; use ark_ff::PrimeField; use ark_marlin::{Marlin, SimplePoseidonRng}; use ark_mnt4_298::{Fr as MNT4Fr, MNT4_298}; @@ -16,9 +17,8 @@ use ark_relations::{ r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError}, }; use ark_std::{ops::Mul, UniformRand}; -use blake2::Blake2s; -use rand_chacha::ChaChaRng; -use ark_ec::pairing::Pairing; + + const NUM_PROVE_REPEATITIONS: usize = 10; const NUM_VERIFY_REPEATITIONS: usize = 50; @@ -77,31 +77,39 @@ macro_rules! marlin_prove_bench { num_constraints: 65536, }; - let srs = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> - ::universal_setup(65536, 65536, 3 * 65536, rng) - .unwrap(); + SonicKZG10< + $bench_pairing_engine, + DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >::universal_setup(65536, 65536, 3 * 65536, rng) + .unwrap(); let (pk, _) = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> - ::index(&srs, c) - .unwrap(); + SonicKZG10< + $bench_pairing_engine, + DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >::index(&srs, c) + .unwrap(); let start = ark_std::time::Instant::now(); for _ in 0..NUM_PROVE_REPEATITIONS { let _ = Marlin::< - $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> - ::prove(&pk, c.clone(), rng) + $bench_field, + SonicKZG10< + $bench_pairing_engine, + DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >::prove(&pk, c.clone(), rng) .unwrap(); } @@ -125,24 +133,33 @@ macro_rules! marlin_verify_bench { let srs = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> - ::universal_setup(65536, 65536, 3 * 65536, rng) + SonicKZG10< + $bench_pairing_engine, + DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >::universal_setup(65536, 65536, 3 * 65536, rng) .unwrap(); let (pk, vk) = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> - ::index(&srs, c) + SonicKZG10< + $bench_pairing_engine, + DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >::index(&srs, c) .unwrap(); let proof = Marlin::< $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> - ::prove(&pk, c.clone(), rng) + SonicKZG10< + $bench_pairing_engine, + DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >::prove(&pk, c.clone(), rng) .unwrap(); let v = c.a.unwrap().mul(c.b.unwrap()); @@ -151,11 +168,14 @@ macro_rules! marlin_verify_bench { for _ in 0..NUM_VERIFY_REPEATITIONS { let _ = Marlin::< - $bench_field, - SonicKZG10<$bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>> - ::verify(&vk, &vec![v], &proof, rng) + $bench_field, + SonicKZG10< + $bench_pairing_engine, + DensePolynomial<$bench_field>, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >, + SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + >::verify(&vk, &vec![v], &proof, rng) .unwrap(); } diff --git a/src/ahp/indexer.rs b/src/ahp/indexer.rs index 00bd59e..6aec219 100644 --- a/src/ahp/indexer.rs +++ b/src/ahp/indexer.rs @@ -7,7 +7,7 @@ use crate::ahp::{ AHPForR1CS, Error, LabeledPolynomial, }; use crate::Vec; -use ark_ff::{PrimeField, Field}; +use ark_ff::{Field, PrimeField}; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, SynthesisMode, @@ -25,7 +25,7 @@ use crate::ahp::constraint_systems::{ /// entries in any of the constraint matrices. #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Clone(bound = ""), Copy(bound = ""))] -pub struct IndexInfo { +pub struct IndexInfo { /// The total number of variables in the constraint system. pub num_variables: usize, /// The number of constraints. @@ -39,7 +39,7 @@ pub struct IndexInfo { f: PhantomData, } -impl IndexInfo { +impl IndexInfo { /// Construct a new index info pub fn new( num_variables: usize, diff --git a/src/ahp/mod.rs b/src/ahp/mod.rs index 35b9154..9c785fa 100644 --- a/src/ahp/mod.rs +++ b/src/ahp/mod.rs @@ -334,7 +334,7 @@ mod tests { use ark_ff::{One, UniformRand, Zero}; use ark_poly::{ univariate::{DenseOrSparsePolynomial, DensePolynomial}, - Polynomial, DenseUVPolynomial, + DenseUVPolynomial, Polynomial, }; #[test] @@ -414,11 +414,7 @@ mod tests { divisor .coeffs .iter() - .filter_map(|f| if !f.is_zero() { - Some(f) - } else { - None - }) + .filter_map(|f| if !f.is_zero() { Some(f) } else { None }) .collect::>() ); @@ -446,11 +442,7 @@ mod tests { quotient .coeffs .iter() - .filter_map(|f| if !f.is_zero() { - Some(f) - } else { - None - }) + .filter_map(|f| if !f.is_zero() { Some(f) } else { None }) .collect::>() ); diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 3459d2c..60a6352 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -10,8 +10,8 @@ use crate::ahp::constraint_systems::{ use crate::{ToString, Vec}; use ark_ff::{Field, PrimeField, Zero}; use ark_poly::{ - univariate::DensePolynomial, EvaluationDomain, Evaluations as EvaluationsOnDomain, - GeneralEvaluationDomain, Polynomial, DenseUVPolynomial, + univariate::DensePolynomial, DenseUVPolynomial, EvaluationDomain, + Evaluations as EvaluationsOnDomain, GeneralEvaluationDomain, Polynomial, }; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, @@ -75,7 +75,11 @@ pub enum ProverMsg { } impl CanonicalSerialize for ProverMsg { - fn serialize_with_mode(&self, writer: W, compress: Compress) -> Result<(), SerializationError> { + fn serialize_with_mode( + &self, + writer: W, + compress: Compress, + ) -> Result<(), SerializationError> { let res = match self { ProverMsg::EmptyMessage => None, ProverMsg::FieldElements(v) => Some(v.clone()), @@ -90,10 +94,10 @@ impl CanonicalSerialize for ProverMsg { ProverMsg::FieldElements(v) => Some(v.clone()), }; res.serialized_size(compress) -} + } } -impl Valid for ProverMsg{ +impl Valid for ProverMsg { fn check(&self) -> Result<(), SerializationError> { match self { ProverMsg::EmptyMessage => Ok(()), @@ -102,14 +106,18 @@ impl Valid for ProverMsg{ } } impl CanonicalDeserialize for ProverMsg { - fn deserialize_with_mode(reader: R, compress:Compress, validate: Validate) -> Result { + fn deserialize_with_mode( + reader: R, + compress: Compress, + validate: Validate, + ) -> Result { let res = Option::>::deserialize_with_mode(reader, compress, validate)?; if let Some(res) = res { Ok(ProverMsg::FieldElements(res)) } else { Ok(ProverMsg::EmptyMessage) } -} + } } /// The first set of prover oracles. diff --git a/src/data_structures.rs b/src/data_structures.rs index f223d74..1051e33 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -1,11 +1,11 @@ use crate::ahp::indexer::*; use crate::ahp::prover::ProverMsg; use crate::Vec; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::univariate::DensePolynomial; use ark_poly_commit::{BatchLCProof, PolynomialCommitment}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_crypto_primitives::sponge::CryptographicSponge; use ark_std::format; /* ************************************************************************* */ @@ -13,7 +13,8 @@ use ark_std::format; /* ************************************************************************* */ /// The universal public parameters for the argument system. -pub type UniversalSRS = ,S>>::UniversalParams; +pub type UniversalSRS = + , S>>::UniversalParams; /* ************************************************************************* */ /* ************************************************************************* */ @@ -21,7 +22,11 @@ pub type UniversalSRS = , S>> { +pub struct IndexVerifierKey< + F: PrimeField, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, +> { /// Stores information about the size of the index, as well as its field of /// definition. pub index_info: IndexInfo, @@ -31,8 +36,8 @@ pub struct IndexVerifierKey, S>, S:CryptographicSponge> Clone - for IndexVerifierKey +impl, S>, S: CryptographicSponge> + Clone for IndexVerifierKey { fn clone(&self) -> Self { Self { @@ -43,7 +48,9 @@ impl, S>, S:Crypto } } -impl,S>, S:CryptographicSponge> IndexVerifierKey { +impl, S>, S: CryptographicSponge> + IndexVerifierKey +{ /// Iterate over the commitments to indexed polynomials in `self`. pub fn iter(&self) -> impl Iterator { self.index_comms.iter() @@ -56,9 +63,13 @@ impl,S>, S:Cryptog /// Proving key for a specific index (i.e., R1CS matrices). #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct IndexProverKey,S>,S:CryptographicSponge> { +pub struct IndexProverKey< + F: PrimeField, + PC: PolynomialCommitment, S>, + S: CryptographicSponge, +> { /// The index verifier key. - pub index_vk: IndexVerifierKey, + pub index_vk: IndexVerifierKey, /// The randomness for the index polynomial commitments. pub index_comm_rands: Vec, /// The index itself. @@ -67,7 +78,8 @@ pub struct IndexProverKey,S>,S:CryptographicSponge> Clone for IndexProverKey +impl, S>, S: CryptographicSponge> + Clone for IndexProverKey where PC::Commitment: Clone, { @@ -87,7 +99,11 @@ where /// A zkSNARK proof. #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct Proof,S>,S:CryptographicSponge> { +pub struct Proof< + F: PrimeField, + PC: PolynomialCommitment, S>, + S: CryptographicSponge, +> { /// Commitments to the polynomials produced by the AHP prover. pub commitments: Vec>, /// Evaluations of these polynomials. @@ -98,7 +114,9 @@ pub struct Proof,S pub pc_proof: BatchLCProof, } -impl,S>,S:CryptographicSponge> Proof { +impl, S>, S: CryptographicSponge> + Proof +{ /// Construct a new proof. pub fn new( commitments: Vec>, diff --git a/src/lib.rs b/src/lib.rs index 4baf16f..84385f6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,10 +7,10 @@ //! is the same as the number of constraints (i.e., where the constraint //! matrices are square). Furthermore, Marlin only supports instances where the //! public inputs are of size one less than a power of 2 (i.e., 2^n - 1). -// #![deny(unused_import_braces, unused_qualifications, trivial_casts)] +#![deny(unused_import_braces, unused_qualifications, trivial_casts)] #![deny(trivial_numeric_casts)] #![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] -// #![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] +#![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] @@ -19,14 +19,15 @@ extern crate ark_std; use ark_crypto_primitives::sponge::CryptographicSponge; -use ark_ff::{PrimeField}; +use ark_ff::PrimeField; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; -use ark_poly_commit::Evaluations; use ark_poly_commit::challenge::ChallengeGenerator; +use ark_poly_commit::Evaluations; use ark_poly_commit::{LabeledCommitment, PCUniversalParams, PolynomialCommitment}; use ark_relations::r1cs::ConstraintSynthesizer; use ark_std::rand::RngCore; +use ark_serialize::CanonicalSerialize; use ark_std::{ collections::BTreeMap, format, @@ -35,8 +36,6 @@ use ark_std::{ vec, vec::Vec, }; -use ark_serialize::CanonicalSerialize; - #[cfg(not(feature = "std"))] macro_rules! eprintln { @@ -73,7 +72,6 @@ macro_rules! push_to_vec { pub mod rng; pub use rng::*; - mod error; pub use error::*; @@ -89,7 +87,11 @@ use ahp::EvaluationsProvider; mod test; /// The compiled argument system.FiatShamiRng -pub struct Marlin,S>, S: DefaultSpongeRNG>( +pub struct Marlin< + F: PrimeField, + PC: PolynomialCommitment, S>, + S: DefaultSpongeRNG, +>( #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, @@ -128,7 +130,7 @@ impl, S>, S: Defau pub fn index>( srs: &UniversalSRS, c: C, - ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { + ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { let index_time = start_timer!(|| "Marlin::Index"); // TODO: Add check that c is in the correct mode. @@ -177,9 +179,9 @@ impl, S>, S: Defau /// Create a zkSNARK asserting that the constraint system is satisfied. pub fn prove, R: RngCore>( - index_pk: &IndexProverKey, + index_pk: &IndexProverKey, c: C, - zk_rng: &mut R + zk_rng: &mut R, ) -> Result, Error> { let prover_time = start_timer!(|| "Marlin::Prover"); // Add check that c is in the correct mode. @@ -188,7 +190,6 @@ impl, S>, S: Defau let public_input = prover_init_state.public_input(); let mut fs_rng = S::default(); fs_rng.absorb(&to_bytes![&Self::PROTOCOL_NAME, &index_pk.index_vk, &public_input].unwrap()); - // -------------------------------------------------------------------- // First round @@ -204,7 +205,11 @@ impl, S>, S: Defau ) .map_err(Error::from_pc_err)?; end_timer!(first_round_comm_time); - let fcinput = first_comms.clone().iter().map(|p| p.commitment().clone()).collect::>(); + let fcinput = first_comms + .clone() + .iter() + .map(|p| p.commitment().clone()) + .collect::>(); fs_rng.absorb(&to_bytes![fcinput, prover_first_msg].unwrap()); @@ -227,7 +232,11 @@ impl, S>, S: Defau .map_err(Error::from_pc_err)?; end_timer!(second_round_comm_time); - let scinput = second_comms.clone().iter().map(|p| p.commitment().clone()).collect::>(); + let scinput = second_comms + .clone() + .iter() + .map(|p| p.commitment().clone()) + .collect::>(); fs_rng.absorb(&to_bytes![scinput, prover_second_msg].unwrap()); let (verifier_second_msg, verifier_state) = @@ -248,8 +257,11 @@ impl, S>, S: Defau .map_err(Error::from_pc_err)?; end_timer!(third_round_comm_time); - - let tcinput = third_comms.clone().iter().map(|p| p.commitment().clone()).collect::>(); + let tcinput = third_comms + .clone() + .iter() + .map(|p| p.commitment().clone()) + .collect::>(); fs_rng.absorb(&to_bytes![tcinput, prover_third_msg].unwrap()); let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); @@ -345,9 +357,9 @@ impl, S>, S: Defau /// Verify that a proof for the constrain system defined by `C` asserts that /// all constraints are satisfied. pub fn verify( - index_vk: &IndexVerifierKey, + index_vk: &IndexVerifierKey, public_input: &[F], - proof: &Proof, + proof: &Proof, rng: &mut R, ) -> Result> { let verifier_time = start_timer!(|| "Marlin::Verify"); diff --git a/src/rng.rs b/src/rng.rs index 3b2481d..190f07b 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,29 +1,35 @@ use crate::Vec; -use ark_crypto_primitives::sponge::{CryptographicSponge, Absorb}; -use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; -use ark_ff::{PrimeField}; +use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonSponge}; +use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; +use ark_ff::PrimeField; - - -use ark_std::rand::{RngCore}; +use ark_std::rand::RngCore; use digest::Digest; /// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed /// and the new seed material. -/// Exposes a particular instantiation of the Poseidon sponge +/// Exposes a particular instantiation of the Poseidon sponge #[derive(Clone)] -pub struct SimplePoseidonRng(PoseidonSponge); +pub struct SimplePoseidonRng(PoseidonSponge); -impl RngCore for SimplePoseidonRng { +impl RngCore for SimplePoseidonRng { #[inline] fn next_u32(&mut self) -> u32 { - self.0.squeeze_bits(32).iter().rev().fold(0, |acc, &bit| (acc << 1) | (bit as u32)) + self.0 + .squeeze_bits(32) + .iter() + .rev() + .fold(0, |acc, &bit| (acc << 1) | (bit as u32)) } #[inline] fn next_u64(&mut self) -> u64 { - self.0.squeeze_bits(64).iter().rev().fold(0, |acc, &bit| (acc << 1) | (bit as u64)) + self.0 + .squeeze_bits(64) + .iter() + .rev() + .fold(0, |acc, &bit| (acc << 1) | (bit as u64)) } #[inline] @@ -37,7 +43,7 @@ impl RngCore for SimplePoseidonRng { } } -impl CryptographicSponge for SimplePoseidonRng { +impl CryptographicSponge for SimplePoseidonRng { type Config = PoseidonConfig; fn new(params: &Self::Config) -> Self { @@ -57,16 +63,15 @@ impl CryptographicSponge for SimplePoseidonRng { } } -impl Default for SimplePoseidonRng { +impl Default for SimplePoseidonRng { fn default() -> Self { Self(PoseidonSponge::new(&poseidon_parameters_for_test())) } } -pub trait DefaultSpongeRNG : Default + CryptographicSponge + RngCore{} - -impl DefaultSpongeRNG for SimplePoseidonRng {} +pub trait DefaultSpongeRNG: Default + CryptographicSponge + RngCore {} +impl DefaultSpongeRNG for SimplePoseidonRng {} /// Generate default parameters (bls381-fr-only) for alpha = 17, state-size = 8 fn poseidon_parameters_for_test() -> PoseidonConfig { @@ -769,4 +774,4 @@ fn poseidon_parameters_for_test() -> PoseidonConfig { rate, capacity, } -} \ No newline at end of file +} diff --git a/src/test.rs b/src/test.rs index b98ed95..ca407bc 100644 --- a/src/test.rs +++ b/src/test.rs @@ -115,18 +115,18 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; + use crate::rng::{SimplePoseidonRng}; use crate::Marlin; - use crate::rng::{DefaultSpongeRNG, SimplePoseidonRng}; - use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; + use ark_bls12_381::{Bls12_381, Fr}; + use ark_ec::{pairing::Pairing}; use ark_ff::UniformRand; use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; use ark_std::ops::MulAssign; - use ark_ec::{pairing::Pairing, bls12::Bls12}; - use blake2::Blake2s; - use rand_chacha::ChaChaRng; + + type BF = ::BaseField; type S = SimplePoseidonRng; From fed7d03026d73917f83d53900b8f2f2f2dc90ee2 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 12:39:26 +0100 Subject: [PATCH 20/40] imports --- src/lib.rs | 1 - src/rng.rs | 3 ++- src/test.rs | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 84385f6..31deedf 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -18,7 +18,6 @@ #[macro_use] extern crate ark_std; -use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::challenge::ChallengeGenerator; diff --git a/src/rng.rs b/src/rng.rs index 190f07b..adacb7b 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -4,7 +4,6 @@ use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; use ark_ff::PrimeField; use ark_std::rand::RngCore; -use digest::Digest; /// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed /// and the new seed material. @@ -63,12 +62,14 @@ impl CryptographicSponge for SimplePoseidonRng { } } +/// Instantiate Poseidon sponge with default parameters impl Default for SimplePoseidonRng { fn default() -> Self { Self(PoseidonSponge::new(&poseidon_parameters_for_test())) } } +/// Mock trait for use in Marlin prover pub trait DefaultSpongeRNG: Default + CryptographicSponge + RngCore {} impl DefaultSpongeRNG for SimplePoseidonRng {} diff --git a/src/test.rs b/src/test.rs index ca407bc..25a0e2e 100644 --- a/src/test.rs +++ b/src/test.rs @@ -115,12 +115,12 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; - use crate::rng::{SimplePoseidonRng}; + use crate::rng::SimplePoseidonRng; use crate::Marlin; use ark_bls12_381::{Bls12_381, Fr}; - use ark_ec::{pairing::Pairing}; + use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; From 0c5728561febd1f1ce349d32d83bd248454f4e30 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 12:45:18 +0100 Subject: [PATCH 21/40] unmodified .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 9a11628..be1aec0 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,3 @@ Cargo.lock *.pyc *.sage.py params -rngNEW/ \ No newline at end of file From f07a64d83c35a70b0ae265a8767b4de0cd64a211 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 13:07:11 +0100 Subject: [PATCH 22/40] updated changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b4d067..591e52d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,11 @@ ### Bug fixes +## v0.4.0 + +- Change dependency to version `0.4.0` of other arkwork-rs crates. +- Fiat-Shamir transformation for the AHP uses the Poseidon sponge function. + ## v0.3.0 - Change dependency to version `0.3.0` of other arkworks-rs crates. From 8b55468050f00efcc84fc677cf261834330734ae Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 13:12:16 +0100 Subject: [PATCH 23/40] removed unnecessary r1cs-std dependency --- Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index b4eafec..198f4d6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,7 +27,6 @@ ark-std = { version = "^0.4.0", default-features = false } ark-poly = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false } ark-poly-commit = { version = "^0.4.0", default-features = false } -ark-r1cs-std = { version = "^0.4.0", default-features = false } ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = [ "r1cs" ] } rayon = { version = "1", optional = true } From a976a897a533b6f9ab2362a7f98671039b953171 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 13:16:47 +0100 Subject: [PATCH 24/40] removed extraneous tracing dep --- Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 198f4d6..6a67d67 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,10 +32,9 @@ ark-crypto-primitives = { version = "^0.4.0", default-features = false, features rayon = { version = "1", optional = true } digest = { version = "0.9" } derivative = { version = "2", features = ["use_core"] } -rand_chacha = { version = "^0.3.1", default-features = false } -tracing = { version = "0.1", default-features = false, features = [ "attributes" ] } [dev-dependencies] +rand_chacha = { version = "^0.3.0", default-features = false } blake2 = { version = "0.9", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-mnt4-298 = { version = "^0.4.0", default-features = false, features = ["r1cs", "curve"] } From 0e070834598ac4eddc327e5065f5fa0f5d4167b1 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 19 Sep 2023 13:51:21 +0100 Subject: [PATCH 25/40] changed order of generics for consistency --- src/data_structures.rs | 8 ++++---- src/lib.rs | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/data_structures.rs b/src/data_structures.rs index 1051e33..955b244 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -24,8 +24,8 @@ pub type UniversalSRS = #[derive(CanonicalSerialize, CanonicalDeserialize)] pub struct IndexVerifierKey< F: PrimeField, - S: CryptographicSponge, PC: PolynomialCommitment, S>, + S: CryptographicSponge, > { /// Stores information about the size of the index, as well as its field of /// definition. @@ -37,7 +37,7 @@ pub struct IndexVerifierKey< } impl, S>, S: CryptographicSponge> - Clone for IndexVerifierKey + Clone for IndexVerifierKey { fn clone(&self) -> Self { Self { @@ -49,7 +49,7 @@ impl, S>, S: Crypt } impl, S>, S: CryptographicSponge> - IndexVerifierKey + IndexVerifierKey { /// Iterate over the commitments to indexed polynomials in `self`. pub fn iter(&self) -> impl Iterator { @@ -69,7 +69,7 @@ pub struct IndexProverKey< S: CryptographicSponge, > { /// The index verifier key. - pub index_vk: IndexVerifierKey, + pub index_vk: IndexVerifierKey, /// The randomness for the index polynomial commitments. pub index_comm_rands: Vec, /// The index itself. diff --git a/src/lib.rs b/src/lib.rs index 31deedf..d6081ea 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -129,7 +129,7 @@ impl, S>, S: Defau pub fn index>( srs: &UniversalSRS, c: C, - ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { + ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { let index_time = start_timer!(|| "Marlin::Index"); // TODO: Add check that c is in the correct mode. @@ -356,7 +356,7 @@ impl, S>, S: Defau /// Verify that a proof for the constrain system defined by `C` asserts that /// all constraints are satisfied. pub fn verify( - index_vk: &IndexVerifierKey, + index_vk: &IndexVerifierKey, public_input: &[F], proof: &Proof, rng: &mut R, From f03a658136f9e568d55e5a56acaa3463ad452a43 Mon Sep 17 00:00:00 2001 From: AB Date: Wed, 20 Sep 2023 16:19:51 +0100 Subject: [PATCH 26/40] easy changes --- src/ahp/mod.rs | 4 ++-- src/lib.rs | 3 --- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/ahp/mod.rs b/src/ahp/mod.rs index 9c785fa..24e6532 100644 --- a/src/ahp/mod.rs +++ b/src/ahp/mod.rs @@ -414,7 +414,7 @@ mod tests { divisor .coeffs .iter() - .filter_map(|f| if !f.is_zero() { Some(f) } else { None }) + .filter(|f| !f.is_zero()) .collect::>() ); @@ -442,7 +442,7 @@ mod tests { quotient .coeffs .iter() - .filter_map(|f| if !f.is_zero() { Some(f) } else { None }) + .filter(|f| !f.is_zero()) .collect::>() ); diff --git a/src/lib.rs b/src/lib.rs index d6081ea..bdf3cd0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -205,7 +205,6 @@ impl, S>, S: Defau .map_err(Error::from_pc_err)?; end_timer!(first_round_comm_time); let fcinput = first_comms - .clone() .iter() .map(|p| p.commitment().clone()) .collect::>(); @@ -232,7 +231,6 @@ impl, S>, S: Defau end_timer!(second_round_comm_time); let scinput = second_comms - .clone() .iter() .map(|p| p.commitment().clone()) .collect::>(); @@ -257,7 +255,6 @@ impl, S>, S: Defau end_timer!(third_round_comm_time); let tcinput = third_comms - .clone() .iter() .map(|p| p.commitment().clone()) .collect::>(); From 127dc2fdcc1773bae2bccfd2d888094e3cef1196 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 26 Sep 2023 10:22:45 +0100 Subject: [PATCH 27/40] refactor RNG initialization --- src/rng.rs | 736 ++--------------------------------------------------- 1 file changed, 27 insertions(+), 709 deletions(-) diff --git a/src/rng.rs b/src/rng.rs index adacb7b..58ada05 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,7 +1,9 @@ +use core::default; + use crate::Vec; -use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonSponge}; +use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonDefaultConfigField, PoseidonSponge, PoseidonDefaultConfigEntry, find_poseidon_ark_and_mds}; use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; -use ark_ff::PrimeField; +use ark_ff::{PrimeField,Fp, Field}; use ark_std::rand::RngCore; @@ -9,6 +11,7 @@ use ark_std::rand::RngCore; /// and the new seed material. /// Exposes a particular instantiation of the Poseidon sponge + #[derive(Clone)] pub struct SimplePoseidonRng(PoseidonSponge); @@ -62,717 +65,32 @@ impl CryptographicSponge for SimplePoseidonRng { } } -/// Instantiate Poseidon sponge with default parameters -impl Default for SimplePoseidonRng { - fn default() -> Self { - Self(PoseidonSponge::new(&poseidon_parameters_for_test())) - } -} - /// Mock trait for use in Marlin prover pub trait DefaultSpongeRNG: Default + CryptographicSponge + RngCore {} impl DefaultSpongeRNG for SimplePoseidonRng {} - -/// Generate default parameters (bls381-fr-only) for alpha = 17, state-size = 8 -fn poseidon_parameters_for_test() -> PoseidonConfig { - let alpha = 17; - let mds = vec![ - vec![ - F::from_str( - "43228725308391137369947362226390319299014033584574058394339561338097152657858", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "20729134655727743386784826341366384914431326428651109729494295849276339718592", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "14275792724825301816674509766636153429127896752891673527373812580216824074377", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "3039440043015681380498693766234886011876841428799441709991632635031851609481", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "6678863357926068615342013496680930722082156498064457711885464611323928471101", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "37355038393562575053091209735467454314247378274125943833499651442997254948957", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "26481612700543967643159862864328231943993263806649000633819754663276818191580", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "30103264397473155564098369644643015994024192377175707604277831692111219371047", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "5712721806190262694719203887224391960978962995663881615739647362444059585747", - ) - .map_err(|_| ()) - .unwrap(), - ], - ]; - let ark = vec![ - vec![ - F::from_str( - "44595993092652566245296379427906271087754779418564084732265552598173323099784", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "23298463296221002559050231199021122673158929708101049474262017406235785365706", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "34212491019164671611180318500074499609633402631511849759183986060951187784466", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "19098051134080182375553680073525644187968170656591203562523489333616681350367", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "7027675418691353855077049716619550622043312043660992344940177187528247727783", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "47642753235356257928619065424282314733361764347085604019867862722762702755609", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "24281836129477728386327945482863886685457469794572168729834072693507088619997", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "12624893078331920791384400430193929292743809612452779381349824703573823883410", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "22654862987689323504199204643771547606936339944127455903448909090318619188561", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "27229172992560143399715985732065737093562061782414043625359531774550940662372", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "13224952063922250960936823741448973692264041750100990569445192064567307041002", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "40380869235216625717296601204704413215735530626882135230693823362552484855508", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "4245751157938905689397184705633683893932492370323323780371834663438472308145", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "8252156875535418429533049587170755750275631534314711502253775796882240991261", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "32910829712934971129644416249914075073083903821282503505466324428991624789936", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "49412601297460128335642438246716127241669915737656789613664349252868389975962", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "841661305510340459373323516098909074520942972558284146843779636353111592117", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "37926489020263024391336570420006226544461516787280929232555625742588667303947", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "18433043696013996573551852847056868761017170818820490351056924728720017242180", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "45376910275288438312773930242803223482318753992595269901397542214841496212310", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "47854349410014339708332226068958253098964727682486278458389508597930796651514", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "32638426693771251366613055506166587312642876874690861030672730491779486904360", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "19105439281696418043426755774110765432959446684037017837894045255490581318047", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "13484299981373196201166722380389594773562113262309564134825386266765751213853", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "63360321133852659797114062808297090090814531427710842859827725871241144161", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "42427543035537409467993338717379268954936885184662765745740070438835506287271", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "149101987103211771991327927827692640556911620408176100290586418839323044234", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "8341764062226826803887898710015561861526081583071950015446833446251359696930", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "45635980415044299013530304465786867101223925975971912073759959440335364441441", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "49833261156201520743834327917353893365097424877680239796845398698940689734850", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "26764715016591436228000634284249890185894507497739511725029482580508707525029", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "25054530812095491217523557726611612265064441619646263299990388543372685322499", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "47654590955096246997622155031169641628093104787883934397920286718814889326452", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "16463825890556752307085325855351334996898686633642574805918056141310194135796", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "17473961341633494489168064889016732306117097771640351649096482400214968053040", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "49914603434867854893558366922996753035832008639512305549839666311012232077468", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "17122578514152308432111470949473865420090463026624297565504381163777697818362", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "34870689836420861427379101859113225049736283485335674111421609473028315711541", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "4622082908476410083286670201138165773322781640914243047922441301693321472984", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "6079244375752010013798561155333454682564824861645642293573415833483620500976", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "2635090520059500019661864086615522409798872905401305311748231832709078452746", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "19070766579582338321241892986615538320421651429118757507174186491084617237586", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "12622420533971517050761060317049369208980632120901481436392835424625664738526", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "8965101225657199137904506150282256568170501907667138404080397024857524386266", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "27085091008069524593196374148553176565775450537072498305327481366756159319838", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "45929056591150668409624595495643698205830429971690813312608217341940499221218", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "50361689160518167880500080025023064746137161030119436080957023803101861300846", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "6722586346537620732668048024627882970582133613352245923413730968378696371065", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "7340485916200743279276570085958556798507770452421357119145466906520506506342", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "25946733168219652706630789514519162148860502996914241011500280690204368174083", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "9962367658743163006517635070396368828381757404628822422306438427554934645464", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "7221669722700687417346373353960536661883467014204005276831020252277657076044", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "21487980358388383563030903293359140836304488103090321183948009095669344637431", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "44389482047246878765773958430749333249729101516826571588063797358040130313157", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "32887270862917330820874162842519225370447850172085449103568878409533683733185", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "15453393396765207016379045014101989306173462885430532298601655955681532648226", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "5478929644476681096437469958231489102974161353940993351588559414552523375472", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "41981370411247590312677561209178363054744730805951096631186178388981705304138", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "3474136981645476955784428843999869229067282976757744542648188369810577298585", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "26251477770740399889956219915654371915771248171098220204692699710414817081869", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "51916561889718854106125837319509539220778634838409949714061033196765117231752", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "25355145802812435959748831835587713214179184608408449220418373832038339021974", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "31950684570730625275416731570246297947385359051792335826965013637877068017530", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "40966378914980473680181850710703295982197782082391794594149984057481543436879", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "1141315130963422417761731263662398620858625339733452795772225916965481730059", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "9812100862165422922235757591915383485338044715409891361026651619010947646011", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "25276091996614379065765602410190790163396484122487585763380676888280427744737", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "18512694312063606403196469408971540495273694846641903978723927656359350642619", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "5791584766415439694303685437881192048262049244830616851865505314899699012588", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "34501536331706470927069149344450300773777486993504673779438188495686129846168", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "10797737565565774079718466476236831116206064650762676383469703413649447678207", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "42599392747310354323136214835734307933597896695637215127297036595538235868368", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "1336670998775417133322626564820911986969949054454812685145275612519924150700", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "2630141283339761901081411552890260088516693208402906795133548756078952896770", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "5206688943117414740600380377278238268309952400341418217132724749372435975215", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "10739264253827005683370721104077252560524362323422172665530191908848354339715", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "48010640624945719826344492755710886355389194986527731603685956726907395779674", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "47880724693177306044229143357252697148359033158394459365791331000715957339701", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "51658938856669444737833983076793759752280196674149218924101718974926964118996", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "27558055650076329657496888512074319504342606463881203707330358472954748913263", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "38886981777859313701520424626728402175860609948757992393598285291689196608037", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "17152756165118461969542990684402410297675979513690903033350206658079448802479", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "43766946932033687220387514221943418338304186408056458476301583041390483707207", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "24324495647041812436929170644873622904287038078113808264580396461953421400343", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "6935839211798937659784055008131602708847374430164859822530563797964932598700", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "42126767398190942911395299419182514513368023621144776598842282267908712110039", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "5702364486091252903915715761606014714345316580946072019346660327857498603375", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "28184981699552917714085740963279595942132561155181044254318202220270242523053", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "27078204494010940048327822707224393686245007379331357330801926151074766130790", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "5004172841233947987988267535285080365124079140142987718231874743202918551203", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "7974360962120296064882769128577382489451060235999590492215336103105134345602", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "48062035869818179910046292951628308709251170031813126950740044942870578526376", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "26361151154829600651603985995297072258262605598910254660032612019129606811983", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "46973867849986280770641828877435510444176572688208439836496241838832695841519", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "1219439673853113792340300173186247996249367102884530407862469123523013083971", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "8063356002935671186275773257019749639571745240775941450161086349727882957042", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "8815571992701260640209942886673939234666734294275300852283020522390608544536", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "36384568984671043678320545346945893232044626942887414733675890845013312931948", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "7493936589040764830842760521372106574503511314427857201860148571929278344956", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "26516538878265871822073279450474977673130300973488209984756372331392531193948", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "3872858659373466814413243601289105962248870842202907364656526273784217311104", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "8291822807524000248589997648893671538524566700364221355689839490238724479848", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "32842548776827046388198955038089826231531188946525483251252938248379132381248", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "10749428410907700061565796335489079278748501945557710351216806276547834974736", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "43342287917341177925402357903832370099402579088513884654598017447701677948416", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "29658571352070370791360499299098360881857072189358092237807807261478461425147", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "7805182565862454238315452208989152534554369855020544477885853141626690738363", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "30699555847500141715826240743138908521140760599479365867708690318477369178275", - ) - .map_err(|_| ()) - .unwrap(), - ], - vec![ - F::from_str( - "1231951350103545216624376889222508148537733140742167414518514908719103925687", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "24784260089125933876714702247471508077514206350883487938806451152907502751770", - ) - .map_err(|_| ()) - .unwrap(), - F::from_str( - "36563542611079418454711392295126742705798573252480028863133394504154697924536", - ) - .map_err(|_| ()) - .unwrap(), - ], - ]; - let full_rounds = 8; - let total_rounds = 37; - let partial_rounds = total_rounds - full_rounds; - let capacity = 1; - let rate = 2; - PoseidonConfig { - full_rounds, - partial_rounds, - alpha, +/// Instantiate Poseidon sponge with default parameters +impl Default for SimplePoseidonRng { + fn default() -> Self { + // let default = + // Self(PoseidonSponge::new(&poseidon_parameters_for_test())) + let (alpha, rate, full_rounds, partial_rounds) = (17,2,8,33); + let (ark,mds) = find_poseidon_ark_and_mds( + F::MODULUS_BIT_SIZE as u64, + alpha, + rate, + full_rounds, + partial_rounds, + ); + let config = PoseidonConfig { + full_rounds:full_rounds as usize, + partial_rounds:partial_rounds as usize, + alpha:alpha as u64, ark, mds, - rate, - capacity, + rate:rate as usize, + capacity:2, + }; + SimplePoseidonRng(PoseidonSponge::new(&config)) } -} +} \ No newline at end of file From 6f11b892e4b62a8df4e11b1363d2f4a1e9c0af65 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 26 Sep 2023 10:28:03 +0100 Subject: [PATCH 28/40] before test --- src/rng.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/rng.rs b/src/rng.rs index 58ada05..eea6906 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,9 +1,7 @@ -use core::default; - use crate::Vec; -use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonDefaultConfigField, PoseidonSponge, PoseidonDefaultConfigEntry, find_poseidon_ark_and_mds}; +use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonSponge, find_poseidon_ark_and_mds}; use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; -use ark_ff::{PrimeField,Fp, Field}; +use ark_ff::PrimeField; use ark_std::rand::RngCore; From 2a70370628916685ab9ba9f7243afe5e2b59290d Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 26 Sep 2023 11:06:04 +0100 Subject: [PATCH 29/40] removed some hardcoded poseidon config parameters --- src/rng.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/rng.rs b/src/rng.rs index eea6906..8ed317d 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -72,13 +72,13 @@ impl Default for SimplePoseidonRng { fn default() -> Self { // let default = // Self(PoseidonSponge::new(&poseidon_parameters_for_test())) - let (alpha, rate, full_rounds, partial_rounds) = (17,2,8,33); + let (alpha, rate, full_rounds, partial_rounds) = (17,2,8,29); let (ark,mds) = find_poseidon_ark_and_mds( F::MODULUS_BIT_SIZE as u64, - alpha, rate, full_rounds, partial_rounds, + 0, ); let config = PoseidonConfig { full_rounds:full_rounds as usize, @@ -86,8 +86,8 @@ impl Default for SimplePoseidonRng { alpha:alpha as u64, ark, mds, - rate:rate as usize, - capacity:2, + rate, + capacity:1, }; SimplePoseidonRng(PoseidonSponge::new(&config)) } From a2c56bf0c1acad82dc7e9604db417d65f56d46d1 Mon Sep 17 00:00:00 2001 From: AB Date: Tue, 26 Sep 2023 15:33:14 +0100 Subject: [PATCH 30/40] semi implemented sponge methods --- Cargo.toml | 3 ++- src/ahp/constraint_systems.rs | 13 +++++++++---- src/ahp/prover.rs | 12 ++++++++---- src/ahp/verifier.rs | 17 +++++++++-------- src/lib.rs | 13 +++++++------ src/test.rs | 31 +++++++++++++++++++------------ 6 files changed, 54 insertions(+), 35 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 6a67d67..e790cb5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,7 @@ ark-crypto-primitives = { version = "^0.4.0", default-features = false, features rayon = { version = "1", optional = true } digest = { version = "0.9" } derivative = { version = "2", features = ["use_core"] } +itertools = "0.11.0" [dev-dependencies] rand_chacha = { version = "^0.3.0", default-features = false } @@ -50,7 +51,7 @@ debug = true panic = 'abort' [profile.test] -opt-level = 3 +opt-level = 0 debug-assertions = true incremental = true debug = true diff --git a/src/ahp/constraint_systems.rs b/src/ahp/constraint_systems.rs index bee9525..d561e06 100644 --- a/src/ahp/constraint_systems.rs +++ b/src/ahp/constraint_systems.rs @@ -288,13 +288,18 @@ pub(crate) fn make_matrices_square_for_prover(cs: ConstraintSyste #[cfg(test)] mod tests { + use crate::SimplePoseidonRng; + use super::*; + use ark_crypto_primitives::sponge::CryptographicSponge; use ark_relations::r1cs::Matrix; use ark_std::{collections::BTreeMap, UniformRand}; use ark_bls12_381::Fr as F; use ark_ff::{One, Zero}; use ark_poly::EvaluationDomain; + use itertools::Itertools; + use rand_chacha::rand_core::RngCore; fn entry(matrix: &Matrix, row: usize, col: usize) -> F { matrix[row] @@ -369,10 +374,10 @@ mod tests { .zip(output_domain.batch_eval_unnormalized_bivariate_lagrange_poly_with_same_inputs()) .collect(); - let mut rng = ark_std::test_rng(); - let eta_a = F::rand(&mut rng); - let eta_b = F::rand(&mut rng); - let eta_c = F::rand(&mut rng); + let mut rng_seed = ark_std::test_rng(); + let mut rng: SimplePoseidonRng = SimplePoseidonRng::default(); + rng.absorb(&rng_seed.next_u64()); + let (eta_a, eta_b, eta_c)= rng.squeeze_field_elements(3).iter().map(|x: &F| x.to_owned()).collect_tuple().unwrap(); for (k_index, k) in interpolation_domain.elements().enumerate() { let row_val = joint_arith.row.evaluate(&k); let col_val = joint_arith.col.evaluate(&k); diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 60a6352..44546dc 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -8,6 +8,7 @@ use crate::ahp::constraint_systems::{ make_matrices_square_for_prover, pad_input_for_indexer_and_prover, unformat_public_input, }; use crate::{ToString, Vec}; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::{Field, PrimeField, Zero}; use ark_poly::{ univariate::DensePolynomial, DenseUVPolynomial, EvaluationDomain, @@ -24,6 +25,7 @@ use ark_std::{ cfg_into_iter, cfg_iter, cfg_iter_mut, io::{Read, Write}, }; +use itertools::Itertools; /// State for the AHP prover. pub struct ProverState<'a, F: PrimeField> { @@ -271,7 +273,7 @@ impl AHPForR1CS { } /// Output the first round message and the next state. - pub fn prover_first_round<'a, R: RngCore>( + pub fn prover_first_round<'a, R: CryptographicSponge + RngCore>( mut state: ProverState<'a, F>, rng: &mut R, ) -> Result<(ProverMsg, ProverFirstOracles, ProverState<'a, F>), Error> { @@ -312,9 +314,11 @@ impl AHPForR1CS { }) .collect(); + + let (f1,f2,f3)= rng.squeeze_field_elements(3).iter().map(|x: &F| x.to_owned()).collect_tuple().unwrap(); let w_poly = &EvaluationsOnDomain::from_vec_and_domain(w_poly_evals, domain_h) .interpolate() - + &(&DensePolynomial::from_coefficients_slice(&[F::rand(rng)]) * &v_H); + + &(&DensePolynomial::from_coefficients_slice(&[f1]) * &v_H); let (w_poly, remainder) = w_poly.divide_by_vanishing_poly(domain_x).unwrap(); assert!(remainder.is_zero()); end_timer!(w_poly_time); @@ -322,13 +326,13 @@ impl AHPForR1CS { let z_a_poly_time = start_timer!(|| "Computing z_A polynomial"); let z_a = state.z_a.clone().unwrap(); let z_a_poly = &EvaluationsOnDomain::from_vec_and_domain(z_a, domain_h).interpolate() - + &(&DensePolynomial::from_coefficients_slice(&[F::rand(rng)]) * &v_H); + + &(&DensePolynomial::from_coefficients_slice(&[f2]) * &v_H); end_timer!(z_a_poly_time); let z_b_poly_time = start_timer!(|| "Computing z_B polynomial"); let z_b = state.z_b.clone().unwrap(); let z_b_poly = &EvaluationsOnDomain::from_vec_and_domain(z_b, domain_h).interpolate() - + &(&DensePolynomial::from_coefficients_slice(&[F::rand(rng)]) * &v_H); + + &(&DensePolynomial::from_coefficients_slice(&[f3]) * &v_H); end_timer!(z_b_poly_time); let mask_poly_time = start_timer!(|| "Computing mask polynomial"); diff --git a/src/ahp/verifier.rs b/src/ahp/verifier.rs index 91bb357..9d5a233 100644 --- a/src/ahp/verifier.rs +++ b/src/ahp/verifier.rs @@ -1,7 +1,9 @@ #![allow(non_snake_case)] +use itertools::Itertools; use crate::ahp::indexer::IndexInfo; use crate::ahp::*; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_std::rand::RngCore; use ark_ff::PrimeField; @@ -41,7 +43,7 @@ pub struct VerifierSecondMsg { impl AHPForR1CS { /// Output the first message and next round state. - pub fn verifier_first_round( + pub fn verifier_first_round( index_info: IndexInfo, rng: &mut R, ) -> Result<(VerifierFirstMsg, VerifierState), Error> { @@ -49,16 +51,14 @@ impl AHPForR1CS { return Err(Error::NonSquareMatrix); } - let domain_h = GeneralEvaluationDomain::new(index_info.num_constraints) + let domain_h: GeneralEvaluationDomain = GeneralEvaluationDomain::new(index_info.num_constraints) .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; let domain_k = GeneralEvaluationDomain::new(index_info.num_non_zero) .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let alpha = domain_h.sample_element_outside_domain(rng); - let eta_a = F::rand(rng); - let eta_b = F::rand(rng); - let eta_c = F::rand(rng); + let alpha = domain_h.sample_element_outside_domain(rng).to_owned(); + let (eta_a, eta_b, eta_c)= rng.squeeze_field_elements(3).iter().map(|x: &F| x.to_owned()).collect_tuple().unwrap(); let msg = VerifierFirstMsg { alpha, @@ -91,11 +91,12 @@ impl AHPForR1CS { } /// Output the third message and next round state. - pub fn verifier_third_round( + pub fn verifier_third_round( mut state: VerifierState, rng: &mut R, ) -> VerifierState { - state.gamma = Some(F::rand(rng)); + let gamma = rng.squeeze_field_elements(1).pop(); + state.gamma = gamma; state } diff --git a/src/lib.rs b/src/lib.rs index bdf3cd0..ddf0bf7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,7 +10,7 @@ #![deny(unused_import_braces, unused_qualifications, trivial_casts)] #![deny(trivial_numeric_casts)] #![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] -#![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] +// #![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] @@ -18,6 +18,7 @@ #[macro_use] extern crate ark_std; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::challenge::ChallengeGenerator; @@ -89,14 +90,14 @@ mod test; pub struct Marlin< F: PrimeField, PC: PolynomialCommitment, S>, - S: DefaultSpongeRNG, + S: CryptographicSponge, >( #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, ); -impl, S>, S: DefaultSpongeRNG> +impl, S>, S: CryptographicSponge+Default+RngCore> Marlin { /// The personalization string for this protocol. Used to personalize the @@ -177,7 +178,7 @@ impl, S>, S: Defau } /// Create a zkSNARK asserting that the constraint system is satisfied. - pub fn prove, R: RngCore>( + pub fn prove, R: RngCore+CryptographicSponge>( index_pk: &IndexProverKey, c: C, zk_rng: &mut R, @@ -327,7 +328,7 @@ impl, S>, S: Defau end_timer!(eval_time); fs_rng.absorb(&to_bytes![&evaluations].unwrap()); - let mut opening_challenge = ChallengeGenerator::::new_multivariate(fs_rng); + let mut opening_challenge: ChallengeGenerator<_, S> = ChallengeGenerator::new_multivariate(fs_rng); let pc_proof = PC::open_combinations( &index_pk.committer_key, @@ -428,7 +429,7 @@ impl, S>, S: Defau AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); fs_rng.absorb(&to_bytes![&proof.evaluations].unwrap()); - let mut opening_challenge = ChallengeGenerator::new_multivariate(fs_rng); + let mut opening_challenge: ChallengeGenerator = ChallengeGenerator::new_multivariate(fs_rng); let mut evaluations = Evaluations::new(); let mut evaluation_labels = Vec::new(); diff --git a/src/test.rs b/src/test.rs index 25a0e2e..fb40cec 100644 --- a/src/test.rs +++ b/src/test.rs @@ -116,7 +116,9 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; use crate::rng::SimplePoseidonRng; + use ark_crypto_primitives::sponge::CryptographicSponge; use crate::Marlin; + use itertools::Itertools; use ark_bls12_381::{Bls12_381, Fr}; @@ -125,6 +127,7 @@ mod marlin { use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; use ark_std::ops::MulAssign; + use ark_std::rand::RngCore; @@ -134,13 +137,15 @@ mod marlin { type MarlinInst = Marlin; fn test_circuit(num_constraints: usize, num_variables: usize) { - let rng = &mut ark_std::test_rng(); + let mut rng_seed = ark_std::test_rng(); + let mut rng: SimplePoseidonRng = SimplePoseidonRng::default(); + rng.absorb(&rng_seed.next_u64()); - let universal_srs = MarlinInst::universal_setup(100, 25, 300, rng).unwrap(); + let universal_srs = MarlinInst::universal_setup(100, 25, 300, &mut rng).unwrap(); for _ in 0..100 { - let a = Fr::rand(rng); - let b = Fr::rand(rng); + + let (a,b) = rng.squeeze_field_elements(2).iter().map(|x: &Fr| x.to_owned()).collect_tuple().unwrap(); let mut c = a; c.mul_assign(&b); let mut d = c; @@ -156,13 +161,13 @@ mod marlin { let (index_pk, index_vk) = MarlinInst::index(&universal_srs, circ.clone()).unwrap(); println!("Called index"); - let proof = MarlinInst::prove(&index_pk, circ, rng).unwrap(); + let proof = MarlinInst::prove(&index_pk, circ, &mut rng).unwrap(); println!("Called prover"); - assert!(MarlinInst::verify(&index_vk, &[c, d], &proof, rng).unwrap()); + assert!(MarlinInst::verify(&index_vk, &[c, d], &proof, &mut rng).unwrap()); println!("Called verifier"); println!("\nShould not verify (i.e. verifier messages should print below):"); - assert!(!MarlinInst::verify(&index_vk, &[a, a], &proof, rng).unwrap()); + assert!(!MarlinInst::verify(&index_vk, &[a, a], &proof, &mut rng).unwrap()); } } @@ -209,9 +214,11 @@ mod marlin { #[test] /// Test on a constraint system that will trigger outlining. fn prove_and_test_outlining() { - let rng = &mut ark_std::test_rng(); + let mut rng_seed = ark_std::test_rng(); + let mut rng: SimplePoseidonRng = SimplePoseidonRng::default(); + rng.absorb(&rng_seed.next_u64()); - let universal_srs = MarlinInst::universal_setup(150, 150, 150, rng).unwrap(); + let universal_srs = MarlinInst::universal_setup(150, 150, 150, &mut rng).unwrap(); let circ = OutlineTestCircuit { field_phantom: PhantomData, @@ -220,15 +227,15 @@ mod marlin { let (index_pk, index_vk) = MarlinInst::index(&universal_srs, circ.clone()).unwrap(); println!("Called index"); - let proof = MarlinInst::prove(&index_pk, circ, rng).unwrap(); + let proof = MarlinInst::prove(&index_pk, circ, &mut rng).unwrap(); println!("Called prover"); let mut inputs = Vec::new(); for i in 0..5 { - inputs.push(Fr::from(i as u128)); + inputs.push(Fr::from(i)); } - assert!(MarlinInst::verify(&index_vk, &inputs, &proof, rng).unwrap()); + assert!(MarlinInst::verify(&index_vk, &inputs, &proof, &mut rng).unwrap()); println!("Called verifier"); } } From cfefa0fb933c0ab006c4ef3955bb6721b100ff29 Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 10:22:17 +0100 Subject: [PATCH 31/40] native absorption of field elements --- src/lib.rs | 45 +++++++++++++++++++++++++++++++++------------ 1 file changed, 33 insertions(+), 12 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index ddf0bf7..309c0fa 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -18,7 +18,8 @@ #[macro_use] extern crate ark_std; -use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_crypto_primitives::absorb; +use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; use ark_ff::PrimeField; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::challenge::ChallengeGenerator; @@ -83,6 +84,8 @@ pub mod ahp; pub use ahp::AHPForR1CS; use ahp::EvaluationsProvider; +use crate::ahp::prover::ProverMsg; + #[cfg(test)] mod test; @@ -97,7 +100,7 @@ pub struct Marlin< #[doc(hidden)] PhantomData, ); -impl, S>, S: CryptographicSponge+Default+RngCore> +impl, S>, S: CryptographicSponge+Default+RngCore> Marlin { /// The personalization string for this protocol. Used to personalize the @@ -189,7 +192,7 @@ impl, S>, S: Crypt let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; let public_input = prover_init_state.public_input(); let mut fs_rng = S::default(); - fs_rng.absorb(&to_bytes![&Self::PROTOCOL_NAME, &index_pk.index_vk, &public_input].unwrap()); + absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, &to_bytes!(&index_pk.index_vk).unwrap(), &public_input); // -------------------------------------------------------------------- // First round @@ -210,7 +213,10 @@ impl, S>, S: Crypt .map(|p| p.commitment().clone()) .collect::>(); - fs_rng.absorb(&to_bytes![fcinput, prover_first_msg].unwrap()); + match prover_first_msg { + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![fcinput].unwrap(), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![fcinput].unwrap()), + } let (verifier_first_msg, verifier_state) = AHPForR1CS::verifier_first_round(index_pk.index_vk.index_info, &mut fs_rng)?; @@ -235,7 +241,10 @@ impl, S>, S: Crypt .iter() .map(|p| p.commitment().clone()) .collect::>(); - fs_rng.absorb(&to_bytes![scinput, prover_second_msg].unwrap()); + match prover_second_msg { + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![scinput].unwrap(), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![scinput].unwrap()), + } let (verifier_second_msg, verifier_state) = AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); @@ -259,7 +268,10 @@ impl, S>, S: Crypt .iter() .map(|p| p.commitment().clone()) .collect::>(); - fs_rng.absorb(&to_bytes![tcinput, prover_third_msg].unwrap()); + match prover_third_msg { + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![tcinput].unwrap(), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![tcinput].unwrap()), + } let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); // -------------------------------------------------------------------- @@ -327,7 +339,7 @@ impl, S>, S: Crypt let evaluations = evaluations.into_iter().map(|x| x.1).collect::>(); end_timer!(eval_time); - fs_rng.absorb(&to_bytes![&evaluations].unwrap()); + fs_rng.absorb(&evaluations); let mut opening_challenge: ChallengeGenerator<_, S> = ChallengeGenerator::new_multivariate(fs_rng); let pc_proof = PC::open_combinations( @@ -376,12 +388,15 @@ impl, S>, S: Crypt let mut fs_rng = S::default(); fs_rng.absorb(&to_bytes![&Self::PROTOCOL_NAME, &index_vk, &public_input].unwrap()); + // -------------------------------------------------------------------- // First round let first_comms = &proof.commitments[0]; - fs_rng.absorb(&to_bytes![first_comms.to_owned(), proof.prover_messages[0]].unwrap()); - + match &proof.prover_messages[0] { + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![first_comms].unwrap(), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![first_comms].unwrap()), + } let (_, verifier_state) = AHPForR1CS::verifier_first_round(index_vk.index_info, &mut fs_rng)?; // -------------------------------------------------------------------- @@ -389,7 +404,10 @@ impl, S>, S: Crypt // -------------------------------------------------------------------- // Second round let second_comms = &proof.commitments[1]; - fs_rng.absorb(&to_bytes![second_comms.to_owned(), proof.prover_messages[1]].unwrap()); + match &proof.prover_messages[1] { + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![second_comms].unwrap(), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![second_comms].unwrap()), + } let (_, verifier_state) = AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); // -------------------------------------------------------------------- @@ -397,7 +415,10 @@ impl, S>, S: Crypt // -------------------------------------------------------------------- // Third round let third_comms = &proof.commitments[2]; - fs_rng.absorb(&to_bytes![third_comms.to_owned(), proof.prover_messages[2]].unwrap()); + match &proof.prover_messages[2] { + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![third_comms].unwrap(), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![third_comms].unwrap()), + } let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); // -------------------------------------------------------------------- @@ -428,7 +449,7 @@ impl, S>, S: Crypt let (query_set, verifier_state) = AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); - fs_rng.absorb(&to_bytes![&proof.evaluations].unwrap()); + fs_rng.absorb(&proof.evaluations); let mut opening_challenge: ChallengeGenerator = ChallengeGenerator::new_multivariate(fs_rng); let mut evaluations = Evaluations::new(); From f2152b3edfd58a573fa355612bc08492f91dc3a8 Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 10:53:41 +0100 Subject: [PATCH 32/40] tests passing --- src/lib.rs | 4 ++-- src/test.rs | 5 +---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 309c0fa..38ed262 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -214,7 +214,7 @@ impl, S>, S .collect::>(); match prover_first_msg { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![fcinput].unwrap(), elems);}, + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![fcinput].unwrap(), &elems);}, ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![fcinput].unwrap()), } @@ -386,7 +386,7 @@ impl, S>, S }; let mut fs_rng = S::default(); - fs_rng.absorb(&to_bytes![&Self::PROTOCOL_NAME, &index_vk, &public_input].unwrap()); + absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, &to_bytes!(&index_vk).unwrap(), &public_input); // -------------------------------------------------------------------- diff --git a/src/test.rs b/src/test.rs index fb40cec..ac1f9ce 100644 --- a/src/test.rs +++ b/src/test.rs @@ -122,8 +122,6 @@ mod marlin { use ark_bls12_381::{Bls12_381, Fr}; - use ark_ec::pairing::Pairing; - use ark_ff::UniformRand; use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; use ark_std::ops::MulAssign; @@ -131,8 +129,7 @@ mod marlin { - type BF = ::BaseField; - type S = SimplePoseidonRng; + type S = SimplePoseidonRng; type MultiPC = MarlinKZG10, S>; type MarlinInst = Marlin; From 6b18bba2dd1c12ff10bb0cfd05fce017acc646b7 Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 11:08:38 +0100 Subject: [PATCH 33/40] benches running --- benches/bench.rs | 60 +++++++++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 26 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 10e5542..d6234b5 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -16,7 +16,9 @@ use ark_relations::{ lc, r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError}, }; -use ark_std::{ops::Mul, UniformRand}; +use ark_std::{ops::Mul, UniformRand, rand::RngCore}; +use ark_crypto_primitives::sponge::CryptographicSponge; +use itertools::Itertools; @@ -69,10 +71,13 @@ impl ConstraintSynthesizer for DummyCircuit { macro_rules! marlin_prove_bench { ($bench_name:ident, $bench_field:ty, $bench_pairing_engine:ty) => { - let rng = &mut ark_std::test_rng(); + let mut rng_seed = ark_std::test_rng(); + let mut rng: SimplePoseidonRng<$bench_field> = SimplePoseidonRng::default(); + rng.absorb(&rng_seed.next_u64()); + let (a,b)= rng.squeeze_field_elements(2).iter().map(|x: &$bench_field| x.to_owned()).collect_tuple().unwrap(); let c = DummyCircuit::<$bench_field> { - a: Some(<$bench_field>::rand(rng)), - b: Some(<$bench_field>::rand(rng)), + a: Some(a), + b: Some(b), num_variables: 10, num_constraints: 65536, }; @@ -82,19 +87,19 @@ macro_rules! marlin_prove_bench { SonicKZG10< $bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, - >::universal_setup(65536, 65536, 3 * 65536, rng) + SimplePoseidonRng<$bench_field>, + >::universal_setup(65536, 65536, 3 * 65536, &mut rng) .unwrap(); let (pk, _) = Marlin::< $bench_field, SonicKZG10< $bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >::index(&srs, c) .unwrap(); @@ -106,10 +111,10 @@ macro_rules! marlin_prove_bench { SonicKZG10< $bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, - >::prove(&pk, c.clone(), rng) + SimplePoseidonRng<$bench_field>, + >::prove(&pk, c.clone(), &mut rng) .unwrap(); } @@ -123,10 +128,13 @@ macro_rules! marlin_prove_bench { macro_rules! marlin_verify_bench { ($bench_name:ident, $bench_field:ty, $bench_pairing_engine:ty) => { - let rng = &mut ark_std::test_rng(); + let mut rng_seed = ark_std::test_rng(); + let mut rng: SimplePoseidonRng<$bench_field> = SimplePoseidonRng::default(); + rng.absorb(&rng_seed.next_u64()); + let (a,b)= rng.squeeze_field_elements(2).iter().map(|x: &$bench_field| x.to_owned()).collect_tuple().unwrap(); let c = DummyCircuit::<$bench_field> { - a: Some(<$bench_field>::rand(rng)), - b: Some(<$bench_field>::rand(rng)), + a: Some(a), + b: Some(b), num_variables: 10, num_constraints: 65536, }; @@ -136,19 +144,19 @@ macro_rules! marlin_verify_bench { SonicKZG10< $bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, - >::universal_setup(65536, 65536, 3 * 65536, rng) + SimplePoseidonRng<$bench_field>, + >::universal_setup(65536, 65536, 3 * 65536, &mut rng) .unwrap(); let (pk, vk) = Marlin::< $bench_field, SonicKZG10< $bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >::index(&srs, c) .unwrap(); let proof = Marlin::< @@ -156,10 +164,10 @@ macro_rules! marlin_verify_bench { SonicKZG10< $bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, - >::prove(&pk, c.clone(), rng) + SimplePoseidonRng<$bench_field>, + >::prove(&pk, c.clone(), &mut rng) .unwrap(); let v = c.a.unwrap().mul(c.b.unwrap()); @@ -172,10 +180,10 @@ macro_rules! marlin_verify_bench { SonicKZG10< $bench_pairing_engine, DensePolynomial<$bench_field>, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, + SimplePoseidonRng<$bench_field>, >, - SimplePoseidonRng<<$bench_pairing_engine as Pairing>::BaseField>, - >::verify(&vk, &vec![v], &proof, rng) + SimplePoseidonRng<$bench_field>, + >::verify(&vk, &vec![v], &proof, &mut rng) .unwrap(); } From a3d9ba6f7416411f235cd9cb56650f054090c3bc Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 11:26:24 +0100 Subject: [PATCH 34/40] removed to_bytes! --- src/lib.rs | 55 ++++++++++++++++++------------------------------------ 1 file changed, 18 insertions(+), 37 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 38ed262..ae54330 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -44,29 +44,10 @@ macro_rules! eprintln { ($($arg: tt)*) => {}; } -/// Takes as input a sequence of structs, and converts them to a series of -/// bytes. All traits that implement `Bytes` can be automatically converted to -/// bytes in this manner. -#[macro_export] -macro_rules! to_bytes { - ($($x:expr),*) => ({ - let mut buf = $crate::vec![]; - {$crate::push_to_vec!(buf, $($x),*)}.map(|_| buf) - }); -} - -#[doc(hidden)] -#[macro_export] -macro_rules! push_to_vec { - ($buf:expr, $y:expr, $($x:expr),*) => ({ - { - $y.serialize_compressed(&mut $buf) - }.and({$crate::push_to_vec!($buf, $($x),*)}) - }); - - ($buf:expr, $x:expr) => ({ - $x.serialize_compressed(&mut $buf) - }) +fn to_bytes(x: &T) -> Vec { + let mut buf = Vec::new(); + x.serialize_compressed(&mut buf).unwrap(); + buf } /// Implements a Fiat-Shamir based Rng that allows one to incrementally update /// the seed based on new messages in the proof transcript. @@ -192,7 +173,7 @@ impl, S>, S let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; let public_input = prover_init_state.public_input(); let mut fs_rng = S::default(); - absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, &to_bytes!(&index_pk.index_vk).unwrap(), &public_input); + absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, to_bytes(&index_pk.index_vk), &public_input); // -------------------------------------------------------------------- // First round @@ -214,8 +195,8 @@ impl, S>, S .collect::>(); match prover_first_msg { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![fcinput].unwrap(), &elems);}, - ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![fcinput].unwrap()), + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(&fcinput), &elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&fcinput)), } let (verifier_first_msg, verifier_state) = @@ -242,8 +223,8 @@ impl, S>, S .map(|p| p.commitment().clone()) .collect::>(); match prover_second_msg { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![scinput].unwrap(), elems);}, - ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![scinput].unwrap()), + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(&scinput), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&scinput)), } let (verifier_second_msg, verifier_state) = @@ -269,8 +250,8 @@ impl, S>, S .map(|p| p.commitment().clone()) .collect::>(); match prover_third_msg { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![tcinput].unwrap(), elems);}, - ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![tcinput].unwrap()), + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(&tcinput), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&tcinput)), } let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); @@ -386,7 +367,7 @@ impl, S>, S }; let mut fs_rng = S::default(); - absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, &to_bytes!(&index_vk).unwrap(), &public_input); + absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, &to_bytes(index_vk), &public_input); // -------------------------------------------------------------------- @@ -394,8 +375,8 @@ impl, S>, S let first_comms = &proof.commitments[0]; match &proof.prover_messages[0] { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![first_comms].unwrap(), elems);}, - ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![first_comms].unwrap()), + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(first_comms), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(first_comms)), } let (_, verifier_state) = AHPForR1CS::verifier_first_round(index_vk.index_info, &mut fs_rng)?; @@ -405,8 +386,8 @@ impl, S>, S // Second round let second_comms = &proof.commitments[1]; match &proof.prover_messages[1] { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![second_comms].unwrap(), elems);}, - ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![second_comms].unwrap()), + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(second_comms), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(second_comms)), } let (_, verifier_state) = AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); @@ -416,8 +397,8 @@ impl, S>, S // Third round let third_comms = &proof.commitments[2]; match &proof.prover_messages[2] { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes![third_comms].unwrap(), elems);}, - ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes![third_comms].unwrap()), + ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(third_comms), elems);}, + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(third_comms)), } let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); From 3f886a972c5bfcac7fc5eb262c9ec210259c693f Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 11:39:24 +0100 Subject: [PATCH 35/40] cargo fix+fmt --- benches/bench.rs | 22 +++++++++----- src/ahp/constraint_systems.rs | 9 ++++-- src/ahp/prover.rs | 8 +++-- src/ahp/verifier.rs | 14 ++++++--- src/lib.rs | 56 +++++++++++++++++++++++++---------- src/rng.rs | 35 +++++++++++----------- src/test.rs | 13 ++++---- 7 files changed, 103 insertions(+), 54 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index d6234b5..c29a6b3 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -3,7 +3,8 @@ // where N is the number of threads you want to use (N = 1 for single-thread). use ark_bls12_381::{Bls12_381, Fr as BlsFr}; -use ark_ec::pairing::Pairing; + +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_marlin::{Marlin, SimplePoseidonRng}; use ark_mnt4_298::{Fr as MNT4Fr, MNT4_298}; @@ -16,12 +17,9 @@ use ark_relations::{ lc, r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError}, }; -use ark_std::{ops::Mul, UniformRand, rand::RngCore}; -use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_std::{ops::Mul, rand::RngCore}; use itertools::Itertools; - - const NUM_PROVE_REPEATITIONS: usize = 10; const NUM_VERIFY_REPEATITIONS: usize = 50; @@ -74,7 +72,12 @@ macro_rules! marlin_prove_bench { let mut rng_seed = ark_std::test_rng(); let mut rng: SimplePoseidonRng<$bench_field> = SimplePoseidonRng::default(); rng.absorb(&rng_seed.next_u64()); - let (a,b)= rng.squeeze_field_elements(2).iter().map(|x: &$bench_field| x.to_owned()).collect_tuple().unwrap(); + let (a, b) = rng + .squeeze_field_elements(2) + .iter() + .map(|x: &$bench_field| x.to_owned()) + .collect_tuple() + .unwrap(); let c = DummyCircuit::<$bench_field> { a: Some(a), b: Some(b), @@ -131,7 +134,12 @@ macro_rules! marlin_verify_bench { let mut rng_seed = ark_std::test_rng(); let mut rng: SimplePoseidonRng<$bench_field> = SimplePoseidonRng::default(); rng.absorb(&rng_seed.next_u64()); - let (a,b)= rng.squeeze_field_elements(2).iter().map(|x: &$bench_field| x.to_owned()).collect_tuple().unwrap(); + let (a, b) = rng + .squeeze_field_elements(2) + .iter() + .map(|x: &$bench_field| x.to_owned()) + .collect_tuple() + .unwrap(); let c = DummyCircuit::<$bench_field> { a: Some(a), b: Some(b), diff --git a/src/ahp/constraint_systems.rs b/src/ahp/constraint_systems.rs index d561e06..31a0dfb 100644 --- a/src/ahp/constraint_systems.rs +++ b/src/ahp/constraint_systems.rs @@ -293,7 +293,7 @@ mod tests { use super::*; use ark_crypto_primitives::sponge::CryptographicSponge; use ark_relations::r1cs::Matrix; - use ark_std::{collections::BTreeMap, UniformRand}; + use ark_std::collections::BTreeMap; use ark_bls12_381::Fr as F; use ark_ff::{One, Zero}; @@ -377,7 +377,12 @@ mod tests { let mut rng_seed = ark_std::test_rng(); let mut rng: SimplePoseidonRng = SimplePoseidonRng::default(); rng.absorb(&rng_seed.next_u64()); - let (eta_a, eta_b, eta_c)= rng.squeeze_field_elements(3).iter().map(|x: &F| x.to_owned()).collect_tuple().unwrap(); + let (eta_a, eta_b, eta_c) = rng + .squeeze_field_elements(3) + .iter() + .map(|x: &F| x.to_owned()) + .collect_tuple() + .unwrap(); for (k_index, k) in interpolation_domain.elements().enumerate() { let row_val = joint_arith.row.evaluate(&k); let col_val = joint_arith.col.evaluate(&k); diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 44546dc..3554da1 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -314,8 +314,12 @@ impl AHPForR1CS { }) .collect(); - - let (f1,f2,f3)= rng.squeeze_field_elements(3).iter().map(|x: &F| x.to_owned()).collect_tuple().unwrap(); + let (f1, f2, f3) = rng + .squeeze_field_elements(3) + .iter() + .map(|x: &F| x.to_owned()) + .collect_tuple() + .unwrap(); let w_poly = &EvaluationsOnDomain::from_vec_and_domain(w_poly_evals, domain_h) .interpolate() + &(&DensePolynomial::from_coefficients_slice(&[f1]) * &v_H); diff --git a/src/ahp/verifier.rs b/src/ahp/verifier.rs index 9d5a233..e8b739a 100644 --- a/src/ahp/verifier.rs +++ b/src/ahp/verifier.rs @@ -1,10 +1,10 @@ #![allow(non_snake_case)] -use itertools::Itertools; use crate::ahp::indexer::IndexInfo; use crate::ahp::*; use ark_crypto_primitives::sponge::CryptographicSponge; use ark_std::rand::RngCore; +use itertools::Itertools; use ark_ff::PrimeField; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; @@ -51,14 +51,20 @@ impl AHPForR1CS { return Err(Error::NonSquareMatrix); } - let domain_h: GeneralEvaluationDomain = GeneralEvaluationDomain::new(index_info.num_constraints) - .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + let domain_h: GeneralEvaluationDomain = + GeneralEvaluationDomain::new(index_info.num_constraints) + .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; let domain_k = GeneralEvaluationDomain::new(index_info.num_non_zero) .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; let alpha = domain_h.sample_element_outside_domain(rng).to_owned(); - let (eta_a, eta_b, eta_c)= rng.squeeze_field_elements(3).iter().map(|x: &F| x.to_owned()).collect_tuple().unwrap(); + let (eta_a, eta_b, eta_c) = rng + .squeeze_field_elements(3) + .iter() + .map(|x: &F| x.to_owned()) + .collect_tuple() + .unwrap(); let msg = VerifierFirstMsg { alpha, diff --git a/src/lib.rs b/src/lib.rs index ae54330..ae2ceed 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,7 +10,7 @@ #![deny(unused_import_braces, unused_qualifications, trivial_casts)] #![deny(trivial_numeric_casts)] #![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] -// #![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] +#![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] @@ -81,8 +81,11 @@ pub struct Marlin< #[doc(hidden)] PhantomData, ); -impl, S>, S: CryptographicSponge+Default+RngCore> - Marlin +impl< + F: PrimeField + Absorb, + PC: PolynomialCommitment, S>, + S: CryptographicSponge + Default + RngCore, + > Marlin { /// The personalization string for this protocol. Used to personalize the /// Fiat-Shamir rng. @@ -162,7 +165,7 @@ impl, S>, S } /// Create a zkSNARK asserting that the constraint system is satisfied. - pub fn prove, R: RngCore+CryptographicSponge>( + pub fn prove, R: RngCore + CryptographicSponge>( index_pk: &IndexProverKey, c: C, zk_rng: &mut R, @@ -173,7 +176,12 @@ impl, S>, S let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; let public_input = prover_init_state.public_input(); let mut fs_rng = S::default(); - absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, to_bytes(&index_pk.index_vk), &public_input); + absorb!( + &mut fs_rng, + &Self::PROTOCOL_NAME, + to_bytes(&index_pk.index_vk), + &public_input + ); // -------------------------------------------------------------------- // First round @@ -195,7 +203,9 @@ impl, S>, S .collect::>(); match prover_first_msg { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(&fcinput), &elems);}, + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(&fcinput), &elems); + } ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&fcinput)), } @@ -223,7 +233,9 @@ impl, S>, S .map(|p| p.commitment().clone()) .collect::>(); match prover_second_msg { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(&scinput), elems);}, + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(&scinput), elems); + } ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&scinput)), } @@ -250,7 +262,9 @@ impl, S>, S .map(|p| p.commitment().clone()) .collect::>(); match prover_third_msg { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(&tcinput), elems);}, + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(&tcinput), elems); + } ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&tcinput)), } @@ -321,7 +335,8 @@ impl, S>, S end_timer!(eval_time); fs_rng.absorb(&evaluations); - let mut opening_challenge: ChallengeGenerator<_, S> = ChallengeGenerator::new_multivariate(fs_rng); + let mut opening_challenge: ChallengeGenerator<_, S> = + ChallengeGenerator::new_multivariate(fs_rng); let pc_proof = PC::open_combinations( &index_pk.committer_key, @@ -367,15 +382,21 @@ impl, S>, S }; let mut fs_rng = S::default(); - absorb!(&mut fs_rng, &Self::PROTOCOL_NAME, &to_bytes(index_vk), &public_input); - + absorb!( + &mut fs_rng, + &Self::PROTOCOL_NAME, + &to_bytes(index_vk), + &public_input + ); // -------------------------------------------------------------------- // First round let first_comms = &proof.commitments[0]; match &proof.prover_messages[0] { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(first_comms), elems);}, + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(first_comms), elems); + } ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(first_comms)), } let (_, verifier_state) = @@ -386,7 +407,9 @@ impl, S>, S // Second round let second_comms = &proof.commitments[1]; match &proof.prover_messages[1] { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(second_comms), elems);}, + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(second_comms), elems); + } ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(second_comms)), } @@ -397,7 +420,9 @@ impl, S>, S // Third round let third_comms = &proof.commitments[2]; match &proof.prover_messages[2] { - ProverMsg::FieldElements(ref elems) => {absorb!(&mut fs_rng, &to_bytes(third_comms), elems);}, + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(third_comms), elems); + } ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(third_comms)), } @@ -431,7 +456,8 @@ impl, S>, S AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); fs_rng.absorb(&proof.evaluations); - let mut opening_challenge: ChallengeGenerator = ChallengeGenerator::new_multivariate(fs_rng); + let mut opening_challenge: ChallengeGenerator = + ChallengeGenerator::new_multivariate(fs_rng); let mut evaluations = Evaluations::new(); let mut evaluation_labels = Vec::new(); diff --git a/src/rng.rs b/src/rng.rs index 8ed317d..c8fea70 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,5 +1,7 @@ use crate::Vec; -use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonSponge, find_poseidon_ark_and_mds}; +use ark_crypto_primitives::sponge::poseidon::{ + find_poseidon_ark_and_mds, PoseidonConfig, PoseidonSponge, +}; use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge}; use ark_ff::PrimeField; @@ -9,7 +11,6 @@ use ark_std::rand::RngCore; /// and the new seed material. /// Exposes a particular instantiation of the Poseidon sponge - #[derive(Clone)] pub struct SimplePoseidonRng(PoseidonSponge); @@ -62,33 +63,31 @@ impl CryptographicSponge for SimplePoseidonRng { self.0.squeeze_bits(num_bits) } } - -/// Mock trait for use in Marlin prover pub trait DefaultSpongeRNG: Default + CryptographicSponge + RngCore {} impl DefaultSpongeRNG for SimplePoseidonRng {} /// Instantiate Poseidon sponge with default parameters impl Default for SimplePoseidonRng { fn default() -> Self { - // let default = + // let default = // Self(PoseidonSponge::new(&poseidon_parameters_for_test())) - let (alpha, rate, full_rounds, partial_rounds) = (17,2,8,29); - let (ark,mds) = find_poseidon_ark_and_mds( + let (alpha, rate, full_rounds, partial_rounds) = (17, 2, 8, 29); + let (ark, mds) = find_poseidon_ark_and_mds( F::MODULUS_BIT_SIZE as u64, rate, full_rounds, partial_rounds, 0, ); - let config = PoseidonConfig { - full_rounds:full_rounds as usize, - partial_rounds:partial_rounds as usize, - alpha:alpha as u64, - ark, - mds, - rate, - capacity:1, - }; - SimplePoseidonRng(PoseidonSponge::new(&config)) + let config = PoseidonConfig { + full_rounds: full_rounds as usize, + partial_rounds: partial_rounds as usize, + alpha: alpha as u64, + ark, + mds, + rate, + capacity: 1, + }; + SimplePoseidonRng(PoseidonSponge::new(&config)) } -} \ No newline at end of file +} diff --git a/src/test.rs b/src/test.rs index ac1f9ce..867cf3d 100644 --- a/src/test.rs +++ b/src/test.rs @@ -116,18 +116,15 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; use crate::rng::SimplePoseidonRng; - use ark_crypto_primitives::sponge::CryptographicSponge; use crate::Marlin; + use ark_crypto_primitives::sponge::CryptographicSponge; use itertools::Itertools; - use ark_bls12_381::{Bls12_381, Fr}; use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly_commit::marlin_pc::MarlinKZG10; use ark_std::ops::MulAssign; use ark_std::rand::RngCore; - - type S = SimplePoseidonRng; type MultiPC = MarlinKZG10, S>; @@ -141,8 +138,12 @@ mod marlin { let universal_srs = MarlinInst::universal_setup(100, 25, 300, &mut rng).unwrap(); for _ in 0..100 { - - let (a,b) = rng.squeeze_field_elements(2).iter().map(|x: &Fr| x.to_owned()).collect_tuple().unwrap(); + let (a, b) = rng + .squeeze_field_elements(2) + .iter() + .map(|x: &Fr| x.to_owned()) + .collect_tuple() + .unwrap(); let mut c = a; c.mul_assign(&b); let mut d = c; From 11bd76693142c7e6206e0cc8e85f84493a0ba78b Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 11:45:35 +0100 Subject: [PATCH 36/40] removed unnecessary trait --- src/rng.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/rng.rs b/src/rng.rs index c8fea70..5a35eef 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -63,9 +63,7 @@ impl CryptographicSponge for SimplePoseidonRng { self.0.squeeze_bits(num_bits) } } -pub trait DefaultSpongeRNG: Default + CryptographicSponge + RngCore {} -impl DefaultSpongeRNG for SimplePoseidonRng {} /// Instantiate Poseidon sponge with default parameters impl Default for SimplePoseidonRng { fn default() -> Self { From c5ab30b5589a02ac29faac1f1c96e024a876c299 Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 11:57:58 +0100 Subject: [PATCH 37/40] reintroduce optimization of tests after debugging --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index e790cb5..1da0b2c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,7 @@ debug = true panic = 'abort' [profile.test] -opt-level = 0 +opt-level = 3 debug-assertions = true incremental = true debug = true From 788a34e922bcbf4e906ed38a33b2f51b6267d94c Mon Sep 17 00:00:00 2001 From: AB Date: Thu, 28 Sep 2023 12:02:35 +0100 Subject: [PATCH 38/40] update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 591e52d..d113ae6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,8 @@ - Change dependency to version `0.4.0` of other arkwork-rs crates. - Fiat-Shamir transformation for the AHP uses the Poseidon sponge function. +- Introduced Absorb trait bound on field elements to facilitate more efficient absorption methods. +- Added RngCore and CryptographicSponge traits for rng sources. ## v0.3.0 From 093fdccf991786ec2771659fd020e278c28902e5 Mon Sep 17 00:00:00 2001 From: AB Date: Mon, 9 Oct 2023 15:52:59 +0100 Subject: [PATCH 39/40] added fast_prove and verify methods --- CHANGELOG.md | 2 +- src/lib.rs | 354 ++++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 351 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d113ae6..d135ec0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,7 +18,7 @@ - Change dependency to version `0.4.0` of other arkwork-rs crates. - Fiat-Shamir transformation for the AHP uses the Poseidon sponge function. -- Introduced Absorb trait bound on field elements to facilitate more efficient absorption methods. +- Introduced fast_prove and fast_verify methods for PrimeFields that also implement the Absorb trait. - Added RngCore and CryptographicSponge traits for rng sources. ## v0.3.0 diff --git a/src/lib.rs b/src/lib.rs index ae2ceed..0992f58 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -70,7 +70,8 @@ use crate::ahp::prover::ProverMsg; #[cfg(test)] mod test; -/// The compiled argument system.FiatShamiRng +/// Marker trait for when we can use fast absord of field elements +/// The compiled argument system. pub struct Marlin< F: PrimeField, PC: PolynomialCommitment, S>, @@ -82,7 +83,7 @@ pub struct Marlin< ); impl< - F: PrimeField + Absorb, + F: PrimeField, PC: PolynomialCommitment, S>, S: CryptographicSponge + Default + RngCore, > Marlin @@ -163,7 +164,6 @@ impl< Ok((index_pk, index_vk)) } - /// Create a zkSNARK asserting that the constraint system is satisfied. pub fn prove, R: RngCore + CryptographicSponge>( index_pk: &IndexProverKey, @@ -173,6 +173,351 @@ impl< let prover_time = start_timer!(|| "Marlin::Prover"); // Add check that c is in the correct mode. + let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; + let public_input = prover_init_state.public_input(); + let mut fs_rng = S::default(); + absorb!( + &mut fs_rng, + &Self::PROTOCOL_NAME, + to_bytes(&index_pk.index_vk), + &to_bytes(&public_input) + ); + + // -------------------------------------------------------------------- + // First round + + let (prover_first_msg, prover_first_oracles, prover_state) = + AHPForR1CS::prover_first_round(prover_init_state, zk_rng)?; + + let first_round_comm_time = start_timer!(|| "Committing to first round polys"); + let (first_comms, first_comm_rands) = PC::commit( + &index_pk.committer_key, + prover_first_oracles.iter(), + Some(zk_rng), + ) + .map_err(Error::from_pc_err)?; + end_timer!(first_round_comm_time); + let fcinput = first_comms + .iter() + .map(|p| p.commitment().clone()) + .collect::>(); + + match prover_first_msg { + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(&fcinput), &to_bytes(elems)); + } + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&fcinput)), + } + + let (verifier_first_msg, verifier_state) = + AHPForR1CS::verifier_first_round(index_pk.index_vk.index_info, &mut fs_rng)?; + // -------------------------------------------------------------------- + + // -------------------------------------------------------------------- + // Second round + + let (prover_second_msg, prover_second_oracles, prover_state) = + AHPForR1CS::prover_second_round(&verifier_first_msg, prover_state, zk_rng); + + let second_round_comm_time = start_timer!(|| "Committing to second round polys"); + let (second_comms, second_comm_rands) = PC::commit( + &index_pk.committer_key, + prover_second_oracles.iter(), + Some(zk_rng), + ) + .map_err(Error::from_pc_err)?; + end_timer!(second_round_comm_time); + + let scinput = second_comms + .iter() + .map(|p| p.commitment().clone()) + .collect::>(); + match prover_second_msg { + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(&scinput), &to_bytes(elems)); + } + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&scinput)), + } + + let (verifier_second_msg, verifier_state) = + AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); + // -------------------------------------------------------------------- + + // -------------------------------------------------------------------- + // Third round + let (prover_third_msg, prover_third_oracles) = + AHPForR1CS::prover_third_round(&verifier_second_msg, prover_state, zk_rng)?; + + let third_round_comm_time = start_timer!(|| "Committing to third round polys"); + let (third_comms, third_comm_rands) = PC::commit( + &index_pk.committer_key, + prover_third_oracles.iter(), + Some(zk_rng), + ) + .map_err(Error::from_pc_err)?; + end_timer!(third_round_comm_time); + + let tcinput = third_comms + .iter() + .map(|p| p.commitment().clone()) + .collect::>(); + match prover_third_msg { + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(&tcinput), &to_bytes(elems)); + } + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(&tcinput)), + } + + let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); + // -------------------------------------------------------------------- + + // Gather prover polynomials in one vector. + let polynomials: Vec<_> = index_pk + .index + .iter() + .chain(prover_first_oracles.iter()) + .chain(prover_second_oracles.iter()) + .chain(prover_third_oracles.iter()) + .collect(); + + // Gather commitments in one vector. + #[rustfmt::skip] + let commitments = vec![ + first_comms.iter().map(|p| p.commitment().clone()).collect(), + second_comms.iter().map(|p| p.commitment().clone()).collect(), + third_comms.iter().map(|p| p.commitment().clone()).collect(), + ]; + let labeled_comms: Vec<_> = index_pk + .index_vk + .iter() + .cloned() + .zip(&AHPForR1CS::::INDEXER_POLYNOMIALS) + .map(|(c, l)| LabeledCommitment::new(l.to_string(), c, None)) + .chain(first_comms.iter().cloned()) + .chain(second_comms.iter().cloned()) + .chain(third_comms.iter().cloned()) + .collect(); + + // Gather commitment randomness together. + let comm_rands: Vec = index_pk + .index_comm_rands + .clone() + .into_iter() + .chain(first_comm_rands) + .chain(second_comm_rands) + .chain(third_comm_rands) + .collect(); + + // Compute the AHP verifier's query set. + let (query_set, verifier_state) = + AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); + let lc_s = AHPForR1CS::construct_linear_combinations( + &public_input, + &polynomials, + &verifier_state, + )?; + + let eval_time = start_timer!(|| "Evaluating linear combinations over query set"); + let mut evaluations = Vec::new(); + for (label, (_, point)) in &query_set { + let lc = lc_s + .iter() + .find(|lc| &lc.label == label) + .ok_or(ahp::Error::MissingEval(label.to_string()))?; + let eval = polynomials.get_lc_eval(&lc, *point)?; + if !AHPForR1CS::::LC_WITH_ZERO_EVAL.contains(&lc.label.as_ref()) { + evaluations.push((label.to_string(), eval)); + } + } + + evaluations.sort_by(|a, b| a.0.cmp(&b.0)); + let evaluations = evaluations.into_iter().map(|x| x.1).collect::>(); + end_timer!(eval_time); + + fs_rng.absorb(&to_bytes(&evaluations)); + let mut opening_challenge: ChallengeGenerator<_, S> = + ChallengeGenerator::new_multivariate(fs_rng); + + let pc_proof = PC::open_combinations( + &index_pk.committer_key, + &lc_s, + polynomials, + &labeled_comms, + &query_set, + &mut opening_challenge, + &comm_rands, + Some(zk_rng), + ) + .map_err(Error::from_pc_err)?; + + // Gather prover messages together. + let prover_messages = vec![prover_first_msg, prover_second_msg, prover_third_msg]; + + let proof = Proof::new(commitments, evaluations, prover_messages, pc_proof); + proof.print_size_info(); + end_timer!(prover_time); + Ok(proof) + } + + /// Verify that a proof for the constrain system defined by `C` asserts that + /// all constraints are satisfied. + pub fn verify( + index_vk: &IndexVerifierKey, + public_input: &[F], + proof: &Proof, + rng: &mut R, + ) -> Result> { + let verifier_time = start_timer!(|| "Marlin::Verify"); + + let public_input = { + let domain_x = GeneralEvaluationDomain::::new(public_input.len() + 1).unwrap(); + + let mut unpadded_input = public_input.to_vec(); + unpadded_input.resize( + core::cmp::max(public_input.len(), domain_x.size() - 1), + F::zero(), + ); + + unpadded_input + }; + + let mut fs_rng = S::default(); + absorb!( + &mut fs_rng, + &Self::PROTOCOL_NAME, + &to_bytes(index_vk), + &to_bytes(&public_input) + ); + + // -------------------------------------------------------------------- + // First round + + let first_comms = &proof.commitments[0]; + match &proof.prover_messages[0] { + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(first_comms), &to_bytes(elems)); + } + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(first_comms)), + } + let (_, verifier_state) = + AHPForR1CS::verifier_first_round(index_vk.index_info, &mut fs_rng)?; + // -------------------------------------------------------------------- + + // -------------------------------------------------------------------- + // Second round + let second_comms = &proof.commitments[1]; + match &proof.prover_messages[1] { + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(second_comms), &to_bytes(elems)); + } + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(second_comms)), + } + + let (_, verifier_state) = AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); + // -------------------------------------------------------------------- + + // -------------------------------------------------------------------- + // Third round + let third_comms = &proof.commitments[2]; + match &proof.prover_messages[2] { + ProverMsg::FieldElements(ref elems) => { + absorb!(&mut fs_rng, &to_bytes(third_comms), &to_bytes(elems)); + } + ProverMsg::EmptyMessage => fs_rng.absorb(&to_bytes(third_comms)), + } + + let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); + // -------------------------------------------------------------------- + + // Collect degree bounds for commitments. Indexed polynomials have *no* + // degree bounds because we know the committed index polynomial has the + // correct degree. + let index_info = index_vk.index_info; + let degree_bounds = vec![None; index_vk.index_comms.len()] + .into_iter() + .chain(AHPForR1CS::prover_first_round_degree_bounds(&index_info)) + .chain(AHPForR1CS::prover_second_round_degree_bounds(&index_info)) + .chain(AHPForR1CS::prover_third_round_degree_bounds(&index_info)) + .collect::>(); + + // Gather commitments in one vector. + let commitments: Vec<_> = index_vk + .iter() + .chain(first_comms) + .chain(second_comms) + .chain(third_comms) + .cloned() + .zip(AHPForR1CS::::polynomial_labels()) + .zip(degree_bounds) + .map(|((c, l), d)| LabeledCommitment::new(l, c, d)) + .collect(); + + let (query_set, verifier_state) = + AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng); + + fs_rng.absorb(&to_bytes(&proof.evaluations)); + let mut opening_challenge: ChallengeGenerator = + ChallengeGenerator::new_multivariate(fs_rng); + + let mut evaluations = Evaluations::new(); + let mut evaluation_labels = Vec::new(); + for (poly_label, (_, point)) in query_set.iter().cloned() { + if AHPForR1CS::::LC_WITH_ZERO_EVAL.contains(&poly_label.as_ref()) { + evaluations.insert((poly_label, point), F::zero()); + } else { + evaluation_labels.push((poly_label, point)); + } + } + evaluation_labels.sort_by(|a, b| a.0.cmp(&b.0)); + for (q, eval) in evaluation_labels.into_iter().zip(&proof.evaluations) { + evaluations.insert(q, *eval); + } + + let lc_s = AHPForR1CS::construct_linear_combinations( + &public_input, + &evaluations, + &verifier_state, + )?; + + let evaluations_are_correct = PC::check_combinations( + &index_vk.verifier_key, + &lc_s, + &commitments, + &query_set, + &evaluations, + &proof.pc_proof, + &mut opening_challenge, + rng, + ) + .map_err(Error::from_pc_err)?; + + if !evaluations_are_correct { + eprintln!("PC::Check failed"); + } + end_timer!(verifier_time, || format!( + " PC::Check for AHP Verifier linear equations: {}", + evaluations_are_correct + )); + Ok(evaluations_are_correct) + } +} + +impl< + F: PrimeField + Absorb, + PC: PolynomialCommitment, S>, + S: CryptographicSponge + Default + RngCore, + > Marlin +{ + /// Create a zkSNARK asserting that the constraint system is satisfied. + /// Uses fast absorption of field elements into sponge + pub fn fast_prove, R: RngCore + CryptographicSponge>( + index_pk: &IndexProverKey, + c: C, + zk_rng: &mut R, + ) -> Result, Error> { + let prover_time = start_timer!(|| "Marlin::Prover"); + // Add check that c is in the correct mode. + let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; let public_input = prover_init_state.public_input(); let mut fs_rng = S::default(); @@ -361,7 +706,8 @@ impl< /// Verify that a proof for the constrain system defined by `C` asserts that /// all constraints are satisfied. - pub fn verify( + /// Uses fast absorption of field elements into sponge + pub fn fast_verify( index_vk: &IndexVerifierKey, public_input: &[F], proof: &Proof, From 6a071f7e7932e6a2a2256f53d3b85127f4abc675 Mon Sep 17 00:00:00 2001 From: AB Date: Mon, 9 Oct 2023 16:11:18 +0100 Subject: [PATCH 40/40] added test of fast_proof and fast_verify --- src/test.rs | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/test.rs b/src/test.rs index 867cf3d..5fa1791 100644 --- a/src/test.rs +++ b/src/test.rs @@ -169,6 +169,7 @@ mod marlin { } } + #[test] fn prove_and_verify_with_tall_matrix_big() { let num_constraints = 100; @@ -236,4 +237,45 @@ mod marlin { assert!(MarlinInst::verify(&index_vk, &inputs, &proof, &mut rng).unwrap()); println!("Called verifier"); } + + #[test] + /// Test fast proof and verify + fn fast_prove_and_test() { + let mut rng_seed = ark_std::test_rng(); + let mut rng: SimplePoseidonRng = SimplePoseidonRng::default(); + rng.absorb(&rng_seed.next_u64()); + + let universal_srs = MarlinInst::universal_setup(150, 150, 150, &mut rng).unwrap(); + + for _ in 0..100 { + let (a, b) = rng + .squeeze_field_elements(2) + .iter() + .map(|x: &Fr| x.to_owned()) + .collect_tuple() + .unwrap(); + let mut c = a; + c.mul_assign(&b); + let mut d = c; + d.mul_assign(&b); + + let circ = Circuit { + a: Some(a), + b: Some(b), + num_constraints:20, + num_variables:100, + }; + + let (index_pk, index_vk) = MarlinInst::index(&universal_srs, circ.clone()).unwrap(); + println!("Called index"); + + let proof = MarlinInst::prove(&index_pk, circ, &mut rng).unwrap(); + println!("Called prover"); + + assert!(MarlinInst::verify(&index_vk, &[c, d], &proof, &mut rng).unwrap()); + println!("Called verifier"); + println!("\nShould not verify (i.e. verifier messages should print below):"); + assert!(!MarlinInst::verify(&index_vk, &[a, a], &proof, &mut rng).unwrap()); + }; + } }