Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 7 additions & 5 deletions saffron/src/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, PolyComm, SRS as _}
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use tracing::{debug, instrument};
use tracing::{debug, debug_span, instrument};

// A FieldBlob<F> represents the encoding of a Vec<u8> as a list of polynomials over F,
// where F is a prime field. The polyonomials are represented in the monomial basis.
Expand Down Expand Up @@ -43,10 +43,12 @@ impl<G: CommitmentCurve> FieldBlob<G> {
let field_elements = encode_for_domain(&domain, bytes);
let domain_size = domain.size();

let data: Vec<DensePolynomial<G::ScalarField>> = field_elements
.par_iter()
.map(|chunk| Evaluations::from_vec_and_domain(chunk.to_vec(), domain).interpolate())
.collect();
let data: Vec<DensePolynomial<G::ScalarField>> = debug_span!("fft").in_scope(|| {
field_elements
.par_iter()
.map(|chunk| Evaluations::from_vec_and_domain(chunk.to_vec(), domain).interpolate())
.collect()
});

let commitments = commit_to_blob_data(srs, &data);

Expand Down
50 changes: 48 additions & 2 deletions saffron/src/cli.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,23 @@
use clap::{arg, Parser};
use std::{fmt::Display, str::FromStr};

#[derive(Debug, Clone)]
pub struct HexString(pub Vec<u8>);

impl FromStr for HexString {
type Err = hex::FromHexError;

fn from_str(s: &str) -> Result<Self, Self::Err> {
let stripped = s.strip_prefix("0x").unwrap_or(s);
Ok(HexString(hex::decode(stripped)?))
}
}

impl Display for HexString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "0x{}", hex::encode(&self.0))
}
}

#[derive(Parser)]
pub struct EncodeFileArgs {
Expand All @@ -16,8 +35,12 @@ pub struct EncodeFileArgs {
#[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")]
pub srs_cache: Option<String>,

#[arg(long = "assert-commitment", value_name = "COMMITMENT")]
pub assert_commitment: Option<String>,
#[arg(
long = "assert-commitment",
value_name = "COMMITMENT",
help = "hash of commitments (hex encoded)"
)]
pub assert_commitment: Option<HexString>,
}

#[derive(Parser)]
Expand Down Expand Up @@ -46,6 +69,27 @@ pub struct ComputeCommitmentArgs {
pub srs_cache: Option<String>,
}

#[derive(Parser)]
pub struct StorageProofArgs {
#[arg(
long,
short = 'i',
value_name = "FILE",
help = "input file (encoded as field elements)"
)]
pub input: String,

#[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")]
pub srs_cache: Option<String>,

#[arg(
long = "challenge",
value_name = "CHALLENGE",
help = "challenge (hex encoded"
)]
pub challenge: HexString,
}

#[derive(Parser)]
#[command(
name = "saffron",
Expand All @@ -59,4 +103,6 @@ pub enum Commands {
Decode(DecodeFileArgs),
#[command(name = "compute-commitment")]
ComputeCommitment(ComputeCommitmentArgs),
#[command(name = "storage-proof")]
StorageProof(StorageProofArgs),
}
78 changes: 54 additions & 24 deletions saffron/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,27 @@
use anyhow::Result;
use ark_poly::{EvaluationDomain, Radix2EvaluationDomain};
use clap::Parser;
use mina_curves::pasta::{Fp, Vesta};
use poly_commitment::{ipa::SRS, SRS as _};
use saffron::{blob::FieldBlob, cli, commitment, env, utils};
use kimchi::groupmap::GroupMap;
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge};
use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, SRS as _};
use rand::rngs::OsRng;
use saffron::{
blob::FieldBlob,
cli::{self, HexString},
commitment, env, proof, utils,
};
use sha3::{Digest, Sha3_256};
use std::{
fs::File,
io::{Read, Write},
};
use tracing::debug;
use tracing::{debug, debug_span};

const DEFAULT_SRS_SIZE: usize = 1 << 16;

fn get_srs(cache: Option<String>) -> (SRS<Vesta>, Radix2EvaluationDomain<Fp>) {
match cache {
let res = match cache {
Some(cache) => {
let srs = env::get_srs_from_cache(cache);
let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap();
Expand All @@ -28,11 +35,16 @@ fn get_srs(cache: Option<String>) -> (SRS<Vesta>, Radix2EvaluationDomain<Fp>) {
let domain_size = DEFAULT_SRS_SIZE;
let srs = SRS::create(domain_size);
let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap();
srs.get_lagrange_basis(domain_fp);
debug!("SRS created successfully");
(srs, domain_fp)
}
}
};

debug_span!("get_lagrange_basis", basis_size = res.0.size()).in_scope(|| {
res.0.get_lagrange_basis(res.1);
});

res
}

fn decode_file(args: cli::DecodeFileArgs) -> Result<()> {
Expand Down Expand Up @@ -66,12 +78,12 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> {
.into_iter()
.for_each(|asserted_commitment| {
let bytes = rmp_serde::to_vec(&blob.commitments).unwrap();
let hash = Sha3_256::new().chain_update(bytes).finalize();
let computed_commitment = hex::encode(hash);
if asserted_commitment != computed_commitment {
let hash = Sha3_256::new().chain_update(bytes).finalize().to_vec();
if asserted_commitment.0 != hash {
panic!(
"commitment hash mismatch: asserted {}, computed {}",
asserted_commitment, computed_commitment
asserted_commitment,
HexString(hash)
);
}
});
Expand All @@ -81,16 +93,34 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> {
Ok(())
}

pub fn compute_commitment(args: cli::ComputeCommitmentArgs) -> Result<String> {
pub fn compute_commitment(args: cli::ComputeCommitmentArgs) -> Result<HexString> {
let (srs, domain_fp) = get_srs(args.srs_cache);
let mut file = File::open(args.input)?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let field_elems = utils::encode_for_domain(&domain_fp, &buf);
let commitments = commitment::commit_to_field_elems(&srs, domain_fp, field_elems);
let bytes = rmp_serde::to_vec(&commitments).unwrap();
let hash = Sha3_256::new().chain_update(bytes).finalize();
Ok(hex::encode(hash))
let hash = Sha3_256::new().chain_update(bytes).finalize().to_vec();
Ok(HexString(hash))
}

pub fn storage_proof(args: cli::StorageProofArgs) -> Result<HexString> {
let file = File::open(args.input)?;
let blob: FieldBlob<Vesta> = rmp_serde::decode::from_read(file)?;
let proof =
{
let (srs, _) = get_srs(args.srs_cache);
let group_map = <Vesta as CommitmentCurve>::Map::setup();
let mut rng = OsRng;
let evaluation_point = utils::encode(&args.challenge.0);
proof::storage_proof::<
Vesta,
DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>,
>(&srs, &group_map, blob, evaluation_point, &mut rng)
};
let bytes = rmp_serde::to_vec(&proof).unwrap();
Ok(HexString(bytes))
}

pub fn main() -> Result<()> {
Expand All @@ -99,15 +129,15 @@ pub fn main() -> Result<()> {
match args {
cli::Commands::Encode(args) => encode_file(args),
cli::Commands::Decode(args) => decode_file(args),
cli::Commands::ComputeCommitment(args) => match compute_commitment(args) {
Ok(c) => {
println!("{}", c);
Ok(())
}
Err(e) => {
eprintln!("{}", e);
Err(e)
}
},
cli::Commands::ComputeCommitment(args) => {
let commitment = compute_commitment(args)?;
println!("{}", commitment);
Ok(())
}
cli::Commands::StorageProof(args) => {
let proof = storage_proof(args)?;
println!("{}", proof);
Ok(())
}
}
}
41 changes: 27 additions & 14 deletions saffron/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,26 +2,38 @@ use crate::blob::FieldBlob;
use ark_ec::AffineRepr;
use ark_ff::{One, PrimeField, Zero};
use ark_poly::{univariate::DensePolynomial, Polynomial, Radix2EvaluationDomain as D};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use kimchi::curve::KimchiCurve;
use mina_poseidon::FqSponge;
use o1_utils::ExtendedDensePolynomial;
use poly_commitment::{
commitment::{absorb_commitment, BatchEvaluationProof, Evaluation},
commitment::{absorb_commitment, BatchEvaluationProof, CommitmentCurve, Evaluation},
ipa::{OpeningProof, SRS},
utils::DensePolynomialOrEvaluations,
PolyComm,
};
use rand::rngs::OsRng;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use tracing::instrument;

#[serde_as]
#[derive(Debug, Serialize, Deserialize)]
#[serde(bound = "G::ScalarField : CanonicalDeserialize + CanonicalSerialize")]
pub struct StorageProof<G: CommitmentCurve> {
#[serde_as(as = "o1_utils::serialization::SerdeAs")]
pub evaluation: G::ScalarField,
pub opening_proof: OpeningProof<G>,
}

#[instrument(skip_all, level = "debug")]
pub fn storage_proof<G: KimchiCurve, EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>>(
srs: &SRS<G>,
group_map: &G::Map,
blob: FieldBlob<G>,
evaluation_point: G::ScalarField,
rng: &mut OsRng,
) -> (G::ScalarField, OpeningProof<G>)
) -> StorageProof<G>
where
G::BaseField: PrimeField,
{
Expand All @@ -47,7 +59,7 @@ where
sponge.absorb_fr(&[evaluation]);
sponge
};
let proof = srs.open(
let opening_proof = srs.open(
group_map,
&[(
DensePolynomialOrEvaluations::<<G as AffineRepr>::ScalarField, D<G::ScalarField>> ::DensePolynomial(
Expand All @@ -63,7 +75,10 @@ where
opening_proof_sponge,
rng,
);
(evaluation, proof)
StorageProof {
evaluation,
opening_proof,
}
}

#[instrument(skip_all, level = "debug")]
Expand All @@ -75,15 +90,14 @@ pub fn verify_storage_proof<
group_map: &G::Map,
commitment: PolyComm<G>,
evaluation_point: G::ScalarField,
evaluation: G::ScalarField,
opening_proof: &OpeningProof<G>,
proof: &StorageProof<G>,
rng: &mut OsRng,
) -> bool
where
G::BaseField: PrimeField,
{
let mut opening_proof_sponge = EFqSponge::new(G::other_curve_sponge_params());
opening_proof_sponge.absorb_fr(&[evaluation]);
opening_proof_sponge.absorb_fr(&[proof.evaluation]);

srs.verify(
group_map,
Expand All @@ -94,10 +108,10 @@ where
evalscale: G::ScalarField::one(),
evaluations: vec![Evaluation {
commitment,
evaluations: vec![vec![evaluation]],
evaluations: vec![vec![proof.evaluation]],
}],
opening: opening_proof,
combined_inner_product: evaluation,
opening: &proof.opening_proof,
combined_inner_product: proof.evaluation,
}],
rng,
)
Expand Down Expand Up @@ -150,16 +164,15 @@ mod tests {
};
let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &data);
let evaluation_point = Fp::rand(&mut rng);
let (evaluation, proof) = storage_proof::<
Vesta,
DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>,
let proof = storage_proof::<
Vesta, DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>

>(&*SRS, &*GROUP_MAP, blob, evaluation_point, &mut rng);
let res = verify_storage_proof::<Vesta, DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>>(
&*SRS,
&*GROUP_MAP,
commitment,
evaluation_point,
evaluation,
&proof,
&mut rng,
);
Expand Down
2 changes: 1 addition & 1 deletion saffron/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use ark_poly::EvaluationDomain;

// For injectivity, you can only use this on inputs of length at most
// 'F::MODULUS_BIT_SIZE / 8', e.g. for Vesta this is 31.
fn encode<Fp: PrimeField>(bytes: &[u8]) -> Fp {
pub fn encode<Fp: PrimeField>(bytes: &[u8]) -> Fp {
Fp::from_be_bytes_mod_order(bytes)
}

Expand Down
13 changes: 13 additions & 0 deletions saffron/test-encoding.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,19 @@ if ! cargo run --release --bin saffron encode -i "$INPUT_FILE" -o "$ENCODED_FILE
exit 1
fi

# Generate 32-byte random challenge as hex string
echo "Generating random challenge..."
CHALLENGE=$(head -c 32 /dev/urandom | xxd -p -c 32)
echo "Challenge: $CHALLENGE"

# Generate storage proof and capture proof output
echo "Generating storage proof..."
PROOF=$(cargo run --release --bin saffron storage-proof -i "$ENCODED_FILE" --challenge "$CHALLENGE" $SRS_ARG | tee /dev/stderr | tail -n 1)
if [ $? -ne 0 ]; then
echo "Storage proof generation failed"
exit 1
fi

# Run decode
echo "Decoding $ENCODED_FILE to $DECODED_FILE"
if ! cargo run --release --bin saffron decode -i "$ENCODED_FILE" -o "$DECODED_FILE" $SRS_ARG; then
Expand Down
Loading