diff --git a/curdleproofs/curdleproofs/commitment.py b/curdleproofs/curdleproofs/commitment.py index e91be10..e16dbe6 100644 --- a/curdleproofs/curdleproofs/commitment.py +++ b/curdleproofs/curdleproofs/commitment.py @@ -1,55 +1,50 @@ from typing import Type, TypeVar from curdleproofs.util import ( - PointProjective, - Fr, point_projective_from_json, point_projective_to_json, BufReader, g1_to_bytes, ) -from py_ecc.optimized_bls12_381.optimized_curve import ( - multiply, - add, - eq, -) +from py_arkworks_bls12381 import G1Point, Scalar + T_GroupCommitment = TypeVar("T_GroupCommitment", bound="GroupCommitment") class GroupCommitment: - T_1: PointProjective - T_2: PointProjective + T_1: G1Point + T_2: G1Point - def __init__(self, T_1: PointProjective, T_2: PointProjective) -> None: + def __init__(self, T_1: G1Point, T_2: G1Point) -> None: self.T_1 = T_1 self.T_2 = T_2 @classmethod def new( cls: Type[T_GroupCommitment], - crs_G: PointProjective, - crs_H: PointProjective, - T: PointProjective, - r: Fr, + crs_G: G1Point, + crs_H: G1Point, + T: G1Point, + r: Scalar, ) -> T_GroupCommitment: - return cls(multiply(crs_G, int(r)), add(T, multiply(crs_H, int(r)))) + return cls(crs_G * r, T + crs_H * r) def __add__(self: T_GroupCommitment, other: object) -> T_GroupCommitment: if not isinstance(other, GroupCommitment): return NotImplemented - return type(self)(add(self.T_1, other.T_1), add(self.T_2, other.T_2)) + return type(self)(self.T_1 + other.T_1, self.T_2 + other.T_2) def __mul__(self: T_GroupCommitment, other: object) -> T_GroupCommitment: - if not isinstance(other, Fr): + if not isinstance(other, Scalar): return NotImplemented return type(self)( - multiply(self.T_1, int(other)), multiply(self.T_2, int(other)) + self.T_1 * other, self.T_2 * other ) def __eq__(self: T_GroupCommitment, __o: object) -> bool: if not isinstance(__o, GroupCommitment): return NotImplemented - return eq(self.T_1, __o.T_1) and eq(self.T_2, __o.T_2) + return self.T_1 == __o.T_1 and self.T_2 == __o.T_2 def to_json(self): return { diff --git a/curdleproofs/curdleproofs/crs.py b/curdleproofs/curdleproofs/crs.py index 9827323..f4e8e20 100644 --- a/curdleproofs/curdleproofs/crs.py +++ b/curdleproofs/curdleproofs/crs.py @@ -1,18 +1,16 @@ from functools import reduce import json from typing import List, Type, TypeVar -from py_ecc.optimized_bls12_381.optimized_curve import ( - add, - Z1 -) from curdleproofs.util import ( - PointProjective, get_random_point, point_projective_from_json, point_projective_to_json, BufReader, g1_to_bytes, + Z1 ) +from py_arkworks_bls12381 import G1Point + T_CurdleproofsCrs = TypeVar("T_CurdleproofsCrs", bound="CurdleproofsCrs") @@ -20,13 +18,13 @@ class CurdleproofsCrs: def __init__( self, - vec_G: List[PointProjective], - vec_H: List[PointProjective], - H: PointProjective, - G_t: PointProjective, - G_u: PointProjective, - G_sum: PointProjective, - H_sum: PointProjective, + vec_G: List[G1Point], + vec_H: List[G1Point], + H: G1Point, + G_t: G1Point, + G_u: G1Point, + G_sum: G1Point, + H_sum: G1Point, ) -> None: self.vec_G = vec_G self.vec_H = vec_H @@ -41,11 +39,11 @@ def new( cls: Type[T_CurdleproofsCrs], ell: int, n_blinders: int ) -> T_CurdleproofsCrs: count = ell + n_blinders + 3 - points: List[PointProjective] = [get_random_point() for _ in range(0, count)] + points: List[G1Point] = [get_random_point() for _ in range(0, count)] return cls.from_random_points(ell, n_blinders, points) @classmethod - def from_random_points(cls: Type[T_CurdleproofsCrs], ell: int, n_blinders: int, points: List[PointProjective]) -> T_CurdleproofsCrs: + def from_random_points(cls: Type[T_CurdleproofsCrs], ell: int, n_blinders: int, points: List[G1Point]) -> T_CurdleproofsCrs: vec_G = points[0:ell] vec_H = points[ell:ell + n_blinders] return cls( @@ -54,8 +52,8 @@ def from_random_points(cls: Type[T_CurdleproofsCrs], ell: int, n_blinders: int, H=points[ell + n_blinders + 0], G_t=points[ell + n_blinders + 1], G_u=points[ell + n_blinders + 2], - G_sum=reduce(add, vec_G, Z1), - H_sum=reduce(add, vec_H, Z1), + G_sum=reduce(lambda a, b: a + b, vec_G, Z1), + H_sum=reduce(lambda a, b: a + b, vec_H, Z1), ) def to_json(self) -> str: diff --git a/curdleproofs/curdleproofs/curdleproofs.py b/curdleproofs/curdleproofs/curdleproofs.py index fa8a359..28ff0eb 100644 --- a/curdleproofs/curdleproofs/curdleproofs.py +++ b/curdleproofs/curdleproofs/curdleproofs.py @@ -1,5 +1,4 @@ import json -import random from curdleproofs.crs import CurdleproofsCrs from curdleproofs.ipa import generate_blinders from curdleproofs.util import ( @@ -9,25 +8,18 @@ points_projective_to_bytes, BufReader, g1_to_bytes, + random_scalar, + Z1, ) from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from typing import List, Tuple, Type, TypeVar -from curdleproofs.util import ( - PointProjective, - Fr, - get_permutation, -) +from curdleproofs.util import get_permutation, g1_is_inf from curdleproofs.msm_accumulator import MSMAccumulator, compute_MSM -from py_ecc.optimized_bls12_381.optimized_curve import ( - multiply, - add, - Z1, - is_inf, -) from curdleproofs.same_perm import SamePermutationProof from curdleproofs.same_msm import SameMSMProof from curdleproofs.same_scalar import SameScalarProof from curdleproofs.commitment import GroupCommitment +from py_arkworks_bls12381 import G1Point, Scalar N_BLINDERS = 4 @@ -37,11 +29,11 @@ class CurdleProofsProof: def __init__( self, - A: PointProjective, + A: G1Point, cm_T: GroupCommitment, cm_U: GroupCommitment, - R: PointProjective, - S: PointProjective, + R: G1Point, + S: G1Point, same_perm_proof: SamePermutationProof, same_scalar_proof: SameScalarProof, same_msm_proof: SameMSMProof, @@ -59,14 +51,14 @@ def __init__( def new( cls: Type[T_CurdleProofsProof], crs: CurdleproofsCrs, - vec_R: List[PointProjective], - vec_S: List[PointProjective], - vec_T: List[PointProjective], - vec_U: List[PointProjective], - M: PointProjective, + vec_R: List[G1Point], + vec_S: List[G1Point], + vec_T: List[G1Point], + vec_U: List[G1Point], + M: G1Point, permutation: List[int], - k: Fr, - vec_m_blinders: List[Fr], + k: Scalar, + vec_m_blinders: List[Scalar], ) -> T_CurdleProofsProof: ell = len(vec_R) @@ -79,13 +71,10 @@ def new( vec_a = transcript.get_and_append_challenges(b"curdleproofs_vec_a", ell) vec_a_blinders = generate_blinders(N_BLINDERS - 2) - vec_r_a_prime = vec_a_blinders + [Fr.zero(), Fr.zero()] + vec_r_a_prime = vec_a_blinders + [Scalar(0), Scalar(0)] vec_a_permuted = get_permutation(vec_a, permutation) - A = add( - compute_MSM(crs.vec_G, vec_a_permuted), - compute_MSM(crs.vec_H, vec_r_a_prime), - ) + A = compute_MSM(crs.vec_G, vec_a_permuted) + compute_MSM(crs.vec_H, vec_r_a_prime) same_perm_proof = SamePermutationProof.new( crs_G_vec=crs.vec_G, @@ -100,16 +89,16 @@ def new( transcript=transcript, ) - r_t = Fr(random.randint(1, Fr.field_modulus)) - r_u = Fr(random.randint(1, Fr.field_modulus)) + r_t = random_scalar() + r_u = random_scalar() R = compute_MSM(vec_R, vec_a) S = compute_MSM(vec_S, vec_a) cm_T: GroupCommitment = GroupCommitment.new( - crs.G_t, crs.H, multiply(R, int(k)), r_t + crs.G_t, crs.H, R * k, r_t ) cm_U: GroupCommitment = GroupCommitment.new( - crs.G_u, crs.H, multiply(S, int(k)), r_u + crs.G_u, crs.H, S * k, r_u ) same_scalar_proof = SameScalarProof.new( @@ -126,7 +115,7 @@ def new( transcript=transcript, ) - A_prime = add(add(A, cm_T.T_1), cm_U.T_1) + A_prime = A + cm_T.T_1 + cm_U.T_1 vec_G_with_blinders = ( crs.vec_G + crs.vec_H[: (N_BLINDERS - 2)] + [crs.G_t, crs.G_u] @@ -173,18 +162,18 @@ def new( def verify( self, crs: CurdleproofsCrs, - vec_R: List[PointProjective], - vec_S: List[PointProjective], - vec_T: List[PointProjective], - vec_U: List[PointProjective], - M: PointProjective, + vec_R: List[G1Point], + vec_S: List[G1Point], + vec_T: List[G1Point], + vec_U: List[G1Point], + M: G1Point, ): ell = len(vec_R) transcript = CurdleproofsTranscript(b"curdleproofs") msm_accumulator = MSMAccumulator() - if is_inf(vec_T[0]): + if g1_is_inf(vec_T[0]): raise Exception("vec_T[0] is infinity") transcript.append_list( @@ -218,7 +207,7 @@ def verify( transcript=transcript, ) - A_prime = add(add(self.A, self.cm_T.T_1), self.cm_U.T_1) + A_prime = self.A + self.cm_T.T_1 + self.cm_U.T_1 vec_G_with_blinders = ( crs.vec_G + crs.vec_H[: (N_BLINDERS - 2)] + [crs.G_t, crs.G_u] @@ -311,23 +300,23 @@ def from_bytes(cls: Type[T_CurdleProofsProof], b: BufReader, n: int) -> T_Curdle def shuffle_permute_and_commit_input( crs: CurdleproofsCrs, - vec_R: List[PointProjective], - vec_S: List[PointProjective], + vec_R: List[G1Point], + vec_S: List[G1Point], permutation: List[int], - k: Fr, -) -> Tuple[List[PointProjective], List[PointProjective], PointProjective, List[Fr]]: + k: Scalar, +) -> Tuple[List[G1Point], List[G1Point], G1Point, List[Scalar]]: ell = len(crs.vec_G) - vec_T = [multiply(R, int(k)) for R in vec_R] - vec_U = [multiply(S, int(k)) for S in vec_S] + vec_T = [R * k for R in vec_R] + vec_U = [S * k for S in vec_S] vec_T = get_permutation(vec_T, permutation) vec_U = get_permutation(vec_U, permutation) - range_as_fr = [Fr(i) for i in range(ell)] + range_as_fr = [Scalar(i) for i in range(ell)] sigma_ell = get_permutation(range_as_fr, permutation) vec_m_blinders = generate_blinders(N_BLINDERS) - M = add(compute_MSM(crs.vec_G, sigma_ell), compute_MSM(crs.vec_H, vec_m_blinders)) + M = compute_MSM(crs.vec_G, sigma_ell) + compute_MSM(crs.vec_H, vec_m_blinders) return vec_T, vec_U, M, vec_m_blinders @@ -338,11 +327,11 @@ def shuffle_permute_and_commit_input( class VerifierInput: def __init__( self, - vec_R: List[PointProjective], - vec_S: List[PointProjective], - vec_T: List[PointProjective], - vec_U: List[PointProjective], - M: PointProjective, + vec_R: List[G1Point], + vec_S: List[G1Point], + vec_T: List[G1Point], + vec_U: List[G1Point], + M: G1Point, ) -> None: self.vec_R = vec_R self.vec_S = vec_S diff --git a/curdleproofs/curdleproofs/curdleproofs_transcript.py b/curdleproofs/curdleproofs/curdleproofs_transcript.py index 393ad79..e37917a 100644 --- a/curdleproofs/curdleproofs/curdleproofs_transcript.py +++ b/curdleproofs/curdleproofs/curdleproofs_transcript.py @@ -1,9 +1,7 @@ from typing import List -from py_ecc.secp256k1.secp256k1 import bytes_to_int -from py_ecc.optimized_bls12_381.optimized_curve import curve_order from merlin import MerlinTranscript - -from curdleproofs.util import Fr +from py_arkworks_bls12381 import Scalar +from curdleproofs.util import CURVE_ORDER class CurdleproofsTranscript(MerlinTranscript): @@ -14,18 +12,19 @@ def append_list(self, label: bytes, items: List[bytes]) -> None: for item in items: self.append_message(label, item) - def get_and_append_challenge(self, label: bytes) -> Fr: + def get_and_append_challenge(self, label: bytes) -> Scalar: while True: challenge_bytes = self.challenge_bytes(label, 32) - challenge_int = bytes_to_int(challenge_bytes) - if challenge_int >= curve_order: + challenge_int = int.from_bytes(challenge_bytes, byteorder='little') + if challenge_int >= CURVE_ORDER: continue - f = Fr(challenge_int) - if f != Fr.zero(): + # `Scalar.from_le_bytes` will error if value if >= CURVE_ORDER + f = Scalar.from_le_bytes(challenge_bytes) + if not f.is_zero(): self.append(label, challenge_bytes) return f - def get_and_append_challenges(self, label: bytes, n: int) -> List[Fr]: + def get_and_append_challenges(self, label: bytes, n: int) -> List[Scalar]: return [self.get_and_append_challenge(label) for _ in range(0, n)] diff --git a/curdleproofs/curdleproofs/grand_prod.py b/curdleproofs/curdleproofs/grand_prod.py index 162c8f4..e1d881f 100644 --- a/curdleproofs/curdleproofs/grand_prod.py +++ b/curdleproofs/curdleproofs/grand_prod.py @@ -9,27 +9,19 @@ BufReader, g1_to_bytes, fr_to_bytes, + scalar_pow, ) from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from typing import List, TypeVar, Type -from curdleproofs.util import ( - PointProjective, - Fr, - field_to_bytes, -) +from curdleproofs.util import field_to_bytes from curdleproofs.msm_accumulator import MSMAccumulator, compute_MSM -from py_ecc.optimized_bls12_381.optimized_curve import ( - multiply, - add, - neg, - eq, -) +from py_arkworks_bls12381 import G1Point, Scalar T_GrandProductProof = TypeVar("T_GrandProductProof", bound="GrandProductProof") class GrandProductProof: - def __init__(self, C: PointProjective, r_p: Fr, ipa_proof: IPA): + def __init__(self, C: G1Point, r_p: Scalar, ipa_proof: IPA): self.C = C self.r_p = r_p self.ipa_proof = ipa_proof @@ -37,13 +29,13 @@ def __init__(self, C: PointProjective, r_p: Fr, ipa_proof: IPA): @classmethod def new( cls: Type[T_GrandProductProof], - crs_G_vec: List[PointProjective], - crs_H_vec: List[PointProjective], - crs_U: PointProjective, - B: PointProjective, - gprod_result: Fr, - vec_b: List[Fr], - vec_b_blinders: List[Fr], + crs_G_vec: List[G1Point], + crs_H_vec: List[G1Point], + crs_U: G1Point, + B: G1Point, + gprod_result: Scalar, + vec_b: List[Scalar], + vec_b_blinders: List[Scalar], transcript: CurdleproofsTranscript, ) -> T_GrandProductProof: n_blinders = len(vec_b_blinders) @@ -54,12 +46,12 @@ def new( alpha = transcript.get_and_append_challenge(b"gprod_alpha") # Step 2 - vec_c = [Fr.one()] + vec_c = [Scalar(1)] for i in range(0, ell - 1): vec_c.append(vec_c[i] * vec_b[i]) vec_c_blinders = generate_blinders(n_blinders) - C = add(compute_MSM(crs_G_vec, vec_c), compute_MSM(crs_H_vec, vec_c_blinders)) + C = compute_MSM(crs_G_vec, vec_c) + compute_MSM(crs_H_vec, vec_c_blinders) vec_r_b_plus_alpha = [r_b_i + alpha for r_b_i in vec_b_blinders] r_p = inner_product(vec_r_b_plus_alpha, vec_c_blinders) @@ -70,40 +62,37 @@ def new( beta_inv = invert(beta) pow_beta_inv = beta_inv - vec_G_prime: List[PointProjective] = [] + vec_G_prime: List[G1Point] = [] for G_i in crs_G_vec: - G_prime = multiply(G_i, int(pow_beta_inv)) + G_prime = G_i * pow_beta_inv vec_G_prime.append(G_prime) pow_beta_inv *= beta_inv - vec_H_prime = [multiply(H_i, int(beta_inv ** (ell + 1))) for H_i in crs_H_vec] + vec_H_prime = [H_i * scalar_pow(beta_inv, ell + 1) for H_i in crs_H_vec] - vec_b_prime: List[Fr] = [] + vec_b_prime: List[Scalar] = [] pow_beta = beta for b_i in vec_b: vec_b_prime.append(b_i * pow_beta) pow_beta *= beta - vec_d: List[Fr] = [] - pow_beta = Fr.one() - vec_beta_powers: List[Fr] = [] + vec_d: List[Scalar] = [] + pow_beta = Scalar(1) + vec_beta_powers: List[Scalar] = [] for b_prime_i in vec_b_prime: vec_d.append(b_prime_i - pow_beta) vec_beta_powers.append(pow_beta) pow_beta *= beta - vec_d_blinders = [(beta ** (ell + 1)) * r_b_i for r_b_i in vec_r_b_plus_alpha] + vec_d_blinders = [scalar_pow(beta, ell + 1) * r_b_i for r_b_i in vec_r_b_plus_alpha] - vec_alphabeta = [alpha * (beta ** (ell + 1)) for _ in range(n_blinders)] - D = add( - add(B, neg(compute_MSM(vec_G_prime, vec_beta_powers))), - compute_MSM(vec_H_prime, vec_alphabeta), - ) + vec_alphabeta = [alpha * scalar_pow(beta, ell + 1) for _ in range(n_blinders)] + D = B - compute_MSM(vec_G_prime, vec_beta_powers) + compute_MSM(vec_H_prime, vec_alphabeta) vec_G = crs_G_vec + crs_H_vec vec_G_prime += vec_H_prime - inner_prod = r_p * (beta ** (ell + 1)) + gprod_result * (beta**ell) - Fr.one() + inner_prod = r_p * scalar_pow(beta, ell + 1) + gprod_result * scalar_pow(beta, ell) - Scalar(1) vec_c += vec_c_blinders vec_d += vec_d_blinders @@ -112,8 +101,8 @@ def new( # print("computed", inner_product(vec_c, vec_d)) assert inner_product(vec_c, vec_d) == inner_prod - assert eq(compute_MSM(vec_G, vec_c), C) - assert eq(compute_MSM(vec_G_prime, vec_d), D) + assert compute_MSM(vec_G, vec_c) == C + assert compute_MSM(vec_G_prime, vec_d) == D ipa_proof = IPA.new( crs_G_vec=vec_G, @@ -131,13 +120,13 @@ def new( def verify( self, - crs_G_vec: List[PointProjective], - crs_H_vec: List[PointProjective], - crs_U: PointProjective, - crs_G_sum: PointProjective, - crs_H_sum: PointProjective, - B: PointProjective, - gprod_result: Fr, + crs_G_vec: List[G1Point], + crs_H_vec: List[G1Point], + crs_U: G1Point, + crs_G_sum: G1Point, + crs_H_sum: G1Point, + B: G1Point, + gprod_result: Scalar, n_blinders: int, transcript: CurdleproofsTranscript, msm_accumulator: MSMAccumulator, @@ -157,26 +146,23 @@ def verify( # Step 3 # Build `vec_u` for the optimization trick - vec_u: List[Fr] = [] + vec_u: List[Scalar] = [] pow_beta_inv = beta_inv for _ in range(0, ell): vec_u.append(pow_beta_inv) pow_beta_inv *= beta_inv - vec_u.extend([beta_inv ** (ell + 1) for _ in range(0, n_blinders)]) + vec_u.extend([scalar_pow(beta_inv, ell + 1) for _ in range(0, n_blinders)]) # Compute D - D = add( - add(B, neg(multiply(crs_G_sum, int(beta_inv)))), - multiply(crs_H_sum, int(alpha)), - ) + D = B - crs_G_sum * beta_inv + crs_H_sum * alpha # Step 4 # Build G vec_G = crs_G_vec + crs_H_vec inner_prod = ( - self.r_p * (beta ** (ell + 1)) + gprod_result * (beta**ell) - Fr.one() + self.r_p * scalar_pow(beta, ell + 1) + gprod_result * scalar_pow(beta, ell) - Scalar(1) ) self.ipa_proof.verify( @@ -201,7 +187,7 @@ def to_json(self): def from_json(cls: Type[T_GrandProductProof], json) -> T_GrandProductProof: return cls( C=point_projective_from_json(json["C"]), - r_p=field_from_json(json["r_p"], Fr), + r_p=field_from_json(json["r_p"]), ipa_proof=IPA.from_json(json["ipa_proof"]), ) diff --git a/curdleproofs/curdleproofs/ipa.py b/curdleproofs/curdleproofs/ipa.py index af505ef..e8dadbc 100644 --- a/curdleproofs/curdleproofs/ipa.py +++ b/curdleproofs/curdleproofs/ipa.py @@ -14,8 +14,6 @@ from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from typing import List, Tuple, Type, TypeVar from curdleproofs.util import ( - PointProjective, - Fr, field_to_bytes, invert, generate_blinders, @@ -23,13 +21,10 @@ get_verification_scalars_bitstring, ) from curdleproofs.msm_accumulator import MSMAccumulator, compute_MSM -from py_ecc.optimized_bls12_381.optimized_curve import ( - multiply, - add, -) +from py_arkworks_bls12381 import G1Point, Scalar -def generate_ipa_blinders(c: List[Fr], d: List[Fr]) -> Tuple[List[Fr], List[Fr]]: +def generate_ipa_blinders(c: List[Scalar], d: List[Scalar]) -> Tuple[List[Scalar], List[Scalar]]: n = len(c) r = generate_blinders(n) @@ -47,8 +42,8 @@ def generate_ipa_blinders(c: List[Fr], d: List[Fr]) -> Tuple[List[Fr], List[Fr]] z += [penultimate_z, last_z] - assert inner_product(r, d) + inner_product(z, c) == Fr.zero() - assert inner_product(r, z) == Fr.zero() + assert inner_product(r, d) + inner_product(z, c) == Scalar(0) + assert inner_product(r, z) == Scalar(0) return (r, z) @@ -59,14 +54,14 @@ def generate_ipa_blinders(c: List[Fr], d: List[Fr]) -> Tuple[List[Fr], List[Fr]] class IPA: def __init__( self, - B_c: PointProjective, - B_d: PointProjective, - vec_L_C: List[PointProjective], - vec_R_C: List[PointProjective], - vec_L_D: List[PointProjective], - vec_R_D: List[PointProjective], - c_final: Fr, - d_final: Fr, + B_c: G1Point, + B_d: G1Point, + vec_L_C: List[G1Point], + vec_R_C: List[G1Point], + vec_L_D: List[G1Point], + vec_R_D: List[G1Point], + c_final: Scalar, + d_final: Scalar, ) -> None: self.B_c = B_c self.B_d = B_d @@ -80,14 +75,14 @@ def __init__( @classmethod def new( cls: Type[T_IPA], - crs_G_vec: List[PointProjective], - crs_G_prime_vec: List[PointProjective], - crs_H: PointProjective, - C: PointProjective, - D: PointProjective, - z: Fr, - vec_c: List[Fr], - vec_d: List[Fr], + crs_G_vec: List[G1Point], + crs_G_prime_vec: List[G1Point], + crs_H: G1Point, + C: G1Point, + D: G1Point, + z: Scalar, + vec_c: List[Scalar], + vec_d: List[Scalar], transcript: CurdleproofsTranscript, ) -> T_IPA: n = len(vec_c) @@ -112,12 +107,12 @@ def new( for i in range(0, n): vec_c[i] = vec_r_c[i] + alpha * vec_c[i] vec_d[i] = vec_r_d[i] + alpha * vec_d[i] - H = multiply(crs_H, int(beta)) + H = crs_H * beta - vec_L_C: List[PointProjective] = [] - vec_R_C: List[PointProjective] = [] - vec_L_D: List[PointProjective] = [] - vec_R_D: List[PointProjective] = [] + vec_L_C: List[G1Point] = [] + vec_R_C: List[G1Point] = [] + vec_L_D: List[G1Point] = [] + vec_R_D: List[G1Point] = [] while len(vec_c) > 1: n //= 2 @@ -128,9 +123,9 @@ def new( G_L, G_R = crs_G_vec[:n], crs_G_vec[n:] G_prime_L, G_prime_R = crs_G_prime_vec[:n], crs_G_prime_vec[n:] - L_C = add(compute_MSM(G_R, c_L), multiply(H, int(inner_product(c_L, d_R)))) + L_C = compute_MSM(G_R, c_L) + H * inner_product(c_L, d_R) L_D = compute_MSM(G_prime_L, d_R) - R_C = add(compute_MSM(G_L, c_R), multiply(H, int(inner_product(c_R, d_L)))) + R_C = compute_MSM(G_L, c_R) + H * inner_product(c_R, d_L) R_D = compute_MSM(G_prime_R, d_L) vec_L_C.append(L_C) @@ -147,8 +142,8 @@ def new( for i in range(0, n): c_L[i] += gamma_inv * c_R[i] d_L[i] += gamma * d_R[i] - G_L[i] = add(G_L[i], multiply(G_R[i], int(gamma))) - G_prime_L[i] = add(G_prime_L[i], multiply(G_prime_R[i], int(gamma_inv))) + G_L[i] = G_L[i] + G_R[i] * gamma + G_prime_L[i] = G_prime_L[i] + G_prime_R[i] * gamma_inv vec_c = c_L vec_d = d_L @@ -159,7 +154,7 @@ def new( def verification_scalars( self, n: int, transcript: CurdleproofsTranscript - ) -> Tuple[List[Fr], List[Fr], List[Fr], List[Fr]]: + ) -> Tuple[List[Scalar], List[Scalar], List[Scalar], List[Scalar]]: lg_n = len(self.vec_L_C) if lg_n >= 32: raise Exception("vec_L_C too large") @@ -168,7 +163,7 @@ def verification_scalars( verification_scalars_bitstring = get_verification_scalars_bitstring(n, lg_n) - challenges: List[Fr] = [] + challenges: List[Scalar] = [] for i in range(0, lg_n): transcript.append_list( b"ipa_loop", @@ -180,9 +175,9 @@ def verification_scalars( challenges_inv = [invert(c) for c in challenges] - vec_s: List[Fr] = [] + vec_s: List[Scalar] = [] for i in range(0, n): - vec_s.append(Fr.one()) + vec_s.append(Scalar(1)) for j in verification_scalars_bitstring[i]: vec_s[i] = vec_s[i] * challenges[j] @@ -192,12 +187,12 @@ def verification_scalars( def verify( self, - crs_G_vec: List[PointProjective], - crs_H: PointProjective, - C: PointProjective, - D: PointProjective, - inner_prod: Fr, - vec_u: List[Fr], + crs_G_vec: List[G1Point], + crs_H: G1Point, + C: G1Point, + D: G1Point, + inner_prod: Scalar, + vec_u: List[Scalar], transcript: CurdleproofsTranscript, msm_accumulator: MSMAccumulator, ): @@ -222,16 +217,10 @@ def verify( vec_rhs_scalars = vec_c_times_s + [self.c_final * self.d_final * beta] vec_G_H = crs_G_vec + [crs_H] - H = multiply(crs_H, int(beta)) - C_a: PointProjective = add( - add(self.B_c, multiply(C, int(alpha))), - multiply(H, int(alpha * alpha * inner_prod)), - ) + H = crs_H * beta + C_a = self.B_c + C * alpha + H * (alpha * alpha * inner_prod) - point_lhs = add( - add(compute_MSM(self.vec_L_C, vec_gamma), C_a), - compute_MSM(self.vec_R_C, vec_gamma_inv), - ) + point_lhs = compute_MSM(self.vec_L_C, vec_gamma) + C_a + compute_MSM(self.vec_R_C, vec_gamma_inv) msm_accumulator.accumulate_check(point_lhs, vec_G_H, vec_rhs_scalars) @@ -239,11 +228,8 @@ def verify( self.d_final * (s_inv_i * u_i) for (s_inv_i, u_i) in zip(vec_s_inv, vec_u) ] - D_a = add(self.B_d, multiply(D, int(alpha))) - point_lhs = add( - add(compute_MSM(self.vec_L_D, vec_gamma), D_a), - compute_MSM(self.vec_R_D, vec_gamma_inv), - ) + D_a = self.B_d + D * alpha + point_lhs = compute_MSM(self.vec_L_D, vec_gamma) + D_a + compute_MSM(self.vec_R_D, vec_gamma_inv) msm_accumulator.accumulate_check(point_lhs, crs_G_vec, vec_d_div_s) def to_json(self): @@ -267,8 +253,8 @@ def from_json(cls: Type[T_IPA], json) -> T_IPA: vec_R_C=[point_projective_from_json(p) for p in json["vec_R_C"]], vec_L_D=[point_projective_from_json(p) for p in json["vec_L_D"]], vec_R_D=[point_projective_from_json(p) for p in json["vec_R_D"]], - c_final=field_from_json(json["c_final"], Fr), - d_final=field_from_json(json["d_final"], Fr), + c_final=field_from_json(json["c_final"]), + d_final=field_from_json(json["d_final"]), ) def to_bytes(self) -> bytes: diff --git a/curdleproofs/curdleproofs/msm_accumulator.py b/curdleproofs/curdleproofs/msm_accumulator.py index 268804c..28263b3 100644 --- a/curdleproofs/curdleproofs/msm_accumulator.py +++ b/curdleproofs/curdleproofs/msm_accumulator.py @@ -1,23 +1,14 @@ -import random -from curdleproofs.util import PointProjective, Fr, affine_to_projective -from typing import Dict, List, Tuple, Union -from py_ecc.optimized_bls12_381.optimized_curve import ( - multiply, - normalize, - add, - Z1, - eq, - FQ, - is_inf, -) +from curdleproofs.util import random_scalar, g1_is_inf, Z1, point_projective_to_bytes +from typing import Dict, List, Tuple +from py_arkworks_bls12381 import G1Point, Scalar def compute_MSM( - bases: List[PointProjective], scalars: Union[List[Fr], List[int]] -) -> PointProjective: - current = Z1 # zero + bases: List[G1Point], scalars: List[Scalar] +) -> G1Point: + current = G1Point.identity() # zero for (base, scalar) in zip(bases, scalars): - current = add(current, multiply(base, int(scalar))) # type: ignore + current = current + (base * scalar) # type: ignore return current @@ -41,38 +32,37 @@ def compute_MSM( class MSMAccumulator: def __init__(self) -> None: self.A_c = Z1 - self.base_scalar_map: Dict[Tuple[int, int], Fr] = {} + self.base_scalar_map: Dict[bytes, Scalar] = {} def accumulate_check( self, - C: PointProjective, - bases: List[PointProjective], - scalars: Union[List[Fr], List[int]], + C: G1Point, + bases: List[G1Point], + scalars: List[Scalar], ) -> None: - random_factor = Fr(random.randint(1, Fr.field_modulus)) + random_factor = random_scalar() - self.A_c = add(self.A_c, multiply(C, int(random_factor))) + self.A_c = self.A_c + C * random_factor for (base, scalar) in zip(bases, scalars): - # print("base", base) - if is_inf(base): + # note: optimization, zero bases contribute nothing to the MSM + if g1_is_inf(base): continue - base_affine_int_untyped = tuple(map(int, normalize(base))) - base_affine_int = (base_affine_int_untyped[0], base_affine_int_untyped[1]) + + # Note: G1Point is not hashable so a different representation is necessary to index base_scalar_map + # TODO: Compressing and decompressing each point is unnecessary, find a different hashble representation + base_comp = point_projective_to_bytes(base) # print("base_affine_int", base_affine_int) - if base_affine_int not in self.base_scalar_map: - self.base_scalar_map[base_affine_int] = Fr.zero() - self.base_scalar_map[base_affine_int] = self.base_scalar_map[ - base_affine_int - ] + random_factor * Fr(scalar) + if base_comp not in self.base_scalar_map: + self.base_scalar_map[base_comp] = Scalar(0) + self.base_scalar_map[base_comp] = self.base_scalar_map[base_comp] + random_factor * scalar def verify(self): bases: List[Tuple[int, int]] - scalars: List[Fr] + scalars: List[Scalar] bases, scalars = map(list, zip(*self.base_scalar_map.items())) # type: ignore computed = compute_MSM( - list(map(lambda t: affine_to_projective((FQ(t[0]), FQ(t[1]))), bases)), - list(map(int, scalars)), + list(map(lambda t: G1Point.from_compressed_bytes_unchecked(t), bases)), + scalars, ) - # print("bases", bases, "scalars", scalars, "computed", normalize(computed), "expected", normalize(self.A_c), "eq", eq(computed, self.A_c)) - assert eq(computed, self.A_c) + assert computed == self.A_c diff --git a/curdleproofs/curdleproofs/opening.py b/curdleproofs/curdleproofs/opening.py index 1e2d670..f265306 100644 --- a/curdleproofs/curdleproofs/opening.py +++ b/curdleproofs/curdleproofs/opening.py @@ -2,8 +2,6 @@ from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from curdleproofs.util import ( - Fr, - PointProjective, field_from_json, field_to_json, generate_blinders, @@ -13,13 +11,9 @@ BufReader, g1_to_bytes, fr_to_bytes, -) -from py_ecc.optimized_bls12_381.optimized_curve import ( G1, - multiply, - add, - eq, ) +from py_arkworks_bls12381 import G1Point, Scalar T_TrackerOpeningProof = TypeVar("T_TrackerOpeningProof", bound="TrackerOpeningProof") @@ -28,9 +22,9 @@ class TrackerOpeningProof: def __init__( self, - A: PointProjective, - B: PointProjective, - s: Fr, + A: G1Point, + B: G1Point, + s: Scalar, ) -> None: self.A = A self.B = B @@ -39,15 +33,15 @@ def __init__( @classmethod def new( cls: Type[T_TrackerOpeningProof], - k_r_G: PointProjective, - r_G: PointProjective, - k_G: PointProjective, - k: Fr, + k_r_G: G1Point, + r_G: G1Point, + k_G: G1Point, + k: Scalar, transcript: CurdleproofsTranscript, ) -> T_TrackerOpeningProof: blinder = generate_blinders(1)[0] - A = multiply(G1, int(blinder)) - B = multiply(r_G, int(blinder)) + A = G1 * blinder + B = r_G * blinder transcript.append_list( b"tracker_opening_proof", @@ -64,9 +58,9 @@ def new( def verify( self, transcript: CurdleproofsTranscript, - k_r_G: PointProjective, - r_G: PointProjective, - k_G: PointProjective, + k_r_G: G1Point, + r_G: G1Point, + k_G: G1Point, ): transcript.append_list( b"tracker_opening_proof", @@ -76,10 +70,10 @@ def verify( b"tracker_opening_proof_challenge" ) - Aprime = add(multiply(G1, int(self.s)), multiply(k_G, int(challenge))) - Bprime = add(multiply(r_G, int(self.s)), multiply(k_r_G, int(challenge))) + Aprime = G1 * self.s + k_G * challenge + Bprime = r_G * self.s + k_r_G * challenge - assert eq(Aprime, self.A) and eq(Bprime, self.B) + assert Aprime == self.A and Bprime == self.B def to_json(self): return { @@ -93,7 +87,7 @@ def from_json(cls: Type[T_TrackerOpeningProof], json) -> T_TrackerOpeningProof: return cls( point_projective_from_json(json["A"]), point_projective_from_json(json["B"]), - field_from_json(json["s"], Fr), + field_from_json(json["s"]), ) def to_bytes(self) -> bytes: diff --git a/curdleproofs/curdleproofs/same_msm.py b/curdleproofs/curdleproofs/same_msm.py index 6c85ff6..5f4740a 100644 --- a/curdleproofs/curdleproofs/same_msm.py +++ b/curdleproofs/curdleproofs/same_msm.py @@ -15,12 +15,9 @@ ) from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from typing import List, Tuple, Type, TypeVar -from curdleproofs.util import PointProjective, Fr, invert +from curdleproofs.util import invert from curdleproofs.msm_accumulator import MSMAccumulator, compute_MSM -from py_ecc.optimized_bls12_381.optimized_curve import ( - multiply, - add, -) +from py_arkworks_bls12381 import G1Point, Scalar T_SameMSMProof = TypeVar("T_SameMSMProof", bound="SameMSMProof") @@ -28,16 +25,16 @@ class SameMSMProof: def __init__( self, - B_a: PointProjective, - B_t: PointProjective, - B_u: PointProjective, - vec_L_A: List[PointProjective], - vec_L_T: List[PointProjective], - vec_L_U: List[PointProjective], - vec_R_A: List[PointProjective], - vec_R_T: List[PointProjective], - vec_R_U: List[PointProjective], - x_final: Fr, + B_a: G1Point, + B_t: G1Point, + B_u: G1Point, + vec_L_A: List[G1Point], + vec_L_T: List[G1Point], + vec_L_U: List[G1Point], + vec_R_A: List[G1Point], + vec_R_T: List[G1Point], + vec_R_U: List[G1Point], + x_final: Scalar, ) -> None: self.B_a = B_a self.B_t = B_t @@ -53,25 +50,25 @@ def __init__( @classmethod def new( cls: Type[T_SameMSMProof], - crs_G_vec: List[PointProjective], - A: PointProjective, - Z_t: PointProjective, - Z_u: PointProjective, - vec_T: List[PointProjective], - vec_U: List[PointProjective], - vec_x: List[Fr], + crs_G_vec: List[G1Point], + A: G1Point, + Z_t: G1Point, + Z_u: G1Point, + vec_T: List[G1Point], + vec_U: List[G1Point], + vec_x: List[Scalar], transcript: CurdleproofsTranscript, ) -> T_SameMSMProof: n = len(vec_x) lg_n = int(log2(n)) assert 2**lg_n == n - vec_L_T: List[PointProjective] = [] - vec_R_T: List[PointProjective] = [] - vec_L_U: List[PointProjective] = [] - vec_R_U: List[PointProjective] = [] - vec_L_A: List[PointProjective] = [] - vec_R_A: List[PointProjective] = [] + vec_L_T: List[G1Point] = [] + vec_R_T: List[G1Point] = [] + vec_L_U: List[G1Point] = [] + vec_R_U: List[G1Point] = [] + vec_L_A: List[G1Point] = [] + vec_R_A: List[G1Point] = [] vec_r = generate_blinders(n) @@ -124,9 +121,9 @@ def new( for i in range(0, n): x_L[i] += gamma_inv * x_R[i] - T_L[i] = add(T_L[i], multiply(T_R[i], int(gamma))) - U_L[i] = add(U_L[i], multiply(U_R[i], int(gamma))) - G_L[i] = add(G_L[i], multiply(G_R[i], int(gamma))) + T_L[i] = T_L[i] + T_R[i] * gamma + U_L[i] = U_L[i] + U_R[i] * gamma + G_L[i] = G_L[i] + G_R[i] * gamma vec_x = x_L vec_T = T_L @@ -148,7 +145,7 @@ def new( def verification_scalars( self, n: int, transcript: CurdleproofsTranscript - ) -> Tuple[List[Fr], List[Fr], List[Fr]]: + ) -> Tuple[List[Scalar], List[Scalar], List[Scalar]]: lg_n = len(self.vec_L_A) if lg_n >= 32: raise Exception("lg_n >= 32") @@ -157,7 +154,7 @@ def verification_scalars( bitstring = get_verification_scalars_bitstring(n, lg_n) - challenges: List[Fr] = [] + challenges: List[Scalar] = [] for i in range(0, lg_n): transcript.append_list( b"same_msm_loop", @@ -176,9 +173,9 @@ def verification_scalars( challenges_inv = list(map(invert, challenges)) - vec_s: List[Fr] = [] + vec_s: List[Scalar] = [] for i in range(0, n): - vec_s.append(Fr.one()) + vec_s.append(Scalar(1)) for j in bitstring[i]: vec_s[i] *= challenges[j] @@ -186,12 +183,12 @@ def verification_scalars( def verify( self, - crs_G_vec: List[PointProjective], - A: PointProjective, - Z_t: PointProjective, - Z_u: PointProjective, - vec_T: List[PointProjective], - vec_U: List[PointProjective], + crs_G_vec: List[G1Point], + A: G1Point, + Z_t: G1Point, + Z_u: G1Point, + vec_T: List[G1Point], + vec_U: List[G1Point], transcript: CurdleproofsTranscript, msm_accumulator: MSMAccumulator, ): @@ -215,26 +212,17 @@ def verify( vec_x_times_s = [self.x_final * s_i for s_i in vec_s] - A_a = add(self.B_a, multiply(A, int(alpha))) - Z_t_a = add(self.B_t, multiply(Z_t, int(alpha))) - Z_u_a = add(self.B_u, multiply(Z_u, int(alpha))) + A_a = self.B_a + A * alpha + Z_t_a = self.B_t + Z_t * alpha + Z_u_a = self.B_u + Z_u * alpha - point_lhs = add( - add(compute_MSM(self.vec_L_A, vec_gamma), A_a), - compute_MSM(self.vec_R_A, vec_gamma_inv), - ) + point_lhs = compute_MSM(self.vec_L_A, vec_gamma) + A_a + compute_MSM(self.vec_R_A, vec_gamma_inv) msm_accumulator.accumulate_check(point_lhs, crs_G_vec, vec_x_times_s) - point_lhs = add( - add(compute_MSM(self.vec_L_T, vec_gamma), Z_t_a), - compute_MSM(self.vec_R_T, vec_gamma_inv), - ) + point_lhs = compute_MSM(self.vec_L_T, vec_gamma) + Z_t_a + compute_MSM(self.vec_R_T, vec_gamma_inv) msm_accumulator.accumulate_check(point_lhs, vec_T, vec_x_times_s) - point_lhs = add( - add(compute_MSM(self.vec_L_U, vec_gamma), Z_u_a), - compute_MSM(self.vec_R_U, vec_gamma_inv), - ) + point_lhs = compute_MSM(self.vec_L_U, vec_gamma) + Z_u_a + compute_MSM(self.vec_R_U, vec_gamma_inv) msm_accumulator.accumulate_check(point_lhs, vec_U, vec_x_times_s) def to_json(self): @@ -263,7 +251,7 @@ def from_json(cls: Type[T_SameMSMProof], json) -> T_SameMSMProof: vec_R_A=[point_projective_from_json(R_A) for R_A in json["vec_R_A"]], vec_R_T=[point_projective_from_json(R_T) for R_T in json["vec_R_T"]], vec_R_U=[point_projective_from_json(R_U) for R_U in json["vec_R_U"]], - x_final=field_from_json(json["x_final"], Fr), + x_final=field_from_json(json["x_final"]), ) def to_bytes(self) -> bytes: diff --git a/curdleproofs/curdleproofs/same_perm.py b/curdleproofs/curdleproofs/same_perm.py index 548f9d5..390d61c 100644 --- a/curdleproofs/curdleproofs/same_perm.py +++ b/curdleproofs/curdleproofs/same_perm.py @@ -3,8 +3,6 @@ from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from typing import List, Type, TypeVar from curdleproofs.util import ( - PointProjective, - Fr, field_to_bytes, point_projective_from_json, point_projective_to_json, @@ -14,33 +12,29 @@ g1_to_bytes, ) from curdleproofs.msm_accumulator import MSMAccumulator, compute_MSM -from py_ecc.optimized_bls12_381.optimized_curve import ( - multiply, - add, - neg, -) from operator import mul as op_mul +from py_arkworks_bls12381 import G1Point, Scalar T_SAME_PERM_PROOF = TypeVar("T_SAME_PERM_PROOF", bound="SamePermutationProof") class SamePermutationProof: - def __init__(self, B: PointProjective, grand_prod_proof: GrandProductProof) -> None: + def __init__(self, B: G1Point, grand_prod_proof: GrandProductProof) -> None: self.B = B self.grand_prod_proof = grand_prod_proof @classmethod def new( cls: Type[T_SAME_PERM_PROOF], - crs_G_vec: List[PointProjective], - crs_H_vec: List[PointProjective], - crs_U: PointProjective, - A: PointProjective, - M: PointProjective, - vec_a: List[Fr], + crs_G_vec: List[G1Point], + crs_H_vec: List[G1Point], + crs_U: G1Point, + A: G1Point, + M: G1Point, + vec_a: List[Scalar], permutation: List[int], - vec_a_blinders: List[Fr], - vec_m_blinders: List[Fr], + vec_a_blinders: List[Scalar], + vec_m_blinders: List[Scalar], transcript: CurdleproofsTranscript, ) -> T_SAME_PERM_PROOF: n_blinders = len(vec_a_blinders) @@ -53,14 +47,12 @@ def new( vec_a_permuted = get_permutation(vec_a, permutation) permuted_polynomial_factors = [ - a + Fr(m) * alpha + beta for (a, m) in zip(vec_a_permuted, permutation) + a + Scalar(m) * alpha + beta for (a, m) in zip(vec_a_permuted, permutation) ] - gprod_result = reduce(op_mul, permuted_polynomial_factors, Fr.one()) + gprod_result = reduce(op_mul, permuted_polynomial_factors, Scalar(1)) vec_beta_repeated = [beta] * ell - B = add( - add(A, multiply(M, int(alpha))), compute_MSM(crs_G_vec, vec_beta_repeated) - ) + B = (A + M * alpha) + compute_MSM(crs_G_vec, vec_beta_repeated) vec_b_blinders = [ vec_a_blinders[i] + alpha * vec_m_blinders[i] for i in range(0, n_blinders) @@ -81,14 +73,14 @@ def new( def verify( self, - crs_G_vec: List[PointProjective], - crs_H_vec: List[PointProjective], - crs_U: PointProjective, - crs_G_sum: PointProjective, - crs_H_sum: PointProjective, - A: PointProjective, - M: PointProjective, - vec_a: List[Fr], + crs_G_vec: List[G1Point], + crs_H_vec: List[G1Point], + crs_U: G1Point, + crs_G_sum: G1Point, + crs_H_sum: G1Point, + A: G1Point, + M: G1Point, + vec_a: List[Scalar], n_blinders: int, transcript: CurdleproofsTranscript, msm_accumulator: MSMAccumulator, @@ -104,12 +96,12 @@ def verify( # Step 2 polynomial_factors = [ - a + Fr(i) * alpha + beta for (a, i) in zip(vec_a, range(0, ell)) + a + Scalar(i) * alpha + beta for (a, i) in zip(vec_a, range(0, ell)) ] - gprod_result = reduce(op_mul, polynomial_factors, Fr.one()) + gprod_result = reduce(op_mul, polynomial_factors, Scalar(1)) vec_beta_repeated = [beta] * ell msm_accumulator.accumulate_check( - add(add(self.B, neg(A)), neg(multiply(M, int(alpha)))), + (self.B - A) - (M * alpha), crs_G_vec, vec_beta_repeated, ) diff --git a/curdleproofs/curdleproofs/same_scalar.py b/curdleproofs/curdleproofs/same_scalar.py index 37c6bb4..8bc799c 100644 --- a/curdleproofs/curdleproofs/same_scalar.py +++ b/curdleproofs/curdleproofs/same_scalar.py @@ -1,22 +1,19 @@ -import random from curdleproofs.commitment import GroupCommitment -from curdleproofs.util import field_from_json, field_to_json, points_projective_to_bytes +from curdleproofs.util import field_from_json, field_to_json, points_projective_to_bytes, random_scalar from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from typing import Type, TypeVar from curdleproofs.util import ( - PointProjective, - Fr, BufReader, fr_to_bytes, ) -from py_ecc.optimized_bls12_381.optimized_curve import multiply +from py_arkworks_bls12381 import G1Point, Scalar T_SameScalarProof = TypeVar("T_SameScalarProof", bound="SameScalarProof") class SameScalarProof: def __init__( - self, cm_A: GroupCommitment, cm_B: GroupCommitment, z_k: Fr, z_t: Fr, z_u: Fr + self, cm_A: GroupCommitment, cm_B: GroupCommitment, z_k: Scalar, z_t: Scalar, z_u: Scalar ) -> None: self.cm_A = cm_A self.cm_B = cm_B @@ -27,24 +24,24 @@ def __init__( @classmethod def new( cls: Type[T_SameScalarProof], - crs_G_t: PointProjective, - crs_G_u: PointProjective, - crs_H: PointProjective, - R: PointProjective, - S: PointProjective, + crs_G_t: G1Point, + crs_G_u: G1Point, + crs_H: G1Point, + R: G1Point, + S: G1Point, cm_T: GroupCommitment, cm_U: GroupCommitment, - k: Fr, - r_t: Fr, - r_u: Fr, + k: Scalar, + r_t: Scalar, + r_u: Scalar, transcript: CurdleproofsTranscript, ) -> T_SameScalarProof: - r_a = Fr(random.randint(1, Fr.field_modulus)) - r_b = Fr(random.randint(1, Fr.field_modulus)) - r_k = Fr(random.randint(1, Fr.field_modulus)) + r_a = random_scalar() + r_b = random_scalar() + r_k = random_scalar() - cm_A = GroupCommitment.new(crs_G_t, crs_H, multiply(R, int(r_k)), r_a) - cm_B = GroupCommitment.new(crs_G_u, crs_H, multiply(S, int(r_k)), r_b) + cm_A = GroupCommitment.new(crs_G_t, crs_H, R * r_k, r_a) + cm_B = GroupCommitment.new(crs_G_u, crs_H, S * r_k, r_b) transcript.append_list( b"sameexp_points", @@ -73,11 +70,11 @@ def new( def verify( self, - crs_G_t: PointProjective, - crs_G_u: PointProjective, - crs_H: PointProjective, - R: PointProjective, - S: PointProjective, + crs_G_t: G1Point, + crs_G_u: G1Point, + crs_H: G1Point, + R: G1Point, + S: G1Point, cm_T: GroupCommitment, cm_U: GroupCommitment, transcript: CurdleproofsTranscript, @@ -102,10 +99,10 @@ def verify( alpha = transcript.get_and_append_challenge(b"same_scalar_alpha") expected_1 = GroupCommitment.new( - crs_G_t, crs_H, multiply(R, int(self.z_k)), self.z_t + crs_G_t, crs_H, R * self.z_k, self.z_t ) expected_2 = GroupCommitment.new( - crs_G_u, crs_H, multiply(S, int(self.z_k)), self.z_u + crs_G_u, crs_H, S * self.z_k, self.z_u ) computed_1 = self.cm_A + (cm_T * alpha) @@ -127,9 +124,9 @@ def from_json(cls: Type[T_SameScalarProof], json) -> T_SameScalarProof: return cls( cm_A=GroupCommitment.from_json(json["cm_A"]), cm_B=GroupCommitment.from_json(json["cm_B"]), - z_k=field_from_json(json["z_k"], Fr), - z_t=field_from_json(json["z_t"], Fr), - z_u=field_from_json(json["z_u"], Fr), + z_k=field_from_json(json["z_k"]), + z_t=field_from_json(json["z_t"]), + z_u=field_from_json(json["z_u"]), ) def to_bytes(self) -> bytes: diff --git a/curdleproofs/curdleproofs/test_curdleproofs.py b/curdleproofs/curdleproofs/test_curdleproofs.py index 6ddc1f5..fda202b 100644 --- a/curdleproofs/curdleproofs/test_curdleproofs.py +++ b/curdleproofs/curdleproofs/test_curdleproofs.py @@ -5,22 +5,22 @@ from curdleproofs.crs import CurdleproofsCrs from curdleproofs.grand_prod import GrandProductProof from curdleproofs.opening import TrackerOpeningProof -from curdleproofs.util import affine_to_projective, get_random_point, get_permutation +from curdleproofs.util import get_random_point, get_permutation from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from typing import List from curdleproofs.util import ( - Fr, generate_blinders, inner_product, -) -from curdleproofs.msm_accumulator import MSMAccumulator, compute_MSM -from py_ecc.optimized_bls12_381.optimized_curve import ( + field_to_bytes, + point_projective_to_bytes, + random_scalar, + scalar_pow, G1, - multiply, - add, Z1, + CURVE_ORDER, + BLSPubkey, ) -from py_ecc.bls.g2_primitives import G1_to_pubkey +from curdleproofs.msm_accumulator import MSMAccumulator, compute_MSM from curdleproofs.ipa import IPA from curdleproofs.same_perm import SamePermutationProof from curdleproofs.same_msm import SameMSMProof @@ -39,7 +39,200 @@ IsValidWhiskOpeningProof, IsValidWhiskShuffleProof, ) -from eth_typing import BLSPubkey +from py_arkworks_bls12381 import G1Point, Scalar + + +def test_py_arkworks_bls12381_api(): + print(dir(G1Point)) + assert dir(G1Point) == [ + '__add__', + '__class__', + '__delattr__', + '__dir__', + '__doc__', + '__eq__', + '__format__', + '__ge__', + '__getattribute__', + '__gt__', + '__hash__', + '__init__', + '__init_subclass__', + '__le__', + '__lt__', + '__module__', + '__mul__', + '__ne__', + '__neg__', + '__new__', + '__radd__', + '__reduce__', + '__reduce_ex__', + '__repr__', + '__rmul__', + '__rsub__', + '__setattr__', + '__sizeof__', + '__str__', + '__sub__', + '__subclasshook__', + 'from_compressed_bytes', + 'from_compressed_bytes_unchecked', + 'identity', + 'multiexp_unchecked', + 'to_compressed_bytes' + ] + + print(dir(Scalar)) + assert dir(Scalar) == [ + '__add__', + '__class__', + '__delattr__', + '__dir__', + '__doc__', + '__eq__', + '__format__', + '__ge__', + '__getattribute__', + '__gt__', + '__hash__', + '__init__', + '__init_subclass__', + '__le__', + '__lt__', + '__module__', + '__mul__', + '__ne__', + '__neg__', + '__new__', + '__radd__', + '__reduce__', + '__reduce_ex__', + '__repr__', + '__rmul__', + '__rsub__', + '__setattr__', + '__sizeof__', + '__str__', + '__sub__', + '__subclasshook__', + 'from_le_bytes', + 'inverse', + 'is_zero', + 'square', + 'to_le_bytes' + ] + + +# Copied from https://pypi.org/project/py-arkworks-bls12381/ +def test_py_arkworks_bls12381_g1points(): + # G1Point and G2Point have the same methods implemented on them + # For brevity, I will only show one method using G1Point and G2Point + # The rest of the code will just use G1Point + + # Point initialization -- This will be initialized to the g1 generator + g1_generator = G1Point() + + # Identity element + identity = G1Point.identity() + + # Equality -- We override eq and neq operators + assert g1_generator == g1_generator + assert g1_generator != identity + + # Printing an element -- We override __str__ so when we print + # an element it prints in hex + print("identity: ", identity) + print("g1 generator: ", g1_generator) + + # Point Addition/subtraction/Negation -- We override the add/sub/neg operators + gen = G1Point() + double_gen = gen + gen + assert double_gen - gen == gen + neg_gen = -gen + assert neg_gen + gen == identity + + # Scalar multiplication + # + scalar = Scalar(4) + four_gen = gen * scalar + assert four_gen == gen + gen + gen + gen + + # Serialisation + # + # serialising to/from a g1 point + # We don't expose the uncompressed form + # because it seems like its not needed + compressed_bytes = gen.to_compressed_bytes() + deserialised_point = G1Point.from_compressed_bytes(compressed_bytes) + # If the bytes being received are trusted, we can avoid + # doing subgroup checks + deserialised_point_unchecked = G1Point.from_compressed_bytes_unchecked(compressed_bytes) + assert deserialised_point == deserialised_point_unchecked + assert deserialised_point == gen + + # Serialization + assert str(gen) == "97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" + assert point_projective_to_bytes(gen) == bytes.fromhex("97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb") + + # Indexing + point_a = G1 * Scalar(4) + point_a_copy = G1 * Scalar(4) + point_map = {} + # G1Point should not be hashable + with pytest.raises(TypeError): + point_map[point_a] = True + # Able to index by serialized form (inefficient) + point_map[point_projective_to_bytes(point_a)] = True + assert point_map[point_projective_to_bytes(point_a_copy)] + + +def test_py_arkworks_bls12381_scalar(): + scalar = Scalar(4) + assert field_to_bytes(scalar) == bytes.fromhex("0400000000000000000000000000000000000000000000000000000000000000") + + assert CURVE_ORDER == 52435875175126190479447740508185965837690552500527637822603658699938581184513 + + # Why does Scalar does not support values over 2**128? + Scalar(2**128 - 1) + with pytest.raises(OverflowError): + Scalar(2**256 - 1) + with pytest.raises(OverflowError): + Scalar(CURVE_ORDER - 1) + + # Deserialize from big integers + Scalar.from_le_bytes((CURVE_ORDER - 1).to_bytes(32, 'little')) + with pytest.raises(ValueError): + # Errors with `ValueError: Err From Rust: serialised data seems to be invalid` + Scalar.from_le_bytes((CURVE_ORDER).to_bytes(32, 'little')) + + +def test_scalar_pow(): + helper_test_scalar_pow(1, 1) + helper_test_scalar_pow(4, 1) + helper_test_scalar_pow(4, 2) + helper_test_scalar_pow(4, 3) + helper_test_scalar_pow(4, 4) + helper_test_scalar_pow(100, 2) + helper_test_scalar_pow(42, 6) + scalar_pow(random_scalar(), 128) + + +def helper_test_scalar_pow(base: int, exponent: int): + res_scalar = scalar_pow(Scalar(base), exponent) + res = int.from_bytes(res_scalar.to_le_bytes(), byteorder='little') + assert res == base ** exponent + + +def test_utils_point_projective_to_bytes(): + scalar = Scalar(99) + point = G1 * scalar + point_projective_to_bytes(point) == bytes.fromhex("aa10e1055b14a89cc3261699524998732fddc4f30c76c1057eb83732a01416643eb015a932e4080c86f42e485973d240") + + +def test_utils_get_random_point(): + point = get_random_point() + assert point + G1 - point == G1 def test_ipa(): @@ -50,14 +243,13 @@ def test_ipa(): crs_G_vec = [get_random_point() for _ in range(0, n)] vec_u = generate_blinders(n) - crs_G_prime_vec = [multiply(G_i, int(u_i)) for (G_i, u_i) in zip(crs_G_vec, vec_u)] + crs_G_prime_vec = [G_i * u_i for (G_i, u_i) in zip(crs_G_vec, vec_u)] crs_H = get_random_point() - vec_b = [Fr(random.randint(1, Fr.field_modulus)) for _ in range(0, n)] - vec_c = [Fr(random.randint(1, Fr.field_modulus)) for _ in range(0, n)] + vec_b = [random_scalar() for _ in range(0, n)] + vec_c = [random_scalar() for _ in range(0, n)] z = inner_product(vec_b, vec_c) - print("prod = ", vec_b, vec_c, z) B = compute_MSM(crs_G_vec, vec_b) C = compute_MSM(crs_G_prime_vec, vec_c) @@ -109,7 +301,7 @@ def test_ipa(): crs_H=crs_H, C=B, D=C, - inner_prod=z + Fr.one(), + inner_prod=z + Scalar(1), vec_u=vec_u, transcript=transcript_wrong, msm_accumulator=msm_accumulator_wrong, @@ -127,15 +319,15 @@ def test_gprod(): crs_G_vec = [get_random_point() for _ in range(ell)] crs_H_vec = [get_random_point() for _ in range(n_blinders)] crs_U = get_random_point() - crs_G_sum = reduce(add, crs_G_vec, Z1) - crs_H_sum = reduce(add, crs_H_vec, Z1) + crs_G_sum = reduce(lambda a, b: a + b, crs_G_vec, Z1) + crs_H_sum = reduce(lambda a, b: a + b, crs_H_vec, Z1) - vec_b = [Fr(random.randint(1, Fr.field_modulus - 1)) for _ in range(ell)] + vec_b = [random_scalar() for _ in range(ell)] vec_b_blinders = generate_blinders(n_blinders) - gprod_result = reduce(operator.mul, vec_b, Fr.one()) + gprod_result = reduce(operator.mul, vec_b, Scalar(1)) - B = add(compute_MSM(crs_G_vec, vec_b), compute_MSM(crs_H_vec, vec_b_blinders)) + B = compute_MSM(crs_G_vec, vec_b) + compute_MSM(crs_H_vec, vec_b_blinders) gprod_proof = GrandProductProof.new( crs_G_vec=crs_G_vec, @@ -179,7 +371,7 @@ def test_gprod(): crs_G_sum=crs_G_sum, crs_H_sum=crs_H_sum, B=B, - gprod_result=gprod_result + Fr.one(), + gprod_result=gprod_result + Scalar(1), n_blinders=n_blinders, transcript=transcript_verifier, msm_accumulator=msm_accumulator, @@ -197,7 +389,7 @@ def test_gprod(): crs_U=crs_U, crs_G_sum=crs_G_sum, crs_H_sum=crs_H_sum, - B=multiply(B, 3), + B=B * Scalar(3), gprod_result=gprod_result, n_blinders=n_blinders, transcript=transcript_verifier, @@ -218,8 +410,8 @@ def test_same_permutation_proof(): crs_H_vec = [get_random_point() for _ in range(0, n_blinders)] crs_U = get_random_point() - crs_G_sum = reduce(add, crs_G_vec, Z1) - crs_H_sum = reduce(add, crs_H_vec, Z1) + crs_G_sum = reduce(lambda a, b: a + b, crs_G_vec, Z1) + crs_H_sum = reduce(lambda a, b: a + b, crs_H_vec, Z1) vec_a_blinders = generate_blinders(n_blinders) vec_m_blinders = generate_blinders(n_blinders) @@ -227,13 +419,11 @@ def test_same_permutation_proof(): permutation = list(range(0, ell)) random.shuffle(permutation) - vec_a = [Fr(random.randint(1, Fr.field_modulus - 1)) for _ in range(0, ell)] + vec_a = [random_scalar() for _ in range(0, ell)] vec_a_permuted = get_permutation(vec_a, permutation) - A = add( - compute_MSM(crs_G_vec, vec_a_permuted), compute_MSM(crs_H_vec, vec_a_blinders) - ) - M = add(compute_MSM(crs_G_vec, permutation), compute_MSM(crs_H_vec, vec_m_blinders)) + A = compute_MSM(crs_G_vec, vec_a_permuted) + compute_MSM(crs_H_vec, vec_a_blinders) + M = compute_MSM(crs_G_vec, map(Scalar, permutation)) + compute_MSM(crs_H_vec, vec_m_blinders) same_perm_proof = SamePermutationProof.new( crs_G_vec=crs_G_vec, @@ -278,7 +468,7 @@ def test_same_msm(): vec_T = [get_random_point() for _ in range(0, n)] vec_U = [get_random_point() for _ in range(0, n)] - vec_x = [Fr(random.randint(1, Fr.field_modulus)) for _ in range(0, n)] + vec_x = [random_scalar() for _ in range(0, n)] A = compute_MSM(crs_G_vec, vec_x) Z_t = compute_MSM(vec_T, vec_x) @@ -324,12 +514,12 @@ def test_same_scalar_arg(): R = get_random_point() S = get_random_point() - k = Fr(random.randint(1, Fr.field_modulus)) - r_t = Fr(random.randint(1, Fr.field_modulus)) - r_u = Fr(random.randint(1, Fr.field_modulus)) + k = random_scalar() + r_t = random_scalar() + r_u = random_scalar() - cm_T = GroupCommitment.new(crs_G_t, crs_H, multiply(R, int(k)), r_t) - cm_U = GroupCommitment.new(crs_G_u, crs_H, multiply(S, int(k)), r_u) + cm_T = GroupCommitment.new(crs_G_t, crs_H, R * k, r_t) + cm_U = GroupCommitment.new(crs_G_u, crs_H, S * k, r_u) proof = SameScalarProof.new( crs_G_t=crs_G_t, @@ -367,12 +557,12 @@ def test_group_commit(): A = get_random_point() B = get_random_point() - r_a = Fr(random.randint(1, Fr.field_modulus)) - r_b = Fr(random.randint(1, Fr.field_modulus)) + r_a = random_scalar() + r_b = random_scalar() cm_a = GroupCommitment.new(crs_G, crs_H, A, r_a) cm_b = GroupCommitment.new(crs_G, crs_H, B, r_b) - cm_a_b = GroupCommitment.new(crs_G, crs_H, add(A, B), r_a + r_b) + cm_a_b = GroupCommitment.new(crs_G, crs_H, A + B, r_a + r_b) assert cm_a + cm_b == cm_a_b @@ -385,7 +575,7 @@ def test_shuffle_argument(): permutation = list(range(ell)) random.shuffle(permutation) - k = Fr(random.randint(1, Fr.field_modulus)) + k = random_scalar() vec_R = [get_random_point() for _ in range(ell)] vec_S = [get_random_point() for _ in range(ell)] @@ -421,7 +611,7 @@ def test_bad_shuffle_argument(): permutation = list(range(ell)) random.shuffle(permutation) - k = Fr(random.randint(1, Fr.field_modulus)) + k = random_scalar() vec_R = [get_random_point() for _ in range(ell)] vec_S = [get_random_point() for _ in range(ell)] @@ -461,12 +651,12 @@ def test_bad_shuffle_argument(): ) shuffle_proof.verify( - crs, vec_R, vec_S, vec_T, vec_U, multiply(M, int(k)) + crs, vec_R, vec_S, vec_T, vec_U, M * k ) - another_k = Fr(random.randint(1, Fr.field_modulus)) - another_vec_T = [multiply(affine_to_projective(T), int(another_k)) for T in vec_T] - another_vec_U = [multiply(affine_to_projective(U), int(another_k)) for U in vec_U] + another_k = random_scalar() + another_vec_T = [T * another_k for T in vec_T] + another_vec_U = [U * another_k for U in vec_U] with pytest.raises(AssertionError): shuffle_proof.verify( @@ -482,7 +672,7 @@ def test_serde(): permutation = list(range(ell)) random.shuffle(permutation) - k = Fr(random.randint(1, Fr.field_modulus)) + k = random_scalar() vec_R = [get_random_point() for _ in range(ell)] vec_S = [get_random_point() for _ in range(ell)] @@ -542,9 +732,9 @@ def test_tracker_opening_proof(): k = generate_blinders(1)[0] r = generate_blinders(1)[0] - k_G = multiply(G, int(k)) - r_G = multiply(G, int(r)) - k_r_G = multiply(r_G, int(k)) + k_G = G * k + r_G = G * r + k_r_G = r_G * k transcript_prover = CurdleproofsTranscript(b"whisk_opening_proof") opening_proof = TrackerOpeningProof.new( @@ -579,19 +769,19 @@ def test_whisk_interface_shuffle_proof(): IsValidWhiskShuffleProof(crs, pre_trackers, post_trackers, shuffle_proof) -def generate_random_k() -> Fr: +def generate_random_k() -> Scalar: return generate_blinders(1)[0] -def get_k_commitment(k: Fr) -> BLSPubkey: - return G1_to_pubkey(multiply(G1, int(k))) +def get_k_commitment(k: Scalar) -> BLSPubkey: + return BLSPubkey(point_projective_to_bytes(G1 * k)) -def generate_tracker(k: Fr) -> WhiskTracker: +def generate_tracker(k: Scalar) -> WhiskTracker: r = generate_blinders(1)[0] - r_G = multiply(G1, int(r)) - k_r_G = multiply(r_G, int(k)) - return WhiskTracker(G1_to_pubkey(r_G), G1_to_pubkey(k_r_G)) + r_G = G1 * r + k_r_G = r_G * k + return WhiskTracker(BLSPubkey(point_projective_to_bytes(r_G)), BLSPubkey(point_projective_to_bytes(k_r_G))) def generate_random_crs(ell: int) -> CurdleproofsCrs: diff --git a/curdleproofs/curdleproofs/util.py b/curdleproofs/curdleproofs/util.py index 368c238..f547447 100644 --- a/curdleproofs/curdleproofs/util.py +++ b/curdleproofs/curdleproofs/util.py @@ -1,74 +1,71 @@ from random import randint from math import log2 -from typing import List, Tuple, Type, TypeVar, Union -from py_ecc.typing import ( - Optimized_Field, - Optimized_Point2D, - Optimized_Point3D, - FQ as FQ_type, -) -from py_ecc.optimized_bls12_381.optimized_curve import ( - curve_order, - G1, - multiply, - normalize, - FQ, -) -from py_ecc.bls.hash import os2ip -from py_ecc.bls.g2_primitives import pubkey_to_G1 -from eth_typing import BLSPubkey -from py_ecc.bls.point_compression import compress_G1 +from typing import List, TypeVar, NewType +from py_arkworks_bls12381 import G1Point, Scalar -class Fr(FQ_type): - field_modulus: int = curve_order +CURVE_ORDER = 52435875175126190479447740508185965837690552500527637822603658699938581184513 +# Generator +G1 = G1Point() +# Point at infinity over FQ +Z1 = G1Point.identity() -PointAffine = Optimized_Point2D[Optimized_Field] -PointProjective = Optimized_Point3D[Optimized_Field] +BLSPubkey = NewType('BLSPubkey', bytes) # bytes48 -def point_affine_to_bytes(point: PointAffine) -> bytes: - return point[0].n.to_bytes(48, "big") + point[1].n.to_bytes(48, "big") +def g1_is_inf(point: G1Point): + return point == Z1 -def points_affine_to_bytes(points: List[PointAffine]) -> List[bytes]: - return [point_affine_to_bytes(point) for point in points] +def random_scalar() -> Scalar: + # Note: the constructor 'Scalar()' errors with integers of more than 128 bits + # Scalar.from_le_bytes() requires integers less than CURVE_ORDER + return Scalar.from_le_bytes(randint(1, CURVE_ORDER - 1).to_bytes(32, 'little')) -def point_projective_to_bytes(point: PointProjective) -> bytes: - return point_affine_to_bytes(normalize(point)) +def point_projective_to_bytes(point: G1Point) -> bytes: + return bytes(point.to_compressed_bytes()) -def points_projective_to_bytes(points: List[PointProjective]) -> List[bytes]: +def points_projective_to_bytes(points: List[G1Point]) -> List[bytes]: return [point_projective_to_bytes(point) for point in points] -def field_to_bytes(field: Fr) -> bytes: - return field.n.to_bytes(48, "big") +def point_projective_from_bytes(b: bytes) -> G1Point: + return G1Point.from_compressed_bytes_unchecked(b) -def fields_to_bytes(fields: List[Fr]) -> List[bytes]: - return [field_to_bytes(field) for field in fields] +def field_to_bytes(field: Scalar) -> bytes: + return bytes(field.to_le_bytes()) -def affine_to_projective(point: PointAffine) -> PointProjective: - return (point[0], point[1], FQ.one()) +def fields_to_bytes(fields: List[Scalar]) -> List[bytes]: + return [field_to_bytes(field) for field in fields] -def g1_from_bytes(b: bytes, offset_point: int) -> PointProjective: - return pubkey_to_G1(BLSPubkey(b[48 * offset_point:48 * (offset_point + 1)])) +def g1_from_bytes(b: bytes, offset_point: int) -> G1Point: + return point_projective_from_bytes(BLSPubkey(b[48 * offset_point:48 * (offset_point + 1)])) -def invert(f: Fr) -> Fr: - res = Fr.one() / f - assert res * f == Fr.one() # fail in case f == 0 +def invert(f: Scalar) -> Scalar: + res = f.inverse() + assert res * f == Scalar(1) # fail in case f == 0 return res -def get_random_point() -> PointProjective: - a = randint(1, curve_order - 1) - return multiply(G1, a) +def scalar_pow(f: Scalar, n: int) -> Scalar: + result = Scalar(1) + while n != 0: + if n % 2 == 1: + result *= f + f *= f + n //= 2 + return result + + +def get_random_point() -> G1Point: + return G1 * random_scalar() def get_verification_scalars_bitstring(n: int, lg_n: int) -> List[List[int]]: @@ -81,61 +78,54 @@ def get_verification_scalars_bitstring(n: int, lg_n: int) -> List[List[int]]: return bitstrings -def generate_blinders(n: int) -> List[Fr]: - return [Fr(randint(0, Fr.field_modulus)) for _ in range(0, n)] +def generate_blinders(n: int) -> List[Scalar]: + return [random_scalar() for _ in range(0, n)] -def inner_product(a: List[Fr], b: List[Fr]) -> Fr: +def inner_product(a: List[Scalar], b: List[Scalar]) -> Scalar: assert len(a) == len(b) - return sum([a[i] * b[i] for i in range(0, len(a))], Fr.zero()) + return sum([a[i] * b[i] for i in range(0, len(a))], Scalar(0)) T_GET_PERMUTATION = TypeVar("T_GET_PERMUTATION") def get_permutation( - vec_a: List[T_GET_PERMUTATION], permutation: Union[List[Fr], List[int]] + vec_a: List[T_GET_PERMUTATION], permutation: List[int] ) -> List[T_GET_PERMUTATION]: return [vec_a[int(i)] for i in permutation] -def field_to_json(f: FQ_type) -> str: - return str(int(f)) - +def field_to_json(f: Scalar) -> str: + return bytes(f.to_le_bytes()).hex() -T_JSON_FIELD = TypeVar("T_JSON_FIELD", Fr, FQ) +def field_from_json(s: str) -> Scalar: + return Scalar.from_le_bytes(bytes.fromhex(s)) -def field_from_json(s: str, field: Type[T_JSON_FIELD]) -> T_JSON_FIELD: - return field(int(s)) +def scalar_from_bytes(b: bytes) -> Scalar: + return Scalar.from_le_bytes(b) -def point_affine_to_json(p: PointAffine) -> Tuple[str, str]: - return (field_to_json(p[0]), field_to_json(p[1])) +def point_projective_to_json(p: G1Point) -> str: + return bytes(p.to_compressed_bytes()).hex() -def point_projective_to_json(p: PointProjective) -> Tuple[str, str]: - return point_affine_to_json(normalize(p)) +def point_projective_from_json(p_hex: str) -> G1Point: + return G1Point.from_compressed_bytes_unchecked(bytes.fromhex(p_hex)) -def point_affine_from_json(t: Tuple[str, str]) -> PointAffine: - return (field_from_json(t[0], FQ), field_from_json(t[1], FQ)) +def g1_to_bytes(p: G1Point) -> bytes: + return bytes(p.to_compressed_bytes()) -def point_projective_from_json(t: Tuple[str, str]) -> PointProjective: - return affine_to_projective(point_affine_from_json(t)) - -def g1_to_bytes(p: PointProjective) -> bytes: - return compress_G1(p).to_bytes(48, 'big') - - -def g1_list_to_bytes(ps: List[PointProjective]) -> bytes: +def g1_list_to_bytes(ps: List[G1Point]) -> bytes: return b''.join([g1_to_bytes(p) for p in ps]) -def fr_to_bytes(fr: Fr) -> bytes: - return fr.n.to_bytes(48, "big") +def fr_to_bytes(fr: Scalar) -> bytes: + return field_to_bytes(fr) def log2_int(x: int) -> int: @@ -150,14 +140,14 @@ def __init__(self, data): self.data = data self.ptr = 0 - def read_g1(self) -> PointProjective: + def read_g1(self) -> G1Point: end_ptr = self.ptr + 48 - p = pubkey_to_G1(BLSPubkey(self.data[self.ptr:end_ptr])) + p = point_projective_from_bytes(BLSPubkey(self.data[self.ptr:end_ptr])) self.ptr = end_ptr return p - def read_fr(self) -> Fr: - end_ptr = self.ptr + 48 - p = Fr(os2ip(self.data[self.ptr:end_ptr])) + def read_fr(self) -> Scalar: + end_ptr = self.ptr + 32 + p = scalar_from_bytes(self.data[self.ptr:end_ptr]) self.ptr = end_ptr return p diff --git a/curdleproofs/curdleproofs/whisk_interface.py b/curdleproofs/curdleproofs/whisk_interface.py index 8fe6ad4..4f2f55c 100644 --- a/curdleproofs/curdleproofs/whisk_interface.py +++ b/curdleproofs/curdleproofs/whisk_interface.py @@ -8,16 +8,17 @@ from curdleproofs.curdleproofs_transcript import CurdleproofsTranscript from curdleproofs.opening import TrackerOpeningProof from curdleproofs.util import ( - PointProjective, point_projective_to_json, point_projective_from_json, - Fr, + point_projective_to_bytes, + point_projective_from_bytes, BufReader, g1_to_bytes, + random_scalar, + G1, + BLSPubkey ) -from py_ecc.optimized_bls12_381.optimized_curve import G1, multiply -from py_ecc.bls.g2_primitives import G1_to_pubkey, pubkey_to_G1 -from eth_typing import BLSPubkey +from py_arkworks_bls12381 import G1Point, Scalar class WhiskTracker: @@ -33,10 +34,10 @@ def __init__(self, r_G: BLSPubkey, k_r_G: BLSPubkey): class WhiskShuffleProof: - M: PointProjective + M: G1Point proof: CurdleProofsProof - def __init__(self, M: PointProjective, proof: CurdleProofsProof): + def __init__(self, M: G1Point, proof: CurdleProofsProof): self.M = M self.proof = proof @@ -79,11 +80,11 @@ def IsValidWhiskShuffleProof( """ Verify `post_shuffle_trackers` is a permutation of `pre_shuffle_trackers`. """ - vec_R = [pubkey_to_G1(tracker.r_G) for tracker in pre_shuffle_trackers] - vec_S = [pubkey_to_G1(tracker.k_r_G) for tracker in pre_shuffle_trackers] + vec_R = [point_projective_from_bytes(tracker.r_G) for tracker in pre_shuffle_trackers] + vec_S = [point_projective_from_bytes(tracker.k_r_G) for tracker in pre_shuffle_trackers] - vec_T = [pubkey_to_G1(tracker.r_G) for tracker in post_shuffle_trackers] - vec_U = [pubkey_to_G1(tracker.k_r_G) for tracker in post_shuffle_trackers] + vec_T = [point_projective_from_bytes(tracker.r_G) for tracker in post_shuffle_trackers] + vec_U = [point_projective_from_bytes(tracker.k_r_G) for tracker in post_shuffle_trackers] ell = len(crs.vec_G) n_blinders = len(crs.vec_H) @@ -99,10 +100,10 @@ def GenerateWhiskShuffleProof( ) -> Tuple[Sequence[WhiskTracker], WhiskShuffleProofBytes]: permutation = list(range(len(crs.vec_G))) random.shuffle(permutation) - k = Fr(random.randint(1, Fr.field_modulus)) + k = random_scalar() - vec_R = [pubkey_to_G1(tracker.r_G) for tracker in pre_shuffle_trackers] - vec_S = [pubkey_to_G1(tracker.k_r_G) for tracker in pre_shuffle_trackers] + vec_R = [point_projective_from_bytes(tracker.r_G) for tracker in pre_shuffle_trackers] + vec_S = [point_projective_from_bytes(tracker.k_r_G) for tracker in pre_shuffle_trackers] vec_T, vec_U, M, vec_m_blinders = shuffle_permute_and_commit_input( crs, vec_R, vec_S, permutation, k @@ -121,7 +122,7 @@ def GenerateWhiskShuffleProof( ) whisk_shuffle_proof = WhiskShuffleProof(M, shuffle_proof) - post_trackers = [WhiskTracker(G1_to_pubkey(r_G), G1_to_pubkey(k_r_G)) for r_G, k_r_G in zip(vec_T, vec_U)] + post_trackers = [WhiskTracker(BLSPubkey(point_projective_to_bytes(r_G)), BLSPubkey(point_projective_to_bytes(k_r_G))) for r_G, k_r_G in zip(vec_T, vec_U)] return post_trackers, whisk_shuffle_proof.to_bytes() @@ -135,28 +136,28 @@ def IsValidWhiskOpeningProof( tracker_proof: SerializedWhiskTrackerProof, ) -> bool: """ - Verify knowledge of `k` such that `tracker.k_r_G == k * tracker.r_G` and `k_commitment == k * BLS_G1_GENERATOR`. + Verify knowledge of `k` such that `tracker.k_r_G == k * tracker.r_G` and `k_commitment == k * BLS_G1`. """ tracker_proof_instance = TrackerOpeningProof.from_bytes(BufReader(tracker_proof)) transcript_verifier = CurdleproofsTranscript(b"whisk_opening_proof") return tracker_proof_instance.verify( transcript_verifier, - pubkey_to_G1(tracker.k_r_G), - pubkey_to_G1(tracker.r_G), - pubkey_to_G1(k_commitment), + point_projective_from_bytes(tracker.k_r_G), + point_projective_from_bytes(tracker.r_G), + point_projective_from_bytes(k_commitment), ) def GenerateWhiskTrackerProof( tracker: WhiskTracker, - k: Fr, + k: Scalar, ) -> SerializedWhiskTrackerProof: transcript_prover = CurdleproofsTranscript(b"whisk_opening_proof") opening_proof = TrackerOpeningProof.new( - k_r_G=pubkey_to_G1(tracker.k_r_G), - r_G=pubkey_to_G1(tracker.r_G), - k_G=multiply(G1, int(k)), + k_r_G=point_projective_from_bytes(tracker.k_r_G), + r_G=point_projective_from_bytes(tracker.r_G), + k_G=G1 * k, k=k, transcript=transcript_prover, ) diff --git a/curdleproofs/poetry.lock b/curdleproofs/poetry.lock index 0891d0f..a41465c 100644 --- a/curdleproofs/poetry.lock +++ b/curdleproofs/poetry.lock @@ -35,17 +35,6 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] -[[package]] -name = "cached-property" -version = "1.5.2" -description = "A decorator for caching properties in classes." -optional = false -python-versions = "*" -files = [ - {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, - {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, -] - [[package]] name = "click" version = "8.1.4" @@ -71,178 +60,6 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "cytoolz" -version = "0.12.1" -description = "Cython implementation of Toolz: High performance functional utilities" -optional = false -python-versions = ">=3.6" -files = [ - {file = "cytoolz-0.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c59bb4ca88e1c69931468bf21f91c8f64d8bf1999eb163b7a2df336f60c304a"}, - {file = "cytoolz-0.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4d700e011156ff112966c6d77faaae125fcaf538f4cec2b9ce8957de82858f0f"}, - {file = "cytoolz-0.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c3f57c48eb939d2986eba4aeaeedf930ebf94d58c91a42d4e0fc45ed5427dc"}, - {file = "cytoolz-0.12.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25ff13c468c06da9ef26651dc389e7e8bb7af548f8c1dfb96305f57f18d398a8"}, - {file = "cytoolz-0.12.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a734511144309ea6e105406633affb74e303a3df07d8a3954f9b01946e27ecb1"}, - {file = "cytoolz-0.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48bc2f30d1b2646d675bb8e7778ab59379bf9edc59fe06fb0e7f85ba1271bf44"}, - {file = "cytoolz-0.12.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30936ae8fa68b6a1ac8ad6c4bacb5a8a00d51bc6c89f9614a1557b0105d09f8a"}, - {file = "cytoolz-0.12.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efd1b2da3ee577fcfa723a214f73186aef9674dd5b28242d90443c7a82722b0f"}, - {file = "cytoolz-0.12.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6805b007af3557ee6c20dab491b6e55a8177f5b6845d9e6c653374d540366ba7"}, - {file = "cytoolz-0.12.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a6e63fc67b23830947b51e0a488992e3c904fce825ead565f3904dcf621d05f7"}, - {file = "cytoolz-0.12.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9e324a94856d88ecf10f34c102d0ded67d7c3cf644153d77e34a29720ce6aa47"}, - {file = "cytoolz-0.12.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02975e2b1e61e47e9afa311f4c1783d155136fad37c54a1cebfe991c5a0798a1"}, - {file = "cytoolz-0.12.1-cp310-cp310-win32.whl", hash = "sha256:b6569f6038133909cd658dbdcc6fc955f791dc47a7f5b55d2066f742253dcbfe"}, - {file = "cytoolz-0.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:1be368623e46ad3c1ce807e7a436acb119c26001507b31f92ceb21b86e08c386"}, - {file = "cytoolz-0.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:849f461bffa1e7700ccfcb5186df29cd4cdcc9efdb7199cb8b5681dc37045d72"}, - {file = "cytoolz-0.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4284120c978fb7039901bf6e66832cb3e82ac1b2a107512e735bdb04fd5533ed"}, - {file = "cytoolz-0.12.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ec296f01c29c809698eaf677211b6255691295c2b35caab2131e1e7eaadfbac"}, - {file = "cytoolz-0.12.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37c53f456a1c84566a7d911eec57c4c6280b915ab0600e7671582793cc2769fe"}, - {file = "cytoolz-0.12.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b6761791973b1e839b8309d5853b40eeb413368e31beaf5f2b6ed44c6fc7cf0"}, - {file = "cytoolz-0.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff478682e8ee6dbaa37201bb71bf4a6eee744006ab000e8f5cea05066fc7c845"}, - {file = "cytoolz-0.12.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:867bebe6be30ee36a836f9b835790762a74f46be8cc339ea57b68dcecdbc1133"}, - {file = "cytoolz-0.12.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7e903df991f0957e2b271a37bb25d28e0d260c52825ae67507d15ca55a935961"}, - {file = "cytoolz-0.12.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e797c4afb1b7962d3205b1959e1051f7e6bfbba29da44042a9efc2391f1feb38"}, - {file = "cytoolz-0.12.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b8eceaa12b7f152b046b67cb053ec2b5b00f73593983de69bc5e63a8aca4a7a8"}, - {file = "cytoolz-0.12.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b575393dd431b8e211de35bd593d831dac870172b16e2b7934f3566b8fc89377"}, - {file = "cytoolz-0.12.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3032c0ba42dee5836d6b57a72a569b65df2c29e8ed266cb900d569003cf933a9"}, - {file = "cytoolz-0.12.1-cp311-cp311-win32.whl", hash = "sha256:c576bd63495150385b8d05eaae775387f378be2fd9805d3ffb4d17c87271fbad"}, - {file = "cytoolz-0.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:421b224dc4157a0d66625acb5798cf50858cfa06a5232d39a8bd6cf1fa88aca3"}, - {file = "cytoolz-0.12.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:be5a454a95797343d0fb1ed02caecae73a023b1393c112951c84f17ec9f4076c"}, - {file = "cytoolz-0.12.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:061387aa39b9c1576c25d0c59142513c09e77a2a07bd5d6211a43c7a758b6f45"}, - {file = "cytoolz-0.12.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f4dbc3f0ec8f6fc68865489af21dcf042ff007d2737c27bfd73296f15db544"}, - {file = "cytoolz-0.12.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a816bff6bf424753e1ac2441902ceaf37ae6718b745a53f6aa1a60c617fb4f5f"}, - {file = "cytoolz-0.12.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633f19d1990b1cf9c67dce9c28bf8b5a18e42785d15548607a100e1236384d5d"}, - {file = "cytoolz-0.12.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fa7009c843667868aa8bdb3d68e5ef3d6356dd418b17ed5ca4e1340e82483a5"}, - {file = "cytoolz-0.12.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1c29dd04e282ddfd45b457e3551075beec9128aa9271245e58ce924bf6e055f8"}, - {file = "cytoolz-0.12.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd35c0be4c46274129dd1678bb911dd4e93d23968b26f4e39cd55bc7cb3b1bac"}, - {file = "cytoolz-0.12.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5158ae6d8dd112d003f677039a3613ca7d2592bfe35d7accf23684edb961fc26"}, - {file = "cytoolz-0.12.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:7eb9e6fa8a82c3d2f519f7d3942898a97792e3895569e9501b9431048289b82f"}, - {file = "cytoolz-0.12.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ac6784cc43aec51a86cf9058a2a343084f8cf46a9281bea5762bfa608127c53b"}, - {file = "cytoolz-0.12.1-cp36-cp36m-win32.whl", hash = "sha256:794cce219bbcb2f36ca220f27d5afd64eaa854e04901bd6f240be156a578b607"}, - {file = "cytoolz-0.12.1-cp36-cp36m-win_amd64.whl", hash = "sha256:695dd8231e4f1bfb9a2363775a6e4e56ad9d2058058f817203a49614f4bfe33b"}, - {file = "cytoolz-0.12.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1bd8017ef0da935a20106272c5f5ff6b1114add1ccb09cfed1ff7ec5cc01c6d"}, - {file = "cytoolz-0.12.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e1ebf6eb4438b8c45cbe7e7b22fc65df0c9efa97a70d3bf2f51e08b19756a5"}, - {file = "cytoolz-0.12.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:816c2038008ebf50d81171ddfae377f1af9e71d504ec609469dcb0906bfcf2ae"}, - {file = "cytoolz-0.12.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bebe58f7a160db7838eb70990c704db4bdc2d58bd364290fd69be0587be8bac"}, - {file = "cytoolz-0.12.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a72440305f634604827f96810e4469877b89f5c060d6852267650a49b0e3768c"}, - {file = "cytoolz-0.12.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b46ebc463bb45f278a2b94e630061c26e10077cb68d4c93583d8f4199699a5ef"}, - {file = "cytoolz-0.12.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e75e287787e6adafed9d8c3d3e7647c0b5eb460221f9f92d7dfe48b45ba77c0d"}, - {file = "cytoolz-0.12.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:03ab22c9aeb1535f8647d23b6520b0c3d41aaa18d04ef42b352dde1931f2e2b1"}, - {file = "cytoolz-0.12.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b2ac288f27a2689d9e39f4cf4df5437a8eb038eaae515169586c77f9f8fb343a"}, - {file = "cytoolz-0.12.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:97a24c0d0806fcf9a6e75fc18aeb95adc37eb0baf6451f10a2de23ffd815329d"}, - {file = "cytoolz-0.12.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:42c9e5cd2a48a257b1f2402334b48122501f249b8dcf77082f569f2680f185eb"}, - {file = "cytoolz-0.12.1-cp37-cp37m-win32.whl", hash = "sha256:35fae4eaa0eaf9072a5fe2d244a79e65baae4e5ddbe9cc629c5037af800213a2"}, - {file = "cytoolz-0.12.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5af43ca7026ead3dd08b261e4f7163cd2cf3ceaa74fa5a81f7b7ea5d445e41d6"}, - {file = "cytoolz-0.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fcc378fa97f02fbcef090b3611305425d72bd1c0afdd13ef4a82dc67d40638b6"}, - {file = "cytoolz-0.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc3645cf6b9246cb8e179db2803e4f0d148211d2a2cf22d5c9b5219111cd91a0"}, - {file = "cytoolz-0.12.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b245b824f4705aef0e4a03fafef3ad6cb59ef43cc564cdbf683ee28dfc11ad5"}, - {file = "cytoolz-0.12.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c1964dcb5f250fd13fac210944b20810d61ef4094a17fbbe502ab7a7eaeeace7"}, - {file = "cytoolz-0.12.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7194a22a4a24f3561cb6ad1cca9c9b2f2cf34cc8d4bce6d6a24c80960323fa8"}, - {file = "cytoolz-0.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c5434db53f3a94a37ad8aedb231901e001995d899af6ed1165f3d27fa04a6a"}, - {file = "cytoolz-0.12.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b30cd083ef8af4ba66d9fe5cc75c653ede3f2655f97a032db1a14cc8a006719c"}, - {file = "cytoolz-0.12.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bef934bd3e024d512c6c0ad1c66eb173f61d9ccb4dbca8d75f727a5604f7c2f6"}, - {file = "cytoolz-0.12.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37320669c364f7d370392af33cc1034b4563da66c22cd3261e3530f4d30dbe4b"}, - {file = "cytoolz-0.12.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3cb95d23defb2322cddf70efb4af6dac191d95edaa343e8c1f58f1afa4f92ecd"}, - {file = "cytoolz-0.12.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ac5895d5f78dbd8646fe37266655ba4995f9cfec38a86595282fee69e41787da"}, - {file = "cytoolz-0.12.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:499af2aff04f65b4c23de1df08e1d1484a93b23ddaaa0163e44b5070b68356eb"}, - {file = "cytoolz-0.12.1-cp38-cp38-win32.whl", hash = "sha256:aa61e3da751a2dfe95aeca603f3ef510071a136ba9905f61ae6cb5d0696271ad"}, - {file = "cytoolz-0.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:f5b43ce952a5a31441556c55f5f5f5a8e62c28581a0ff2a2c31c04ef992d73bd"}, - {file = "cytoolz-0.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8b8f88251b84b3877254cdd59c86a1dc6b2b39a03c6c9c067d344ef879562e0"}, - {file = "cytoolz-0.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d72415b0110f7958dd3a5ee98a70166f47bd42ede85e3535669c794d06f57406"}, - {file = "cytoolz-0.12.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8101ab6de5aa0b26a2b5032bc488d430010c91863e701812d65836b03a12f61"}, - {file = "cytoolz-0.12.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2eed428b5e68c28abf2c71195e799850e040d67a27c05f7785319c611665b86a"}, - {file = "cytoolz-0.12.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59641eb1f41cb688b3cb2f98c9003c493a5024325f76b5c02333d08dd972127c"}, - {file = "cytoolz-0.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a48940ff0449ffcf690310bf9228bb57885f7571406ed2fe05c98e299987195"}, - {file = "cytoolz-0.12.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bae431a5985cdb2014be09d37206c288e0d063940cf9539e9769bd2ec26b220"}, - {file = "cytoolz-0.12.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cb8b10405960a8e6801a4702af98ea640130ec6ecfc1208195762de3f5503ba9"}, - {file = "cytoolz-0.12.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c9a16a5b4f54d5c0a131f56b0ca65998a9a74958b5b36840c280edba4f8b907"}, - {file = "cytoolz-0.12.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:49911cb533c96d275e31e7eaeb0742ac3f7afe386a1d8c40937814d75039a0f7"}, - {file = "cytoolz-0.12.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:dbae37d48ef5a0ab90cfaf2b9312d96f034b1c828208a9cbe25377a1b19ba129"}, - {file = "cytoolz-0.12.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c34e69be4429633fc614febe3127fa03aa418a1abb9252f29d9ba5b3394573a5"}, - {file = "cytoolz-0.12.1-cp39-cp39-win32.whl", hash = "sha256:0d474dacbafbdbb44c7de986bbf71ff56ae62df0d52ab3b6fa966784dc88737a"}, - {file = "cytoolz-0.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:3d6d0b0075731832343eb88229cea4bf39e96f3fc7acbc449aadbdfec2842703"}, - {file = "cytoolz-0.12.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8506d1863f30d26f577c4ed59d2cfd03d2f39569f9cbaa02a764a9de73d312d5"}, - {file = "cytoolz-0.12.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a1eae39656a1685e8b3f433eecfd72015ce5c1d7519e9c8f9402153c68331bb"}, - {file = "cytoolz-0.12.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a0055943074c6c85b77fcc3f42f7c54010a3478daa2ed9d6243d0411c84a4d3"}, - {file = "cytoolz-0.12.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8a7a325b8fe885a6dd91093616c703134f2dacbd869bc519970df3849c2a15b"}, - {file = "cytoolz-0.12.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7b60caf0fa5f1b49f1062f7dc0f66c7b23e2736bad50fa8296bfb845995e3051"}, - {file = "cytoolz-0.12.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:980e7eb7205e01816a92f3290cfc80507957e64656b9271a0dfebb85fe3718c0"}, - {file = "cytoolz-0.12.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06d38a40fe153f23cda0e823413fe9d9ebee89dd461827285316eff929fb121e"}, - {file = "cytoolz-0.12.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d540e9c34a61b53b6a374ea108794a48388178f7889d772e364cdbd6df37774c"}, - {file = "cytoolz-0.12.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:117871f036926e42d3abcee587eafa9dc7383f1064ac53a806d33e76604de311"}, - {file = "cytoolz-0.12.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:31131b54a0c72efc0eb432dc66df546c6a54f2a7d396c9a34cf65ac1c26b1df8"}, - {file = "cytoolz-0.12.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4534cbfad73cdb1a6dad495530d4186d57d73089c01e9cb0558caab50e46cb3b"}, - {file = "cytoolz-0.12.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50db41e875e36aec11881b8b12bc69c6f4836b7dd9e88a9e5bbf26c2cb3ba6cd"}, - {file = "cytoolz-0.12.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6716855f9c669c9e25a185d88e0f169839bf8553d16496796325acd114607c11"}, - {file = "cytoolz-0.12.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f32452e833f0605b871626e6c61b71b0cba24233aad0e04accc3240497d4995"}, - {file = "cytoolz-0.12.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ba74c239fc6cb6e962eabc420967c7565f3f363b776c89b3df5234caecf1f463"}, - {file = "cytoolz-0.12.1.tar.gz", hash = "sha256:fc33909397481c90de3cec831bfb88d97e220dc91939d996920202f184b4648e"}, -] - -[package.dependencies] -toolz = ">=0.8.0" - -[package.extras] -cython = ["cython"] - -[[package]] -name = "eth-hash" -version = "0.5.2" -description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (erroneously) called sha3" -optional = false -python-versions = ">=3.7, <4" -files = [ - {file = "eth-hash-0.5.2.tar.gz", hash = "sha256:1b5f10eca7765cc385e1430eefc5ced6e2e463bb18d1365510e2e539c1a6fe4e"}, - {file = "eth_hash-0.5.2-py3-none-any.whl", hash = "sha256:251f62f6579a1e247561679d78df37548bd5f59908da0b159982bf8293ad32f0"}, -] - -[package.extras] -dev = ["black (>=23)", "build (>=0.9.0)", "bumpversion (>=0.5.3)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "ipython", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=6.0.0)", "pytest (>=7.0.0)", "pytest-watch (>=4.1.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -doc = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -lint = ["black (>=23)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=6.0.0)"] -pycryptodome = ["pycryptodome (>=3.6.6,<4)"] -pysha3 = ["pysha3 (>=1.0.0,<2.0.0)", "safe-pysha3 (>=1.0.0)"] -test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] - -[[package]] -name = "eth-typing" -version = "3.4.0" -description = "eth-typing: Common type annotations for ethereum python packages" -optional = false -python-versions = ">=3.7.2, <4" -files = [ - {file = "eth-typing-3.4.0.tar.gz", hash = "sha256:7f49610469811ee97ac43eaf6baa294778ce74042d41e61ecf22e5ebe385590f"}, - {file = "eth_typing-3.4.0-py3-none-any.whl", hash = "sha256:347d50713dd58ab50063b228d8271624ab2de3071bfa32d467b05f0ea31ab4c5"}, -] - -[package.extras] -dev = ["black (>=23)", "build (>=0.9.0)", "bumpversion (>=0.5.3)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "ipython", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=6.0.0)", "pytest (>=7.0.0)", "pytest-watch (>=4.1.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -doc = ["sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -lint = ["black (>=23)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=6.0.0)"] -test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] - -[[package]] -name = "eth-utils" -version = "2.1.1" -description = "eth-utils: Common utility functions for python code that interacts with Ethereum" -optional = false -python-versions = ">=3.7,<4" -files = [ - {file = "eth-utils-2.1.1.tar.gz", hash = "sha256:7cccfb0b0749431d0d001e327e9a7289bf07308316a73850ae3895020e5682f4"}, - {file = "eth_utils-2.1.1-py3-none-any.whl", hash = "sha256:4938ab742f91cdf19bae024261af090664f63ccf83bdb1213e7146c14209e899"}, -] - -[package.dependencies] -cytoolz = {version = ">=0.10.1", markers = "implementation_name == \"cpython\""} -eth-hash = ">=0.3.1" -eth-typing = ">=3.0.0" -toolz = {version = ">0.8.2", markers = "implementation_name == \"pypy\""} - -[package.extras] -dev = ["black (>=23)", "build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "flake8 (==3.8.3)", "hypothesis (>=4.43.0)", "ipython", "isort (>=5.11.0)", "mypy (==0.971)", "pydocstyle (>=5.0.0)", "pytest (>=7.0.0)", "pytest-watch (>=4.1.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "types-setuptools", "wheel"] -doc = ["sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -lint = ["black (>=23)", "flake8 (==3.8.3)", "isort (>=5.11.0)", "mypy (==0.971)", "pydocstyle (>=5.0.0)", "types-setuptools"] -test = ["hypothesis (>=4.43.0)", "mypy (==0.971)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "types-setuptools"] - [[package]] name = "exceptiongroup" version = "1.1.2" @@ -418,27 +235,70 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "py-ecc" -version = "6.0.0" -description = "Elliptic curve crypto in python including secp256k1 and alt_bn128" +name = "py-arkworks-bls12381" +version = "0.3.4" +description = "" optional = false -python-versions = ">=3.6, <4" +python-versions = ">=3.8" files = [ - {file = "py_ecc-6.0.0-py3-none-any.whl", hash = "sha256:54e8aa4c30374fa62d582c599a99f352c153f2971352171318bd6910a643be0b"}, - {file = "py_ecc-6.0.0.tar.gz", hash = "sha256:3fc8a79e38975e05dc443d25783fd69212a1ca854cc0efef071301a8f7d6ce1d"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7b77d0e84204a27c0cd2755084cdba147aabdd914e31f3e1d9bf39de8e04d1eb"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4b861f26e3a32e623d115cc12ce9da637c4ad85717fef02bfe949de32fc31aab"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fddec2702093c4505fa2bb6fb754efe54692502e556bc03d2be6a369dfe20bea"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5b4bc1e0326be10ea1c522c8eecd638723116c9838703bf245fc1b55b5777f"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f82cd27fa579f3911154b731bc6860f38fa49e3d4fe509e971fbb56b2c17b2eb"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ba5245469fea19deeab059c4fb49d7332ee16659b1a0c85692c786e032d3957"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d82ef2604fed956953c1947210248ed452f2be03d4490e27e7c9438b31495a8"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7414ac0fd29abe39ad2b8b78fb80bc52ca4527cbca24f28db9064c073b003062"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-none-win32.whl", hash = "sha256:2a424c1f07f23c385b242699370cc3cabec62c0c4d6909487b932789319d9433"}, + {file = "py_arkworks_bls12381-0.3.4-cp310-none-win_amd64.whl", hash = "sha256:e318e5df82bad081c37babd929126a5c8f83e04009afcb4bf8e8a20c1d8f1abd"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:ce3d8bc5b495881d783ac46025c47fcb6e4d38deb3e5f32bb572f2a370099183"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e1d837cd246efe8ded815e8a8de461c87772dc1a0359eb4fce8b815d473e94b4"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55a21207599bba40524a4791ca6538513cd212c7cb7466f13f95b3f401c6ff94"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e036b37fa07d824f2b4d289bae5ee4b820b3702fd6cb01716587a5f43ab3be"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0d6ba0d216fd7d6a62f8743c8f079f69db473ab6058112d78e60bb46aa857ffe"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66c03ef4a8841d111c76e86d9c86ce73474c1d08d025c569567352cd5745aef2"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81bce857ea7db3fd7a5e76a607cd794b36a78aea6cb18c710f9b162bac46b83"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a03209f97d6732c828a2383ab022c763d94530fad6b61468e1c5e149c5fa5710"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-none-win32.whl", hash = "sha256:f4c80957d3dd4bc3a045e78b139e06f7eb82004297b7c56faa505d16fb7ce650"}, + {file = "py_arkworks_bls12381-0.3.4-cp311-none-win_amd64.whl", hash = "sha256:6267bb2d5ae9ce0953fe50d8316538e1e3f2af642a19621ee23f53dfda52e900"}, + {file = "py_arkworks_bls12381-0.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cb392eeecffd28e43585c104da005999be485e5a763ebff271679560e0d36a"}, + {file = "py_arkworks_bls12381-0.3.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d87f93ccdc6ac07c3c9cfdc0700980b39ca633216769ea1f7659f320a8e5514"}, + {file = "py_arkworks_bls12381-0.3.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2842dae251f9d52b2f51bdfc7c01165905ebd55bafda9dcebde501fad913fa8b"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6473ee984de5396b1743f578c6f50413659307e4ab2023fa82c1207c3f963dc6"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f6ca5c31af472e7a3481adc91ae8b2cf0277852a1c5471f16cfcede47ef949b5"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4e6d3042edbdd4f7e5633a2dd1cb39be6394b18f2e37b7a88c0257fa3479c0e0"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:980141f625bc2a6f369a20007bcef80051f2cf910d154f2cf71f7a423c47b75b"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25206c9ba7327f42129b189a1f61053c0929feb0e19385502cab655b7f1f2f58"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8cab59fae127c640d3a56c13084874f15e73cc585b2ab666349ec1d82595c88"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95097f14cd884199737adb0a3754b5c78a4a041f8b38a152228b013f4099668f"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71d95e2d30216f7c7ca967582be8cac4b5b5b6fcafb6ebf8d3532238e5bb2936"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-none-win32.whl", hash = "sha256:5ff17fe50663cfe73c13cbad9b7f57d1209ab7745ed8db22ad4ed2d198a29562"}, + {file = "py_arkworks_bls12381-0.3.4-cp38-none-win_amd64.whl", hash = "sha256:0799b1bfb71f028f74d20bd9cb96c03d4ab0b6b8f6b94ad55910770ff49c77f8"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:df984e07fbe15cae04f586716b4f47fb2115ddb40a27146547ab73a323624047"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6d65d7eee1ac61a116646d283df83619c068ef21db8490ed52f8343021936617"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6d1fc9ff5161cc9e9ef5ceae9aef3a0794c4d419b0c9b840d3ea5247b3a5ddcb"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad3b0199638d830d15b67412b72848cef25a2cfa5dbae260af09cb179908c975"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0bce6771d1612e6ef1aacd93a693ff1273140f7fa4af98a2da73e83265734a58"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fec9947e698fa6c64cb178813f6e23208c9e3e02e863fa4da8ae5d9c2849a3f"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f6cd5856eb056fb9ca3c374ecb953d5de4fb8bc81787ab4fa927ed60a717a1d"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9b9f6b560d917396def5c6bf4bc99e014f516d0446189fc3bf1e8d0b2b8099c"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-none-win32.whl", hash = "sha256:62f97432a4458619d944bcc53ad178bc9606b983f539096a1fed09f1afbc3386"}, + {file = "py_arkworks_bls12381-0.3.4-cp39-none-win_amd64.whl", hash = "sha256:7c1e2475b2fc52ede8ce8dc2f4b9e6325876ef22a788aacad64affd95553e794"}, + {file = "py_arkworks_bls12381-0.3.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a88dbfb6c43e2e37477df6673403580e78ac0011079528e1ea7d1950d75b153"}, + {file = "py_arkworks_bls12381-0.3.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6af83fbce8159d5637e21d0343baf47f811f15b6a4c5531563dd976518a12d0"}, + {file = "py_arkworks_bls12381-0.3.4-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:640decc883597d24f43c0433dec6b51c58870e3ad33e3fd95e9ab8d51bd582ed"}, + {file = "py_arkworks_bls12381-0.3.4-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ac501ecc86c0a269e7c5623a0de1b4193ced8ff3025e288feb86e8811c48205"}, + {file = "py_arkworks_bls12381-0.3.4-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbf7b6fab400345f00fe97fd8b7dbeea1d8eb4340ddfb102dc859978c4e92891"}, + {file = "py_arkworks_bls12381-0.3.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5c1f9fb2bd77e9b67b4d07dd973d9cc5ee24deae98c17e4c1ea004ffb6019b3"}, + {file = "py_arkworks_bls12381-0.3.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c2340216594f42716c370aad752e61f8cecca5398c03729e731a2e531c199bc"}, + {file = "py_arkworks_bls12381-0.3.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dd8d0781e89316aa42119b2ba4b8a02b8fe020cf4334b017fca6c39e1fd7f59"}, + {file = "py_arkworks_bls12381-0.3.4-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90e457ffdf86cecb1f32327a24580ed8a308ea29497ceb05dd037f89469adff8"}, + {file = "py_arkworks_bls12381-0.3.4-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4f90898a53d3ee4adebd9da77f4a0a55b8fa95fac71cfb0c9c0e71fd9b8d58b"}, + {file = "py_arkworks_bls12381-0.3.4-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5720f9b3201530c91f0074f5655651f410d79031e14f576790bbd303498aaddc"}, + {file = "py_arkworks_bls12381-0.3.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b91c49d067051502449867dc7828166b6eb44b8afef12f375875a291f6fbd0a"}, + {file = "py_arkworks_bls12381-0.3.4.tar.gz", hash = "sha256:93a86d24b0b07722c9449cef523e977c2018ec7673accfac25334694f40f3848"}, ] -[package.dependencies] -cached-property = ">=1.5.1,<2" -eth-typing = ">=3.0.0,<4" -eth-utils = ">=2.0.0,<3" -mypy-extensions = ">=0.4.1" - -[package.extras] -dev = ["bumpversion (>=0.5.3,<1)", "flake8 (==3.5.0)", "mypy (==0.641)", "mypy-extensions (>=0.4.1)", "pytest (==6.2.5)", "pytest-xdist (==1.26.0)", "twine"] -lint = ["flake8 (==3.5.0)", "mypy (==0.641)", "mypy-extensions (>=0.4.1)"] -test = ["pytest (==6.2.5)", "pytest-xdist (==1.26.0)"] - [[package]] name = "pycodestyle" version = "2.10.0" @@ -494,17 +354,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "toolz" -version = "0.12.0" -description = "List processing tools and functional utilities" -optional = false -python-versions = ">=3.5" -files = [ - {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, - {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, -] - [[package]] name = "typing-extensions" version = "4.7.1" @@ -519,4 +368,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f7f214ea61f328115bd8cd70f579b9daa5bd88a8f84b8be8169ea98d36f1f355" +content-hash = "a48025e2ebf4dc31a5fe141ba574b1d6f6036692b5f0a8d861a5d986e3751d3e" diff --git a/curdleproofs/py_arkworks_bls12381-stubs/__init__.pyi b/curdleproofs/py_arkworks_bls12381-stubs/__init__.pyi new file mode 100644 index 0000000..4e45d67 --- /dev/null +++ b/curdleproofs/py_arkworks_bls12381-stubs/__init__.pyi @@ -0,0 +1,54 @@ +from typing import Any + +from typing import Any, List, Tuple, Union + +class G1Point: + def __init__(self) -> None: ... + + def __add__(self, other: 'G1Point') -> 'G1Point': ... + + def __mul__(self, other: Scalar) -> 'G1Point': ... + + def __neg__(self) -> 'G1Point': ... + + def __sub__(self, other: 'G1Point') -> 'G1Point': ... + + def __eq__(self, other: Any) -> bool: ... + + @staticmethod + def from_compressed_bytes(data: bytes) -> 'G1Point': ... + + @staticmethod + def from_compressed_bytes_unchecked(data: bytes) -> 'G1Point': ... + + @staticmethod + def identity() -> 'G1Point': ... + + @staticmethod + def multiexp_unchecked(bases: List['G1Point'], scalars: List[Any]) -> 'G1Point': ... + + def to_compressed_bytes(self) -> bytes: ... + +class Scalar: + def __init__(self, value: int) -> None: ... + + def __add__(self, other: 'Scalar') -> 'Scalar': ... + + def __mul__(self, other: 'Scalar') -> 'Scalar': ... + + def __neg__(self) -> 'Scalar': ... + + def __sub__(self, other: 'Scalar') -> 'Scalar': ... + + def __eq__(self, other: Any) -> bool: ... + + @staticmethod + def from_le_bytes(data: bytes) -> 'Scalar': ... + + def inverse(self) -> 'Scalar': ... + + def is_zero(self) -> bool: ... + + def square(self) -> 'Scalar': ... + + def to_le_bytes(self) -> bytes: ... diff --git a/curdleproofs/pyproject.toml b/curdleproofs/pyproject.toml index 779f413..e41756a 100644 --- a/curdleproofs/pyproject.toml +++ b/curdleproofs/pyproject.toml @@ -7,7 +7,7 @@ license = "MIT" [tool.poetry.dependencies] python = "^3.9" -py-ecc = "^6.0.0" +py_arkworks_bls12381 = "0.3.4" merlin = {git = "https://github.com/nalinbhardwaj/curdleproofs.pie", rev = "master", subdirectory = "merlin"} [tool.poetry.dev-dependencies]