From a80822310431ed0e3f8147710be8fdcc1a302e55 Mon Sep 17 00:00:00 2001 From: iquerejeta Date: Wed, 15 Jan 2025 13:44:54 +0100 Subject: [PATCH] Working recursive test --- Cargo.toml | 3 +- src/plonk/circuit.rs | 14 +- src/plonk/keygen.rs | 20 ++ src/plonk/permutation.rs | 4 +- src/plonk/verifier.rs | 5 +- src/poly/commitment.rs | 4 +- src/poly/kzg/mod.rs | 414 +++++++++++++++++++++++++-------- src/poly/kzg/msm.rs | 23 ++ src/poly/mod.rs | 4 +- src/poly/query.rs | 8 +- src/transcript/implementors.rs | 4 +- src/transcript/mod.rs | 13 +- src/utils/arithmetic.rs | 12 + 13 files changed, 405 insertions(+), 123 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index fccd6f7b7..dd998bdb5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,7 @@ backtrace = { version = "0.3", optional = true } ff = "0.13" group = "0.13" halo2curves = { version = "0.7.0", default-features = false } -blstrs = { git = "https://github.com/davidnevadoc/blstrs", rev = "8ca6da7" } +blstrs = { git = "https://github.com/davidnevadoc/blstrs", rev = "3dfe5bf" } rand_core = { version = "0.6", default-features = false } tracing = "0.1" blake2b_simd = "1" # MSRV 1.66.0 @@ -59,6 +59,7 @@ tabbycat = { version = "0.1", features = ["attributes"], optional = true } # Legacy circuit compatibility halo2_legacy_pdqsort = { version = "0.1.0", optional = true } +num-bigint = "0.4.6" [dev-dependencies] assert_matches = "1.5" diff --git a/src/plonk/circuit.rs b/src/plonk/circuit.rs index 9953f34e5..f696db807 100644 --- a/src/plonk/circuit.rs +++ b/src/plonk/circuit.rs @@ -481,7 +481,7 @@ impl Selector { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct FixedQuery { /// Query index - pub(crate) index: Option, + pub index: Option, /// Column index pub(crate) column_index: usize, /// Rotation of this query @@ -504,7 +504,7 @@ impl FixedQuery { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct AdviceQuery { /// Query index - pub(crate) index: Option, + pub index: Option, /// Column index pub(crate) column_index: usize, /// Rotation of this query @@ -534,7 +534,7 @@ impl AdviceQuery { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct InstanceQuery { /// Query index - pub(crate) index: Option, + pub index: Option, /// Column index pub(crate) column_index: usize, /// Rotation of this query @@ -1338,11 +1338,6 @@ impl Product for Expression { } } -// /// Represents an index into a vector where each entry corresponds to a distinct -// /// point that polynomials are queried at. -// #[derive(Copy, Clone, Debug)] -// pub(crate) struct PointIndex(pub usize); - /// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset /// within a custom gate. #[derive(Clone, Debug)] @@ -2140,7 +2135,8 @@ impl ConstraintSystem { }); } - pub(crate) fn phases(&self) -> impl Iterator { + /// Return an iterator over the phases of the constraint system. + pub fn phases(&self) -> impl Iterator { let max_phase = self .advice_column_phase .iter() diff --git a/src/plonk/keygen.rs b/src/plonk/keygen.rs index 982babe1d..90d357a13 100644 --- a/src/plonk/keygen.rs +++ b/src/plonk/keygen.rs @@ -246,6 +246,10 @@ fn k_from_circuit, C: Circuit>(circuit: /// Generate a `VerifyingKey` from an instance of `Circuit`. /// By default, selector compression is turned **off**. +/// +/// This function automatically generates the VK using the smallest +/// value of k required for the ConcreteCircuit. +/// To specify a particular value for k, use keygen_vk_with_k instead. pub fn keygen_vk( params: &CS::Parameters, circuit: &ConcreteCircuit, @@ -256,6 +260,22 @@ where ConcreteCircuit: Circuit, { let k = k_from_circuit(circuit); + + keygen_vk_with_k(params, circuit, k) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// By default, selector compression is turned **off**. +pub fn keygen_vk_with_k( + params: &CS::Parameters, + circuit: &ConcreteCircuit, + k: u32, +) -> Result, Error> +where + F: WithSmallOrderMulGroup<3> + FromUniformBytes<64> + Ord, + CS: PolynomialCommitmentScheme, + ConcreteCircuit: Circuit, +{ if params.max_k() < k { return Err(Error::NotEnoughRowsAvailable { current_k: params.max_k(), diff --git a/src/plonk/permutation.rs b/src/plonk/permutation.rs index f0614ba59..0dd6d9710 100644 --- a/src/plonk/permutation.rs +++ b/src/plonk/permutation.rs @@ -23,7 +23,7 @@ use std::io; #[derive(Debug, Clone)] pub struct Argument { /// A sequence of columns involved in the argument. - pub(super) columns: Vec>, + pub columns: Vec>, } impl Argument { @@ -88,7 +88,7 @@ pub struct VerifyingKey> { } impl> VerifyingKey { - /// Returns commitments of sigma polynomials + /// Returns the commitments of the verifying key. pub fn commitments(&self) -> &Vec { &self.commitments } diff --git a/src/plonk/verifier.rs b/src/plonk/verifier.rs index fed4e7912..8bde50727 100644 --- a/src/plonk/verifier.rs +++ b/src/plonk/verifier.rs @@ -19,7 +19,8 @@ where + Hashable + Sampleable + SerdeObject - + FromUniformBytes<64>, + + FromUniformBytes<64> + + Ord, CS::Commitment: Hashable + SerdeObject, { // Check that instances matches the expected number of instance columns @@ -119,6 +120,7 @@ where // Sample x challenge, which is used to ensure the circuit is // satisfied with high probability. let x: F = transcript.squeeze_challenge(); + let instance_evals = { let xn = x.pow([vk.n()]); let (min_rotation, max_rotation) = @@ -165,7 +167,6 @@ where .collect::, _>>()?; let fixed_evals = read_n(transcript, vk.cs.fixed_queries.len())?; - let vanishing = vanishing.evaluate_after_x(vk, transcript)?; let permutations_common = vk.permutation.evaluate(transcript)?; diff --git a/src/poly/commitment.rs b/src/poly/commitment.rs index af755d9c9..23f433052 100644 --- a/src/poly/commitment.rs +++ b/src/poly/commitment.rs @@ -43,7 +43,7 @@ pub trait PolynomialCommitmentScheme: Clone + Debug { ) -> Result<(), Error> where I: IntoIterator> + Clone, - F: Sampleable, + F: Sampleable + Ord + Hashable<::Hash>, Self::Commitment: Hashable; /// Verify an opening proof at a given query @@ -53,7 +53,7 @@ pub trait PolynomialCommitmentScheme: Clone + Debug { ) -> Result where I: IntoIterator> + Clone, - F: Sampleable, + F: Sampleable + Ord + Hashable, Self::Commitment: Hashable; } diff --git a/src/poly/kzg/mod.rs b/src/poly/kzg/mod.rs index cca1aa09b..33442f6a3 100644 --- a/src/poly/kzg/mod.rs +++ b/src/poly/kzg/mod.rs @@ -1,4 +1,5 @@ use halo2curves::pairing::Engine; +use std::collections::{BTreeMap, BTreeSet}; use std::marker::PhantomData; /// Multiscalar multiplication engines @@ -13,12 +14,13 @@ use crate::poly::kzg::params::{ParamsKZG, ParamsVerifierKZG}; use crate::poly::query::Query; use crate::poly::query::VerifierQuery; use crate::poly::{Coeff, Error, LagrangeCoeff, Polynomial, ProverQuery}; -use crate::utils::arithmetic::{kate_division, powers, MSM}; +use crate::utils::arithmetic::{ + eval_polynomial, kate_division, lagrange_interpolate, powers, truncate, truncated_powers, MSM, +}; -use crate::poly::commitment::PolynomialCommitmentScheme; +use crate::poly::commitment::{Params, PolynomialCommitmentScheme}; use crate::transcript::{Hashable, Sampleable, Transcript}; -use ff::Field; -use group::prime::PrimeCurveAffine; +use ff::{Field, PrimeField}; use group::Group; use halo2curves::msm::msm_best; use halo2curves::pairing::MultiMillerLoop; @@ -32,6 +34,20 @@ pub struct KZGCommitmentScheme { _marker: PhantomData, } +impl KZGCommitmentScheme { + fn inner_product( + polys: &[Polynomial], + scalars: impl Iterator, + ) -> Polynomial { + polys + .iter() + .zip(scalars) + .map(|(p, s)| p.clone() * s) + .reduce(|acc, p| acc + &p) + .unwrap() + } +} + impl PolynomialCommitmentScheme for KZGCommitmentScheme where E::Fr: SerdeObject, @@ -81,143 +97,346 @@ where ) -> Result<(), Error> where I: IntoIterator> + Clone, - E::Fr: Sampleable, + E::Fr: Sampleable + Ord + Hashable<::Hash>, E::G1Affine: Hashable, { - let v: E::Fr = transcript.squeeze_challenge(); - let commitment_data = construct_intermediate_sets(prover_query); + // Refer to the halo2 book for docs: + // https://zcash.github.io/halo2/design/proving-system/multipoint-opening.html + let x1: E::Fr = transcript.squeeze_challenge(); + let x2: E::Fr = transcript.squeeze_challenge(); - for commitment_at_a_point in commitment_data.iter() { - let z = commitment_at_a_point.point; - let (poly_batch, eval_batch) = commitment_at_a_point - .queries - .iter() - .zip(powers(v)) - .map(|(query, power_of_v)| { - assert_eq!(query.get_point(), z); + let (poly_map, point_sets) = construct_intermediate_sets_zcash(prover_query); - let poly = query.get_commitment().poly; - let eval = query.get_eval(); + let mut q_polys = vec![vec![]; point_sets.len()]; - (poly.clone() * power_of_v, eval * power_of_v) + for com_data in poly_map.iter() { + q_polys[com_data.set_index].push(com_data.commitment.poly.clone()); + } + + let q_polys = q_polys + .iter() + .map(|polys| Self::inner_product(polys, truncated_powers(x1))) + .collect::>(); + let f_poly = { + let f_polys = point_sets + .iter() + .zip(q_polys.clone()) + .map(|(points, q_poly)| { + let mut poly = points.iter().fold(q_poly.clone().values, |poly, point| { + kate_division(&poly, *point) + }); + poly.resize(1 << params.max_k() as usize, E::Fr::ZERO); + Polynomial { + values: poly, + _marker: PhantomData, + } }) - .reduce(|(poly_acc, eval_acc), (poly, eval)| (poly_acc + &poly, eval_acc + eval)) - .unwrap(); + .collect::>(); + Self::inner_product(&f_polys, powers(x2)) + }; + let f_com = Self::commit(params, &f_poly); + transcript.write(&f_com).map_err(|_| Error::OpeningError)?; + let x3: E::Fr = transcript.squeeze_challenge(); + let x3 = truncate(x3); + for q_poly in q_polys.iter() { + transcript + .write(&eval_polynomial(&q_poly.values, x3)) + .map_err(|_| Error::OpeningError)?; + } - let poly_batch = &poly_batch - eval_batch; - let witness_poly = Polynomial { - values: kate_division(&poly_batch.values, z), + let x4: E::Fr = transcript.squeeze_challenge(); + + let final_poly = { + let mut polys = q_polys; + polys.push(f_poly); + Self::inner_product(&polys, truncated_powers(x4)) + }; + let v = eval_polynomial(&final_poly, x3); + let pi = { + let pi_poly = Polynomial { + values: kate_division(&(&final_poly - v).values, x3), _marker: PhantomData, }; - let w = Self::commit(params, &witness_poly); + Self::commit(params, &pi_poly) + }; - transcript.write(&w).map_err(|_| Error::OpeningError)?; - } + transcript.write(&pi).map_err(|_| Error::OpeningError)?; Ok(()) } fn prepare(verifier_query: I, transcript: &mut T) -> Result, Error> where - E::Fr: Sampleable, + E::Fr: Sampleable + Ord + Hashable, E::G1Affine: Hashable, I: IntoIterator>> + Clone, { - let v: E::Fr = transcript.squeeze_challenge(); + // Refer to the halo2 book for docs: + // https://zcash.github.io/halo2/design/proving-system/multipoint-opening.html + let x1: E::Fr = transcript.squeeze_challenge(); + let x2: E::Fr = transcript.squeeze_challenge(); + + let (commitment_map, point_sets) = construct_intermediate_sets_zcash(verifier_query); + + let mut q_coms: Vec<_> = vec![vec![]; point_sets.len()]; + let mut q_eval_sets = vec![vec![]; point_sets.len()]; + for com_data in commitment_map.into_iter() { + let mut msm = MSMKZG::new(); + msm.append_term(E::Fr::ONE, com_data.commitment.into()); + q_coms[com_data.set_index].push(msm); + q_eval_sets[com_data.set_index].push(com_data.evals); + } - let commitment_data = construct_intermediate_sets(verifier_query); + let q_coms = q_coms + .iter() + .map(|msms| msm_inner_product(msms, truncated_powers(x1))) + .collect::>(); + let q_eval_sets = q_eval_sets + .iter() + .map(|evals| evals_inner_product(evals, truncated_powers(x1))) + .collect::>(); + + let f_com: E::G1Affine = transcript.read().map_err(|_| Error::SamplingError)?; + // Sample a challenge x_3 for checking that f(X) was committed to + // correctly. + let x3: E::Fr = transcript.squeeze_challenge(); + let x3 = truncate(x3); + + let mut q_evals_on_x3 = Vec::::with_capacity(q_eval_sets.len()); + for _ in 0..q_eval_sets.len() { + q_evals_on_x3.push(transcript.read().map_err(|_| Error::SamplingError)?); + } - let w = (0..commitment_data.len()) - .map(|_| transcript.read().map_err(|_| Error::SamplingError)) - .collect::, Error>>()?; + // We can compute the expected msm_eval at x_3 using the u provided + // by the prover and from x_2 + let f_eval = point_sets + .iter() + .zip(q_eval_sets.iter()) + .zip(q_evals_on_x3.iter()) + .rev() + .fold(E::Fr::ZERO, |acc_eval, ((points, evals), proof_eval)| { + let r_poly = lagrange_interpolate(points, evals); + let r_eval = eval_polynomial(&r_poly, x3); + let eval = points.iter().fold(*proof_eval - &r_eval, |eval, point| { + eval * &(x3 - point).invert().unwrap() + }); + acc_eval * &(x2) + &eval + }); + + let x4: E::Fr = transcript.squeeze_challenge(); + + let final_com = { + let mut polys = q_coms; + let mut f_com_as_msm = MSMKZG::new(); + f_com_as_msm.append_term(E::Fr::ONE, f_com.into()); + polys.push(f_com_as_msm); + msm_inner_product(&polys, truncated_powers(x4)) + }; - let u: E::Fr = transcript.squeeze_challenge(); + let v = { + let mut evals = q_evals_on_x3; + evals.push(f_eval); + scalars_inner_product(&evals, truncated_powers(x4)) + }; - let mut commitment_multi = MSMKZG::::new(); - let mut eval_multi = E::Fr::ZERO; + let pi: E::G1Affine = transcript.read().map_err(|_| Error::SamplingError)?; - let mut witness = MSMKZG::::new(); - let mut witness_with_aux = MSMKZG::::new(); + let mut pi_msm = MSMKZG::::new(); + pi_msm.append_term(E::Fr::ONE, pi.into()); - for ((commitment_at_a_point, wi), power_of_u) in - commitment_data.iter().zip(w.into_iter()).zip(powers(u)) - { - assert!(!commitment_at_a_point.queries.is_empty()); - let z = commitment_at_a_point.point; + // Scale zπ + let mut scaled_pi = MSMKZG::::new(); + scaled_pi.append_term(x3, pi.into()); - let (mut commitment_batch, eval_batch) = commitment_at_a_point - .queries - .iter() - .zip(powers(v)) - .map(|(query, power_of_v)| { - assert_eq!(query.get_point(), z); + let mut msm_accumulator = DualMSM::new(); - let mut commitment = MSMKZG::::new(); - commitment.append_term(power_of_v, query.get_commitment().to_curve()); + // (π, C − vG + zπ) + msm_accumulator.left.add_msm(&pi_msm); // π - let eval = power_of_v * query.get_eval(); + msm_accumulator.right.add_msm(&final_com); // C + let g0 = E::G1::generator(); + msm_accumulator.right.append_term(v, -g0); // -vG + msm_accumulator.right.add_msm(&scaled_pi); // zπ - (commitment, eval) - }) - .reduce(|(mut commitment_acc, eval_acc), (commitment, eval)| { - commitment_acc.add_msm(&commitment); - (commitment_acc, eval_acc + eval) - }) - .unwrap(); + Ok(msm_accumulator) + } +} - commitment_batch.scale(power_of_u); - commitment_multi.add_msm(&commitment_batch); - eval_multi += power_of_u * eval_batch; +fn msm_inner_product( + msms: &[MSMKZG], + scalars: impl Iterator, +) -> MSMKZG +where + E: MultiMillerLoop + Debug, + E::G1Affine: CurveAffine, + E::Fr: Ord, +{ + let mut res = MSMKZG::::new(); + let mut msms = msms.to_vec(); + for (msm, s) in msms.iter_mut().zip(scalars) { + msm.scale(s); + res.add_msm(msm); + } + res +} - witness_with_aux.append_term(power_of_u * z, wi.to_curve()); - witness.append_term(power_of_u, wi.to_curve()); +fn scalars_inner_product(v1: &[F], scalars: impl Iterator) -> F { + v1.iter() + .zip(scalars) + .map(|(s1, s2)| *s1 * s2) + .reduce(|acc, s| acc + s) + .unwrap() +} +/// Inter produc with truncated powers of the given x. +fn evals_inner_product( + evals_set: &[Vec], + scalars: impl Iterator, +) -> Vec { + let mut res = vec![F::ZERO; evals_set[0].len()]; + for (poly_evals, s) in evals_set.iter().zip(scalars) { + for i in 0..res.len() { + res[i] += poly_evals[i] * s; } + } + res +} - let mut msm = DualMSM::new(); - - msm.left.add_msm(&witness); - - msm.right.add_msm(&witness_with_aux); - msm.right.add_msm(&commitment_multi); - let g0 = E::G1::generator(); - msm.right.append_term(eval_multi, -g0); +#[derive(Clone, Debug)] +struct CommitmentDataZCash { + commitment: T, + set_index: usize, + point_indices: Vec, + evals: Vec, +} - Ok(msm) +impl CommitmentDataZCash { + fn new(commitment: T) -> Self { + CommitmentDataZCash { + commitment, + set_index: 0, + point_indices: vec![], + evals: vec![], + } } } -#[derive(Debug)] -struct CommitmentData> { - queries: Vec, - point: F, - _marker: PhantomData, -} +type IntermediateSets = ( + Vec>::Eval, >::Commitment>>, + Vec>, +); -fn construct_intermediate_sets>(queries: I) -> Vec> +fn construct_intermediate_sets_zcash>( + queries: I, +) -> IntermediateSets where I: IntoIterator + Clone, { - let mut point_query_map: Vec<(F, Vec)> = Vec::new(); - for query in queries { - if let Some(pos) = point_query_map + // Construct sets of unique commitments and corresponding information about + // their queries. + let mut commitment_map: Vec> = vec![]; + + // Also construct mapping from a unique point to a point_index. This defines + // an ordering on the points. + let mut point_index_map = BTreeMap::new(); + + // Iterate over all of the queries, computing the ordering of the points + // while also creating new commitment data. + for query in queries.clone() { + let num_points = point_index_map.len(); + let point_idx = point_index_map + .entry(query.get_point()) + .or_insert(num_points); + + if let Some(pos) = commitment_map .iter() - .position(|(point, _)| *point == query.get_point()) + .position(|comm| comm.commitment == query.get_commitment()) { - let (_, queries) = &mut point_query_map[pos]; - queries.push(query); + commitment_map[pos].point_indices.push(*point_idx); } else { - point_query_map.push((query.get_point(), vec![query])); + let mut tmp = CommitmentDataZCash::new(query.get_commitment()); + tmp.point_indices.push(*point_idx); + commitment_map.push(tmp); + } + } + + // Also construct inverse mapping from point_index to the point + let mut inverse_point_index_map = BTreeMap::new(); + for (&point, &point_index) in point_index_map.iter() { + inverse_point_index_map.insert(point_index, point); + } + + // Construct map of unique ordered point_idx_sets to their set_idx + let mut point_idx_sets = BTreeMap::new(); + // Also construct mapping from commitment to point_idx_set + let mut commitment_set_map = Vec::new(); + + for commitment_data in commitment_map.iter() { + let mut point_index_set = BTreeSet::new(); + // Note that point_index_set is ordered, unlike point_indices + for &point_index in commitment_data.point_indices.iter() { + point_index_set.insert(point_index); + } + + // Push point_index_set to CommitmentData for the relevant commitment + commitment_set_map.push((commitment_data.commitment, point_index_set.clone())); + + let num_sets = point_idx_sets.len(); + point_idx_sets.entry(point_index_set).or_insert(num_sets); + } + + // Initialise empty evals vec for each unique commitment + for commitment_data in commitment_map.iter_mut() { + let len = commitment_data.point_indices.len(); + commitment_data.evals = vec![Q::Eval::default(); len]; + } + + // Populate set_index, evals and points for each commitment using point_idx_sets + for query in queries { + // The index of the point at which the commitment is queried + let point_index = point_index_map.get(&query.get_point()).unwrap(); + + // The point_index_set at which the commitment was queried + let mut point_index_set = BTreeSet::new(); + for (commitment, point_idx_set) in commitment_set_map.iter() { + if query.get_commitment() == *commitment { + point_index_set = point_idx_set.clone(); + } + } + assert!(!point_index_set.is_empty()); + + // The set_index of the point_index_set + let set_index = point_idx_sets.get(&point_index_set).unwrap(); + for commitment_data in commitment_map.iter_mut() { + if query.get_commitment() == commitment_data.commitment { + commitment_data.set_index = *set_index; + } + } + let point_index_set: Vec = point_index_set.iter().cloned().collect(); + + // The offset of the point_index in the point_index_set + let point_index_in_set = point_index_set + .iter() + .position(|i| i == point_index) + .unwrap(); + + for commitment_data in commitment_map.iter_mut() { + if query.get_commitment() == commitment_data.commitment { + // Insert the eval using the ordering of the point_index_set + commitment_data.evals[point_index_in_set] = query.get_eval(); + } + } + } + + // Get actual points in each point set + let mut point_sets: Vec> = vec![Vec::new(); point_idx_sets.len()]; + for (point_idx_set, &set_idx) in point_idx_sets.iter() { + for &point_idx in point_idx_set.iter() { + let point = inverse_point_index_map.get(&point_idx).unwrap(); + point_sets[set_idx].push(*point); } } - point_query_map - .into_iter() - .map(|(point, queries)| CommitmentData { - queries, - point, - _marker: PhantomData, - }) - .collect() + (commitment_map, point_sets) } #[cfg(test)] @@ -249,7 +468,7 @@ mod tests { let proof = create_proof::<_, CircuitTranscript>(¶ms); let verifier_params = params.verifier_params(); - verify::<_, CircuitTranscript>(&verifier_params, &proof[..], false); + verify::>(&verifier_params, &proof[..], false); verify::>(&verifier_params, &proof[..], true); } @@ -259,7 +478,7 @@ mod tests { proof: &[u8], should_fail: bool, ) where - E::Fr: SerdeObject + Hashable + Sampleable, + E::Fr: SerdeObject + Hashable + Sampleable + Ord, E::G1Affine: CurveAffine::Fr, CurveExt = ::G1> + SerdeObject + Hashable, @@ -305,7 +524,8 @@ mod tests { fn create_proof(kzg_params: &ParamsKZG) -> Vec where - E::Fr: WithSmallOrderMulGroup<3> + SerdeObject + Hashable + Sampleable, + E::Fr: + WithSmallOrderMulGroup<3> + SerdeObject + Hashable + Sampleable + Ord, E::G1Affine: SerdeObject + Hashable + Default diff --git a/src/poly/kzg/msm.rs b/src/poly/kzg/msm.rs index f8d12b839..fad76373e 100644 --- a/src/poly/kzg/msm.rs +++ b/src/poly/kzg/msm.rs @@ -94,6 +94,12 @@ pub struct DualMSM { pub(crate) right: MSMKZG, } +/// A [DualMSM] split into left and right vectors of `(Scalar, Point)` tuples +pub type SplitDualMSM<'a, E> = ( + Vec<(&'a ::Fr, &'a ::G1)>, + Vec<(&'a ::Fr, &'a ::G1)>, +); + impl Default for DualMSM where E::G1Affine: CurveAffine, @@ -128,6 +134,23 @@ where } } + /// Split the [DualMSM] into `left` and `right`. + pub fn split(&self) -> SplitDualMSM { + let left = self + .left + .scalars + .iter() + .zip(self.left.bases.iter()) + .collect(); + let right = self + .right + .scalars + .iter() + .zip(self.right.bases.iter()) + .collect(); + (left, right) + } + /// Scale all scalars in the MSM by some scaling factor pub fn scale(&mut self, e: E::Fr) { self.left.scale(e); diff --git a/src/poly/mod.rs b/src/poly/mod.rs index 7b60626a2..fe96d5424 100644 --- a/src/poly/mod.rs +++ b/src/poly/mod.rs @@ -40,7 +40,7 @@ pub enum Error { pub trait Basis: Copy + Debug + Send + Sync {} /// The polynomial is defined as coefficients -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct Coeff; impl Basis for Coeff {} @@ -57,7 +57,7 @@ impl Basis for ExtendedLagrangeCoeff {} /// Represents a univariate polynomial defined over a field and a particular /// basis. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Polynomial { pub(crate) values: Vec, pub(crate) _marker: PhantomData, diff --git a/src/poly/query.rs b/src/poly/query.rs index 7e7fe8dd3..bb7fa2174 100644 --- a/src/poly/query.rs +++ b/src/poly/query.rs @@ -7,8 +7,8 @@ use crate::{ utils::arithmetic::eval_polynomial, }; -pub trait Query: Sized + Clone + Send + Sync { - type Commitment: PartialEq + Copy + Send + Sync; +pub trait Query: Debug + Sized + Clone + Send + Sync { + type Commitment: Debug + PartialEq + Copy + Send + Sync; type Eval: Clone + Default + Debug; fn get_point(&self) -> F; @@ -36,14 +36,14 @@ where } #[doc(hidden)] -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] pub struct PolynomialPointer<'com, F: PrimeField> { pub(crate) poly: &'com Polynomial, } impl<'com, F: PrimeField> PartialEq for PolynomialPointer<'com, F> { fn eq(&self, other: &Self) -> bool { - std::ptr::eq(self.poly, other.poly) + self.poly == other.poly } } diff --git a/src/transcript/implementors.rs b/src/transcript/implementors.rs index 9e3c9812e..9f403935b 100644 --- a/src/transcript/implementors.rs +++ b/src/transcript/implementors.rs @@ -17,9 +17,9 @@ impl TranscriptHash for Blake2bState { .to_state() } - fn absorb(&mut self, input: &Self::Input) -> &mut Self { + fn absorb(&mut self, input: &Self::Input) { self.update(&[BLAKE2B_PREFIX_COMMON]); - self.update(input) + self.update(input); } fn squeeze(&mut self) -> Self::Output { diff --git a/src/transcript/mod.rs b/src/transcript/mod.rs index 713d8a9f0..c9c75ac35 100644 --- a/src/transcript/mod.rs +++ b/src/transcript/mod.rs @@ -21,7 +21,7 @@ pub trait TranscriptHash { /// Initialise the hasher fn init() -> Self; /// Absorb an element - fn absorb(&mut self, input: &Self::Input) -> &mut Self; + fn absorb(&mut self, input: &Self::Input); /// Squeeze an output fn squeeze(&mut self) -> Self::Output; } @@ -67,13 +67,22 @@ pub trait Transcript { fn finalize(self) -> Vec; } -#[derive(Debug)] +#[derive(Clone, Debug)] /// Transcript used in proofs, parametrised by its hash function. pub struct CircuitTranscript { state: H, buffer: Cursor>, } +impl CircuitTranscript { + /// Returns the buffer for non default reading of the buffer (such as for + /// reading an empty proof) + /// TODO: SHOULD WE REMOVE THIS AND WRITE A FUNCTION THAT RETURNS THE PROOF SIZE? + pub fn buffer(&mut self) -> &mut Cursor> { + &mut self.buffer + } +} + impl Transcript for CircuitTranscript { type Hash = H; diff --git a/src/utils/arithmetic.rs b/src/utils/arithmetic.rs index 697e3a02c..a0bbff623 100644 --- a/src/utils/arithmetic.rs +++ b/src/utils/arithmetic.rs @@ -230,6 +230,18 @@ pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { } } +use num_bigint::BigUint; +pub(crate) fn truncate(scalar: F) -> F { + let nb_bytes = F::NUM_BITS.div_ceil(8).div_ceil(2) as usize; + let bytes = scalar.to_repr().as_ref()[..nb_bytes].to_vec(); + let bi = BigUint::from_bytes_le(&bytes); + F::from_str_vartime(&BigUint::to_string(&bi)).unwrap() +} + +pub(crate) fn truncated_powers(base: F) -> impl Iterator { + powers(base).map(truncate) +} + pub(crate) fn powers(base: F) -> impl Iterator { std::iter::successors(Some(F::ONE), move |power| Some(base * power)) }