From eb5d1aadbe4956dc8fd08ee9c72fccde80014811 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 13 Dec 2023 17:37:04 +0100 Subject: [PATCH 01/79] Reimplement keygen_vk for fe-be split as keygen_vk_v2 --- halo2_proofs/src/plonk.rs | 59 +++++++++++ halo2_proofs/src/plonk/circuit.rs | 157 +++++++++++++++++++++++++++++- halo2_proofs/src/plonk/keygen.rs | 80 ++++++++++++++- 3 files changed, 293 insertions(+), 3 deletions(-) diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 5506f94a68..60badc44a6 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -43,6 +43,65 @@ pub use verifier::*; use evaluation::Evaluator; use std::io; +/// This is a verifying key which allows for the verification of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct VerifyingKeyV2 { + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystemV2Backend, + /// Cached maximum degree of `cs` (which doesn't change after construction). + cs_degree: usize, + /// The representative of this `VerifyingKey` in transcripts. + transcript_repr: C::Scalar, +} + +impl VerifyingKeyV2 { + fn from_parts( + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystemV2Backend, + ) -> Self + where + C::ScalarExt: FromUniformBytes<64>, + { + // Compute cached values. + let cs_degree = cs.degree(); + + let mut vk = Self { + domain, + fixed_commitments, + permutation, + cs, + cs_degree, + // Temporary, this is not pinned. + transcript_repr: C::Scalar::ZERO, + }; + + let mut hasher = Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Verify-Key") + .to_state(); + + // let s = format!("{:?}", vk.pinned()); + // TODO(Edu): Is it Ok to not use the pinned Vk here? We removed a lot of stuff from Vk + // and Cs, so maybe we already have the same as in PinnedVerificationKey? + // TODO(Edu): We removed queries information from the ConstraintSystem, so this output will + // definitely be a breaking change. + let s = format!("{:?}", vk); + + hasher.update(&(s.len() as u64).to_le_bytes()); + hasher.update(s.as_bytes()); + + // Hash in final Blake2bState + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + + vk + } +} + /// This is a verifying key which allows for the verification of proofs for a /// particular circuit. #[derive(Clone, Debug)] diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 98445a5881..a59d212abd 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -3,7 +3,7 @@ use crate::circuit::layouter::SyncDeps; use crate::dev::metadata; use crate::{ circuit::{Layouter, Region, Value}, - poly::Rotation, + poly::{LagrangeCoeff, Polynomial, Rotation}, }; use core::cmp::max; use core::ops::{Add, Mul}; @@ -1545,6 +1545,161 @@ impl Gate { } } +/// Data that needs to be preprocessed from a circuit +#[derive(Debug, Clone)] +pub struct PreprocessingV2 { + // TODO(Edu): Can we replace this by a simpler structure? + pub(crate) permutation: permutation::keygen::Assembly, + // TODO(Edu): Replace this by Vec> + pub(crate) fixed: Vec>, +} + +/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +/// as well as the fixed columns and copy constraints information. +#[derive(Debug, Clone)] +pub struct CompiledCircuitV2 { + pub(crate) preprocessing: PreprocessingV2, + pub(crate) cs: ConstraintSystemV2Backend, +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystemV2Backend { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + // pub(crate) num_selectors: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + /// This is a cached vector that maps virtual selectors to the concrete + /// fixed column that they were compressed into. This is just used by dev + /// tooling right now. + // pub(crate) selector_map: Vec>, + pub(crate) gates: Vec>, + // pub(crate) advice_queries: Vec<(Column, Rotation)>, + // Contains an integer for each advice column + // identifying how many distinct queries it has + // so far; should be same length as num_advice_columns. + num_advice_queries: Vec, + // pub(crate) instance_queries: Vec<(Column, Rotation)>, + // pub(crate) fixed_queries: Vec<(Column, Rotation)>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, + // Vector of fixed columns, which can be used to store constant values + // that are copied into advice columns. + // pub(crate) constants: Vec>, + + // pub(crate) minimum_degree: Option, +} + +impl ConstraintSystemV2Backend { + /// Compute the degree of the constraint system (the maximum degree of all + /// constraints). + pub fn degree(&self) -> usize { + // The permutation argument will serve alongside the gates, so must be + // accounted for. + let mut degree = self.permutation.required_degree(); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.lookups + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.shuffles + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // Account for each gate to ensure our quotient polynomial is the + // correct degree and that our extended domain is the right size. + degree = std::cmp::max( + degree, + self.gates + .iter() + .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) + .max() + .unwrap_or(0), + ); + + // std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) + degree + } + + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } +} + /// This is a description of the circuit environment, such as the gate, column and /// permutation arrangements. #[derive(Debug, Clone)] diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 984eecb9e8..3d46b177ed 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -7,11 +7,12 @@ use group::Curve; use super::{ circuit::{ - Advice, Any, Assignment, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, Instance, - Selector, + Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, ConstraintSystem, Fixed, + FloorPlanner, Instance, Selector, }, evaluation::Evaluator, permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, + VerifyingKeyV2, }; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -202,6 +203,81 @@ impl Assignment for Assembly { } } +/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. +pub fn keygen_vk_v2<'params, C, P>( + params: &P, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + C::Scalar: FromUniformBytes<64>, +{ + let cs = &circuit.cs; + let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); + // let (domain, cs, config) = create_domain::( + // params.k(), + // #[cfg(feature = "circuit-params")] + // circuit.params(), + // ); + + if (params.n() as usize) < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + // let mut assembly: Assembly = Assembly { + // k: params.k(), + // fixed: vec![domain.empty_lagrange_assigned(); cs.num_fixed_columns], + // permutation: permutation::keygen::Assembly::new(params.n() as usize, &cs.permutation), + // // selectors: vec![vec![false; params.n() as usize]; cs.num_selectors], + // usable_rows: 0..params.n() as usize - (cs.blinding_factors() + 1), + // _marker: std::marker::PhantomData, + // }; + + // Synthesize the circuit to obtain URS + // ConcreteCircuit::FloorPlanner::synthesize( + // &mut assembly, + // circuit, + // config, + // cs.constants.clone(), + // )?; + + // let mut fixed = batch_invert_assigned(assembly.fixed); + // let (cs, selector_polys) = if compress_selectors { + // cs.compress_selectors(assembly.selectors.clone()) + // } else { + // // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. + // let selectors = std::mem::take(&mut assembly.selectors); + // cs.directly_convert_selectors_to_fixed(selectors) + // }; + // fixed.extend( + // selector_polys + // .into_iter() + // .map(|poly| domain.lagrange_from_vec(poly)), + // ); + + let permutation_vk = + circuit + .preprocessing + .permutation + .clone() + .build_vk(params, &domain, &cs.permutation); + + let fixed_commitments = circuit + .preprocessing + .fixed + .iter() + .map(|poly| params.commit_lagrange(poly, Blind::default()).to_affine()) + .collect(); + + Ok(VerifyingKeyV2::from_parts( + domain, + fixed_commitments, + permutation_vk, + cs.clone(), + )) +} + /// Generate a `VerifyingKey` from an instance of `Circuit`. /// By default, selector compression is turned **off**. pub fn keygen_vk<'params, C, P, ConcreteCircuit>( From d318a9d03dd2450e7b02fbcb5eb0386715a6104b Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 13 Dec 2023 17:48:26 +0100 Subject: [PATCH 02/79] Reimplement keygen_pk for fe-be split as keygen_pk_v2 --- halo2_proofs/src/plonk.rs | 16 ++++ halo2_proofs/src/plonk/evaluation.rs | 91 +++++++++++++++++++- halo2_proofs/src/plonk/keygen.rs | 124 +++++++++++++++++++-------- 3 files changed, 192 insertions(+), 39 deletions(-) diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 60badc44a6..05f932b98b 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -373,6 +373,22 @@ pub struct PinnedVerificationKey<'a, C: CurveAffine> { fixed_commitments: &'a Vec, permutation: &'a permutation::VerifyingKey, } + +/// This is a proving key which allows for the creation of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct ProvingKeyV2 { + vk: VerifyingKeyV2, + l0: Polynomial, + l_last: Polynomial, + l_active_row: Polynomial, + fixed_values: Vec>, + fixed_polys: Vec>, + fixed_cosets: Vec>, + permutation: permutation::ProvingKey, + ev: Evaluator, +} + /// This is a proving key which allows for the creation of proofs for a /// particular circuit. #[derive(Clone, Debug)] diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index 431c487c7e..43958e7c9e 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -7,7 +7,7 @@ use crate::{ }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; -use super::{shuffle, ConstraintSystem, Expression}; +use super::{shuffle, ConstraintSystem, ConstraintSystemV2Backend, Expression}; /// Return the index in the polynomial of size `isize` after rotation `rot`. fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { @@ -205,6 +205,95 @@ pub struct CalculationInfo { } impl Evaluator { + /// Creates a new evaluation structure + pub fn new_v2(cs: &ConstraintSystemV2Backend) -> Self { + let mut ev = Evaluator::default(); + + // Custom gates + let mut parts = Vec::new(); + for gate in cs.gates.iter() { + parts.extend( + gate.polynomials() + .iter() + .map(|poly| ev.custom_gates.add_expression(poly)), + ); + } + ev.custom_gates.add_calculation(Calculation::Horner( + ValueSource::PreviousValue(), + parts, + ValueSource::Y(), + )); + + // Lookups + for lookup in cs.lookups.iter() { + let mut graph = GraphEvaluator::default(); + + let mut evaluate_lc = |expressions: &Vec>| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + // Input coset + let compressed_input_coset = evaluate_lc(&lookup.input_expressions); + // table coset + let compressed_table_coset = evaluate_lc(&lookup.table_expressions); + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + let right_gamma = graph.add_calculation(Calculation::Add( + compressed_table_coset, + ValueSource::Gamma(), + )); + let lc = graph.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Beta(), + )); + graph.add_calculation(Calculation::Mul(lc, right_gamma)); + + ev.lookups.push(graph); + } + + // Shuffles + for shuffle in cs.shuffles.iter() { + let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + let mut graph_input = GraphEvaluator::default(); + let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); + let _ = graph_input.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Gamma(), + )); + + let mut graph_shuffle = GraphEvaluator::default(); + let compressed_shuffle_coset = + evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); + let _ = graph_shuffle.add_calculation(Calculation::Add( + compressed_shuffle_coset, + ValueSource::Gamma(), + )); + + ev.shuffles.push(graph_input); + ev.shuffles.push(graph_shuffle); + } + + ev + } + /// Creates a new evaluation structure pub fn new(cs: &ConstraintSystem) -> Self { let mut ev = Evaluator::default(); diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 3d46b177ed..405e1c11f8 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -11,8 +11,8 @@ use super::{ FloorPlanner, Instance, Selector, }, evaluation::Evaluator, - permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, - VerifyingKeyV2, + permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, ProvingKeyV2, + VerifyingKey, VerifyingKeyV2, }; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -215,47 +215,11 @@ where { let cs = &circuit.cs; let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); - // let (domain, cs, config) = create_domain::( - // params.k(), - // #[cfg(feature = "circuit-params")] - // circuit.params(), - // ); if (params.n() as usize) < cs.minimum_rows() { return Err(Error::not_enough_rows_available(params.k())); } - // let mut assembly: Assembly = Assembly { - // k: params.k(), - // fixed: vec![domain.empty_lagrange_assigned(); cs.num_fixed_columns], - // permutation: permutation::keygen::Assembly::new(params.n() as usize, &cs.permutation), - // // selectors: vec![vec![false; params.n() as usize]; cs.num_selectors], - // usable_rows: 0..params.n() as usize - (cs.blinding_factors() + 1), - // _marker: std::marker::PhantomData, - // }; - - // Synthesize the circuit to obtain URS - // ConcreteCircuit::FloorPlanner::synthesize( - // &mut assembly, - // circuit, - // config, - // cs.constants.clone(), - // )?; - - // let mut fixed = batch_invert_assigned(assembly.fixed); - // let (cs, selector_polys) = if compress_selectors { - // cs.compress_selectors(assembly.selectors.clone()) - // } else { - // // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. - // let selectors = std::mem::take(&mut assembly.selectors); - // cs.directly_convert_selectors_to_fixed(selectors) - // }; - // fixed.extend( - // selector_polys - // .into_iter() - // .map(|poly| domain.lagrange_from_vec(poly)), - // ); - let permutation_vk = circuit .preprocessing @@ -367,6 +331,90 @@ where )) } +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. +pub fn keygen_pk_v2<'params, C, P>( + params: &P, + vk: VerifyingKeyV2, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, +{ + let cs = &circuit.cs; + + if (params.n() as usize) < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let fixed_polys: Vec<_> = circuit + .preprocessing + .fixed + .iter() + .map(|poly| vk.domain.lagrange_to_coeff(poly.clone())) + .collect(); + + let fixed_cosets = fixed_polys + .iter() + .map(|poly| vk.domain.coeff_to_extended(poly.clone())) + .collect(); + + let permutation_pk = + circuit + .preprocessing + .permutation + .clone() + .build_pk(params, &vk.domain, &cs.permutation); + + // Compute l_0(X) + // TODO: this can be done more efficiently + let mut l0 = vk.domain.empty_lagrange(); + l0[0] = C::Scalar::ONE; + let l0 = vk.domain.lagrange_to_coeff(l0); + let l0 = vk.domain.coeff_to_extended(l0); + + // Compute l_blind(X) which evaluates to 1 for each blinding factor row + // and 0 otherwise over the domain. + let mut l_blind = vk.domain.empty_lagrange(); + for evaluation in l_blind[..].iter_mut().rev().take(cs.blinding_factors()) { + *evaluation = C::Scalar::ONE; + } + let l_blind = vk.domain.lagrange_to_coeff(l_blind); + let l_blind = vk.domain.coeff_to_extended(l_blind); + + // Compute l_last(X) which evaluates to 1 on the first inactive row (just + // before the blinding factors) and 0 otherwise over the domain + let mut l_last = vk.domain.empty_lagrange(); + l_last[params.n() as usize - cs.blinding_factors() - 1] = C::Scalar::ONE; + let l_last = vk.domain.lagrange_to_coeff(l_last); + let l_last = vk.domain.coeff_to_extended(l_last); + + // Compute l_active_row(X) + let one = C::Scalar::ONE; + let mut l_active_row = vk.domain.empty_extended(); + parallelize(&mut l_active_row, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = i + start; + *value = one - (l_last[idx] + l_blind[idx]); + } + }); + + // Compute the optimized evaluation data structure + let ev = Evaluator::new_v2(&vk.cs); + + Ok(ProvingKeyV2 { + vk, + l0, + l_last, + l_active_row, + fixed_values: circuit.preprocessing.fixed.clone(), + fixed_polys, + fixed_cosets, + permutation: permutation_pk, + ev, + }) +} + /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. pub fn keygen_pk<'params, C, P, ConcreteCircuit>( params: &P, From a60192577f63ac4654abaa56c7216072bb474b60 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 13 Dec 2023 19:06:12 +0100 Subject: [PATCH 03/79] WIP: ProverV2 implementation --- halo2_proofs/src/plonk.rs | 25 +++++++ halo2_proofs/src/plonk/prover.rs | 117 ++++++++++++++++++++++++++++++- 2 files changed, 139 insertions(+), 3 deletions(-) diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 05f932b98b..af9aac3f87 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -100,6 +100,16 @@ impl VerifyingKeyV2 { vk } + + /// Hashes a verification key into a transcript. + pub fn hash_into, T: Transcript>( + &self, + transcript: &mut T, + ) -> io::Result<()> { + transcript.common_scalar(self.transcript_repr)?; + + Ok(()) + } } /// This is a verifying key which allows for the verification of proofs for a @@ -389,6 +399,21 @@ pub struct ProvingKeyV2 { ev: Evaluator, } +// impl ProvingKeyV2 +// where +// C::Scalar: FromUniformBytes<64>, +// { +// /// Hashes a verification key into a transcript. +// pub fn hash_into, T: Transcript>( +// &self, +// transcript: &mut T, +// ) -> io::Result<()> { +// transcript.common_scalar(self.transcript_repr)?; +// +// Ok(()) +// } +// } + /// This is a proving key which allows for the creation of proofs for a /// particular circuit. #[derive(Clone, Debug)] diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index cd0d7306a9..91b44356dd 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -8,11 +8,11 @@ use std::{collections::HashMap, iter}; use super::{ circuit::{ sealed::{self}, - Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, - Instance, Selector, + Advice, Any, Assignment, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, + Fixed, FloorPlanner, Instance, Selector, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, - ChallengeX, ChallengeY, Error, ProvingKey, + ChallengeX, ChallengeY, Error, ProvingKey, ProvingKeyV2, }; use crate::{ @@ -30,6 +30,117 @@ use crate::{ }; use group::prime::PrimeCurveAffine; +struct InstanceSingle { + pub instance_values: Vec>, + pub instance_polys: Vec>, +} + +pub struct ProverV2< + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +> { + params: &'params Scheme::ParamsProver, + instance: Vec>, + _marker: std::marker::PhantomData<(Scheme, P, E, R, T)>, +} + +impl< + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + > ProverV2<'params, Scheme, P, E, R, T> +{ + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &ProvingKeyV2, + circuit: &CompiledCircuitV2, + instance: &[&[Scheme::Scalar]], + mut rng: R, + mut transcript: T, + ) -> Result + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + if instance.len() != pk.vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } + + // Hash verification key into transcript + pk.vk.hash_into(&mut transcript)?; + + let meta = &circuit.cs; + + let domain = &pk.vk.domain; + + let instance: Vec> = iter::once(instance) + .map(|instance| -> Result, Error> { + let instance_values = instance + .iter() + .map(|values| { + let mut poly = domain.empty_lagrange(); + assert_eq!(poly.len(), params.n() as usize); + if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { + return Err(Error::InstanceTooLarge); + } + for (poly, value) in poly.iter_mut().zip(values.iter()) { + if !P::QUERY_INSTANCE { + transcript.common_scalar(*value)?; + } + *poly = *value; + } + Ok(poly) + }) + .collect::, _>>()?; + + if P::QUERY_INSTANCE { + let instance_commitments_projective: Vec<_> = instance_values + .iter() + .map(|poly| params.commit_lagrange(poly, Blind::default())) + .collect(); + let mut instance_commitments = + vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &instance_commitments_projective, + &mut instance_commitments, + ); + let instance_commitments = instance_commitments; + drop(instance_commitments_projective); + + for commitment in &instance_commitments { + transcript.common_point(*commitment)?; + } + } + + let instance_polys: Vec<_> = instance_values + .iter() + .map(|poly| { + let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); + domain.lagrange_to_coeff(lagrange_vec) + }) + .collect(); + + Ok(InstanceSingle { + instance_values, + instance_polys, + }) + }) + .collect::, _>>()?; + + Ok(ProverV2 { + params, + instance, + _marker: std::marker::PhantomData {}, + }) + } +} + /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` From 4510059ae72b57b999e6acfe3a985e45b61812e4 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Thu, 14 Dec 2023 15:44:23 +0100 Subject: [PATCH 04/79] WIP --- halo2_proofs/src/plonk/circuit.rs | 17 ++- halo2_proofs/src/plonk/prover.rs | 182 +++++++++++++++++++++++++++++- 2 files changed, 190 insertions(+), 9 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index a59d212abd..1526873a08 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1550,7 +1550,8 @@ impl Gate { pub struct PreprocessingV2 { // TODO(Edu): Can we replace this by a simpler structure? pub(crate) permutation: permutation::keygen::Assembly, - // TODO(Edu): Replace this by Vec> + // TODO(Edu): Replace this by Vec>. Requires some methods of Polynomial to take Vec + // instead pub(crate) fixed: Vec>, } @@ -1576,9 +1577,9 @@ pub struct ConstraintSystemV2Backend { pub(crate) unblinded_advice_columns: Vec, /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, + pub(crate) advice_column_phase: Vec, /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, + pub(crate) challenge_phase: Vec, /// This is a cached vector that maps virtual selectors to the concrete /// fixed column that they were compressed into. This is just used by dev @@ -1698,6 +1699,16 @@ impl ConstraintSystemV2Backend { // off-by-one errors. factors + 1 } + + pub(crate) fn phases(&self) -> Vec { + let max_phase = self + .advice_column_phase + .iter() + .max() + .map(|phase| phase.0) + .unwrap_or_default(); + (0..=max_phase).collect() + } } /// This is a description of the circuit environment, such as the gate, column and diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 91b44356dd..a53a37cc74 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -9,7 +9,7 @@ use super::{ circuit::{ sealed::{self}, Advice, Any, Assignment, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, - Fixed, FloorPlanner, Instance, Selector, + ConstraintSystemV2Backend, Fixed, FloorPlanner, Instance, Selector, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, ProvingKeyV2, @@ -21,7 +21,7 @@ use crate::{ plonk::Assigned, poly::{ commitment::{Blind, CommitmentScheme, Params, Prover}, - Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, + Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, Rotation, }, }; use crate::{ @@ -30,12 +30,23 @@ use crate::{ }; use group::prime::PrimeCurveAffine; +#[derive(Debug)] struct InstanceSingle { pub instance_values: Vec>, pub instance_polys: Vec>, } +#[derive(Debug, Clone)] +struct AdviceSingle { + pub advice_polys: Vec>, + pub advice_blinds: Vec>, +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. +#[derive(Debug)] pub struct ProverV2< + 'a, 'params, Scheme: CommitmentScheme, P: Prover<'params, Scheme>, @@ -44,25 +55,38 @@ pub struct ProverV2< T: TranscriptWrite, > { params: &'params Scheme::ParamsProver, + pk: &'a ProvingKeyV2, + cs: &'a ConstraintSystemV2Backend, + advice_queries: Vec<(Column, Rotation)>, + instance_queries: Vec<(Column, Rotation)>, + fixed_queries: Vec<(Column, Rotation)>, + phases: Vec, instance: Vec>, + rng: R, + transcript: T, + advice: AdviceSingle, + challenges: HashMap, + next_phase_index: usize, _marker: std::marker::PhantomData<(Scheme, P, E, R, T)>, } impl< + 'a, 'params, Scheme: CommitmentScheme, P: Prover<'params, Scheme>, E: EncodedChallenge, R: RngCore, T: TranscriptWrite, - > ProverV2<'params, Scheme, P, E, R, T> + > ProverV2<'a, 'params, Scheme, P, E, R, T> { + /// Create a new prover object pub fn new( params: &'params Scheme::ParamsProver, - pk: &ProvingKeyV2, - circuit: &CompiledCircuitV2, + pk: &'a ProvingKeyV2, + circuit: &'a CompiledCircuitV2, instance: &[&[Scheme::Scalar]], - mut rng: R, + rng: R, mut transcript: T, ) -> Result where @@ -72,10 +96,14 @@ impl< return Err(Error::InvalidInstances); } + // TODO(Edu): Calculate advice_queries, fixed_queries, instance_queries from the gates and + // lookup expressions. + // Hash verification key into transcript pk.vk.hash_into(&mut transcript)?; let meta = &circuit.cs; + let phases = circuit.cs.phases(); let domain = &pk.vk.domain; @@ -133,12 +161,154 @@ impl< }) .collect::, _>>()?; + let advice = AdviceSingle:: { + advice_polys: vec![domain.empty_lagrange(); meta.num_advice_columns], + advice_blinds: vec![Blind::default(); meta.num_advice_columns], + }; + let challenges = HashMap::::with_capacity(meta.num_challenges); + Ok(ProverV2 { params, + cs: &circuit.cs, + pk, + advice_queries: todo!(), + instance_queries: todo!(), + fixed_queries: todo!(), + phases, instance, + rng, + transcript, + advice, + challenges, + next_phase_index: 0, _marker: std::marker::PhantomData {}, }) } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + pub fn commit_phase( + &mut self, + phase: u8, + // TODO: Turn this into Vec>>. Requires batch_invert_assigned to work with + // Vec + witness: Vec, LagrangeCoeff>>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let current_phase = match self.phases.get(self.next_phase_index) { + Some(phase) => phase, + None => { + panic!("TODO: Return Error instead. All phases already commited"); + } + }; + if phase != *current_phase { + panic!("TODO: Return Error instead. Committing invalid phase"); + } + + let params = self.params; + let meta = self.cs; + let domain = self.pk.vk.domain; + + let mut transcript = self.transcript; + let mut rng = self.rng; + + let mut advice = self.advice; + let mut challenges = self.challenges; + + let column_indices = meta + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + // TODO: Check that witness.len() is the expected number of advice columns. + + // Check that all current_phase advice columns are Some + for (column_index, advice_column) in witness.iter().enumerate() { + if column_indices.contains(&column_index) { + // TODO: Check that column_index in witness is Some + // TODO: Check that the column length is `params.n()` + } else { + // TODO: Check that column_index in witness is None + }; + } + let mut advice_values = + batch_invert_assigned::(witness.into_iter().flatten().collect()); + let unblinded_advice = HashSet::from_iter(meta.unblinded_advice_columns.clone()); + let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); + + // Add blinding factors to advice columns + for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { + if !unblinded_advice.contains(column_index) { + for cell in &mut advice_values[unusable_rows_start..] { + *cell = Scheme::Scalar::random(&mut rng); + } + } else { + #[cfg(feature = "sanity-checks")] + for cell in &advice_values[unusable_rows_start..] { + assert_eq!(*cell, Scheme::Scalar::ZERO); + } + } + } + + // Compute commitments to advice column polynomials + let blinds: Vec<_> = column_indices + .iter() + .map(|i| { + if unblinded_advice.contains(i) { + Blind::default() + } else { + Blind(Scheme::Scalar::random(&mut rng)) + } + }) + .collect(); + let advice_commitments_projective: Vec<_> = advice_values + .iter() + .zip(blinds.iter()) + .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) + .collect(); + let mut advice_commitments = + vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &advice_commitments_projective, + &mut advice_commitments, + ); + let advice_commitments = advice_commitments; + drop(advice_commitments_projective); + + for commitment in &advice_commitments { + transcript.write_point(*commitment)?; + } + for ((column_index, advice_values), blind) in + column_indices.iter().zip(advice_values).zip(blinds) + { + advice.advice_polys[*column_index] = advice_values; + advice.advice_blinds[*column_index] = blind; + } + + for (index, phase) in meta.challenge_phase.iter().enumerate() { + if current_phase == phase { + let existing = + challenges.insert(index, *transcript.squeeze_challenge_scalar::<()>()); + assert!(existing.is_none()); + } + } + + self.next_phase_index += 1; + Ok(challenges.clone()) + } + + pub fn create_proof(self) -> Result { + todo!() + } } /// This creates a proof for the provided `circuit` when given the public From 1b35fab453a62f28f3e399c1c68ee4e69aa88874 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Thu, 14 Dec 2023 17:24:41 +0100 Subject: [PATCH 05/79] Complete create_proof refactor for fe-be split --- halo2_proofs/src/plonk/circuit.rs | 2 +- halo2_proofs/src/plonk/evaluation.rs | 313 +++++++++++++- halo2_proofs/src/plonk/lookup/prover.rs | 418 ++++++++++++++++++- halo2_proofs/src/plonk/permutation/prover.rs | 240 +++++++++++ halo2_proofs/src/plonk/prover.rs | 378 ++++++++++++++--- halo2_proofs/src/plonk/shuffle/prover.rs | 213 ++++++++++ 6 files changed, 1506 insertions(+), 58 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 1526873a08..ae6ec15703 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1704,8 +1704,8 @@ impl ConstraintSystemV2Backend { let max_phase = self .advice_column_phase .iter() + .cloned() .max() - .map(|phase| phase.0) .unwrap_or_default(); (0..=max_phase).collect() } diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index 43958e7c9e..efd4f6081f 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -1,5 +1,5 @@ use crate::multicore; -use crate::plonk::{lookup, permutation, Any, ProvingKey}; +use crate::plonk::{lookup, permutation, Any, ProvingKey, ProvingKeyV2}; use crate::poly::Basis; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -383,6 +383,317 @@ impl Evaluator { ev } + /// Evaluate h poly + // NOTE: Copy of evaluate_h with ProvingKeyV2 + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn evaluate_h_v2( + &self, + pk: &ProvingKeyV2, + advice_polys: &[&[Polynomial]], + instance_polys: &[&[Polynomial]], + challenges: &[C::ScalarExt], + y: C::ScalarExt, + beta: C::ScalarExt, + gamma: C::ScalarExt, + theta: C::ScalarExt, + lookups: &[Vec>], + shuffles: &[Vec>], + permutations: &[permutation::prover::Committed], + ) -> Polynomial { + let domain = &pk.vk.domain; + let size = domain.extended_len(); + let rot_scale = 1 << (domain.extended_k() - domain.k()); + let fixed = &pk.fixed_cosets[..]; + let extended_omega = domain.get_extended_omega(); + let isize = size as i32; + let one = C::ScalarExt::ONE; + let l0 = &pk.l0; + let l_last = &pk.l_last; + let l_active_row = &pk.l_active_row; + let p = &pk.vk.cs.permutation; + + // Calculate the advice and instance cosets + let advice: Vec>> = advice_polys + .iter() + .map(|advice_polys| { + advice_polys + .iter() + .map(|poly| domain.coeff_to_extended(poly.clone())) + .collect() + }) + .collect(); + let instance: Vec>> = instance_polys + .iter() + .map(|instance_polys| { + instance_polys + .iter() + .map(|poly| domain.coeff_to_extended(poly.clone())) + .collect() + }) + .collect(); + + let mut values = domain.empty_extended(); + + // Core expression evaluations + let num_threads = multicore::current_num_threads(); + for ((((advice, instance), lookups), shuffles), permutation) in advice + .iter() + .zip(instance.iter()) + .zip(lookups.iter()) + .zip(shuffles.iter()) + .zip(permutations.iter()) + { + // Custom gates + multicore::scope(|scope| { + let chunk_size = (size + num_threads - 1) / num_threads; + for (thread_idx, values) in values.chunks_mut(chunk_size).enumerate() { + let start = thread_idx * chunk_size; + scope.spawn(move |_| { + let mut eval_data = self.custom_gates.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + *value = self.custom_gates.evaluate( + &mut eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + value, + idx, + rot_scale, + isize, + ); + } + }); + } + }); + + // Permutations + let sets = &permutation.sets; + if !sets.is_empty() { + let blinding_factors = pk.vk.cs.blinding_factors(); + let last_rotation = Rotation(-((blinding_factors + 1) as i32)); + let chunk_len = pk.vk.cs.degree() - 2; + let delta_start = beta * &C::Scalar::ZETA; + + let first_set = sets.first().unwrap(); + let last_set = sets.last().unwrap(); + + // Permutation constraints + parallelize(&mut values, |values, start| { + let mut beta_term = extended_omega.pow_vartime([start as u64, 0, 0, 0]); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + let r_last = get_rotation_idx(idx, last_rotation.0, rot_scale, isize); + + // Enforce only for the first set. + // l_0(X) * (1 - z_0(X)) = 0 + *value = *value * y + + ((one - first_set.permutation_product_coset[idx]) * l0[idx]); + // Enforce only for the last set. + // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 + *value = *value * y + + ((last_set.permutation_product_coset[idx] + * last_set.permutation_product_coset[idx] + - last_set.permutation_product_coset[idx]) + * l_last[idx]); + // Except for the first set, enforce. + // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 + for (set_idx, set) in sets.iter().enumerate() { + if set_idx != 0 { + *value = *value * y + + ((set.permutation_product_coset[idx] + - permutation.sets[set_idx - 1].permutation_product_coset + [r_last]) + * l0[idx]); + } + } + // And for all the sets we enforce: + // (1 - (l_last(X) + l_blind(X))) * ( + // z_i(\omega X) \prod_j (p(X) + \beta s_j(X) + \gamma) + // - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma) + // ) + let mut current_delta = delta_start * beta_term; + for ((set, columns), cosets) in sets + .iter() + .zip(p.columns.chunks(chunk_len)) + .zip(pk.permutation.cosets.chunks(chunk_len)) + { + let mut left = set.permutation_product_coset[r_next]; + for (values, permutation) in columns + .iter() + .map(|&column| match column.column_type() { + Any::Advice(_) => &advice[column.index()], + Any::Fixed => &fixed[column.index()], + Any::Instance => &instance[column.index()], + }) + .zip(cosets.iter()) + { + left *= values[idx] + beta * permutation[idx] + gamma; + } + + let mut right = set.permutation_product_coset[idx]; + for values in columns.iter().map(|&column| match column.column_type() { + Any::Advice(_) => &advice[column.index()], + Any::Fixed => &fixed[column.index()], + Any::Instance => &instance[column.index()], + }) { + right *= values[idx] + current_delta + gamma; + current_delta *= &C::Scalar::DELTA; + } + + *value = *value * y + ((left - right) * l_active_row[idx]); + } + beta_term *= &extended_omega; + } + }); + } + + // Lookups + for (n, lookup) in lookups.iter().enumerate() { + // Polynomials required for this lookup. + // Calculated here so these only have to be kept in memory for the short time + // they are actually needed. + let product_coset = pk.vk.domain.coeff_to_extended(lookup.product_poly.clone()); + let permuted_input_coset = pk + .vk + .domain + .coeff_to_extended(lookup.permuted_input_poly.clone()); + let permuted_table_coset = pk + .vk + .domain + .coeff_to_extended(lookup.permuted_table_poly.clone()); + + // Lookup constraints + parallelize(&mut values, |values, start| { + let lookup_evaluator = &self.lookups[n]; + let mut eval_data = lookup_evaluator.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + + let table_value = lookup_evaluator.evaluate( + &mut eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + let r_prev = get_rotation_idx(idx, -1, rot_scale, isize); + + let a_minus_s = permuted_input_coset[idx] - permuted_table_coset[idx]; + // l_0(X) * (1 - z(X)) = 0 + *value = *value * y + ((one - product_coset[idx]) * l0[idx]); + // l_last(X) * (z(X)^2 - z(X)) = 0 + *value = *value * y + + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) + * l_last[idx]); + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) + // (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + // ) = 0 + *value = *value * y + + ((product_coset[r_next] + * (permuted_input_coset[idx] + beta) + * (permuted_table_coset[idx] + gamma) + - product_coset[idx] * table_value) + * l_active_row[idx]); + // Check that the first values in the permuted input expression and permuted + // fixed expression are the same. + // l_0(X) * (a'(X) - s'(X)) = 0 + *value = *value * y + (a_minus_s * l0[idx]); + // Check that each value in the permuted lookup input expression is either + // equal to the value above it, or the value at the same index in the + // permuted table expression. + // (1 - (l_last + l_blind)) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 + *value = *value * y + + (a_minus_s + * (permuted_input_coset[idx] - permuted_input_coset[r_prev]) + * l_active_row[idx]); + } + }); + } + + // Shuffle constraints + for (n, shuffle) in shuffles.iter().enumerate() { + let product_coset = pk.vk.domain.coeff_to_extended(shuffle.product_poly.clone()); + + // Shuffle constraints + parallelize(&mut values, |values, start| { + let input_evaluator = &self.shuffles[2 * n]; + let shuffle_evaluator = &self.shuffles[2 * n + 1]; + let mut eval_data_input = shuffle_evaluator.instance(); + let mut eval_data_shuffle = shuffle_evaluator.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + + let input_value = input_evaluator.evaluate( + &mut eval_data_input, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let shuffle_value = shuffle_evaluator.evaluate( + &mut eval_data_shuffle, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + + // l_0(X) * (1 - z(X)) = 0 + *value = *value * y + ((one - product_coset[idx]) * l0[idx]); + // l_last(X) * (z(X)^2 - z(X)) = 0 + *value = *value * y + + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) + * l_last[idx]); + // (1 - (l_last(X) + l_blind(X))) * (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) = 0 + *value = *value * y + + l_active_row[idx] + * (product_coset[r_next] * shuffle_value + - product_coset[idx] * input_value) + } + }); + } + } + values + } + /// Evaluate h poly #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn evaluate_h( diff --git a/halo2_proofs/src/plonk/lookup/prover.rs b/halo2_proofs/src/plonk/lookup/prover.rs index 028b298853..203b554939 100644 --- a/halo2_proofs/src/plonk/lookup/prover.rs +++ b/halo2_proofs/src/plonk/lookup/prover.rs @@ -1,6 +1,6 @@ use super::super::{ circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, - ProvingKey, + ProvingKey, ProvingKeyV2, }; use super::Argument; use crate::plonk::evaluation::evaluate; @@ -51,6 +51,112 @@ pub(in crate::plonk) struct Evaluated { } impl> Argument { + /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions + /// [S_0, S_1, ..., S_{m-1}], this method + /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} + /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, + /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, + /// obtaining A' and S', and + /// - constructs Permuted struct using permuted_input_value = A', and + /// permuted_table_expression = S'. + /// The Permuted struct is used to update the Lookup, and is then returned. + // NOTE: Copy of commit_permuted that uses ProvingKeyV2 + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit_permuted_v2< + 'a, + 'params: 'a, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + pk: &ProvingKeyV2, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, + ) -> Result, Error> + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the lookup and compress them + let compressed_input_expression = compress_expressions(&self.input_expressions); + + // Get values of table expressions involved in the lookup and compress them + let compressed_table_expression = compress_expressions(&self.table_expressions); + + // Permute compressed (InputExpression, TableExpression) pair + let (permuted_input_expression, permuted_table_expression) = permute_expression_pair_v2( + pk, + params, + domain, + &mut rng, + &compressed_input_expression, + &compressed_table_expression, + )?; + + // Closure to construct commitment to vector of values + let mut commit_values = |values: &Polynomial| { + let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); + let blind = Blind(C::Scalar::random(&mut rng)); + let commitment = params.commit_lagrange(values, blind).to_affine(); + (poly, blind, commitment) + }; + + // Commit to permuted input expression + let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = + commit_values(&permuted_input_expression); + + // Commit to permuted table expression + let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = + commit_values(&permuted_table_expression); + + // Hash permuted input commitment + transcript.write_point(permuted_input_commitment)?; + + // Hash permuted table commitment + transcript.write_point(permuted_table_commitment)?; + + Ok(Permuted { + compressed_input_expression, + permuted_input_expression, + permuted_input_poly, + permuted_input_blind, + compressed_table_expression, + permuted_table_expression, + permuted_table_poly, + permuted_table_blind, + }) + } /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions /// [S_0, S_1, ..., S_{m-1}], this method /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} @@ -159,6 +265,151 @@ impl> Argument { } impl Permuted { + /// Given a Lookup with input expressions, table expressions, and the permuted + /// input expression and permuted table expression, this method constructs the + /// grand product polynomial over the lookup. The grand product polynomial + /// is used to populate the Product struct. The Product struct is + /// added to the Lookup and finally returned by the method. + // NOTE: Copy of commit_permuted with ProvingKeyV2 + pub(in crate::plonk) fn commit_product_v2< + 'params, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + self, + pk: &ProvingKeyV2, + params: &P, + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + let blinding_factors = pk.vk.cs.blinding_factors(); + // Goal is to compute the products of fractions + // + // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) + // + // where a_j(X) is the jth input expression in this lookup, + // where a'(X) is the compression of the permuted input expressions, + // s_j(X) is the jth table expression in this lookup, + // s'(X) is the compression of the permuted table expressions, + // and i is the ith row of the expression. + let mut lookup_product = vec![C::Scalar::ZERO; params.n() as usize]; + // Denominator uses the permuted input expression and permuted table expression + parallelize(&mut lookup_product, |lookup_product, start| { + for ((lookup_product, permuted_input_value), permuted_table_value) in lookup_product + .iter_mut() + .zip(self.permuted_input_expression[start..].iter()) + .zip(self.permuted_table_expression[start..].iter()) + { + *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); + } + }); + + // Batch invert to obtain the denominators for the lookup product + // polynomials + lookup_product.iter_mut().batch_invert(); + + // Finish the computation of the entire fraction by computing the numerators + // (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + parallelize(&mut lookup_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + + *product *= &(self.compressed_input_expression[i] + &*beta); + *product *= &(self.compressed_table_expression[i] + &*gamma); + } + }); + + // The product vector is a vector of products of fractions of the form + // + // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) + // + // where there are m input expressions and m table expressions, + // a_j(\omega^i) is the jth input expression in this lookup, + // a'j(\omega^i) is the permuted input expression, + // s_j(\omega^i) is the jth table expression in this lookup, + // s'(\omega^i) is the permuted table expression, + // and i is the ith row of the expression. + + // Compute the evaluations of the lookup product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(lookup_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) + }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + // This test works only with intermediate representations in this method. + // It can be used for debugging purposes. + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + + // l_0(X) * (1 - z(X)) = 0 + assert_eq!(z[0], C::Scalar::ONE); + + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + for i in 0..u { + let mut left = z[i + 1]; + let permuted_input_value = &self.permuted_input_expression[i]; + + let permuted_table_value = &self.permuted_table_expression[i]; + + left *= &(*beta + permuted_input_value); + left *= &(*gamma + permuted_table_value); + + let mut right = z[i]; + let mut input_term = self.compressed_input_expression[i]; + let mut table_term = self.compressed_table_expression[i]; + + input_term += &(*beta); + table_term += &(*gamma); + right *= &(input_term * &table_term); + + assert_eq!(left, right); + } + + // l_last(X) * (z(X)^2 - z(X)) = 0 + // Assertion will fail only when soundness is broken, in which + // case this z[u] value will be zero. (bad!) + assert_eq!(z[u], C::Scalar::ONE); + } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + permuted_input_poly: self.permuted_input_poly, + permuted_input_blind: self.permuted_input_blind, + permuted_table_poly: self.permuted_table_poly, + permuted_table_blind: self.permuted_table_blind, + product_poly: z, + product_blind, + }) + } /// Given a Lookup with input expressions, table expressions, and the permuted /// input expression and permuted table expression, this method constructs the /// grand product polynomial over the lookup. The grand product polynomial @@ -306,6 +557,36 @@ impl Permuted { } impl Committed { + pub(in crate::plonk) fn evaluate_v2, T: TranscriptWrite>( + self, + pk: &ProvingKeyV2, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_inv = domain.rotate_omega(*x, Rotation::prev()); + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let product_eval = eval_polynomial(&self.product_poly, *x); + let product_next_eval = eval_polynomial(&self.product_poly, x_next); + let permuted_input_eval = eval_polynomial(&self.permuted_input_poly, *x); + let permuted_input_inv_eval = eval_polynomial(&self.permuted_input_poly, x_inv); + let permuted_table_eval = eval_polynomial(&self.permuted_table_poly, *x); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(product_eval)) + .chain(Some(product_next_eval)) + .chain(Some(permuted_input_eval)) + .chain(Some(permuted_input_inv_eval)) + .chain(Some(permuted_table_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &ProvingKey, @@ -338,6 +619,48 @@ impl Committed { } impl Evaluated { + // NOTE: Copy of open with ProvingKeyV2 + pub(in crate::plonk) fn open_v2<'a>( + &'a self, + pk: &'a ProvingKeyV2, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_inv = pk.vk.domain.rotate_omega(*x, Rotation::prev()); + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open lookup product commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + // Open lookup input commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.permuted_input_poly, + blind: self.constructed.permuted_input_blind, + })) + // Open lookup table commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.permuted_table_poly, + blind: self.constructed.permuted_table_blind, + })) + // Open lookup input commitments at x_inv + .chain(Some(ProverQuery { + point: x_inv, + poly: &self.constructed.permuted_input_poly, + blind: self.constructed.permuted_input_blind, + })) + // Open lookup product commitments at x_next + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + } + pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a ProvingKey, @@ -382,6 +705,99 @@ impl Evaluated { type ExpressionPair = (Polynomial, Polynomial); +/// Given a vector of input values A and a vector of table values S, +/// this method permutes A and S to produce A' and S', such that: +/// - like values in A' are vertically adjacent to each other; and +/// - the first row in a sequence of like values in A' is the row +/// that has the corresponding value in S'. +/// This method returns (A', S') if no errors are encountered. +// NOTE: Copy of permute_expression_pair that uses ProvingKeyV2 +fn permute_expression_pair_v2<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( + pk: &ProvingKeyV2, + params: &P, + domain: &EvaluationDomain, + mut rng: R, + input_expression: &Polynomial, + table_expression: &Polynomial, +) -> Result, Error> { + let blinding_factors = pk.vk.cs.blinding_factors(); + let usable_rows = params.n() as usize - (blinding_factors + 1); + + let mut permuted_input_expression: Vec = input_expression.to_vec(); + permuted_input_expression.truncate(usable_rows); + + // Sort input lookup expression values + permuted_input_expression.sort(); + + // A BTreeMap of each unique element in the table expression and its count + let mut leftover_table_map: BTreeMap = table_expression + .iter() + .take(usable_rows) + .fold(BTreeMap::new(), |mut acc, coeff| { + *acc.entry(*coeff).or_insert(0) += 1; + acc + }); + let mut permuted_table_coeffs = vec![C::Scalar::ZERO; usable_rows]; + + let mut repeated_input_rows = permuted_input_expression + .iter() + .zip(permuted_table_coeffs.iter_mut()) + .enumerate() + .filter_map(|(row, (input_value, table_value))| { + // If this is the first occurrence of `input_value` in the input expression + if row == 0 || *input_value != permuted_input_expression[row - 1] { + *table_value = *input_value; + // Remove one instance of input_value from leftover_table_map + if let Some(count) = leftover_table_map.get_mut(input_value) { + assert!(*count > 0); + *count -= 1; + None + } else { + // Return error if input_value not found + Some(Err(Error::ConstraintSystemFailure)) + } + // If input value is repeated + } else { + Some(Ok(row)) + } + }) + .collect::, _>>()?; + + // Populate permuted table at unfilled rows with leftover table elements + for (coeff, count) in leftover_table_map.iter() { + for _ in 0..*count { + permuted_table_coeffs[repeated_input_rows.pop().unwrap()] = *coeff; + } + } + assert!(repeated_input_rows.is_empty()); + + permuted_input_expression + .extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); + permuted_table_coeffs.extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); + assert_eq!(permuted_input_expression.len(), params.n() as usize); + assert_eq!(permuted_table_coeffs.len(), params.n() as usize); + + #[cfg(feature = "sanity-checks")] + { + let mut last = None; + for (a, b) in permuted_input_expression + .iter() + .zip(permuted_table_coeffs.iter()) + .take(usable_rows) + { + if *a != *b { + assert_eq!(*a, last.unwrap()); + } + last = Some(*a); + } + } + + Ok(( + domain.lagrange_from_vec(permuted_input_expression), + domain.lagrange_from_vec(permuted_table_coeffs), + )) +} + /// Given a vector of input values A and a vector of table values S, /// this method permutes A and S to produce A' and S', such that: /// - like values in A' are vertically adjacent to each other; and diff --git a/halo2_proofs/src/plonk/permutation/prover.rs b/halo2_proofs/src/plonk/permutation/prover.rs index d6b108554d..5bc3924708 100644 --- a/halo2_proofs/src/plonk/permutation/prover.rs +++ b/halo2_proofs/src/plonk/permutation/prover.rs @@ -42,6 +42,155 @@ pub(crate) struct Evaluated { } impl Argument { + // NOTE: Copy of commit with ProvingKeyV2 + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit_v2< + 'params, + C: CurveAffine, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + params: &P, + pk: &plonk::ProvingKeyV2, + pkey: &ProvingKey, + advice: &[Polynomial], + fixed: &[Polynomial], + instance: &[Polynomial], + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + + // How many columns can be included in a single permutation polynomial? + // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This + // will never underflow because of the requirement of at least a degree + // 3 circuit for the permutation argument. + assert!(pk.vk.cs_degree >= 3); + let chunk_len = pk.vk.cs_degree - 2; + let blinding_factors = pk.vk.cs.blinding_factors(); + + // Each column gets its own delta power. + let mut deltaomega = C::Scalar::ONE; + + // Track the "last" value from the previous column set + let mut last_z = C::Scalar::ONE; + + let mut sets = vec![]; + + for (columns, permutations) in self + .columns + .chunks(chunk_len) + .zip(pkey.permutations.chunks(chunk_len)) + { + // Goal is to compute the products of fractions + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where p_j(X) is the jth column in this permutation, + // and i is the ith row of the column. + + let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; + + // Iterate over each column of the permutation + for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + for ((modified_values, value), permuted_value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + .zip(permuted_column_values[start..].iter()) + { + *modified_values *= &(*beta * permuted_value + &*gamma + value); + } + }); + } + + // Invert to obtain the denominator for the permutation product polynomial + modified_values.batch_invert(); + + // Iterate over each column again, this time finishing the computation + // of the entire fraction by computing the numerators + for &column in columns.iter() { + let omega = domain.get_omega(); + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); + for (modified_values, value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + { + // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta + *modified_values *= &(deltaomega * &*beta + &*gamma + value); + deltaomega *= ω + } + }); + deltaomega *= &::DELTA; + } + + // The modified_values vector is a vector of products of fractions + // of the form + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where i is the index into modified_values, for the jth column in + // the permutation + + // Compute the evaluations of the permutation product polynomial + // over our domain, starting with z[0] = 1 + let mut z = vec![last_z]; + for row in 1..(params.n() as usize) { + let mut tmp = z[row - 1]; + + tmp *= &modified_values[row - 1]; + z.push(tmp); + } + let mut z = domain.lagrange_from_vec(z); + // Set blinding factors + for z in &mut z[params.n() as usize - blinding_factors..] { + *z = C::Scalar::random(&mut rng); + } + // Set new last_z + last_z = z[params.n() as usize - (blinding_factors + 1)]; + + let blind = Blind(C::Scalar::random(&mut rng)); + + let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); + let permutation_product_blind = blind; + let z = domain.lagrange_to_coeff(z); + let permutation_product_poly = z.clone(); + + let permutation_product_coset = domain.coeff_to_extended(z.clone()); + + let permutation_product_commitment = + permutation_product_commitment_projective.to_affine(); + + // Hash the permutation product commitment + transcript.write_point(permutation_product_commitment)?; + + sets.push(CommittedSet { + permutation_product_poly, + permutation_product_coset, + permutation_product_blind, + }); + } + + Ok(Committed { sets }) + } #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit< 'params, @@ -234,6 +383,51 @@ impl super::ProvingKey { } impl Constructed { + // NOTE: Copy of evaluate with ProvingKeyV2 + pub(in crate::plonk) fn evaluate_v2, T: TranscriptWrite>( + self, + pk: &plonk::ProvingKeyV2, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let blinding_factors = pk.vk.cs.blinding_factors(); + + { + let mut sets = self.sets.iter(); + + while let Some(set) = sets.next() { + let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); + + let permutation_product_next_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation::next()), + ); + + // Hash permutation product evals + for eval in iter::empty() + .chain(Some(&permutation_product_eval)) + .chain(Some(&permutation_product_next_eval)) + { + transcript.write_scalar(*eval)?; + } + + // If we have any remaining sets to process, evaluate this set at omega^u + // so we can constrain the last value of its running product to equal the + // first value of the next set's running product, chaining them together. + if sets.len() > 0 { + let permutation_product_last_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), + ); + + transcript.write_scalar(permutation_product_last_eval)?; + } + } + } + + Ok(Evaluated { constructed: self }) + } pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &plonk::ProvingKey, @@ -281,6 +475,52 @@ impl Constructed { } impl Evaluated { + // NOTE: Copy of open with ProvingKeyV2 + pub(in crate::plonk) fn open_v2<'a>( + &'a self, + pk: &'a plonk::ProvingKeyV2, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let blinding_factors = pk.vk.cs.blinding_factors(); + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + let x_last = pk + .vk + .domain + .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); + + iter::empty() + .chain(self.constructed.sets.iter().flat_map(move |set| { + iter::empty() + // Open permutation product commitments at x and \omega x + .chain(Some(ProverQuery { + point: *x, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + .chain(Some(ProverQuery { + point: x_next, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + })) + // Open it at \omega^{last} x for all but the last set. This rotation is only + // sensical for the first row, but we only use this rotation in a constraint + // that is gated on l_0. + .chain( + self.constructed + .sets + .iter() + .rev() + .skip(1) + .flat_map(move |set| { + Some(ProverQuery { + point: x_last, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + }) + }), + ) + } pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a plonk::ProvingKey, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index a53a37cc74..3fa1e46bd5 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -42,6 +42,7 @@ struct AdviceSingle { pub advice_blinds: Vec>, } +// TODO: Rewrite as multi-instance prover, and make a wraper for signle-instance case. /// The prover object used to create proofs interactively by passing the witnesses to commit at /// each phase. #[derive(Debug)] @@ -61,7 +62,7 @@ pub struct ProverV2< instance_queries: Vec<(Column, Rotation)>, fixed_queries: Vec<(Column, Rotation)>, phases: Vec, - instance: Vec>, + instance: InstanceSingle, rng: R, transcript: T, advice: AdviceSingle, @@ -89,9 +90,12 @@ impl< rng: R, mut transcript: T, ) -> Result + // TODO: Can I move this `where` to the struct definition? where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { + // TODO: We have cs duplicated in circuit.cs and pk.vk.cs. Can we dedup them? + if instance.len() != pk.vk.cs.num_instance_columns { return Err(Error::InvalidInstances); } @@ -107,59 +111,57 @@ impl< let domain = &pk.vk.domain; - let instance: Vec> = iter::once(instance) - .map(|instance| -> Result, Error> { - let instance_values = instance - .iter() - .map(|values| { - let mut poly = domain.empty_lagrange(); - assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { - return Err(Error::InstanceTooLarge); - } - for (poly, value) in poly.iter_mut().zip(values.iter()) { - if !P::QUERY_INSTANCE { - transcript.common_scalar(*value)?; - } - *poly = *value; + let instance: InstanceSingle = { + let instance_values = instance + .iter() + .map(|values| { + let mut poly = domain.empty_lagrange(); + assert_eq!(poly.len(), params.n() as usize); + if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { + return Err(Error::InstanceTooLarge); + } + for (poly, value) in poly.iter_mut().zip(values.iter()) { + if !P::QUERY_INSTANCE { + transcript.common_scalar(*value)?; } - Ok(poly) - }) - .collect::, _>>()?; - - if P::QUERY_INSTANCE { - let instance_commitments_projective: Vec<_> = instance_values - .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default())) - .collect(); - let mut instance_commitments = - vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &instance_commitments_projective, - &mut instance_commitments, - ); - let instance_commitments = instance_commitments; - drop(instance_commitments_projective); - - for commitment in &instance_commitments { - transcript.common_point(*commitment)?; + *poly = *value; } - } + Ok(poly) + }) + .collect::, _>>()?; - let instance_polys: Vec<_> = instance_values + if P::QUERY_INSTANCE { + let instance_commitments_projective: Vec<_> = instance_values .iter() - .map(|poly| { - let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); - domain.lagrange_to_coeff(lagrange_vec) - }) + .map(|poly| params.commit_lagrange(poly, Blind::default())) .collect(); + let mut instance_commitments = + vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &instance_commitments_projective, + &mut instance_commitments, + ); + let instance_commitments = instance_commitments; + drop(instance_commitments_projective); - Ok(InstanceSingle { - instance_values, - instance_polys, + for commitment in &instance_commitments { + transcript.common_point(*commitment)?; + } + } + + let instance_polys: Vec<_> = instance_values + .iter() + .map(|poly| { + let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); + domain.lagrange_to_coeff(lagrange_vec) }) - }) - .collect::, _>>()?; + .collect(); + + InstanceSingle { + instance_values, + instance_polys, + } + }; let advice = AdviceSingle:: { advice_polys: vec![domain.empty_lagrange(); meta.num_advice_columns], @@ -208,13 +210,12 @@ impl< let params = self.params; let meta = self.cs; - let domain = self.pk.vk.domain; - let mut transcript = self.transcript; - let mut rng = self.rng; + let transcript = &mut self.transcript; + let mut rng = &mut self.rng; - let mut advice = self.advice; - let mut challenges = self.challenges; + let advice = &mut self.advice; + let challenges = &mut self.challenges; let column_indices = meta .advice_column_phase @@ -242,7 +243,8 @@ impl< } let mut advice_values = batch_invert_assigned::(witness.into_iter().flatten().collect()); - let unblinded_advice = HashSet::from_iter(meta.unblinded_advice_columns.clone()); + let unblinded_advice: HashSet = + HashSet::from_iter(meta.unblinded_advice_columns.clone()); let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); // Add blinding factors to advice columns @@ -306,8 +308,274 @@ impl< Ok(challenges.clone()) } - pub fn create_proof(self) -> Result { - todo!() + /// Finalizes the proof creation. + pub fn create_proof(mut self) -> Result + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let params = self.params; + let meta = self.cs; + let pk = self.pk; + let domain = &self.pk.vk.domain; + + let mut transcript = self.transcript; + let mut rng = self.rng; + + let instance = std::mem::replace( + &mut self.instance, + InstanceSingle { + instance_values: Vec::new(), + instance_polys: Vec::new(), + }, + ); + let advice = std::mem::replace( + &mut self.advice, + AdviceSingle { + advice_polys: Vec::new(), + advice_blinds: Vec::new(), + }, + ); + let mut challenges = self.challenges; + + assert_eq!(challenges.len(), meta.num_challenges); + let challenges = (0..meta.num_challenges) + .map(|index| challenges.remove(&index).unwrap()) + .collect::>(); + + // Sample theta challenge for keeping lookup columns linearly independent + let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); + + // Construct and commit to permuted values for each lookup + let lookups: Vec> = pk + .vk + .cs + .lookups + .iter() + .map(|lookup| { + lookup.commit_permuted_v2( + pk, + params, + &domain, + theta, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + &mut transcript, + ) + }) + .collect::, _>>()?; + + // Sample beta challenge + let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); + + // Sample gamma challenge + let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); + + // Commit to permutation. + let permutation = [pk.vk.cs.permutation.commit_v2( + params, + pk, + &pk.permutation, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + beta, + gamma, + &mut rng, + &mut transcript, + )?]; + + // Construct and commit to products for each lookup + let lookups: [Vec>; 1] = [lookups + .into_iter() + .map(|lookup| { + lookup.commit_product_v2(pk, params, beta, gamma, &mut rng, &mut transcript) + }) + .collect::, _>>()?]; + + // Compress expressions for each shuffle + let shuffles: [Vec>; 1] = [pk + .vk + .cs + .shuffles + .iter() + .map(|shuffle| { + shuffle.commit_product_v2( + pk, + params, + domain, + theta, + gamma, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + &mut transcript, + ) + }) + .collect::, _>>()?]; + + // Commit to the vanishing argument's random polynomial for blinding h(x_3) + let vanishing = vanishing::Argument::commit(params, domain, &mut rng, &mut transcript)?; + + // Obtain challenge for keeping all separate gates linearly independent + let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + + // Calculate the advice polys + let advice: AdviceSingle = AdviceSingle { + advice_polys: advice + .advice_polys + .into_iter() + .map(|poly| domain.lagrange_to_coeff(poly)) + .collect::>(), + advice_blinds: advice.advice_blinds, + }; + + // Evaluate the h(X) polynomial + let h_poly = pk.ev.evaluate_h_v2( + pk, + &[advice.advice_polys.as_slice()], + &[instance.instance_polys.as_slice()], + &challenges, + *y, + *beta, + *gamma, + *theta, + &lookups, + &shuffles, + &permutation, + ); + + // Construct the vanishing argument's h(X) commitments + let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, &mut transcript)?; + + let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); + let xn = x.pow([params.n()]); + + if P::QUERY_INSTANCE { + // Compute and hash instance evals for the circuit instance + // Evaluate polynomials at omega^i x + let instance_evals: Vec<_> = self + .instance_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &instance.instance_polys[column.index()], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + + // Hash each instance column evaluation + for eval in instance_evals.iter() { + transcript.write_scalar(*eval)?; + } + } + + // Compute and hash advice evals for the circuit instance + // Evaluate polynomials at omega^i x + let advice_evals: Vec<_> = self + .advice_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &advice.advice_polys[column.index()], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + + // Hash each advice column evaluation + for eval in advice_evals.iter() { + transcript.write_scalar(*eval)?; + } + + // Compute and hash fixed evals + let fixed_evals: Vec<_> = self + .fixed_queries + .iter() + .map(|&(column, at)| { + eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) + }) + .collect(); + + // Hash each fixed column evaluation + for eval in fixed_evals.iter() { + transcript.write_scalar(*eval)?; + } + + let vanishing = vanishing.evaluate(x, xn, domain, &mut transcript)?; + + // Evaluate common permutation data + pk.permutation.evaluate(x, &mut transcript)?; + + let [permutation] = permutation; + let [lookups] = lookups; + let [shuffles] = shuffles; + + // Evaluate the permutations, if any, at omega^i x. + let permutation = permutation + .construct() + .evaluate_v2(pk, x, &mut transcript)?; + + // Evaluate the lookups, if any, at omega^i x. + let lookups: Vec> = lookups + .into_iter() + .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .collect::, _>>()?; + + // Evaluate the shuffles, if any, at omega^i x. + let shuffles: Vec> = shuffles + .into_iter() + .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .collect::, _>>()?; + + let instance_ref = &instance; + let advice_ref = &advice; + let instances = + iter::empty() + .chain( + P::QUERY_INSTANCE + .then_some(self.instance_queries.iter().map(move |&(column, at)| { + ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &instance_ref.instance_polys[column.index()], + blind: Blind::default(), + } + })) + .into_iter() + .flatten(), + ) + .chain( + self.advice_queries + .iter() + .map(move |&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &advice_ref.advice_polys[column.index()], + blind: advice_ref.advice_blinds[column.index()], + }), + ) + .chain(permutation.open_v2(pk, x)) + .chain(lookups.iter().flat_map(move |p| p.open_v2(pk, x))) + .chain(shuffles.iter().flat_map(move |p| p.open_v2(pk, x))) + .chain(self.fixed_queries.iter().map(|&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &pk.fixed_polys[column.index()], + blind: Blind::default(), + })) + .chain(pk.permutation.open(x)) + // We query the h(X) polynomial at x + .chain(vanishing.open(x)); + + let prover = P::new(params); + prover + .create_proof(rng, &mut transcript, instances) + .map_err(|_| Error::ConstraintSystemFailure)?; + + Ok(transcript) } } diff --git a/halo2_proofs/src/plonk/shuffle/prover.rs b/halo2_proofs/src/plonk/shuffle/prover.rs index fd30436a47..30b9768203 100644 --- a/halo2_proofs/src/plonk/shuffle/prover.rs +++ b/halo2_proofs/src/plonk/shuffle/prover.rs @@ -1,5 +1,6 @@ use super::super::{ circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, + ProvingKeyV2, }; use super::Argument; use crate::plonk::evaluation::evaluate; @@ -36,6 +37,60 @@ pub(in crate::plonk) struct Evaluated { } impl> Argument { + /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions + /// [S_0, S_1, ..., S_{m-1}], this method + /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} + /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, + // NOTE: Copy of compress with ProvingKeyV2 + #[allow(clippy::too_many_arguments)] + fn compress_v2<'a, 'params: 'a, C, P: Params<'params, C>>( + &self, + pk: &ProvingKeyV2, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + ) -> Compressed + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the shuffle and compress them + let input_expression = compress_expressions(&self.input_expressions); + + // Get values of table expressions involved in the shuffle and compress them + let shuffle_expression = compress_expressions(&self.shuffle_expressions); + + Compressed { + input_expression, + shuffle_expression, + } + } + /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions /// [S_0, S_1, ..., S_{m-1}], this method /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} @@ -89,6 +144,117 @@ impl> Argument { } } + /// Given a Shuffle with input expressions and table expressions this method + /// constructs the grand product polynomial over the shuffle. + /// The grand product polynomial is used to populate the Product struct. + /// The Product struct is added to the Shuffle and finally returned by the method. + // NOTE: Copy of commit_product with ProvingKeyV2 + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit_product_v2< + 'a, + 'params: 'a, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + pk: &ProvingKeyV2, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + gamma: ChallengeGamma, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, + ) -> Result, Error> + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + let compressed = self.compress_v2( + pk, + params, + domain, + theta, + advice_values, + fixed_values, + instance_values, + challenges, + ); + + let blinding_factors = pk.vk.cs.blinding_factors(); + + let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; + parallelize(&mut shuffle_product, |shuffle_product, start| { + for (shuffle_product, shuffle_value) in shuffle_product + .iter_mut() + .zip(compressed.shuffle_expression[start..].iter()) + { + *shuffle_product = *gamma + shuffle_value; + } + }); + + shuffle_product.iter_mut().batch_invert(); + + parallelize(&mut shuffle_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + *product *= &(*gamma + compressed.input_expression[i]); + } + }); + + // Compute the evaluations of the shuffle product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(shuffle_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) + }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + assert_eq!(z[0], C::Scalar::ONE); + for i in 0..u { + let mut left = z[i + 1]; + let input_value = &compressed.input_expression[i]; + let shuffle_value = &compressed.shuffle_expression[i]; + left *= &(*gamma + shuffle_value); + let mut right = z[i]; + right *= &(*gamma + input_value); + assert_eq!(left, right); + } + assert_eq!(z[u], C::Scalar::ONE); + } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + product_poly: z, + product_blind, + }) + } + /// Given a Shuffle with input expressions and table expressions this method /// constructs the grand product polynomial over the shuffle. /// The grand product polynomial is used to populate the Product struct. @@ -201,6 +367,30 @@ impl> Argument { } impl Committed { + // NOTE: Copy of evaluate with ProvingKeyV2 + pub(in crate::plonk) fn evaluate_v2, T: TranscriptWrite>( + self, + pk: &ProvingKeyV2, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let product_eval = eval_polynomial(&self.product_poly, *x); + let product_next_eval = eval_polynomial(&self.product_poly, x_next); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(product_eval)) + .chain(Some(product_next_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &ProvingKey, @@ -226,6 +416,29 @@ impl Committed { } impl Evaluated { + // NOTE: Copy of open with ProvingKeyV2 + pub(in crate::plonk) fn open_v2<'a>( + &'a self, + pk: &'a ProvingKeyV2, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open shuffle product commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + // Open shuffle product commitments at x_next + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + } + pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a ProvingKey, From 50e7354c50823f80ed99d2e9e7096ba7b9b459e6 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 15 Dec 2023 13:20:43 +0100 Subject: [PATCH 06/79] Make new prover methods work with multi-instance --- halo2_proofs/src/plonk/error.rs | 5 +- halo2_proofs/src/plonk/prover.rs | 576 ++++++++++++++++++------------- 2 files changed, 332 insertions(+), 249 deletions(-) diff --git a/halo2_proofs/src/plonk/error.rs b/halo2_proofs/src/plonk/error.rs index d4a7e11c14..e149dd59f3 100644 --- a/halo2_proofs/src/plonk/error.rs +++ b/halo2_proofs/src/plonk/error.rs @@ -39,6 +39,8 @@ pub enum Error { ColumnNotInPermutation(Column), /// An error relating to a lookup table. TableError(TableError), + /// Generic error not covered by previous cases + Other(String), } impl From for Error { @@ -81,7 +83,8 @@ impl fmt::Display for Error { "Column {:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", column ), - Error::TableError(error) => write!(f, "{}", error) + Error::TableError(error) => write!(f, "{}", error), + Error::Other(error) => write!(f, "Other: {}", error), } } } diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 3fa1e46bd5..fbd62070b6 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -62,10 +62,10 @@ pub struct ProverV2< instance_queries: Vec<(Column, Rotation)>, fixed_queries: Vec<(Column, Rotation)>, phases: Vec, - instance: InstanceSingle, + instance: Vec>, rng: R, transcript: T, - advice: AdviceSingle, + advice: Vec>, challenges: HashMap, next_phase_index: usize, _marker: std::marker::PhantomData<(Scheme, P, E, R, T)>, @@ -86,7 +86,7 @@ impl< params: &'params Scheme::ParamsProver, pk: &'a ProvingKeyV2, circuit: &'a CompiledCircuitV2, - instance: &[&[Scheme::Scalar]], + instances: &[&[&[Scheme::Scalar]]], rng: R, mut transcript: T, ) -> Result @@ -96,8 +96,10 @@ impl< { // TODO: We have cs duplicated in circuit.cs and pk.vk.cs. Can we dedup them? - if instance.len() != pk.vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); + for instance in instances.iter() { + if instance.len() != pk.vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } } // TODO(Edu): Calculate advice_queries, fixed_queries, instance_queries from the gates and @@ -111,62 +113,71 @@ impl< let domain = &pk.vk.domain; - let instance: InstanceSingle = { - let instance_values = instance - .iter() - .map(|values| { - let mut poly = domain.empty_lagrange(); - assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { - return Err(Error::InstanceTooLarge); - } - for (poly, value) in poly.iter_mut().zip(values.iter()) { - if !P::QUERY_INSTANCE { - transcript.common_scalar(*value)?; + // TODO: Name this better + let mut instance_fn = + |instance: &[&[Scheme::Scalar]]| -> Result, Error> { + let instance_values = instance + .iter() + .map(|values| { + let mut poly = domain.empty_lagrange(); + assert_eq!(poly.len(), params.n() as usize); + if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { + return Err(Error::InstanceTooLarge); } - *poly = *value; + for (poly, value) in poly.iter_mut().zip(values.iter()) { + if !P::QUERY_INSTANCE { + transcript.common_scalar(*value)?; + } + *poly = *value; + } + Ok(poly) + }) + .collect::, _>>()?; + + if P::QUERY_INSTANCE { + let instance_commitments_projective: Vec<_> = instance_values + .iter() + .map(|poly| params.commit_lagrange(poly, Blind::default())) + .collect(); + let mut instance_commitments = + vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &instance_commitments_projective, + &mut instance_commitments, + ); + let instance_commitments = instance_commitments; + drop(instance_commitments_projective); + + for commitment in &instance_commitments { + transcript.common_point(*commitment)?; } - Ok(poly) - }) - .collect::, _>>()?; + } - if P::QUERY_INSTANCE { - let instance_commitments_projective: Vec<_> = instance_values + let instance_polys: Vec<_> = instance_values .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default())) + .map(|poly| { + let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); + domain.lagrange_to_coeff(lagrange_vec) + }) .collect(); - let mut instance_commitments = - vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &instance_commitments_projective, - &mut instance_commitments, - ); - let instance_commitments = instance_commitments; - drop(instance_commitments_projective); - for commitment in &instance_commitments { - transcript.common_point(*commitment)?; - } - } - - let instance_polys: Vec<_> = instance_values - .iter() - .map(|poly| { - let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); - domain.lagrange_to_coeff(lagrange_vec) + Ok(InstanceSingle { + instance_values, + instance_polys, }) - .collect(); - - InstanceSingle { - instance_values, - instance_polys, - } - }; + }; + let instance: Vec> = instances + .iter() + .map(|instance| instance_fn(instance)) + .collect::, _>>()?; - let advice = AdviceSingle:: { - advice_polys: vec![domain.empty_lagrange(); meta.num_advice_columns], - advice_blinds: vec![Blind::default(); meta.num_advice_columns], - }; + let advice = vec![ + AdviceSingle:: { + advice_polys: vec![domain.empty_lagrange(); meta.num_advice_columns], + advice_blinds: vec![Blind::default(); meta.num_advice_columns], + }; + instances.len() + ]; let challenges = HashMap::::with_capacity(meta.num_challenges); Ok(ProverV2 { @@ -193,7 +204,7 @@ impl< phase: u8, // TODO: Turn this into Vec>>. Requires batch_invert_assigned to work with // Vec - witness: Vec, LagrangeCoeff>>>, + witness: Vec, LagrangeCoeff>>>>, ) -> Result, Error> where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, @@ -231,6 +242,14 @@ impl< .collect::>(); // TODO: Check that witness.len() is the expected number of advice columns. + if witness.len() != self.instance.len() { + return Err(Error::Other(format!("witness.len() != instance.len()"))); + } + for witness in witness.iter() { + if witness.len() != meta.num_instance_columns { + return Err(Error::InvalidInstances); + } + } // Check that all current_phase advice columns are Some for (column_index, advice_column) in witness.iter().enumerate() { @@ -241,59 +260,72 @@ impl< // TODO: Check that column_index in witness is None }; } - let mut advice_values = - batch_invert_assigned::(witness.into_iter().flatten().collect()); - let unblinded_advice: HashSet = - HashSet::from_iter(meta.unblinded_advice_columns.clone()); - let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); - // Add blinding factors to advice columns - for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { - if !unblinded_advice.contains(column_index) { - for cell in &mut advice_values[unusable_rows_start..] { - *cell = Scheme::Scalar::random(&mut rng); - } - } else { - #[cfg(feature = "sanity-checks")] - for cell in &advice_values[unusable_rows_start..] { - assert_eq!(*cell, Scheme::Scalar::ZERO); + let mut commit_phase_fn = |advice: &mut AdviceSingle, + challenges: &mut HashMap, + witness: Vec< + Option, LagrangeCoeff>>, + >| + -> Result<(), Error> { + let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); + let mut advice_values = + batch_invert_assigned::(witness.into_iter().flatten().collect()); + let unblinded_advice: HashSet = + HashSet::from_iter(meta.unblinded_advice_columns.clone()); + + // Add blinding factors to advice columns + for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { + if !unblinded_advice.contains(column_index) { + for cell in &mut advice_values[unusable_rows_start..] { + *cell = Scheme::Scalar::random(&mut rng); + } + } else { + #[cfg(feature = "sanity-checks")] + for cell in &advice_values[unusable_rows_start..] { + assert_eq!(*cell, Scheme::Scalar::ZERO); + } } } - } - // Compute commitments to advice column polynomials - let blinds: Vec<_> = column_indices - .iter() - .map(|i| { - if unblinded_advice.contains(i) { - Blind::default() - } else { - Blind(Scheme::Scalar::random(&mut rng)) - } - }) - .collect(); - let advice_commitments_projective: Vec<_> = advice_values - .iter() - .zip(blinds.iter()) - .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) - .collect(); - let mut advice_commitments = - vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &advice_commitments_projective, - &mut advice_commitments, - ); - let advice_commitments = advice_commitments; - drop(advice_commitments_projective); + // Compute commitments to advice column polynomials + let blinds: Vec<_> = column_indices + .iter() + .map(|i| { + if unblinded_advice.contains(i) { + Blind::default() + } else { + Blind(Scheme::Scalar::random(&mut rng)) + } + }) + .collect(); + let advice_commitments_projective: Vec<_> = advice_values + .iter() + .zip(blinds.iter()) + .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) + .collect(); + let mut advice_commitments = + vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &advice_commitments_projective, + &mut advice_commitments, + ); + let advice_commitments = advice_commitments; + drop(advice_commitments_projective); + + for commitment in &advice_commitments { + transcript.write_point(*commitment)?; + } + for ((column_index, advice_values), blind) in + column_indices.iter().zip(advice_values).zip(blinds) + { + advice.advice_polys[*column_index] = advice_values; + advice.advice_blinds[*column_index] = blind; + } + Ok(()) + }; - for commitment in &advice_commitments { - transcript.write_point(*commitment)?; - } - for ((column_index, advice_values), blind) in - column_indices.iter().zip(advice_values).zip(blinds) - { - advice.advice_polys[*column_index] = advice_values; - advice.advice_blinds[*column_index] = blind; + for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { + commit_phase_fn(advice, challenges, witness)?; } for (index, phase) in meta.challenge_phase.iter().enumerate() { @@ -321,20 +353,8 @@ impl< let mut transcript = self.transcript; let mut rng = self.rng; - let instance = std::mem::replace( - &mut self.instance, - InstanceSingle { - instance_values: Vec::new(), - instance_polys: Vec::new(), - }, - ); - let advice = std::mem::replace( - &mut self.advice, - AdviceSingle { - advice_polys: Vec::new(), - advice_blinds: Vec::new(), - }, - ); + let instance = std::mem::replace(&mut self.instance, Vec::new()); + let advice = std::mem::replace(&mut self.advice, Vec::new()); let mut challenges = self.challenges; assert_eq!(challenges.len(), meta.num_challenges); @@ -345,25 +365,36 @@ impl< // Sample theta challenge for keeping lookup columns linearly independent let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); - // Construct and commit to permuted values for each lookup - let lookups: Vec> = pk - .vk - .cs - .lookups + let mut lookups_fn = + |instance: &InstanceSingle, + advice: &AdviceSingle| + -> Result>, Error> { + pk.vk + .cs + .lookups + .iter() + .map(|lookup| { + lookup.commit_permuted_v2( + pk, + params, + &domain, + theta, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + &mut transcript, + ) + }) + .collect::, _>>() + }; + let lookups: Vec>> = instance .iter() - .map(|lookup| { - lookup.commit_permuted_v2( - pk, - params, - &domain, - theta, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - &mut transcript, - ) + .zip(advice.iter()) + .map(|(instance, advice)| -> Result, Error> { + // Construct and commit to permuted values for each lookup + lookups_fn(instance, advice) }) .collect::, _>>()?; @@ -374,49 +405,65 @@ impl< let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); // Commit to permutation. - let permutation = [pk.vk.cs.permutation.commit_v2( - params, - pk, - &pk.permutation, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - beta, - gamma, - &mut rng, - &mut transcript, - )?]; - - // Construct and commit to products for each lookup - let lookups: [Vec>; 1] = [lookups - .into_iter() - .map(|lookup| { - lookup.commit_product_v2(pk, params, beta, gamma, &mut rng, &mut transcript) - }) - .collect::, _>>()?]; - - // Compress expressions for each shuffle - let shuffles: [Vec>; 1] = [pk - .vk - .cs - .shuffles + let permutations: Vec> = instance .iter() - .map(|shuffle| { - shuffle.commit_product_v2( - pk, + .zip(advice.iter()) + .map(|(instance, advice)| { + pk.vk.cs.permutation.commit_v2( params, - domain, - theta, - gamma, + pk, + &pk.permutation, &advice.advice_polys, &pk.fixed_values, &instance.instance_values, - &challenges, + beta, + gamma, &mut rng, &mut transcript, ) }) - .collect::, _>>()?]; + .collect::, _>>()?; + + let lookups: Vec>> = lookups + .into_iter() + .map(|lookups| -> Result, _> { + // Construct and commit to products for each lookup + lookups + .into_iter() + .map(|lookup| { + lookup.commit_product_v2(pk, params, beta, gamma, &mut rng, &mut transcript) + }) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles: Vec>> = instance + .iter() + .zip(advice.iter()) + .map(|(instance, advice)| -> Result, _> { + // Compress expressions for each shuffle + pk.vk + .cs + .shuffles + .iter() + .map(|shuffle| { + shuffle.commit_product_v2( + pk, + params, + domain, + theta, + gamma, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + &mut transcript, + ) + }) + .collect::, _>>() + }) + .collect::, _>>()?; // Commit to the vanishing argument's random polynomial for blinding h(x_3) let vanishing = vanishing::Argument::commit(params, domain, &mut rng, &mut transcript)?; @@ -425,20 +472,35 @@ impl< let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); // Calculate the advice polys - let advice: AdviceSingle = AdviceSingle { - advice_polys: advice - .advice_polys - .into_iter() - .map(|poly| domain.lagrange_to_coeff(poly)) - .collect::>(), - advice_blinds: advice.advice_blinds, - }; + let advice: Vec> = advice + .into_iter() + .map( + |AdviceSingle { + advice_polys, + advice_blinds, + }| { + AdviceSingle { + advice_polys: advice_polys + .into_iter() + .map(|poly| domain.lagrange_to_coeff(poly)) + .collect::>(), + advice_blinds, + } + }, + ) + .collect(); // Evaluate the h(X) polynomial let h_poly = pk.ev.evaluate_h_v2( pk, - &[advice.advice_polys.as_slice()], - &[instance.instance_polys.as_slice()], + &advice + .iter() + .map(|a| a.advice_polys.as_slice()) + .collect::>(), + &instance + .iter() + .map(|i| i.instance_polys.as_slice()) + .collect::>(), &challenges, *y, *beta, @@ -446,7 +508,7 @@ impl< *theta, &lookups, &shuffles, - &permutation, + &permutations, ); // Construct the vanishing argument's h(X) commitments @@ -457,42 +519,46 @@ impl< if P::QUERY_INSTANCE { // Compute and hash instance evals for the circuit instance + for instance in instance.iter() { + // Evaluate polynomials at omega^i x + let instance_evals: Vec<_> = self + .instance_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &instance.instance_polys[column.index()], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + + // Hash each instance column evaluation + for eval in instance_evals.iter() { + transcript.write_scalar(*eval)?; + } + } + } + + // Compute and hash advice evals for the circuit instance + for advice in advice.iter() { // Evaluate polynomials at omega^i x - let instance_evals: Vec<_> = self - .instance_queries + let advice_evals: Vec<_> = self + .advice_queries .iter() .map(|&(column, at)| { eval_polynomial( - &instance.instance_polys[column.index()], + &advice.advice_polys[column.index()], domain.rotate_omega(*x, at), ) }) .collect(); - // Hash each instance column evaluation - for eval in instance_evals.iter() { + // Hash each advice column evaluation + for eval in advice_evals.iter() { transcript.write_scalar(*eval)?; } } - // Compute and hash advice evals for the circuit instance - // Evaluate polynomials at omega^i x - let advice_evals: Vec<_> = self - .advice_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &advice.advice_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - - // Hash each advice column evaluation - for eval in advice_evals.iter() { - transcript.write_scalar(*eval)?; - } - // Compute and hash fixed evals let fixed_evals: Vec<_> = self .fixed_queries @@ -512,63 +578,77 @@ impl< // Evaluate common permutation data pk.permutation.evaluate(x, &mut transcript)?; - let [permutation] = permutation; - let [lookups] = lookups; - let [shuffles] = shuffles; - // Evaluate the permutations, if any, at omega^i x. - let permutation = permutation - .construct() - .evaluate_v2(pk, x, &mut transcript)?; + let permutations: Vec> = permutations + .into_iter() + .map(|permutation| -> Result<_, _> { + permutation.construct().evaluate_v2(pk, x, &mut transcript) + }) + .collect::, _>>()?; // Evaluate the lookups, if any, at omega^i x. - let lookups: Vec> = lookups + let lookups: Vec>> = lookups .into_iter() - .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .map(|lookups| -> Result, _> { + lookups + .into_iter() + .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .collect::, _>>() + }) .collect::, _>>()?; // Evaluate the shuffles, if any, at omega^i x. - let shuffles: Vec> = shuffles + let shuffles: Vec>> = shuffles .into_iter() - .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .map(|shuffles| -> Result, _> { + shuffles + .into_iter() + .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .collect::, _>>() + }) .collect::, _>>()?; - let instance_ref = &instance; - let advice_ref = &advice; - let instances = - iter::empty() - .chain( - P::QUERY_INSTANCE - .then_some(self.instance_queries.iter().map(move |&(column, at)| { - ProverQuery { + let instances = instance + .iter() + .zip(advice.iter()) + .zip(permutations.iter()) + .zip(lookups.iter()) + .zip(shuffles.iter()) + .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { + iter::empty() + .chain( + P::QUERY_INSTANCE + .then_some(self.instance_queries.iter().map(move |&(column, at)| { + ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &instance.instance_polys[column.index()], + blind: Blind::default(), + } + })) + .into_iter() + .flatten(), + ) + .chain( + self.advice_queries + .iter() + .map(move |&(column, at)| ProverQuery { point: domain.rotate_omega(*x, at), - poly: &instance_ref.instance_polys[column.index()], - blind: Blind::default(), - } - })) - .into_iter() - .flatten(), - ) - .chain( - self.advice_queries - .iter() - .map(move |&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &advice_ref.advice_polys[column.index()], - blind: advice_ref.advice_blinds[column.index()], - }), - ) - .chain(permutation.open_v2(pk, x)) - .chain(lookups.iter().flat_map(move |p| p.open_v2(pk, x))) - .chain(shuffles.iter().flat_map(move |p| p.open_v2(pk, x))) - .chain(self.fixed_queries.iter().map(|&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &pk.fixed_polys[column.index()], - blind: Blind::default(), - })) - .chain(pk.permutation.open(x)) - // We query the h(X) polynomial at x - .chain(vanishing.open(x)); + poly: &advice.advice_polys[column.index()], + blind: advice.advice_blinds[column.index()], + }), + ) + .chain(permutation.open_v2(pk, x)) + .chain(lookups.iter().flat_map(move |p| p.open_v2(pk, x))) + .chain(shuffles.iter().flat_map(move |p| p.open_v2(pk, x))) + }) + .chain(self.fixed_queries.iter().map(|&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &pk.fixed_polys[column.index()], + blind: Blind::default(), + })) + .chain(pk.permutation.open(x)) + // We query the h(X) polynomial at x + .chain(vanishing.open(x)); let prover = P::new(params); prover From 98759c33cd823eb2ab921aa689c4feaf805bf297 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 15 Dec 2023 14:10:34 +0100 Subject: [PATCH 07/79] Get queires for proving --- halo2_proofs/src/plonk/circuit.rs | 28 ++++++- halo2_proofs/src/plonk/prover.rs | 133 +++++++++++++++++++++++------- halo2_proofs/src/poly.rs | 2 +- 3 files changed, 132 insertions(+), 31 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index ae6ec15703..b78acd8294 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -35,7 +35,6 @@ pub struct Column { } impl Column { - #[cfg(test)] pub(crate) fn new(index: usize, column_type: C) -> Self { Column { index, column_type } } @@ -1508,6 +1507,31 @@ impl>, Iter: IntoIterator> IntoIterato } } +/// GateV2Backend +#[derive(Clone, Debug)] +pub struct GateV2Backend { + name: String, + constraint_names: Vec, + polys: Vec>, +} + +impl GateV2Backend { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the name of the constraint at index `constraint_index`. + pub fn constraint_name(&self, constraint_index: usize) -> &str { + self.constraint_names[constraint_index].as_str() + } + + /// Returns constraints of this gate + pub fn polynomials(&self) -> &[Expression] { + &self.polys + } +} + /// Gate #[derive(Clone, Debug)] pub struct Gate { @@ -1585,7 +1609,7 @@ pub struct ConstraintSystemV2Backend { /// fixed column that they were compressed into. This is just used by dev /// tooling right now. // pub(crate) selector_map: Vec>, - pub(crate) gates: Vec>, + pub(crate) gates: Vec>, // pub(crate) advice_queries: Vec<(Column, Rotation)>, // Contains an integer for each advice column // identifying how many distinct queries it has diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index fbd62070b6..187b9f4972 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -8,8 +8,8 @@ use std::{collections::HashMap, iter}; use super::{ circuit::{ sealed::{self}, - Advice, Any, Assignment, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, - ConstraintSystemV2Backend, Fixed, FloorPlanner, Instance, Selector, + Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, + ConstraintSystemV2Backend, Expression, Fixed, FloorPlanner, Instance, Selector, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, ProvingKeyV2, @@ -55,20 +55,107 @@ pub struct ProverV2< R: RngCore, T: TranscriptWrite, > { + // Circuit and setup fields params: &'params Scheme::ParamsProver, pk: &'a ProvingKeyV2, - cs: &'a ConstraintSystemV2Backend, advice_queries: Vec<(Column, Rotation)>, instance_queries: Vec<(Column, Rotation)>, fixed_queries: Vec<(Column, Rotation)>, phases: Vec, + // State instance: Vec>, - rng: R, - transcript: T, advice: Vec>, challenges: HashMap, next_phase_index: usize, - _marker: std::marker::PhantomData<(Scheme, P, E, R, T)>, + rng: R, + transcript: T, + _marker: std::marker::PhantomData<(P, E)>, +} + +struct Queries { + advice: Vec<(Column, Rotation)>, + instance: Vec<(Column, Rotation)>, + fixed: Vec<(Column, Rotation)>, +} + +struct QueriesSet { + advice: BTreeSet<(Column, Rotation)>, + instance: BTreeSet<(Column, Rotation)>, + fixed: BTreeSet<(Column, Rotation)>, +} + +fn collect_queries(expr: &Expression, queries: &mut QueriesSet) { + match expr { + Expression::Constant(_) => (), + Expression::Selector(_selector) => { + panic!("no Selector should arrive to the Backend"); + } + Expression::Fixed(query) => { + queries + .fixed + .insert((Column::new(query.column_index, Fixed), query.rotation)); + } + Expression::Advice(query) => { + queries.advice.insert(( + Column::new(query.column_index, Advice { phase: query.phase }), + query.rotation, + )); + } + Expression::Instance(query) => { + queries + .instance + .insert((Column::new(query.column_index, Instance), query.rotation)); + } + Expression::Challenge(_) => (), + Expression::Negated(a) => collect_queries(a, queries), + Expression::Sum(a, b) => { + collect_queries(a, queries); + collect_queries(b, queries); + } + Expression::Product(a, b) => { + collect_queries(a, queries); + collect_queries(b, queries); + } + Expression::Scaled(a, _) => collect_queries(a, queries), + }; +} + +fn get_all_queries(cs: &ConstraintSystemV2Backend) -> Queries { + let mut queries = QueriesSet { + advice: BTreeSet::new(), + instance: BTreeSet::new(), + fixed: BTreeSet::new(), + }; + + for gate in &cs.gates { + for expr in gate.polynomials() { + collect_queries(expr, &mut queries); + } + } + for lookup in &cs.lookups { + for expr in lookup + .input_expressions + .iter() + .chain(lookup.table_expressions.iter()) + { + collect_queries(expr, &mut queries); + } + } + for shuffle in &cs.shuffles { + for expr in shuffle + .input_expressions + .iter() + .chain(shuffle.shuffle_expressions.iter()) + { + collect_queries(expr, &mut queries); + } + } + + Queries { + advice: queries.advice.into_iter().collect(), + instance: queries.instance.into_iter().collect(), + fixed: queries.fixed.into_iter().collect(), + } } impl< @@ -85,7 +172,6 @@ impl< pub fn new( params: &'params Scheme::ParamsProver, pk: &'a ProvingKeyV2, - circuit: &'a CompiledCircuitV2, instances: &[&[&[Scheme::Scalar]]], rng: R, mut transcript: T, @@ -94,22 +180,19 @@ impl< where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { - // TODO: We have cs duplicated in circuit.cs and pk.vk.cs. Can we dedup them? - for instance in instances.iter() { if instance.len() != pk.vk.cs.num_instance_columns { return Err(Error::InvalidInstances); } } - // TODO(Edu): Calculate advice_queries, fixed_queries, instance_queries from the gates and - // lookup expressions. + let queries = get_all_queries(&pk.vk.cs); // Hash verification key into transcript pk.vk.hash_into(&mut transcript)?; - let meta = &circuit.cs; - let phases = circuit.cs.phases(); + let meta = &pk.vk.cs; + let phases = meta.phases(); let domain = &pk.vk.domain; @@ -182,11 +265,10 @@ impl< Ok(ProverV2 { params, - cs: &circuit.cs, pk, - advice_queries: todo!(), - instance_queries: todo!(), - fixed_queries: todo!(), + advice_queries: queries.advice, + instance_queries: queries.instance, + fixed_queries: queries.fixed, phases, instance, rng, @@ -220,7 +302,7 @@ impl< } let params = self.params; - let meta = self.cs; + let meta = &self.pk.vk.cs; let transcript = &mut self.transcript; let mut rng = &mut self.rng; @@ -262,7 +344,6 @@ impl< } let mut commit_phase_fn = |advice: &mut AdviceSingle, - challenges: &mut HashMap, witness: Vec< Option, LagrangeCoeff>>, >| @@ -325,7 +406,7 @@ impl< }; for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { - commit_phase_fn(advice, challenges, witness)?; + commit_phase_fn(advice, witness)?; } for (index, phase) in meta.challenge_phase.iter().enumerate() { @@ -346,7 +427,7 @@ impl< Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { let params = self.params; - let meta = self.cs; + let meta = &self.pk.vk.cs; let pk = self.pk; let domain = &self.pk.vk.domain; @@ -369,9 +450,7 @@ impl< |instance: &InstanceSingle, advice: &AdviceSingle| -> Result>, Error> { - pk.vk - .cs - .lookups + meta.lookups .iter() .map(|lookup| { lookup.commit_permuted_v2( @@ -409,7 +488,7 @@ impl< .iter() .zip(advice.iter()) .map(|(instance, advice)| { - pk.vk.cs.permutation.commit_v2( + meta.permutation.commit_v2( params, pk, &pk.permutation, @@ -442,9 +521,7 @@ impl< .zip(advice.iter()) .map(|(instance, advice)| -> Result, _> { // Compress expressions for each shuffle - pk.vk - .cs - .shuffles + meta.shuffles .iter() .map(|shuffle| { shuffle.commit_product_v2( diff --git a/halo2_proofs/src/poly.rs b/halo2_proofs/src/poly.rs index 9cb6b149bc..c52e982f19 100644 --- a/halo2_proofs/src/poly.rs +++ b/halo2_proofs/src/poly.rs @@ -303,7 +303,7 @@ impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { /// Describes the relative rotation of a vector. Negative numbers represent /// reverse (leftmost) rotations and positive numbers represent forward (rightmost) /// rotations. Zero represents no rotation. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct Rotation(pub i32); impl Rotation { From 9ee0ced481367744c60c8c92988ceb1b7325a1e8 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 15 Dec 2023 17:11:24 +0100 Subject: [PATCH 08/79] Port verify_proof to fe-be split --- halo2_proofs/src/plonk.rs | 107 ++++- halo2_proofs/src/plonk/circuit.rs | 134 ++++-- halo2_proofs/src/plonk/evaluation.rs | 4 +- halo2_proofs/src/plonk/keygen.rs | 13 +- halo2_proofs/src/plonk/lookup/prover.rs | 8 +- halo2_proofs/src/plonk/lookup/verifier.rs | 45 +- halo2_proofs/src/plonk/permutation/prover.rs | 12 +- .../src/plonk/permutation/verifier.rs | 170 +++++++ halo2_proofs/src/plonk/prover.rs | 141 ++---- halo2_proofs/src/plonk/shuffle/prover.rs | 6 +- halo2_proofs/src/plonk/shuffle/verifier.rs | 26 +- halo2_proofs/src/plonk/vanishing/verifier.rs | 18 +- halo2_proofs/src/plonk/verifier.rs | 424 +++++++++++++++++- 13 files changed, 930 insertions(+), 178 deletions(-) diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index af9aac3f87..ad4dc16653 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -15,7 +15,7 @@ use crate::helpers::{ }; use crate::poly::{ Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, - Polynomial, + Polynomial, Rotation, }; use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; use crate::SerdeFormat; @@ -43,6 +43,108 @@ pub use verifier::*; use evaluation::Evaluator; use std::io; +/// List of queries (columns and rotations) used by a circuit +#[derive(Debug, Clone)] +pub struct Queries { + /// List of unique advice queries + pub advice: Vec<(Column, Rotation)>, + /// List of unique instance queries + pub instance: Vec<(Column, Rotation)>, + /// List of unique fixed queries + pub fixed: Vec<(Column, Rotation)>, + /// Contains an integer for each advice column + /// identifying how many distinct queries it has + /// so far; should be same length as cs.num_advice_columns. + pub num_advice_queries: Vec, +} + +impl Queries { + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } + + pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, advice_query) in self.advice.iter().enumerate() { + if advice_query == &(column, at) { + return index; + } + } + + panic!("get_advice_query_index called for non-existent query"); + } + + pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, fixed_query) in self.fixed.iter().enumerate() { + if fixed_query == &(column, at) { + return index; + } + } + + panic!("get_fixed_query_index called for non-existent query"); + } + + pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, instance_query) in self.instance.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + + panic!("get_instance_query_index called for non-existent query"); + } + + pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { + match column.column_type() { + Any::Advice(_) => { + self.get_advice_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Fixed => { + self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Instance => { + self.get_instance_query_index(Column::::try_from(column).unwrap(), at) + } + } + } +} + /// This is a verifying key which allows for the verification of proofs for a /// particular circuit. #[derive(Clone, Debug)] @@ -51,6 +153,7 @@ pub struct VerifyingKeyV2 { fixed_commitments: Vec, permutation: permutation::VerifyingKey, cs: ConstraintSystemV2Backend, + queries: Queries, /// Cached maximum degree of `cs` (which doesn't change after construction). cs_degree: usize, /// The representative of this `VerifyingKey` in transcripts. @@ -69,12 +172,14 @@ impl VerifyingKeyV2 { { // Compute cached values. let cs_degree = cs.degree(); + let queries = cs.collect_queries(); let mut vk = Self { domain, fixed_commitments, permutation, cs, + queries, cs_degree, // Temporary, this is not pinned. transcript_repr: C::Scalar::ZERO, diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index b78acd8294..caa70bb029 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1,4 +1,4 @@ -use super::{lookup, permutation, shuffle, Assigned, Error}; +use super::{lookup, permutation, shuffle, Assigned, Error, Queries}; use crate::circuit::layouter::SyncDeps; use crate::dev::metadata; use crate::{ @@ -9,6 +9,7 @@ use core::cmp::max; use core::ops::{Add, Mul}; use ff::Field; use sealed::SealedPhase; +use std::collections::BTreeSet; use std::collections::HashMap; use std::fmt::Debug; use std::iter::{Product, Sum}; @@ -1587,6 +1588,48 @@ pub struct CompiledCircuitV2 { pub(crate) cs: ConstraintSystemV2Backend, } +struct QueriesSet { + advice: BTreeSet<(Column, Rotation)>, + instance: BTreeSet<(Column, Rotation)>, + fixed: BTreeSet<(Column, Rotation)>, +} + +fn collect_queries(expr: &Expression, queries: &mut QueriesSet) { + match expr { + Expression::Constant(_) => (), + Expression::Selector(_selector) => { + panic!("no Selector should arrive to the Backend"); + } + Expression::Fixed(query) => { + queries + .fixed + .insert((Column::new(query.column_index, Fixed), query.rotation)); + } + Expression::Advice(query) => { + queries.advice.insert(( + Column::new(query.column_index, Advice { phase: query.phase }), + query.rotation, + )); + } + Expression::Instance(query) => { + queries + .instance + .insert((Column::new(query.column_index, Instance), query.rotation)); + } + Expression::Challenge(_) => (), + Expression::Negated(a) => collect_queries(a, queries), + Expression::Sum(a, b) => { + collect_queries(a, queries); + collect_queries(b, queries); + } + Expression::Product(a, b) => { + collect_queries(a, queries); + collect_queries(b, queries); + } + Expression::Scaled(a, _) => collect_queries(a, queries), + }; +} + /// This is a description of the circuit environment, such as the gate, column and /// permutation arrangements. #[derive(Debug, Clone)] @@ -1611,10 +1654,6 @@ pub struct ConstraintSystemV2Backend { // pub(crate) selector_map: Vec>, pub(crate) gates: Vec>, // pub(crate) advice_queries: Vec<(Column, Rotation)>, - // Contains an integer for each advice column - // identifying how many distinct queries it has - // so far; should be same length as num_advice_columns. - num_advice_queries: Vec, // pub(crate) instance_queries: Vec<(Column, Rotation)>, // pub(crate) fixed_queries: Vec<(Column, Rotation)>, @@ -1683,47 +1722,6 @@ impl ConstraintSystemV2Backend { degree } - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } - - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. - - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); - - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; - - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. - - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. - - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 - } - pub(crate) fn phases(&self) -> Vec { let max_phase = self .advice_column_phase @@ -1733,6 +1731,50 @@ impl ConstraintSystemV2Backend { .unwrap_or_default(); (0..=max_phase).collect() } + + pub(crate) fn collect_queries(&self) -> Queries { + let mut queries = QueriesSet { + advice: BTreeSet::new(), + instance: BTreeSet::new(), + fixed: BTreeSet::new(), + }; + let mut num_advice_queries = vec![0; self.num_advice_columns]; + + for gate in &self.gates { + for expr in gate.polynomials() { + collect_queries(expr, &mut queries); + } + } + for lookup in &self.lookups { + for expr in lookup + .input_expressions + .iter() + .chain(lookup.table_expressions.iter()) + { + collect_queries(expr, &mut queries); + } + } + for shuffle in &self.shuffles { + for expr in shuffle + .input_expressions + .iter() + .chain(shuffle.shuffle_expressions.iter()) + { + collect_queries(expr, &mut queries); + } + } + + for (column, _) in queries.advice.iter() { + num_advice_queries[column.index()] += 1; + } + + Queries { + advice: queries.advice.into_iter().collect(), + instance: queries.instance.into_iter().collect(), + fixed: queries.fixed.into_iter().collect(), + num_advice_queries, + } + } } /// This is a description of the circuit environment, such as the gate, column and diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index efd4f6081f..83f52f16ac 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -295,6 +295,7 @@ impl Evaluator { } /// Creates a new evaluation structure + // TODO: Remove pub fn new(cs: &ConstraintSystem) -> Self { let mut ev = Evaluator::default(); @@ -475,7 +476,7 @@ impl Evaluator { // Permutations let sets = &permutation.sets; if !sets.is_empty() { - let blinding_factors = pk.vk.cs.blinding_factors(); + let blinding_factors = pk.vk.queries.blinding_factors(); let last_rotation = Rotation(-((blinding_factors + 1) as i32)); let chunk_len = pk.vk.cs.degree() - 2; let delta_start = beta * &C::Scalar::ZETA; @@ -695,6 +696,7 @@ impl Evaluator { } /// Evaluate h poly + // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn evaluate_h( &self, diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 405e1c11f8..b8891ce26e 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -214,9 +214,10 @@ where C::Scalar: FromUniformBytes<64>, { let cs = &circuit.cs; + let queries = cs.collect_queries(); let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); - if (params.n() as usize) < cs.minimum_rows() { + if (params.n() as usize) < queries.minimum_rows() { return Err(Error::not_enough_rows_available(params.k())); } @@ -343,7 +344,7 @@ where { let cs = &circuit.cs; - if (params.n() as usize) < cs.minimum_rows() { + if (params.n() as usize) < vk.queries.minimum_rows() { return Err(Error::not_enough_rows_available(params.k())); } @@ -376,7 +377,11 @@ where // Compute l_blind(X) which evaluates to 1 for each blinding factor row // and 0 otherwise over the domain. let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..].iter_mut().rev().take(cs.blinding_factors()) { + for evaluation in l_blind[..] + .iter_mut() + .rev() + .take(vk.queries.blinding_factors()) + { *evaluation = C::Scalar::ONE; } let l_blind = vk.domain.lagrange_to_coeff(l_blind); @@ -385,7 +390,7 @@ where // Compute l_last(X) which evaluates to 1 on the first inactive row (just // before the blinding factors) and 0 otherwise over the domain let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - cs.blinding_factors() - 1] = C::Scalar::ONE; + l_last[params.n() as usize - vk.queries.blinding_factors() - 1] = C::Scalar::ONE; let l_last = vk.domain.lagrange_to_coeff(l_last); let l_last = vk.domain.coeff_to_extended(l_last); diff --git a/halo2_proofs/src/plonk/lookup/prover.rs b/halo2_proofs/src/plonk/lookup/prover.rs index 203b554939..c6c9859046 100644 --- a/halo2_proofs/src/plonk/lookup/prover.rs +++ b/halo2_proofs/src/plonk/lookup/prover.rs @@ -166,6 +166,7 @@ impl> Argument { /// - constructs Permuted struct using permuted_input_value = A', and /// permuted_table_expression = S'. /// The Permuted struct is used to update the Lookup, and is then returned. + // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit_permuted< 'a, @@ -286,7 +287,7 @@ impl Permuted { mut rng: R, transcript: &mut T, ) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); + let blinding_factors = pk.vk.queries.blinding_factors(); // Goal is to compute the products of fractions // // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) @@ -587,6 +588,7 @@ impl Committed { Ok(Evaluated { constructed: self }) } + // TODO: Remove pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &ProvingKey, @@ -661,6 +663,7 @@ impl Evaluated { })) } + // TODO: Remove pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a ProvingKey, @@ -720,7 +723,7 @@ fn permute_expression_pair_v2<'params, C: CurveAffine, P: Params<'params, C>, R: input_expression: &Polynomial, table_expression: &Polynomial, ) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); + let blinding_factors = pk.vk.queries.blinding_factors(); let usable_rows = params.n() as usize - (blinding_factors + 1); let mut permuted_input_expression: Vec = input_expression.to_vec(); @@ -804,6 +807,7 @@ fn permute_expression_pair_v2<'params, C: CurveAffine, P: Params<'params, C>, R: /// - the first row in a sequence of like values in A' is the row /// that has the corresponding value in S'. /// This method returns (A', S') if no errors are encountered. +// TODO: Remove fn permute_expression_pair<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( pk: &ProvingKey, params: &P, diff --git a/halo2_proofs/src/plonk/lookup/verifier.rs b/halo2_proofs/src/plonk/lookup/verifier.rs index bbc86c8e9d..5667a54c5d 100644 --- a/halo2_proofs/src/plonk/lookup/verifier.rs +++ b/halo2_proofs/src/plonk/lookup/verifier.rs @@ -6,7 +6,7 @@ use super::super::{ use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, + plonk::{Error, VerifyingKey, VerifyingKeyV2}, poly::{commitment::MSM, Rotation, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; @@ -168,6 +168,49 @@ impl Evaluated { )) } + // NOTE: Copy of queries with VerifyingKeyV2 + pub(in crate::plonk) fn queries_v2<'r, M: MSM + 'r>( + &'r self, + vk: &'r VerifyingKeyV2, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_inv = vk.domain.rotate_omega(*x, Rotation::prev()); + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open lookup product commitment at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + *x, + self.product_eval, + ))) + // Open lookup input commitments at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_input_commitment, + *x, + self.permuted_input_eval, + ))) + // Open lookup table commitments at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_table_commitment, + *x, + self.permuted_table_eval, + ))) + // Open lookup input commitments at \omega^{-1} x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_input_commitment, + x_inv, + self.permuted_input_inv_eval, + ))) + // Open lookup product commitment at \omega x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + x_next, + self.product_next_eval, + ))) + } + + // TODO: Remove pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( &'r self, vk: &'r VerifyingKey, diff --git a/halo2_proofs/src/plonk/permutation/prover.rs b/halo2_proofs/src/plonk/permutation/prover.rs index 5bc3924708..d5683a815b 100644 --- a/halo2_proofs/src/plonk/permutation/prover.rs +++ b/halo2_proofs/src/plonk/permutation/prover.rs @@ -72,7 +72,7 @@ impl Argument { // 3 circuit for the permutation argument. assert!(pk.vk.cs_degree >= 3); let chunk_len = pk.vk.cs_degree - 2; - let blinding_factors = pk.vk.cs.blinding_factors(); + let blinding_factors = pk.vk.queries.blinding_factors(); // Each column gets its own delta power. let mut deltaomega = C::Scalar::ONE; @@ -191,6 +191,8 @@ impl Argument { Ok(Committed { sets }) } + + // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit< 'params, @@ -391,7 +393,7 @@ impl Constructed { transcript: &mut T, ) -> Result, Error> { let domain = &pk.vk.domain; - let blinding_factors = pk.vk.cs.blinding_factors(); + let blinding_factors = pk.vk.queries.blinding_factors(); { let mut sets = self.sets.iter(); @@ -428,6 +430,8 @@ impl Constructed { Ok(Evaluated { constructed: self }) } + + // TODO: Remove pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &plonk::ProvingKey, @@ -481,7 +485,7 @@ impl Evaluated { pk: &'a plonk::ProvingKeyV2, x: ChallengeX, ) -> impl Iterator> + Clone { - let blinding_factors = pk.vk.cs.blinding_factors(); + let blinding_factors = pk.vk.queries.blinding_factors(); let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); let x_last = pk .vk @@ -521,6 +525,8 @@ impl Evaluated { }), ) } + + // TODO: Remove pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a plonk::ProvingKey, diff --git a/halo2_proofs/src/plonk/permutation/verifier.rs b/halo2_proofs/src/plonk/permutation/verifier.rs index a4637422ae..2cb6e6f925 100644 --- a/halo2_proofs/src/plonk/permutation/verifier.rs +++ b/halo2_proofs/src/plonk/permutation/verifier.rs @@ -30,6 +30,29 @@ pub struct Evaluated { } impl Argument { + pub(crate) fn read_product_commitments_v2< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + vk: &plonk::VerifyingKeyV2, + transcript: &mut T, + ) -> Result, Error> { + let chunk_len = vk.cs_degree - 2; + + let permutation_product_commitments = self + .columns + .chunks(chunk_len) + .map(|_| transcript.read_point()) + .collect::, _>>()?; + + Ok(Committed { + permutation_product_commitments, + }) + } + + // TODO: Remove pub(crate) fn read_product_commitments< C: CurveAffine, E: EncodedChallenge, @@ -99,6 +122,114 @@ impl Committed { } impl Evaluated { + // NOTE: Copy of expressions with VerifyingKeyV2 + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn expressions_v2<'a>( + &'a self, + vk: &'a plonk::VerifyingKeyV2, + p: &'a Argument, + common: &'a CommonEvaluated, + advice_evals: &'a [C::Scalar], + fixed_evals: &'a [C::Scalar], + instance_evals: &'a [C::Scalar], + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + beta: ChallengeBeta, + gamma: ChallengeGamma, + x: ChallengeX, + ) -> impl Iterator + 'a { + let chunk_len = vk.cs_degree - 2; + iter::empty() + // Enforce only for the first set. + // l_0(X) * (1 - z_0(X)) = 0 + .chain( + self.sets + .first() + .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), + ) + // Enforce only for the last set. + // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 + .chain(self.sets.last().map(|last_set| { + (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) + * &l_last + })) + // Except for the first set, enforce. + // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 + .chain( + self.sets + .iter() + .skip(1) + .zip(self.sets.iter()) + .map(|(set, last_set)| { + ( + set.permutation_product_eval, + last_set.permutation_product_last_eval.unwrap(), + ) + }) + .map(move |(set, prev_last)| (set - &prev_last) * &l_0), + ) + // And for all the sets we enforce: + // (1 - (l_last(X) + l_blind(X))) * ( + // z_i(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) + // - z_i(X) \prod (p(X) + \delta^i \beta X + \gamma) + // ) + .chain( + self.sets + .iter() + .zip(p.columns.chunks(chunk_len)) + .zip(common.permutation_evals.chunks(chunk_len)) + .enumerate() + .map(move |(chunk_index, ((set, columns), permutation_evals))| { + let mut left = set.permutation_product_next_eval; + for (eval, permutation_eval) in columns + .iter() + .map(|&column| match column.column_type() { + Any::Advice(_) => { + advice_evals + [vk.queries.get_any_query_index(column, Rotation::cur())] + } + Any::Fixed => { + fixed_evals + [vk.queries.get_any_query_index(column, Rotation::cur())] + } + Any::Instance => { + instance_evals + [vk.queries.get_any_query_index(column, Rotation::cur())] + } + }) + .zip(permutation_evals.iter()) + { + left *= &(eval + &(*beta * permutation_eval) + &*gamma); + } + + let mut right = set.permutation_product_eval; + let mut current_delta = (*beta * &*x) + * &(::DELTA + .pow_vartime([(chunk_index * chunk_len) as u64])); + for eval in columns.iter().map(|&column| match column.column_type() { + Any::Advice(_) => { + advice_evals + [vk.queries.get_any_query_index(column, Rotation::cur())] + } + Any::Fixed => { + fixed_evals[vk.queries.get_any_query_index(column, Rotation::cur())] + } + Any::Instance => { + instance_evals + [vk.queries.get_any_query_index(column, Rotation::cur())] + } + }) { + right *= &(eval + ¤t_delta + &*gamma); + current_delta *= &C::Scalar::DELTA; + } + + (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) + }), + ) + } + + // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn expressions<'a>( &'a self, @@ -201,6 +332,45 @@ impl Evaluated { ) } + // NOTE: Copy of queries with VerifyingKeyV2 + pub(in crate::plonk) fn queries_v2<'r, M: MSM + 'r>( + &'r self, + vk: &'r plonk::VerifyingKeyV2, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let blinding_factors = vk.queries.blinding_factors(); + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + let x_last = vk + .domain + .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); + + iter::empty() + .chain(self.sets.iter().flat_map(move |set| { + iter::empty() + // Open permutation product commitments at x and \omega^{-1} x + // Open permutation product commitments at x and \omega x + .chain(Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + *x, + set.permutation_product_eval, + ))) + .chain(Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + x_next, + set.permutation_product_next_eval, + ))) + })) + // Open it at \omega^{last} x for all but the last set + .chain(self.sets.iter().rev().skip(1).flat_map(move |set| { + Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + x_last, + set.permutation_product_last_eval.unwrap(), + )) + })) + } + + // TODO: Remove pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( &'r self, vk: &'r plonk::VerifyingKey, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 187b9f4972..1f3130dd75 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -8,8 +8,8 @@ use std::{collections::HashMap, iter}; use super::{ circuit::{ sealed::{self}, - Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, - ConstraintSystemV2Backend, Expression, Fixed, FloorPlanner, Instance, Selector, + Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, + Instance, Selector, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, ProvingKeyV2, @@ -21,7 +21,7 @@ use crate::{ plonk::Assigned, poly::{ commitment::{Blind, CommitmentScheme, Params, Prover}, - Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, }, }; use crate::{ @@ -58,9 +58,9 @@ pub struct ProverV2< // Circuit and setup fields params: &'params Scheme::ParamsProver, pk: &'a ProvingKeyV2, - advice_queries: Vec<(Column, Rotation)>, - instance_queries: Vec<(Column, Rotation)>, - fixed_queries: Vec<(Column, Rotation)>, + // advice_queries: Vec<(Column, Rotation)>, + // instance_queries: Vec<(Column, Rotation)>, + // fixed_queries: Vec<(Column, Rotation)>, phases: Vec, // State instance: Vec>, @@ -72,92 +72,6 @@ pub struct ProverV2< _marker: std::marker::PhantomData<(P, E)>, } -struct Queries { - advice: Vec<(Column, Rotation)>, - instance: Vec<(Column, Rotation)>, - fixed: Vec<(Column, Rotation)>, -} - -struct QueriesSet { - advice: BTreeSet<(Column, Rotation)>, - instance: BTreeSet<(Column, Rotation)>, - fixed: BTreeSet<(Column, Rotation)>, -} - -fn collect_queries(expr: &Expression, queries: &mut QueriesSet) { - match expr { - Expression::Constant(_) => (), - Expression::Selector(_selector) => { - panic!("no Selector should arrive to the Backend"); - } - Expression::Fixed(query) => { - queries - .fixed - .insert((Column::new(query.column_index, Fixed), query.rotation)); - } - Expression::Advice(query) => { - queries.advice.insert(( - Column::new(query.column_index, Advice { phase: query.phase }), - query.rotation, - )); - } - Expression::Instance(query) => { - queries - .instance - .insert((Column::new(query.column_index, Instance), query.rotation)); - } - Expression::Challenge(_) => (), - Expression::Negated(a) => collect_queries(a, queries), - Expression::Sum(a, b) => { - collect_queries(a, queries); - collect_queries(b, queries); - } - Expression::Product(a, b) => { - collect_queries(a, queries); - collect_queries(b, queries); - } - Expression::Scaled(a, _) => collect_queries(a, queries), - }; -} - -fn get_all_queries(cs: &ConstraintSystemV2Backend) -> Queries { - let mut queries = QueriesSet { - advice: BTreeSet::new(), - instance: BTreeSet::new(), - fixed: BTreeSet::new(), - }; - - for gate in &cs.gates { - for expr in gate.polynomials() { - collect_queries(expr, &mut queries); - } - } - for lookup in &cs.lookups { - for expr in lookup - .input_expressions - .iter() - .chain(lookup.table_expressions.iter()) - { - collect_queries(expr, &mut queries); - } - } - for shuffle in &cs.shuffles { - for expr in shuffle - .input_expressions - .iter() - .chain(shuffle.shuffle_expressions.iter()) - { - collect_queries(expr, &mut queries); - } - } - - Queries { - advice: queries.advice.into_iter().collect(), - instance: queries.instance.into_iter().collect(), - fixed: queries.fixed.into_iter().collect(), - } -} - impl< 'a, 'params, @@ -186,12 +100,11 @@ impl< } } - let queries = get_all_queries(&pk.vk.cs); - // Hash verification key into transcript pk.vk.hash_into(&mut transcript)?; let meta = &pk.vk.cs; + let queries = &pk.vk.queries; let phases = meta.phases(); let domain = &pk.vk.domain; @@ -204,7 +117,7 @@ impl< .map(|values| { let mut poly = domain.empty_lagrange(); assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { + if values.len() > (poly.len() - (queries.blinding_factors() + 1)) { return Err(Error::InstanceTooLarge); } for (poly, value) in poly.iter_mut().zip(values.iter()) { @@ -266,9 +179,6 @@ impl< Ok(ProverV2 { params, pk, - advice_queries: queries.advice, - instance_queries: queries.instance, - fixed_queries: queries.fixed, phases, instance, rng, @@ -303,6 +213,7 @@ impl< let params = self.params; let meta = &self.pk.vk.cs; + let queries = &self.pk.vk.queries; let transcript = &mut self.transcript; let mut rng = &mut self.rng; @@ -348,7 +259,7 @@ impl< Option, LagrangeCoeff>>, >| -> Result<(), Error> { - let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); + let unusable_rows_start = params.n() as usize - (queries.blinding_factors() + 1); let mut advice_values = batch_invert_assigned::(witness.into_iter().flatten().collect()); let unblinded_advice: HashSet = @@ -428,6 +339,7 @@ impl< { let params = self.params; let meta = &self.pk.vk.cs; + let queries = &self.pk.vk.queries; let pk = self.pk; let domain = &self.pk.vk.domain; @@ -598,8 +510,8 @@ impl< // Compute and hash instance evals for the circuit instance for instance in instance.iter() { // Evaluate polynomials at omega^i x - let instance_evals: Vec<_> = self - .instance_queries + let instance_evals: Vec<_> = queries + .instance .iter() .map(|&(column, at)| { eval_polynomial( @@ -619,8 +531,8 @@ impl< // Compute and hash advice evals for the circuit instance for advice in advice.iter() { // Evaluate polynomials at omega^i x - let advice_evals: Vec<_> = self - .advice_queries + let advice_evals: Vec<_> = queries + .advice .iter() .map(|&(column, at)| { eval_polynomial( @@ -637,8 +549,8 @@ impl< } // Compute and hash fixed evals - let fixed_evals: Vec<_> = self - .fixed_queries + let fixed_evals: Vec<_> = queries + .fixed .iter() .map(|&(column, at)| { eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) @@ -695,7 +607,7 @@ impl< iter::empty() .chain( P::QUERY_INSTANCE - .then_some(self.instance_queries.iter().map(move |&(column, at)| { + .then_some(queries.instance.iter().map(move |&(column, at)| { ProverQuery { point: domain.rotate_omega(*x, at), poly: &instance.instance_polys[column.index()], @@ -705,20 +617,16 @@ impl< .into_iter() .flatten(), ) - .chain( - self.advice_queries - .iter() - .map(move |&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &advice.advice_polys[column.index()], - blind: advice.advice_blinds[column.index()], - }), - ) + .chain(queries.advice.iter().map(move |&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &advice.advice_polys[column.index()], + blind: advice.advice_blinds[column.index()], + })) .chain(permutation.open_v2(pk, x)) .chain(lookups.iter().flat_map(move |p| p.open_v2(pk, x))) .chain(shuffles.iter().flat_map(move |p| p.open_v2(pk, x))) }) - .chain(self.fixed_queries.iter().map(|&(column, at)| ProverQuery { + .chain(queries.fixed.iter().map(|&(column, at)| ProverQuery { point: domain.rotate_omega(*x, at), poly: &pk.fixed_polys[column.index()], blind: Blind::default(), @@ -740,6 +648,7 @@ impl< /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` /// are zero-padded internally. +// TODO: Remove pub fn create_proof< 'params, Scheme: CommitmentScheme, diff --git a/halo2_proofs/src/plonk/shuffle/prover.rs b/halo2_proofs/src/plonk/shuffle/prover.rs index 30b9768203..929acfe4fa 100644 --- a/halo2_proofs/src/plonk/shuffle/prover.rs +++ b/halo2_proofs/src/plonk/shuffle/prover.rs @@ -95,6 +95,7 @@ impl> Argument { /// [S_0, S_1, ..., S_{m-1}], this method /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, + // TODO: Remove #[allow(clippy::too_many_arguments)] fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( &self, @@ -187,7 +188,7 @@ impl> Argument { challenges, ); - let blinding_factors = pk.vk.cs.blinding_factors(); + let blinding_factors = pk.vk.queries.blinding_factors(); let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; parallelize(&mut shuffle_product, |shuffle_product, start| { @@ -259,6 +260,7 @@ impl> Argument { /// constructs the grand product polynomial over the shuffle. /// The grand product polynomial is used to populate the Product struct. /// The Product struct is added to the Shuffle and finally returned by the method. + // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit_product< 'a, @@ -391,6 +393,7 @@ impl Committed { Ok(Evaluated { constructed: self }) } + // TODO: Remove pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &ProvingKey, @@ -439,6 +442,7 @@ impl Evaluated { })) } + // TODO: Remove pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a ProvingKey, diff --git a/halo2_proofs/src/plonk/shuffle/verifier.rs b/halo2_proofs/src/plonk/shuffle/verifier.rs index 379cc5c8a1..2f77b52d1d 100644 --- a/halo2_proofs/src/plonk/shuffle/verifier.rs +++ b/halo2_proofs/src/plonk/shuffle/verifier.rs @@ -4,7 +4,7 @@ use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, Challeng use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, + plonk::{Error, VerifyingKey, VerifyingKeyV2}, poly::{commitment::MSM, Rotation, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; @@ -114,6 +114,30 @@ impl Evaluated { ) } + // NOTE: Copy of queries with VerifyingKeyV2 + pub(in crate::plonk) fn queries_v2<'r, M: MSM + 'r>( + &'r self, + vk: &'r VerifyingKeyV2, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open shuffle product commitment at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + *x, + self.product_eval, + ))) + // Open shuffle product commitment at \omega x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + x_next, + self.product_next_eval, + ))) + } + + // TODO: Remove pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( &'r self, vk: &'r VerifyingKey, diff --git a/halo2_proofs/src/plonk/vanishing/verifier.rs b/halo2_proofs/src/plonk/vanishing/verifier.rs index 0881dfb2c0..a179336e0d 100644 --- a/halo2_proofs/src/plonk/vanishing/verifier.rs +++ b/halo2_proofs/src/plonk/vanishing/verifier.rs @@ -4,7 +4,7 @@ use ff::Field; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, + plonk::{Error, VerifyingKey, VerifyingKeyV2}, poly::{ commitment::{Params, MSM}, VerifierQuery, @@ -53,6 +53,22 @@ impl Argument { } impl Committed { + pub(in crate::plonk) fn read_commitments_after_y_v2< + E: EncodedChallenge, + T: TranscriptRead, + >( + self, + vk: &VerifyingKeyV2, + transcript: &mut T, + ) -> Result, Error> { + // Obtain a commitment to h(X) in the form of multiple pieces of degree n - 1 + let h_commitments = read_n_points(transcript, vk.domain.get_quotient_poly_degree())?; + + Ok(Constructed { + h_commitments, + random_poly_commitment: self.random_poly_commitment, + }) + } pub(in crate::plonk) fn read_commitments_after_y< E: EncodedChallenge, T: TranscriptRead, diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index 76675bcdfa..6efee590f2 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -4,7 +4,7 @@ use std::iter; use super::{ vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, - VerifyingKey, + VerifyingKey, VerifyingKeyV2, }; use crate::arithmetic::compute_inner_product; use crate::poly::commitment::{CommitmentScheme, Verifier}; @@ -20,6 +20,428 @@ mod batch; #[cfg(feature = "batch")] pub use batch::BatchVerifier; +/// Returns a boolean indicating whether or not the proof is valid +pub fn verify_proof_v2< + 'params, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptRead, + Strategy: VerificationStrategy<'params, Scheme, V>, +>( + params: &'params Scheme::ParamsVerifier, + vk: &VerifyingKeyV2, + strategy: Strategy, + instances: &[&[&[Scheme::Scalar]]], + transcript: &mut T, +) -> Result +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + // Check that instances matches the expected number of instance columns + for instances in instances.iter() { + if instances.len() != vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } + } + + let instance_commitments = if V::QUERY_INSTANCE { + instances + .iter() + .map(|instance| { + instance + .iter() + .map(|instance| { + if instance.len() + > params.n() as usize - (vk.queries.blinding_factors() + 1) + { + return Err(Error::InstanceTooLarge); + } + let mut poly = instance.to_vec(); + poly.resize(params.n() as usize, Scheme::Scalar::ZERO); + let poly = vk.domain.lagrange_from_vec(poly); + + Ok(params.commit_lagrange(&poly, Blind::default()).to_affine()) + }) + .collect::, _>>() + }) + .collect::, _>>()? + } else { + vec![vec![]; instances.len()] + }; + + let num_proofs = instance_commitments.len(); + + // Hash verification key into transcript + vk.hash_into(transcript)?; + + if V::QUERY_INSTANCE { + for instance_commitments in instance_commitments.iter() { + // Hash the instance (external) commitments into the transcript + for commitment in instance_commitments { + transcript.common_point(*commitment)? + } + } + } else { + for instance in instances.iter() { + for instance in instance.iter() { + for value in instance.iter() { + transcript.common_scalar(*value)?; + } + } + } + } + + // Hash the prover's advice commitments into the transcript and squeeze challenges + let (advice_commitments, challenges) = { + let mut advice_commitments = + vec![vec![Scheme::Curve::default(); vk.cs.num_advice_columns]; num_proofs]; + let mut challenges = vec![Scheme::Scalar::ZERO; vk.cs.num_challenges]; + + for current_phase in vk.cs.phases() { + for advice_commitments in advice_commitments.iter_mut() { + for (phase, commitment) in vk + .cs + .advice_column_phase + .iter() + .zip(advice_commitments.iter_mut()) + { + if current_phase == *phase { + *commitment = transcript.read_point()?; + } + } + } + for (phase, challenge) in vk.cs.challenge_phase.iter().zip(challenges.iter_mut()) { + if current_phase == *phase { + *challenge = *transcript.squeeze_challenge_scalar::<()>(); + } + } + } + + (advice_commitments, challenges) + }; + + // Sample theta challenge for keeping lookup columns linearly independent + let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); + + let lookups_permuted = (0..num_proofs) + .map(|_| -> Result, _> { + // Hash each lookup permuted commitment + vk.cs + .lookups + .iter() + .map(|argument| argument.read_permuted_commitments(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Sample beta challenge + let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); + + // Sample gamma challenge + let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); + + let permutations_committed = (0..num_proofs) + .map(|_| { + // Hash each permutation product commitment + vk.cs + .permutation + .read_product_commitments_v2(vk, transcript) + }) + .collect::, _>>()?; + + let lookups_committed = lookups_permuted + .into_iter() + .map(|lookups| { + // Hash each lookup product commitment + lookups + .into_iter() + .map(|lookup| lookup.read_product_commitment(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles_committed = (0..num_proofs) + .map(|_| -> Result, _> { + // Hash each shuffle product commitment + vk.cs + .shuffles + .iter() + .map(|argument| argument.read_product_commitment(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?; + + // Sample y challenge, which keeps the gates linearly independent. + let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + + let vanishing = vanishing.read_commitments_after_y_v2(vk, transcript)?; + + // Sample x challenge, which is used to ensure the circuit is + // satisfied with high probability. + let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); + let instance_evals = if V::QUERY_INSTANCE { + (0..num_proofs) + .map(|_| -> Result, _> { read_n_scalars(transcript, vk.queries.instance.len()) }) + .collect::, _>>()? + } else { + let xn = x.pow([params.n()]); + let (min_rotation, max_rotation) = + vk.queries + .instance + .iter() + .fold((0, 0), |(min, max), (_, rotation)| { + if rotation.0 < min { + (rotation.0, max) + } else if rotation.0 > max { + (min, rotation.0) + } else { + (min, max) + } + }); + let max_instance_len = instances + .iter() + .flat_map(|instance| instance.iter().map(|instance| instance.len())) + .max_by(Ord::cmp) + .unwrap_or_default(); + let l_i_s = &vk.domain.l_i_range( + *x, + xn, + -max_rotation..max_instance_len as i32 + min_rotation.abs(), + ); + instances + .iter() + .map(|instances| { + vk.queries + .instance + .iter() + .map(|(column, rotation)| { + let instances = instances[column.index()]; + let offset = (max_rotation - rotation.0) as usize; + compute_inner_product(instances, &l_i_s[offset..offset + instances.len()]) + }) + .collect::>() + }) + .collect::>() + }; + + let advice_evals = (0..num_proofs) + .map(|_| -> Result, _> { read_n_scalars(transcript, vk.queries.advice.len()) }) + .collect::, _>>()?; + + let fixed_evals = read_n_scalars(transcript, vk.queries.fixed.len())?; + + let vanishing = vanishing.evaluate_after_x(transcript)?; + + let permutations_common = vk.permutation.evaluate(transcript)?; + + let permutations_evaluated = permutations_committed + .into_iter() + .map(|permutation| permutation.evaluate(transcript)) + .collect::, _>>()?; + + let lookups_evaluated = lookups_committed + .into_iter() + .map(|lookups| -> Result, _> { + lookups + .into_iter() + .map(|lookup| lookup.evaluate(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles_evaluated = shuffles_committed + .into_iter() + .map(|shuffles| -> Result, _> { + shuffles + .into_iter() + .map(|shuffle| shuffle.evaluate(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // This check ensures the circuit is satisfied so long as the polynomial + // commitments open to the correct values. + let vanishing = { + // x^n + let xn = x.pow([params.n()]); + + let blinding_factors = vk.queries.blinding_factors(); + let l_evals = vk + .domain + .l_i_range(*x, xn, (-((blinding_factors + 1) as i32))..=0); + assert_eq!(l_evals.len(), 2 + blinding_factors); + let l_last = l_evals[0]; + let l_blind: Scheme::Scalar = l_evals[1..(1 + blinding_factors)] + .iter() + .fold(Scheme::Scalar::ZERO, |acc, eval| acc + eval); + let l_0 = l_evals[1 + blinding_factors]; + + // Compute the expected value of h(x) + let expressions = advice_evals + .iter() + .zip(instance_evals.iter()) + .zip(permutations_evaluated.iter()) + .zip(lookups_evaluated.iter()) + .zip(shuffles_evaluated.iter()) + .flat_map( + |((((advice_evals, instance_evals), permutation), lookups), shuffles)| { + let challenges = &challenges; + let fixed_evals = &fixed_evals; + std::iter::empty() + // Evaluate the circuit using the custom gates provided + .chain(vk.cs.gates.iter().flat_map(move |gate| { + gate.polynomials().iter().map(move |poly| { + poly.evaluate( + &|scalar| scalar, + &|_| { + panic!("virtual selectors are removed during optimization") + }, + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + })) + .chain(permutation.expressions_v2( + vk, + &vk.cs.permutation, + &permutations_common, + advice_evals, + fixed_evals, + instance_evals, + l_0, + l_last, + l_blind, + beta, + gamma, + x, + )) + .chain(lookups.iter().zip(vk.cs.lookups.iter()).flat_map( + move |(p, argument)| { + p.expressions( + l_0, + l_last, + l_blind, + argument, + theta, + beta, + gamma, + advice_evals, + fixed_evals, + instance_evals, + challenges, + ) + }, + )) + .chain(shuffles.iter().zip(vk.cs.shuffles.iter()).flat_map( + move |(p, argument)| { + p.expressions( + l_0, + l_last, + l_blind, + argument, + theta, + gamma, + advice_evals, + fixed_evals, + instance_evals, + challenges, + ) + }, + )) + }, + ); + + vanishing.verify(params, expressions, y, xn) + }; + + let queries = instance_commitments + .iter() + .zip(instance_evals.iter()) + .zip(advice_commitments.iter()) + .zip(advice_evals.iter()) + .zip(permutations_evaluated.iter()) + .zip(lookups_evaluated.iter()) + .zip(shuffles_evaluated.iter()) + .flat_map( + |( + ( + ( + ( + ((instance_commitments, instance_evals), advice_commitments), + advice_evals, + ), + permutation, + ), + lookups, + ), + shuffles, + )| { + iter::empty() + .chain( + V::QUERY_INSTANCE + .then_some(vk.queries.instance.iter().enumerate().map( + move |(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &instance_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + instance_evals[query_index], + ) + }, + )) + .into_iter() + .flatten(), + ) + .chain(vk.queries.advice.iter().enumerate().map( + move |(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &advice_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + advice_evals[query_index], + ) + }, + )) + .chain(permutation.queries_v2(vk, x)) + .chain(lookups.iter().flat_map(move |p| p.queries_v2(vk, x))) + .chain(shuffles.iter().flat_map(move |p| p.queries_v2(vk, x))) + }, + ) + .chain( + vk.queries + .fixed + .iter() + .enumerate() + .map(|(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &vk.fixed_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + fixed_evals[query_index], + ) + }), + ) + .chain(permutations_common.queries(&vk.permutation, x)) + .chain(vanishing.queries(x)); + + // We are now convinced the circuit is satisfied so long as the + // polynomial commitments open to the correct values. + + let verifier = V::new(params); + strategy.process(|msm| { + verifier + .verify_proof(transcript, queries, msm) + .map_err(|_| Error::Opening) + }) +} + +// TODO: Remove /// Returns a boolean indicating whether or not the proof is valid pub fn verify_proof< 'params, From 013e6d4998d80c4acdd1ad3f144009dc035c9e6d Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 27 Dec 2023 18:01:52 +0100 Subject: [PATCH 09/79] Add sample circuit for testing --- halo2_proofs/src/plonk.rs | 2 - halo2_proofs/tests/frontend_backend_split.rs | 287 +++++++++++++++++++ 2 files changed, 287 insertions(+), 2 deletions(-) create mode 100644 halo2_proofs/tests/frontend_backend_split.rs diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index ad4dc16653..a8088208b9 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -193,8 +193,6 @@ impl VerifyingKeyV2 { // let s = format!("{:?}", vk.pinned()); // TODO(Edu): Is it Ok to not use the pinned Vk here? We removed a lot of stuff from Vk // and Cs, so maybe we already have the same as in PinnedVerificationKey? - // TODO(Edu): We removed queries information from the ConstraintSystem, so this output will - // definitely be a breaking change. let s = format!("{:?}", vk); hasher.update(&(s.len() as u64).to_le_bytes()); diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs new file mode 100644 index 0000000000..8ef9030989 --- /dev/null +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -0,0 +1,287 @@ +#![allow(clippy::many_single_char_names)] +#![allow(clippy::op_ref)] + +use assert_matches::assert_matches; +use ff::{FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_proofs::arithmetic::Field; +use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPlanner, Value}; +use halo2_proofs::dev::MockProver; +use halo2_proofs::plonk::{ + create_proof as create_plonk_proof, keygen_pk, keygen_vk, verify_proof as verify_plonk_proof, + Advice, Assigned, Circuit, Column, ConstraintSystem, Error, Expression, Fixed, Instance, + ProvingKey, Selector, TableColumn, VerifyingKey, +}; +use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; +use halo2_proofs::poly::Rotation; +use halo2_proofs::poly::VerificationStrategy; +use halo2_proofs::transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptReadBuffer, + TranscriptWriterBuffer, +}; +use rand_core::{OsRng, RngCore}; +use std::marker::PhantomData; + +#[derive(Clone)] +struct MyCircuitConfig { + // A gate that uses selector, fixed, advice, has addition, multiplication and rotation + // s_gate[0] * (a[0] + b[0] * c[0] * d[0] - a[1]) + s_gate: Selector, + a: Column, + b: Column, + c: Column, + d: Column, + + // Copy constraints between columns (a, b) and (a, d) + + // A dynamic lookup: s_lookup * [1, a[0], b[0]] in s_ltable * [1, d[0], c[0]] + s_lookup: Column, + s_ltable: Column, + + // A shuffle: s_shufle * [1, a[0]] shuffle_of s_stable * [1, b[0]] + s_shuffle: Column, + s_stable: Column, + + // Instance + instance: Column, +} + +#[derive(Clone)] +struct MyCircuit { + _marker: std::marker::PhantomData, +} + +impl> Circuit for MyCircuit { + type Config = MyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self { + _marker: std::marker::PhantomData {}, + } + } + + fn configure(meta: &mut ConstraintSystem) -> MyCircuitConfig { + let s_gate = meta.selector(); + let a = meta.advice_column(); + let b = meta.advice_column(); + let c = meta.advice_column(); + let d = meta.fixed_column(); + + meta.enable_equality(a); + meta.enable_equality(b); + meta.enable_equality(d); + + let s_lookup = meta.fixed_column(); + let s_ltable = meta.fixed_column(); + + let s_shuffle = meta.fixed_column(); + let s_stable = meta.fixed_column(); + + let instance = meta.instance_column(); + meta.enable_equality(instance); + + let one = Expression::Constant(F::ONE); + + meta.create_gate("gate_a", |meta| { + let s_gate = meta.query_selector(s_gate); + let a1 = meta.query_advice(a, Rotation::next()); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + + vec![s_gate * (a + b * c * d - a1)] + }); + + meta.lookup_any("lookup", |meta| { + let s_lookup = meta.query_fixed(s_lookup, Rotation::cur()); + let s_ltable = meta.query_fixed(s_ltable, Rotation::cur()); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + let lhs = [one.clone(), a, b].map(|c| c * s_lookup.clone()); + let rhs = [one.clone(), d, c].map(|c| c * s_ltable.clone()); + lhs.into_iter().zip(rhs.into_iter()).collect() + }); + + meta.shuffle("shuffle", |meta| { + let s_shuffle = meta.query_fixed(s_shuffle, Rotation::cur()); + let s_stable = meta.query_fixed(s_stable, Rotation::cur()); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let lhs = [one.clone(), a].map(|c| c * s_shuffle.clone()); + let rhs = [one.clone(), b].map(|c| c * s_stable.clone()); + lhs.into_iter().zip(rhs.into_iter()).collect() + }); + + MyCircuitConfig { + s_gate, + a, + b, + c, + d, + s_lookup, + s_ltable, + s_shuffle, + s_stable, + instance, + } + } + + fn synthesize( + &self, + config: MyCircuitConfig, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + let assign_gate = |region: &mut Region<'_, F>, + offset: &mut usize, + a_assigned: Option>, + abcd: [u64; 4]| + -> Result<(AssignedCell, [AssignedCell; 4]), Error> { + let [a, b, c, d] = abcd; + config.s_gate.enable(region, *offset); + let a_assigned = if let Some(a_assigned) = a_assigned { + a_assigned + } else { + region.assign_advice(|| "", config.a, *offset, || Value::known(F::from(a)))? + }; + let a = a_assigned.value(); + let [b, c, d] = [b, c, d].map(|v| Value::known(F::from(b))); + let b_assigned = region.assign_advice(|| "", config.b, *offset, || b)?; + let c_assigned = region.assign_advice(|| "", config.c, *offset, || c)?; + let d_assigned = region.assign_fixed(|| "", config.d, *offset, || d)?; + *offset += 1; + let res = a + .zip(b.zip(c.zip(d))) + .map(|(a, (b, (c, d)))| *a + b * c * d); + // let res = a + b * c * d; + let res_assigned = region.assign_advice(|| "", config.a, *offset, || res)?; + Ok(( + res_assigned, + [a_assigned, b_assigned, c_assigned, d_assigned], + )) + }; + + let instances = layouter.assign_region( + || "single", + |mut region| { + let mut offset = 0; + let mut instances = Vec::new(); + // Enable the gate on a few consecutive rows with rotations + let (res, _) = assign_gate(&mut region, &mut offset, None, [2, 3, 4, 1])?; + instances.push(res.clone()); + let (res, _) = assign_gate(&mut region, &mut offset, Some(res), [0, 6, 7, 1])?; + instances.push(res.clone()); + let (res, _) = assign_gate(&mut region, &mut offset, Some(res), [0, 8, 9, 1])?; + instances.push(res.clone()); + let (res, _) = assign_gate( + &mut region, + &mut offset, + Some(res), + [0, 0xffffffff, 0xdeadbeef, 1], + )?; + let _ = assign_gate( + &mut region, + &mut offset, + Some(res), + [0, 0xabad1d3a, 0x12345678, 0x42424242], + )?; + offset += 1; + + // Enable the gate on non-consecutive rows with advice-advice copy constraints enabled + let (_, abcd1) = assign_gate(&mut region, &mut offset, None, [5, 2, 1, 1])?; + offset += 1; + let (_, abcd2) = assign_gate(&mut region, &mut offset, None, [2, 3, 1, 1])?; + offset += 1; + let (_, abcd3) = assign_gate(&mut region, &mut offset, None, [4, 2, 1, 1])?; + offset += 1; + region.constrain_equal(abcd1[1].cell(), abcd2[0].cell())?; + region.constrain_equal(abcd2[0].cell(), abcd3[1].cell())?; + instances.push(abcd1[1].clone()); + instances.push(abcd2[0].clone()); + + // Enable the gate on non-consecutive rows with advice-fixed copy constraints enabled + let (_, abcd1) = assign_gate(&mut region, &mut offset, None, [5, 9, 1, 9])?; + offset += 1; + let (_, abcd2) = assign_gate(&mut region, &mut offset, None, [2, 9, 1, 1])?; + offset += 1; + let (_, abcd3) = assign_gate(&mut region, &mut offset, None, [9, 2, 1, 1])?; + offset += 1; + region.constrain_equal(abcd1[1].cell(), abcd1[3].cell())?; + region.constrain_equal(abcd2[1].cell(), abcd1[3].cell())?; + region.constrain_equal(abcd3[0].cell(), abcd1[3].cell())?; + + // Enable a dynamic lookup (powers of two) + let table: Vec<_> = (0u64..=10).map(|exp| (exp, 2u64.pow(exp as u32))).collect(); + let lookups = [(2, 4), (2, 4), (10, 1024), (0, 1), (2, 4)]; + for (table_row, lookup_row) in table + .iter() + .zip(lookups.iter().chain(std::iter::repeat(&(0, 1)))) + { + region.assign_fixed(|| "", config.s_lookup, offset, || Value::known(F::ONE))?; + region.assign_fixed(|| "", config.s_ltable, offset, || Value::known(F::ONE))?; + let lookup_row0 = Value::known(F::from(lookup_row.0)); + let lookup_row1 = Value::known(F::from(lookup_row.1)); + region.assign_advice(|| "", config.a, offset, || lookup_row0)?; + region.assign_advice(|| "", config.b, offset, || lookup_row1)?; + let table_row0 = Value::known(F::from(table_row.0)); + let table_row1 = Value::known(F::from(table_row.1)); + region.assign_fixed(|| "", config.d, offset, || table_row0)?; + region.assign_advice(|| "", config.c, offset, || table_row1)?; + offset += 1; + } + + // Enable a dynamic shuffle (sequence from 0 to 15) + let table: Vec<_> = (0u64..16).collect(); + let shuffle = [0u64, 2, 4, 6, 8, 10, 12, 14, 1, 3, 5, 7, 9, 11, 13, 15]; + assert_eq!(table.len(), shuffle.len()); + + for (table_row, shuffle_row) in table.iter().zip(shuffle.iter()) { + region.assign_fixed( + || "", + config.s_shuffle, + offset, + || Value::known(F::ONE), + )?; + region.assign_fixed(|| "", config.s_stable, offset, || Value::known(F::ONE))?; + let shuffle_row0 = Value::known(F::from(*shuffle_row)); + region.assign_advice(|| "", config.a, offset, || shuffle_row0)?; + let table_row0 = Value::known(F::from(*table_row)); + region.assign_advice(|| "", config.b, offset, || table_row0)?; + offset += 1; + } + + Ok(instances) + }, + )?; + + println!("DBG instances: {:?}", instances); + for (i, instance) in instances.iter().enumerate() { + layouter.constrain_instance(instance.cell(), config.instance, i)?; + } + + Ok(()) + } +} + +use halo2curves::bn256::Fr; + +#[test] +fn test_mycircuit() { + let k = 8; + let circuit: MyCircuit = MyCircuit { + _marker: std::marker::PhantomData {}, + }; + let instance = vec![ + Fr::from(0x1d), + Fr::from(0xf5), + Fr::from(0x2f5), + Fr::from(0x2), + Fr::from(0x2), + ]; + let prover = MockProver::run(k, &circuit, vec![instance]).unwrap(); + prover.assert_satisfied(); +} From 50fb46ab14f80d5b3d5de7f78bea9ff15a1954b3 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Thu, 28 Dec 2023 18:18:47 +0100 Subject: [PATCH 10/79] Make progress on testing fe-be split --- halo2_proofs/src/plonk/circuit.rs | 84 ++++- halo2_proofs/src/plonk/keygen.rs | 14 +- halo2_proofs/src/poly.rs | 25 +- halo2_proofs/tests/frontend_backend_split.rs | 372 ++++++++++++++----- 4 files changed, 401 insertions(+), 94 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index caa70bb029..3a786dc37c 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -3,7 +3,7 @@ use crate::circuit::layouter::SyncDeps; use crate::dev::metadata; use crate::{ circuit::{Layouter, Region, Value}, - poly::{LagrangeCoeff, Polynomial, Rotation}, + poly::{batch_invert_assigned, LagrangeCoeff, Polynomial, Rotation}, }; use core::cmp::max; use core::ops::{Add, Mul}; @@ -1677,6 +1677,88 @@ pub struct ConstraintSystemV2Backend { // pub(crate) minimum_degree: Option, } +/// TODO: Document. Frontend function +pub fn compile_circuit>( + k: u32, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result, Error> { + let n = 2usize.pow(k); + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let cs = cs; + + if n < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(k)); + } + + let mut assembly = crate::plonk::keygen::Assembly { + k, + fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], + permutation: permutation::keygen::Assembly::new(n, &cs.permutation), + selectors: vec![vec![false; n as usize]; cs.num_selectors], + usable_rows: 0..n as usize - (cs.blinding_factors() + 1), + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain URS + ConcreteCircuit::FloorPlanner::synthesize( + &mut assembly, + circuit, + config, + cs.constants.clone(), + )?; + + let mut fixed = batch_invert_assigned(assembly.fixed); + let (cs, selector_polys) = if compress_selectors { + cs.compress_selectors(assembly.selectors.clone()) + } else { + // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. + let selectors = std::mem::take(&mut assembly.selectors); + cs.directly_convert_selectors_to_fixed(selectors) + }; + fixed.extend( + selector_polys + .into_iter() + .map(|poly| Polynomial::new_lagrange_from_vec(poly)), + ); + + let cs2 = ConstraintSystemV2Backend { + num_fixed_columns: cs.num_fixed_columns, + num_advice_columns: cs.num_advice_columns, + num_instance_columns: cs.num_instance_columns, + num_challenges: cs.num_challenges, + unblinded_advice_columns: cs.unblinded_advice_columns, + advice_column_phase: cs.advice_column_phase.iter().map(|p| p.0).collect(), + challenge_phase: cs.challenge_phase.iter().map(|p| p.0).collect(), + gates: cs + .gates + .iter() + .map(|g| GateV2Backend { + name: g.name.clone(), + constraint_names: g.constraint_names.clone(), + polys: g.polys.clone(), + }) + .collect(), + permutation: cs.permutation, + lookups: cs.lookups, + shuffles: cs.shuffles, + general_column_annotations: cs.general_column_annotations, + }; + let preprocessing = PreprocessingV2 { + permutation: assembly.permutation, + fixed, + }; + + Ok(CompiledCircuitV2 { + cs: cs2, + preprocessing, + }) +} + impl ConstraintSystemV2Backend { /// Compute the degree of the constraint system (the maximum degree of all /// constraints). diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index b8891ce26e..a3ea376c6b 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -51,14 +51,14 @@ where /// Assembly to be used in circuit synthesis. #[derive(Debug)] -struct Assembly { - k: u32, - fixed: Vec, LagrangeCoeff>>, - permutation: permutation::keygen::Assembly, - selectors: Vec>, +pub(crate) struct Assembly { + pub(crate) k: u32, + pub(crate) fixed: Vec, LagrangeCoeff>>, + pub(crate) permutation: permutation::keygen::Assembly, + pub(crate) selectors: Vec>, // A range of available rows for assignment and copies. - usable_rows: Range, - _marker: std::marker::PhantomData, + pub(crate) usable_rows: Range, + pub(crate) _marker: std::marker::PhantomData, } impl Assignment for Assembly { diff --git a/halo2_proofs/src/poly.rs b/halo2_proofs/src/poly.rs index c52e982f19..68b33f0d47 100644 --- a/halo2_proofs/src/poly.rs +++ b/halo2_proofs/src/poly.rs @@ -65,8 +65,29 @@ impl Basis for ExtendedLagrangeCoeff {} /// basis. #[derive(Clone, Debug)] pub struct Polynomial { - values: Vec, - _marker: PhantomData, + pub(crate) values: Vec, + pub(crate) _marker: PhantomData, +} + +impl Polynomial { + pub(crate) fn new_empty(size: usize, zero: F) -> Self { + Polynomial { + values: vec![zero; size], + _marker: PhantomData, + } + } +} + +impl Polynomial { + /// Obtains a polynomial in Lagrange form when given a vector of Lagrange + /// coefficients of size `n`; panics if the provided vector is the wrong + /// length. + pub(crate) fn new_lagrange_from_vec(values: Vec) -> Polynomial { + Polynomial { + values, + _marker: PhantomData, + } + } } impl Index for Polynomial { diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 8ef9030989..dfd7d9223a 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -7,9 +7,10 @@ use halo2_proofs::arithmetic::Field; use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPlanner, Value}; use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ - create_proof as create_plonk_proof, keygen_pk, keygen_vk, verify_proof as verify_plonk_proof, - Advice, Assigned, Circuit, Column, ConstraintSystem, Error, Expression, Fixed, Instance, - ProvingKey, Selector, TableColumn, VerifyingKey, + compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, verify_proof, + Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, + ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, ProvingKey, + SecondPhase, Selector, TableColumn, VerifyingKey, }; use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; use halo2_proofs::poly::Rotation; @@ -41,28 +42,91 @@ struct MyCircuitConfig { s_shuffle: Column, s_stable: Column, - // Instance + // A FirstPhase challenge and SecondPhase column. We define the following gates: + // s_rlc * (a[0] + challenge * b[0] - e[0]) + // s_rlc * (c[0] + challenge * d[0] - e[0]) + s_rlc: Selector, + e: Column, + challenge: Challenge, + + // Instance with a gate: s_instance * (a[0] - instance[0]) + s_instance: Selector, instance: Column, } +impl MyCircuitConfig { + fn assign_gate>( + &self, + region: &mut Region<'_, F>, + offset: &mut usize, + a_assigned: Option>, + abcd: [u64; 4], + ) -> Result<(AssignedCell, [AssignedCell; 4]), Error> { + let [a, b, c, d] = abcd; + self.s_gate.enable(region, *offset)?; + let a_assigned = if let Some(a_assigned) = a_assigned { + a_assigned + } else { + region.assign_advice(|| "", self.a, *offset, || Value::known(F::from(a)))? + }; + let a = a_assigned.value(); + let [b, c, d] = [b, c, d].map(|v| Value::known(F::from(v))); + let b_assigned = region.assign_advice(|| "", self.b, *offset, || b)?; + let c_assigned = region.assign_advice(|| "", self.c, *offset, || c)?; + let d_assigned = region.assign_fixed(|| "", self.d, *offset, || d)?; + *offset += 1; + // let res = a + b * c * d; + let res = a + .zip(b.zip(c.zip(d))) + .map(|(a, (b, (c, d)))| *a + b * c * d); + let res_assigned = region.assign_advice(|| "", self.a, *offset, || res)?; + Ok(( + res_assigned, + [a_assigned, b_assigned, c_assigned, d_assigned], + )) + } +} + #[derive(Clone)] -struct MyCircuit { +struct MyCircuit { + k: u32, + input: u64, _marker: std::marker::PhantomData, } -impl> Circuit for MyCircuit { - type Config = MyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { +impl, const WIDTH_FACTOR: usize> MyCircuit { + fn new(k: u32, input: u64) -> Self { Self { + k, + input, _marker: std::marker::PhantomData {}, } } - fn configure(meta: &mut ConstraintSystem) -> MyCircuitConfig { + fn instance(&self) -> Vec { + let mut instance = Vec::new(); + let res = F::from(self.input); + instance.push(res); + let (b, c, d) = (3, 4, 1); + let res = res + F::from(b) * F::from(c) * F::from(d); + instance.push(res); + let (b, c, d) = (6, 7, 1); + let res = res + F::from(b) * F::from(c) * F::from(d); + instance.push(res); + let (b, c, d) = (8, 9, 1); + let res = res + F::from(b) * F::from(c) * F::from(d); + instance.push(res); + instance.push(F::from(2)); + instance.push(F::from(2)); + instance + } + + fn instances(&self) -> Vec> { + let instance = self.instance(); + (0..WIDTH_FACTOR).map(|_| instance.clone()).collect() + } + + fn configure_single(meta: &mut ConstraintSystem) -> MyCircuitConfig { let s_gate = meta.selector(); let a = meta.advice_column(); let b = meta.advice_column(); @@ -79,6 +143,11 @@ impl> Circuit for MyCircuit { let s_shuffle = meta.fixed_column(); let s_stable = meta.fixed_column(); + let s_rlc = meta.selector(); + let e = meta.advice_column_in(SecondPhase); + let challenge = meta.challenge_usable_after(FirstPhase); + + let s_instance = meta.selector(); let instance = meta.instance_column(); meta.enable_equality(instance); @@ -117,6 +186,21 @@ impl> Circuit for MyCircuit { lhs.into_iter().zip(rhs.into_iter()).collect() }); + meta.create_gate("gate_rlc", |meta| { + let s_rlc = meta.query_selector(s_rlc); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + let e = meta.query_advice(e, Rotation::cur()); + let challenge = meta.query_challenge(challenge); + + vec![ + s_rlc.clone() * (a + challenge.clone() * b - e.clone()), + s_rlc * (c + challenge * d - e), + ] + }); + MyCircuitConfig { s_gate, a, @@ -125,65 +209,53 @@ impl> Circuit for MyCircuit { d, s_lookup, s_ltable, + s_rlc, + e, + challenge, s_shuffle, s_stable, + s_instance, instance, } } - fn synthesize( + fn synthesize_unit( &self, - config: MyCircuitConfig, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - let assign_gate = |region: &mut Region<'_, F>, - offset: &mut usize, - a_assigned: Option>, - abcd: [u64; 4]| - -> Result<(AssignedCell, [AssignedCell; 4]), Error> { - let [a, b, c, d] = abcd; - config.s_gate.enable(region, *offset); - let a_assigned = if let Some(a_assigned) = a_assigned { - a_assigned - } else { - region.assign_advice(|| "", config.a, *offset, || Value::known(F::from(a)))? - }; - let a = a_assigned.value(); - let [b, c, d] = [b, c, d].map(|v| Value::known(F::from(b))); - let b_assigned = region.assign_advice(|| "", config.b, *offset, || b)?; - let c_assigned = region.assign_advice(|| "", config.c, *offset, || c)?; - let d_assigned = region.assign_fixed(|| "", config.d, *offset, || d)?; - *offset += 1; - let res = a - .zip(b.zip(c.zip(d))) - .map(|(a, (b, (c, d)))| *a + b * c * d); - // let res = a + b * c * d; - let res_assigned = region.assign_advice(|| "", config.a, *offset, || res)?; - Ok(( - res_assigned, - [a_assigned, b_assigned, c_assigned, d_assigned], - )) - }; - - let instances = layouter.assign_region( - || "single", + config: &MyCircuitConfig, + layouter: &mut impl Layouter, + ) -> Result<(usize, Vec>), Error> { + let challenge = layouter.get_challenge(config.challenge); + let (rows, instance_copy) = layouter.assign_region( + || "unit", |mut region| { let mut offset = 0; - let mut instances = Vec::new(); + let mut instance_copy = Vec::new(); + // First "a" value comes from instance + config.s_instance.enable(&mut region, offset); + let res = region.assign_advice_from_instance( + || "", + config.instance, + 0, + config.a, + offset, + )?; // Enable the gate on a few consecutive rows with rotations - let (res, _) = assign_gate(&mut region, &mut offset, None, [2, 3, 4, 1])?; - instances.push(res.clone()); - let (res, _) = assign_gate(&mut region, &mut offset, Some(res), [0, 6, 7, 1])?; - instances.push(res.clone()); - let (res, _) = assign_gate(&mut region, &mut offset, Some(res), [0, 8, 9, 1])?; - instances.push(res.clone()); - let (res, _) = assign_gate( + let (res, _) = + config.assign_gate(&mut region, &mut offset, Some(res), [0, 3, 4, 1])?; + instance_copy.push(res.clone()); + let (res, _) = + config.assign_gate(&mut region, &mut offset, Some(res), [0, 6, 7, 1])?; + instance_copy.push(res.clone()); + let (res, _) = + config.assign_gate(&mut region, &mut offset, Some(res), [0, 8, 9, 1])?; + instance_copy.push(res.clone()); + let (res, _) = config.assign_gate( &mut region, &mut offset, Some(res), [0, 0xffffffff, 0xdeadbeef, 1], )?; - let _ = assign_gate( + let _ = config.assign_gate( &mut region, &mut offset, Some(res), @@ -192,23 +264,29 @@ impl> Circuit for MyCircuit { offset += 1; // Enable the gate on non-consecutive rows with advice-advice copy constraints enabled - let (_, abcd1) = assign_gate(&mut region, &mut offset, None, [5, 2, 1, 1])?; + let (_, abcd1) = + config.assign_gate(&mut region, &mut offset, None, [5, 2, 1, 1])?; offset += 1; - let (_, abcd2) = assign_gate(&mut region, &mut offset, None, [2, 3, 1, 1])?; + let (_, abcd2) = + config.assign_gate(&mut region, &mut offset, None, [2, 3, 1, 1])?; offset += 1; - let (_, abcd3) = assign_gate(&mut region, &mut offset, None, [4, 2, 1, 1])?; + let (_, abcd3) = + config.assign_gate(&mut region, &mut offset, None, [4, 2, 1, 1])?; offset += 1; region.constrain_equal(abcd1[1].cell(), abcd2[0].cell())?; region.constrain_equal(abcd2[0].cell(), abcd3[1].cell())?; - instances.push(abcd1[1].clone()); - instances.push(abcd2[0].clone()); + instance_copy.push(abcd1[1].clone()); + instance_copy.push(abcd2[0].clone()); // Enable the gate on non-consecutive rows with advice-fixed copy constraints enabled - let (_, abcd1) = assign_gate(&mut region, &mut offset, None, [5, 9, 1, 9])?; + let (_, abcd1) = + config.assign_gate(&mut region, &mut offset, None, [5, 9, 1, 9])?; offset += 1; - let (_, abcd2) = assign_gate(&mut region, &mut offset, None, [2, 9, 1, 1])?; + let (_, abcd2) = + config.assign_gate(&mut region, &mut offset, None, [2, 9, 1, 1])?; offset += 1; - let (_, abcd3) = assign_gate(&mut region, &mut offset, None, [9, 2, 1, 1])?; + let (_, abcd3) = + config.assign_gate(&mut region, &mut offset, None, [9, 2, 1, 1])?; offset += 1; region.constrain_equal(abcd1[1].cell(), abcd1[3].cell())?; region.constrain_equal(abcd2[1].cell(), abcd1[3].cell())?; @@ -234,6 +312,17 @@ impl> Circuit for MyCircuit { offset += 1; } + // Enable RLC gate 3 times + for abcd in [[3, 5, 3, 5], [8, 9, 8, 9], [111, 222, 111, 222]] { + config.s_rlc.enable(&mut region, offset)?; + let (_, abcd1) = config.assign_gate(&mut region, &mut offset, None, abcd)?; + let rlc = challenge + .zip(abcd1[0].value().zip(abcd1[1].value())) + .map(|(ch, (a, b))| *a + ch * b); + region.assign_advice(|| "", config.e, offset - 1, || rlc)?; + offset += 1; + } + // Enable a dynamic shuffle (sequence from 0 to 15) let table: Vec<_> = (0u64..16).collect(); let shuffle = [0u64, 2, 4, 6, 8, 10, 12, 14, 1, 3, 5, 7, 9, 11, 13, 15]; @@ -254,34 +343,149 @@ impl> Circuit for MyCircuit { offset += 1; } - Ok(instances) + Ok((offset, instance_copy)) }, )?; - println!("DBG instances: {:?}", instances); - for (i, instance) in instances.iter().enumerate() { - layouter.constrain_instance(instance.cell(), config.instance, i)?; - } + Ok((rows, instance_copy)) + } +} + +impl, const WIDTH_FACTOR: usize> Circuit for MyCircuit { + type Config = Vec; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + self.clone() + } + + fn configure(meta: &mut ConstraintSystem) -> Vec { + assert!(WIDTH_FACTOR > 0); + (0..WIDTH_FACTOR) + .map(|_| Self::configure_single(meta)) + .collect() + } + fn synthesize( + &self, + config: Vec, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + // 2 queries from first gate, 3 for permutation argument, 1 for multipoen, 1 for off-by-one + // errors, 1 for off-by-two errors? + let unusable_rows = 2 + 3 + 1 + 1 + 1; + let max_rows = 2usize.pow(self.k) - unusable_rows; + for config in &config { + let mut total_rows = 0; + loop { + let (rows, instance_copy) = self.synthesize_unit(config, &mut layouter)?; + if total_rows == 0 { + for (i, instance) in instance_copy.iter().enumerate() { + layouter.constrain_instance(instance.cell(), config.instance, 1 + i)?; + } + } + total_rows += rows; + if total_rows + rows > max_rows { + break; + } + } + assert!(total_rows <= max_rows); + } Ok(()) } } -use halo2curves::bn256::Fr; +use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG, ParamsVerifierKZG}; +use halo2_proofs::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; +use halo2_proofs::poly::kzg::strategy::SingleStrategy; +use halo2curves::bn256::{Bn256, Fr, G1Affine}; +use rand_core::block::BlockRng; +use rand_core::block::BlockRngCore; + +// One number generator, that can be used as a deterministic Rng, outputing fixed values. +struct OneNg {} + +impl BlockRngCore for OneNg { + type Item = u32; + type Results = [u32; 16]; + + fn generate(&mut self, results: &mut Self::Results) { + for elem in results.iter_mut() { + *elem = 1; + } + } +} #[test] -fn test_mycircuit() { - let k = 8; - let circuit: MyCircuit = MyCircuit { - _marker: std::marker::PhantomData {}, - }; - let instance = vec![ - Fr::from(0x1d), - Fr::from(0xf5), - Fr::from(0x2f5), - Fr::from(0x2), - Fr::from(0x2), - ]; - let prover = MockProver::run(k, &circuit, vec![instance]).unwrap(); +fn test_mycircuit_mock() { + let k = 6; + const WIDTH_FACTOR: usize = 2; + let circuit: MyCircuit = MyCircuit::new(k, 42); + let instances = circuit.instances(); + let prover = MockProver::run(k, &circuit, instances).unwrap(); prover.assert_satisfied(); } + +#[test] +fn test_mycircuit_full_legacy() { + let k = 6; + const WIDTH_FACTOR: usize = 1; + let circuit: MyCircuit = MyCircuit::new(k, 42); + + // Setup + let params = ParamsKZG::::new(k); + let verifier_params = params.verifier_params(); + let vk = keygen_vk(¶ms, &circuit).expect("keygen_vk should not fail"); + let pk = keygen_pk(¶ms, vk.clone(), &circuit).expect("keygen_pk should not fail"); + + // Proving + let instances = circuit.instances(); + let instances_slice: &[&[Fr]] = &(instances + .iter() + .map(|instance| instance.as_slice()) + .collect::>()); + + let rng = BlockRng::new(OneNg {}); + let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); + create_proof::, ProverSHPLONK<'_, Bn256>, _, _, _, _>( + ¶ms, + &pk, + &[circuit.clone()], + &[instances_slice], + rng, + &mut transcript, + ) + .expect("proof generation should not fail"); + let proof = transcript.finalize(); + + // Verify + let mut verifier_transcript = + Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); + let strategy = SingleStrategy::new(&verifier_params); + + verify_proof::, VerifierSHPLONK<'_, Bn256>, _, _, _>( + ¶ms, + &vk, + strategy, + &[instances_slice], + &mut verifier_transcript, + ) + .expect("verify succeeds"); +} + +#[test] +fn test_mycircuit_full_split() { + let k = 6; + const WIDTH_FACTOR: usize = 1; + let circuit: MyCircuit = MyCircuit::new(k, 42); + let compiled_circuit = compile_circuit(k, &circuit, false).unwrap(); + + // Setup + let params = ParamsKZG::::new(k); + let verifier_params = params.verifier_params(); + let vk = keygen_vk_v2(¶ms, &compiled_circuit).expect("keygen_vk should not fail"); + let pk = + keygen_pk_v2(¶ms, vk.clone(), &compiled_circuit).expect("keygen_pk should not fail"); +} From a8a200911bc7283a3a47ff0acf50b5fb497c4949 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 29 Dec 2023 12:19:49 +0100 Subject: [PATCH 11/79] Add witness calculator --- halo2_proofs/src/plonk/circuit.rs | 210 ++++++++++++- halo2_proofs/src/plonk/prover.rs | 295 +++++++++---------- halo2_proofs/tests/frontend_backend_split.rs | 51 +++- 3 files changed, 391 insertions(+), 165 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 3a786dc37c..7b16af6157 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1,6 +1,7 @@ use super::{lookup, permutation, shuffle, Assigned, Error, Queries}; use crate::circuit::layouter::SyncDeps; use crate::dev::metadata; +use crate::plonk::WitnessCollection; use crate::{ circuit::{Layouter, Region, Value}, poly::{batch_invert_assigned, LagrangeCoeff, Polynomial, Rotation}, @@ -11,6 +12,7 @@ use ff::Field; use sealed::SealedPhase; use std::collections::BTreeSet; use std::collections::HashMap; +use std::collections::HashSet; use std::fmt::Debug; use std::iter::{Product, Sum}; use std::{ @@ -1677,12 +1679,194 @@ pub struct ConstraintSystemV2Backend { // pub(crate) minimum_degree: Option, } +/// Witness calculator. Frontend function +#[derive(Debug)] +pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { + k: u32, + n: usize, + unusable_rows_start: usize, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [&'a [F]], + next_phase: u8, +} + +impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, ConcreteCircuit> { + /// Create a new WitnessCalculator + pub fn new( + k: u32, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [&'a [F]], + ) -> Self { + let n = 2usize.pow(k); + let unusable_rows_start = n - (cs.blinding_factors() + 1); + Self { + k, + n, + unusable_rows_start, + circuit, + config, + cs, + instances, + next_phase: 0, + } + } + + /// Calculate witness at phase + pub fn calc( + &mut self, + phase: u8, + challenges: &HashMap, + ) -> Result, LagrangeCoeff>>>, Error> { + if phase != self.next_phase { + return Err(Error::Other(format!( + "Expected phase {}, got {}", + self.next_phase, phase + ))); + } + let current_phase = match phase { + 0 => FirstPhase.to_sealed(), + 1 => SecondPhase.to_sealed(), + 2 => ThirdPhase.to_sealed(), + _ => unreachable!("only phase [0..2] supported"), + }; + let mut witness = WitnessCollection { + k: self.k, + current_phase, + advice: vec![Polynomial::new_empty(self.n, F::ZERO.into()); self.cs.num_advice_columns], + unblinded_advice: HashSet::from_iter(self.cs.unblinded_advice_columns.clone()), + instances: self.instances, + challenges, + // The prover will not be allowed to assign values to advice + // cells that exist within inactive rows, which include some + // number of blinding factors and an extra row for use in the + // permutation argument. + usable_rows: ..self.unusable_rows_start, + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain the witness and other information. + ConcreteCircuit::FloorPlanner::synthesize( + &mut witness, + self.circuit, + self.config.clone(), + self.cs.constants.clone(), + )?; + + let column_indices = self + .cs + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == *phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + self.next_phase += 1; + Ok(witness + .advice + .into_iter() + .enumerate() + .map(|(column_index, advice)| { + if column_indices.contains(&column_index) { + Some(advice) + } else { + None + } + }) + .collect()) + } +} + +/// Calculate witness at phase. Frontend function +pub fn calc_witness>( + k: u32, + circuit: &ConcreteCircuit, + config: &ConcreteCircuit::Config, + cs: &ConstraintSystem, + instances: &[&[F]], + phase: u8, + challenges: &HashMap, +) -> Result, LagrangeCoeff>>>, Error> { + let n = 2usize.pow(k); + let unusable_rows_start = n - (cs.blinding_factors() + 1); + let phase = match phase { + 0 => FirstPhase.to_sealed(), + 1 => SecondPhase.to_sealed(), + 2 => ThirdPhase.to_sealed(), + _ => unreachable!("only phase [0..2] supported"), + }; + let mut witness = WitnessCollection { + k, + current_phase: phase, + advice: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_advice_columns], + unblinded_advice: HashSet::from_iter(cs.unblinded_advice_columns.clone()), + instances, + challenges, + // The prover will not be allowed to assign values to advice + // cells that exist within inactive rows, which include some + // number of blinding factors and an extra row for use in the + // permutation argument. + usable_rows: ..unusable_rows_start, + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain the witness and other information. + ConcreteCircuit::FloorPlanner::synthesize( + &mut witness, + circuit, + config.clone(), + cs.constants.clone(), + )?; + + let column_indices = cs + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if witness.current_phase == *phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + Ok(witness + .advice + .into_iter() + .enumerate() + .map(|(column_index, advice)| { + if column_indices.contains(&column_index) { + Some(advice) + } else { + None + } + }) + .collect()) +} + /// TODO: Document. Frontend function pub fn compile_circuit>( k: u32, circuit: &ConcreteCircuit, compress_selectors: bool, -) -> Result, Error> { +) -> Result< + ( + CompiledCircuitV2, + ConcreteCircuit::Config, + ConstraintSystem, + ), + Error, +> { let n = 2usize.pow(k); let mut cs = ConstraintSystem::default(); #[cfg(feature = "circuit-params")] @@ -1708,7 +1892,7 @@ pub fn compile_circuit>( ConcreteCircuit::FloorPlanner::synthesize( &mut assembly, circuit, - config, + config.clone(), cs.constants.clone(), )?; @@ -1731,7 +1915,7 @@ pub fn compile_circuit>( num_advice_columns: cs.num_advice_columns, num_instance_columns: cs.num_instance_columns, num_challenges: cs.num_challenges, - unblinded_advice_columns: cs.unblinded_advice_columns, + unblinded_advice_columns: cs.unblinded_advice_columns.clone(), advice_column_phase: cs.advice_column_phase.iter().map(|p| p.0).collect(), challenge_phase: cs.challenge_phase.iter().map(|p| p.0).collect(), gates: cs @@ -1743,20 +1927,24 @@ pub fn compile_circuit>( polys: g.polys.clone(), }) .collect(), - permutation: cs.permutation, - lookups: cs.lookups, - shuffles: cs.shuffles, - general_column_annotations: cs.general_column_annotations, + permutation: cs.permutation.clone(), + lookups: cs.lookups.clone(), + shuffles: cs.shuffles.clone(), + general_column_annotations: cs.general_column_annotations.clone(), }; let preprocessing = PreprocessingV2 { permutation: assembly.permutation, fixed, }; - Ok(CompiledCircuitV2 { - cs: cs2, - preprocessing, - }) + Ok(( + CompiledCircuitV2 { + cs: cs2, + preprocessing, + }, + config, + cs, + )) } impl ConstraintSystemV2Backend { diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 1f3130dd75..956a456371 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -86,6 +86,7 @@ impl< pub fn new( params: &'params Scheme::ParamsProver, pk: &'a ProvingKeyV2, + // TODO: If this was a vector the usage would be simpler instances: &[&[&[Scheme::Scalar]]], rng: R, mut transcript: T, @@ -235,12 +236,14 @@ impl< .collect::>(); // TODO: Check that witness.len() is the expected number of advice columns. - if witness.len() != self.instance.len() { - return Err(Error::Other(format!("witness.len() != instance.len()"))); + if witness.len() != advice.len() { + return Err(Error::Other(format!("witness.len() != advice.len()"))); } for witness in witness.iter() { - if witness.len() != meta.num_instance_columns { - return Err(Error::InvalidInstances); + if witness.len() != self.params.n() as usize { + return Err(Error::Other(format!( + "unexpected length in witness columns" + ))); } } @@ -644,6 +647,145 @@ impl< } } +pub(crate) struct WitnessCollection<'a, F: Field> { + pub(crate) k: u32, + pub(crate) current_phase: sealed::Phase, + pub(crate) advice: Vec, LagrangeCoeff>>, + pub(crate) unblinded_advice: HashSet, + pub(crate) challenges: &'a HashMap, + pub(crate) instances: &'a [&'a [F]], + pub(crate) usable_rows: RangeTo, + pub(crate) _marker: std::marker::PhantomData, +} + +impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn query_instance(&self, column: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.instances + .get(column.index()) + .and_then(|column| column.get(row)) + .map(|v| Value::known(*v)) + .ok_or(Error::BoundsFailure) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Ignore assignment of advice column in different phase than current one. + if self.current_phase != column.column_type().phase { + return Ok(()); + } + + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .advice + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { + // We only care about advice columns here + + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.challenges + .get(&challenge.index()) + .cloned() + .map(Value::known) + .unwrap_or_else(Value::unknown) + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` @@ -758,151 +900,6 @@ where pub advice_blinds: Vec>, } - struct WitnessCollection<'a, F: Field> { - k: u32, - current_phase: sealed::Phase, - advice: Vec, LagrangeCoeff>>, - unblinded_advice: HashSet, - challenges: &'a HashMap, - instances: &'a [&'a [F]], - usable_rows: RangeTo, - _marker: std::marker::PhantomData, - } - - impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn query_instance(&self, column: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.instances - .get(column.index()) - .and_then(|column| column.get(row)) - .map(|v| Value::known(*v)) - .ok_or(Error::BoundsFailure) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Ignore assignment of advice column in different phase than current one. - if self.current_phase != column.column_type().phase { - return Ok(()); - } - - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .advice - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn copy( - &mut self, - _: Column, - _: usize, - _: Column, - _: usize, - ) -> Result<(), Error> { - // We only care about advice columns here - - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.challenges - .get(&challenge.index()) - .cloned() - .map(Value::known) - .unwrap_or_else(Value::unknown) - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } - } - let (advice, challenges) = { let mut advice = vec![ AdviceSingle:: { diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index dfd7d9223a..708a57d310 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -7,10 +7,10 @@ use halo2_proofs::arithmetic::Field; use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPlanner, Value}; use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ - compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, verify_proof, - Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, - ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, ProvingKey, - SecondPhase, Selector, TableColumn, VerifyingKey, + calc_witness, compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, + verify_proof, verify_proof_v2, Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, + ConstraintSystem, ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, + ProverV2, ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, WitnessCalculator, }; use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; use halo2_proofs::poly::Rotation; @@ -20,6 +20,7 @@ use halo2_proofs::transcript::{ TranscriptWriterBuffer, }; use rand_core::{OsRng, RngCore}; +use std::collections::HashMap; use std::marker::PhantomData; #[derive(Clone)] @@ -480,7 +481,7 @@ fn test_mycircuit_full_split() { let k = 6; const WIDTH_FACTOR: usize = 1; let circuit: MyCircuit = MyCircuit::new(k, 42); - let compiled_circuit = compile_circuit(k, &circuit, false).unwrap(); + let (compiled_circuit, config, cs) = compile_circuit(k, &circuit, false).unwrap(); // Setup let params = ParamsKZG::::new(k); @@ -488,4 +489,44 @@ fn test_mycircuit_full_split() { let vk = keygen_vk_v2(¶ms, &compiled_circuit).expect("keygen_vk should not fail"); let pk = keygen_pk_v2(¶ms, vk.clone(), &compiled_circuit).expect("keygen_pk should not fail"); + + // Proving + let instances = circuit.instances(); + let instances_slice: &[&[Fr]] = &(instances + .iter() + .map(|instance| instance.as_slice()) + .collect::>()); + let mut witness_calc = WitnessCalculator::new(k, &circuit, &config, &cs, instances_slice); + + let rng = BlockRng::new(OneNg {}); + let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); + let mut prover = + ProverV2::, ProverSHPLONK<'_, Bn256>, _, _, _>::new( + ¶ms, + &pk, + &[instances_slice], + rng, + transcript, + ) + .unwrap(); + let witness_phase0 = witness_calc.calc(0, &HashMap::new()).unwrap(); + let challenges_phase0 = prover.commit_phase(0, vec![witness_phase0]).unwrap(); + let witness_phase1 = witness_calc.calc(1, &challenges_phase0).unwrap(); + let _challenges_phase1 = prover.commit_phase(1, vec![witness_phase1]).unwrap(); + let mut transcript = prover.create_proof().unwrap(); + let proof = transcript.finalize(); + + // Verify + let mut verifier_transcript = + Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); + let strategy = SingleStrategy::new(&verifier_params); + + verify_proof_v2::, VerifierSHPLONK<'_, Bn256>, _, _, _>( + ¶ms, + &vk, + strategy, + &[instances_slice], + &mut verifier_transcript, + ) + .expect("verify succeeds"); } From c27237851fe3e2e709221d1e7a28c0be32605222 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 29 Dec 2023 13:55:30 +0100 Subject: [PATCH 12/79] Full flow, but verification fails --- halo2_proofs/src/plonk/circuit.rs | 84 ++++---------------- halo2_proofs/src/plonk/prover.rs | 19 ++++- halo2_proofs/src/plonk/verifier.rs | 13 +-- halo2_proofs/src/poly/kzg/strategy.rs | 3 +- halo2_proofs/tests/frontend_backend_split.rs | 16 ++-- 5 files changed, 50 insertions(+), 85 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 7b16af6157..0c7cd33f21 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1754,7 +1754,8 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret self.circuit, self.config.clone(), self.cs.constants.clone(), - )?; + ) + .expect("todo"); let column_indices = self .cs @@ -1786,74 +1787,6 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret } } -/// Calculate witness at phase. Frontend function -pub fn calc_witness>( - k: u32, - circuit: &ConcreteCircuit, - config: &ConcreteCircuit::Config, - cs: &ConstraintSystem, - instances: &[&[F]], - phase: u8, - challenges: &HashMap, -) -> Result, LagrangeCoeff>>>, Error> { - let n = 2usize.pow(k); - let unusable_rows_start = n - (cs.blinding_factors() + 1); - let phase = match phase { - 0 => FirstPhase.to_sealed(), - 1 => SecondPhase.to_sealed(), - 2 => ThirdPhase.to_sealed(), - _ => unreachable!("only phase [0..2] supported"), - }; - let mut witness = WitnessCollection { - k, - current_phase: phase, - advice: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_advice_columns], - unblinded_advice: HashSet::from_iter(cs.unblinded_advice_columns.clone()), - instances, - challenges, - // The prover will not be allowed to assign values to advice - // cells that exist within inactive rows, which include some - // number of blinding factors and an extra row for use in the - // permutation argument. - usable_rows: ..unusable_rows_start, - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain the witness and other information. - ConcreteCircuit::FloorPlanner::synthesize( - &mut witness, - circuit, - config.clone(), - cs.constants.clone(), - )?; - - let column_indices = cs - .advice_column_phase - .iter() - .enumerate() - .filter_map(|(column_index, phase)| { - if witness.current_phase == *phase { - Some(column_index) - } else { - None - } - }) - .collect::>(); - - Ok(witness - .advice - .into_iter() - .enumerate() - .map(|(column_index, advice)| { - if column_indices.contains(&column_index) { - Some(advice) - } else { - None - } - }) - .collect()) -} - /// TODO: Document. Frontend function pub fn compile_circuit>( k: u32, @@ -2033,6 +1966,19 @@ impl ConstraintSystemV2Backend { collect_queries(expr, &mut queries); } } + for column in self.permutation.get_columns() { + match column.column_type { + Any::Instance => queries + .instance + .insert((Column::new(column.index(), Instance), Rotation::cur())), + Any::Fixed => queries + .fixed + .insert((Column::new(column.index(), Fixed), Rotation::cur())), + Any::Advice(advice) => queries + .advice + .insert((Column::new(column.index(), advice), Rotation::cur())), + }; + } for (column, _) in queries.advice.iter() { num_advice_queries[column.index()] += 1; diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 956a456371..90f7e0c1c0 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -239,12 +239,25 @@ impl< if witness.len() != advice.len() { return Err(Error::Other(format!("witness.len() != advice.len()"))); } - for witness in witness.iter() { - if witness.len() != self.params.n() as usize { + for witness_circuit in witness.iter() { + if witness_circuit.len() != meta.num_advice_columns { return Err(Error::Other(format!( - "unexpected length in witness columns" + "unexpected length in witness_circuitk. Got {}, expected {}", + witness_circuit.len(), + meta.num_advice_columns, ))); } + for witness_column in witness_circuit { + if let Some(witness_column) = witness_column { + if witness_column.len() != self.params.n() as usize { + return Err(Error::Other(format!( + "unexpected length in witness_column. Got {}, expected {}", + witness_column.len(), + self.params.n() + ))); + } + } + } } // Check that all current_phase advice columns are Some diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index 6efee590f2..cfdd15677a 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -434,11 +434,14 @@ where // polynomial commitments open to the correct values. let verifier = V::new(params); - strategy.process(|msm| { - verifier - .verify_proof(transcript, queries, msm) - .map_err(|_| Error::Opening) - }) + Ok(strategy + .process(|msm| { + println!("ONE"); + verifier + .verify_proof(transcript, queries, msm) + .map_err(|_| Error::Opening) + }) + .expect("todo")) } // TODO: Remove diff --git a/halo2_proofs/src/poly/kzg/strategy.rs b/halo2_proofs/src/poly/kzg/strategy.rs index 14b6565b80..5e1a9cfa8e 100644 --- a/halo2_proofs/src/poly/kzg/strategy.rs +++ b/halo2_proofs/src/poly/kzg/strategy.rs @@ -102,7 +102,7 @@ where self.msm_accumulator.scale(E::Scalar::random(OsRng)); // Guard is updated with new msm contributions - let guard = f(self.msm_accumulator)?; + let guard = f(self.msm_accumulator).expect("todo"); Ok(Self { msm_accumulator: guard.msm_accumulator, }) @@ -144,6 +144,7 @@ where if msm.check() { Ok(()) } else { + println!("OH NO"); Err(Error::ConstraintSystemFailure) } } diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 708a57d310..f24b3d83cb 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -7,8 +7,8 @@ use halo2_proofs::arithmetic::Field; use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPlanner, Value}; use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ - calc_witness, compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, - verify_proof, verify_proof_v2, Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, + compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, verify_proof, + verify_proof_v2, Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, ProverV2, ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, WitnessCalculator, }; @@ -316,10 +316,11 @@ impl, const WIDTH_FACTOR: usize> MyCircuit // Enable RLC gate 3 times for abcd in [[3, 5, 3, 5], [8, 9, 8, 9], [111, 222, 111, 222]] { config.s_rlc.enable(&mut region, offset)?; - let (_, abcd1) = config.assign_gate(&mut region, &mut offset, None, abcd)?; - let rlc = challenge - .zip(abcd1[0].value().zip(abcd1[1].value())) - .map(|(ch, (a, b))| *a + ch * b); + let (_, _) = config.assign_gate(&mut region, &mut offset, None, abcd)?; + let rlc = challenge.map(|ch| { + let [a, b, ..] = abcd; + F::from(a) + ch * F::from(b) + }); region.assign_advice(|| "", config.e, offset - 1, || rlc)?; offset += 1; } @@ -381,7 +382,8 @@ impl, const WIDTH_FACTOR: usize> Circuit for MyCircuit Date: Tue, 2 Jan 2024 20:05:42 +0100 Subject: [PATCH 13/79] WIP --- Cargo.toml | 1 + halo2_proofs/src/plonk.rs | 18 +- halo2_proofs/src/plonk/circuit.rs | 259 +++++++++++++-- halo2_proofs/src/plonk/evaluation.rs | 168 +++++----- halo2_proofs/src/plonk/keygen.rs | 30 +- halo2_proofs/src/plonk/lookup.rs | 9 +- halo2_proofs/src/plonk/lookup/prover.rs | 4 +- halo2_proofs/src/plonk/permutation/prover.rs | 6 +- .../src/plonk/permutation/verifier.rs | 18 +- halo2_proofs/src/plonk/prover.rs | 58 ++-- halo2_proofs/src/plonk/shuffle.rs | 9 +- halo2_proofs/src/plonk/shuffle/prover.rs | 2 +- halo2_proofs/src/plonk/verifier.rs | 36 +- halo2_proofs/src/transcript.rs | 2 + halo2_proofs/tests/frontend_backend_split.rs | 312 +++++++++++------- 15 files changed, 623 insertions(+), 309 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b7878ae843..a57cd8dc11 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,3 +4,4 @@ members = [ "halo2_gadgets", "halo2_proofs", ] +resolver = "2" diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index a8088208b9..152361dd30 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -145,6 +145,7 @@ impl Queries { } } +// TODO: Remove in favour of VerifyingKey /// This is a verifying key which allows for the verification of proofs for a /// particular circuit. #[derive(Clone, Debug)] @@ -152,8 +153,8 @@ pub struct VerifyingKeyV2 { domain: EvaluationDomain, fixed_commitments: Vec, permutation: permutation::VerifyingKey, - cs: ConstraintSystemV2Backend, - queries: Queries, + cs: ConstraintSystem, + // queries: Queries, /// Cached maximum degree of `cs` (which doesn't change after construction). cs_degree: usize, /// The representative of this `VerifyingKey` in transcripts. @@ -165,21 +166,21 @@ impl VerifyingKeyV2 { domain: EvaluationDomain, fixed_commitments: Vec, permutation: permutation::VerifyingKey, - cs: ConstraintSystemV2Backend, + cs: ConstraintSystem, ) -> Self where C::ScalarExt: FromUniformBytes<64>, { // Compute cached values. let cs_degree = cs.degree(); - let queries = cs.collect_queries(); + // let queries = cs.collect_queries(); let mut vk = Self { domain, fixed_commitments, permutation, cs, - queries, + // queries, cs_degree, // Temporary, this is not pinned. transcript_repr: C::Scalar::ZERO, @@ -199,7 +200,9 @@ impl VerifyingKeyV2 { hasher.update(s.as_bytes()); // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + // TODO: Uncomment + // vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + dbg!(&vk.transcript_repr); vk } @@ -425,7 +428,8 @@ impl VerifyingKey { hasher.update(s.as_bytes()); // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + // TODO: Uncomment + // vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); vk } diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 0c7cd33f21..a94f80fc0b 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -99,7 +99,7 @@ impl PartialOrd for Column { pub(crate) mod sealed { /// Phase of advice column #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct Phase(pub(super) u8); + pub struct Phase(pub(crate) u8); impl Phase { pub fn prev(&self) -> Option { @@ -480,6 +480,15 @@ impl Selector { } } +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + /// Query of fixed column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct FixedQuery { @@ -503,6 +512,17 @@ impl FixedQuery { } } +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, + /// Phase of this advice column + pub phase: sealed::Phase, +} + /// Query of advice column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct AdviceQuery { @@ -533,6 +553,15 @@ impl AdviceQuery { } } +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + /// Query of instance column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct InstanceQuery { @@ -794,6 +823,47 @@ pub trait Circuit { fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; } +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ExpressionMid { + /// This is a constant polynomial + Constant(F), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQueryMid), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQueryMid), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQueryMid), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl ExpressionMid { + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + use ExpressionMid::*; + match self { + Constant(_) => 0, + Fixed(_) => 1, + Advice(_) => 1, + Instance(_) => 1, + Challenge(_) => 0, + Negated(poly) => poly.degree(), + Sum(a, b) => max(a.degree(), b.degree()), + Product(a, b) => a.degree() + b.degree(), + Scaled(poly, _) => poly.degree(), + } + } +} + /// Low-degree expression representing an identity that must hold over the committed columns. #[derive(Clone, PartialEq, Eq)] pub enum Expression { @@ -819,6 +889,50 @@ pub enum Expression { Scaled(Box>, F), } +impl Into> for Expression { + fn into(self) -> ExpressionMid { + match self { + Expression::Constant(c) => ExpressionMid::Constant(c), + Expression::Selector(_) => unreachable!(), + Expression::Fixed(FixedQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Fixed(FixedQueryMid { + column_index, + rotation, + }), + Expression::Advice(AdviceQuery { + column_index, + rotation, + phase, + .. + }) => ExpressionMid::Advice(AdviceQueryMid { + column_index, + rotation, + phase, + }), + Expression::Instance(InstanceQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Instance(InstanceQueryMid { + column_index, + rotation, + }), + Expression::Challenge(c) => ExpressionMid::Challenge(c), + Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), + Expression::Sum(lhs, rhs) => { + ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Product(lhs, rhs) => { + ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Scaled(e, c) => ExpressionMid::Scaled(Box::new((*e).into()), c), + } + } +} + impl Expression { /// Make side effects pub fn query_cells(&mut self, cells: &mut VirtualCells<'_, F>) { @@ -1515,7 +1629,7 @@ impl>, Iter: IntoIterator> IntoIterato pub struct GateV2Backend { name: String, constraint_names: Vec, - polys: Vec>, + polys: Vec>, } impl GateV2Backend { @@ -1530,7 +1644,7 @@ impl GateV2Backend { } /// Returns constraints of this gate - pub fn polynomials(&self) -> &[Expression] { + pub fn polynomials(&self) -> &[ExpressionMid] { &self.polys } } @@ -1596,42 +1710,92 @@ struct QueriesSet { fixed: BTreeSet<(Column, Rotation)>, } -fn collect_queries(expr: &Expression, queries: &mut QueriesSet) { +fn collect_queries(expr: &ExpressionMid, queries: &mut QueriesSet) { match expr { - Expression::Constant(_) => (), - Expression::Selector(_selector) => { - panic!("no Selector should arrive to the Backend"); - } - Expression::Fixed(query) => { + ExpressionMid::Constant(_) => (), + ExpressionMid::Fixed(query) => { queries .fixed .insert((Column::new(query.column_index, Fixed), query.rotation)); } - Expression::Advice(query) => { + ExpressionMid::Advice(query) => { queries.advice.insert(( Column::new(query.column_index, Advice { phase: query.phase }), query.rotation, )); } - Expression::Instance(query) => { + ExpressionMid::Instance(query) => { queries .instance .insert((Column::new(query.column_index, Instance), query.rotation)); } - Expression::Challenge(_) => (), - Expression::Negated(a) => collect_queries(a, queries), - Expression::Sum(a, b) => { + ExpressionMid::Challenge(_) => (), + ExpressionMid::Negated(a) => collect_queries(a, queries), + ExpressionMid::Sum(a, b) => { collect_queries(a, queries); collect_queries(b, queries); } - Expression::Product(a, b) => { + ExpressionMid::Product(a, b) => { collect_queries(a, queries); collect_queries(b, queries); } - Expression::Scaled(a, _) => collect_queries(a, queries), + ExpressionMid::Scaled(a, _) => collect_queries(a, queries), }; } +/* +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystemV2BackendQueries { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + // pub(crate) num_selectors: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + /// This is a cached vector that maps virtual selectors to the concrete + /// fixed column that they were compressed into. This is just used by dev + /// tooling right now. + // pub(crate) selector_map: Vec>, + pub(crate) gates: Vec>, + pub(crate) advice_queries: Vec<(Column, Rotation)>, + // Contains an integer for each advice column + // identifying how many distinct queries it has + // so far; should be same length as num_advice_columns. + pub(crate) num_advice_queries: Vec, + pub(crate) instance_queries: Vec<(Column, Rotation)>, + pub(crate) fixed_queries: Vec<(Column, Rotation)>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, + // Vector of fixed columns, which can be used to store constant values + // that are copied into advice columns. + // pub(crate) constants: Vec>, + + // pub(crate) minimum_degree: Option, +} +*/ + /// This is a description of the circuit environment, such as the gate, column and /// permutation arrangements. #[derive(Debug, Clone)] @@ -1664,11 +1828,11 @@ pub struct ConstraintSystemV2Backend { // Vector of lookup arguments, where each corresponds to a sequence of // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, + pub(crate) lookups: Vec>, // Vector of shuffle arguments, where each corresponds to a sequence of // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, + pub(crate) shuffles: Vec>, // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. pub(crate) general_column_annotations: HashMap, @@ -1851,18 +2015,55 @@ pub fn compile_circuit>( unblinded_advice_columns: cs.unblinded_advice_columns.clone(), advice_column_phase: cs.advice_column_phase.iter().map(|p| p.0).collect(), challenge_phase: cs.challenge_phase.iter().map(|p| p.0).collect(), + // TODO: Clean up all the Expression -> Expression conversions gates: cs .gates .iter() .map(|g| GateV2Backend { name: g.name.clone(), constraint_names: g.constraint_names.clone(), - polys: g.polys.clone(), + polys: g.polys.clone().into_iter().map(|e| e.into()).collect(), }) .collect(), permutation: cs.permutation.clone(), - lookups: cs.lookups.clone(), - shuffles: cs.shuffles.clone(), + lookups: cs + .lookups + .iter() + .map(|l| lookup::ArgumentV2 { + name: l.name.clone(), + input_expressions: l + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + table_expressions: l + .table_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + shuffles: cs + .shuffles + .iter() + .map(|s| shuffle::ArgumentV2 { + name: s.name.clone(), + input_expressions: s + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + shuffle_expressions: s + .shuffle_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), general_column_annotations: cs.general_column_annotations.clone(), }; let preprocessing = PreprocessingV2 { @@ -1881,6 +2082,7 @@ pub fn compile_circuit>( } impl ConstraintSystemV2Backend { + /* /// Compute the degree of the constraint system (the maximum degree of all /// constraints). pub fn degree(&self) -> usize { @@ -1924,6 +2126,7 @@ impl ConstraintSystemV2Backend { // std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) degree } + */ pub(crate) fn phases(&self) -> Vec { let max_phase = self @@ -1984,12 +2187,14 @@ impl ConstraintSystemV2Backend { num_advice_queries[column.index()] += 1; } - Queries { + let queries = Queries { advice: queries.advice.into_iter().collect(), instance: queries.instance.into_iter().collect(), fixed: queries.fixed.into_iter().collect(), num_advice_queries, - } + }; + // println!("DBG collected queries\n{:#?}", queries); + queries } } @@ -2021,7 +2226,7 @@ pub struct ConstraintSystem { // Contains an integer for each advice column // identifying how many distinct queries it has // so far; should be same length as num_advice_columns. - num_advice_queries: Vec, + pub(crate) num_advice_queries: Vec, pub(crate) instance_queries: Vec<(Column, Rotation)>, pub(crate) fixed_queries: Vec<(Column, Rotation)>, @@ -2046,6 +2251,12 @@ pub struct ConstraintSystem { pub(crate) minimum_degree: Option, } +impl From> for ConstraintSystem { + fn from(circuit: ConstraintSystemV2Backend) -> Self { + todo!() + } +} + /// Represents the minimal parameters that determine a `ConstraintSystem`. #[allow(dead_code)] pub struct PinnedConstraintSystem<'a, F: Field> { diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index 83f52f16ac..812d1fd2e8 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -205,94 +205,96 @@ pub struct CalculationInfo { } impl Evaluator { - /// Creates a new evaluation structure - pub fn new_v2(cs: &ConstraintSystemV2Backend) -> Self { - let mut ev = Evaluator::default(); - - // Custom gates - let mut parts = Vec::new(); - for gate in cs.gates.iter() { - parts.extend( - gate.polynomials() - .iter() - .map(|poly| ev.custom_gates.add_expression(poly)), - ); - } - ev.custom_gates.add_calculation(Calculation::Horner( - ValueSource::PreviousValue(), - parts, - ValueSource::Y(), - )); - - // Lookups - for lookup in cs.lookups.iter() { - let mut graph = GraphEvaluator::default(); - - let mut evaluate_lc = |expressions: &Vec>| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; + /* + /// Creates a new evaluation structure + pub fn new_v2(cs: &ConstraintSystemV2Backend) -> Self { + let mut ev = Evaluator::default(); - // Input coset - let compressed_input_coset = evaluate_lc(&lookup.input_expressions); - // table coset - let compressed_table_coset = evaluate_lc(&lookup.table_expressions); - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - let right_gamma = graph.add_calculation(Calculation::Add( - compressed_table_coset, - ValueSource::Gamma(), - )); - let lc = graph.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Beta(), + // Custom gates + let mut parts = Vec::new(); + for gate in cs.gates.iter() { + parts.extend( + gate.polynomials() + .iter() + .map(|poly| ev.custom_gates.add_expression(poly)), + ); + } + ev.custom_gates.add_calculation(Calculation::Horner( + ValueSource::PreviousValue(), + parts, + ValueSource::Y(), )); - graph.add_calculation(Calculation::Mul(lc, right_gamma)); - - ev.lookups.push(graph); - } - - // Shuffles - for shuffle in cs.shuffles.iter() { - let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; - let mut graph_input = GraphEvaluator::default(); - let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); - let _ = graph_input.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Gamma(), - )); + // Lookups + for lookup in cs.lookups.iter() { + let mut graph = GraphEvaluator::default(); + + let mut evaluate_lc = |expressions: &Vec>| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + // Input coset + let compressed_input_coset = evaluate_lc(&lookup.input_expressions); + // table coset + let compressed_table_coset = evaluate_lc(&lookup.table_expressions); + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + let right_gamma = graph.add_calculation(Calculation::Add( + compressed_table_coset, + ValueSource::Gamma(), + )); + let lc = graph.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Beta(), + )); + graph.add_calculation(Calculation::Mul(lc, right_gamma)); + + ev.lookups.push(graph); + } - let mut graph_shuffle = GraphEvaluator::default(); - let compressed_shuffle_coset = - evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); - let _ = graph_shuffle.add_calculation(Calculation::Add( - compressed_shuffle_coset, - ValueSource::Gamma(), - )); + // Shuffles + for shuffle in cs.shuffles.iter() { + let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + let mut graph_input = GraphEvaluator::default(); + let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); + let _ = graph_input.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Gamma(), + )); + + let mut graph_shuffle = GraphEvaluator::default(); + let compressed_shuffle_coset = + evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); + let _ = graph_shuffle.add_calculation(Calculation::Add( + compressed_shuffle_coset, + ValueSource::Gamma(), + )); + + ev.shuffles.push(graph_input); + ev.shuffles.push(graph_shuffle); + } - ev.shuffles.push(graph_input); - ev.shuffles.push(graph_shuffle); + ev } - - ev - } + */ /// Creates a new evaluation structure // TODO: Remove @@ -476,7 +478,7 @@ impl Evaluator { // Permutations let sets = &permutation.sets; if !sets.is_empty() { - let blinding_factors = pk.vk.queries.blinding_factors(); + let blinding_factors = pk.vk.cs.blinding_factors(); let last_rotation = Rotation(-((blinding_factors + 1) as i32)); let chunk_len = pk.vk.cs.degree() - 2; let delta_start = beta * &C::Scalar::ZETA; diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index a3ea376c6b..b5b7b200fb 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -213,11 +213,12 @@ where P: Params<'params, C>, C::Scalar: FromUniformBytes<64>, { - let cs = &circuit.cs; - let queries = cs.collect_queries(); + let cs2 = &circuit.cs; + let cs: ConstraintSystem = cs2.clone().into(); + // let queries = cs.collect_queries(); let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); - if (params.n() as usize) < queries.minimum_rows() { + if (params.n() as usize) < cs.minimum_rows() { return Err(Error::not_enough_rows_available(params.k())); } @@ -239,7 +240,7 @@ where domain, fixed_commitments, permutation_vk, - cs.clone(), + cs, )) } @@ -344,7 +345,7 @@ where { let cs = &circuit.cs; - if (params.n() as usize) < vk.queries.minimum_rows() { + if (params.n() as usize) < vk.cs.minimum_rows() { return Err(Error::not_enough_rows_available(params.k())); } @@ -377,11 +378,7 @@ where // Compute l_blind(X) which evaluates to 1 for each blinding factor row // and 0 otherwise over the domain. let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..] - .iter_mut() - .rev() - .take(vk.queries.blinding_factors()) - { + for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { *evaluation = C::Scalar::ONE; } let l_blind = vk.domain.lagrange_to_coeff(l_blind); @@ -390,7 +387,7 @@ where // Compute l_last(X) which evaluates to 1 on the first inactive row (just // before the blinding factors) and 0 otherwise over the domain let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - vk.queries.blinding_factors() - 1] = C::Scalar::ONE; + l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; let l_last = vk.domain.lagrange_to_coeff(l_last); let l_last = vk.domain.coeff_to_extended(l_last); @@ -405,7 +402,7 @@ where }); // Compute the optimized evaluation data structure - let ev = Evaluator::new_v2(&vk.cs); + let ev = Evaluator::new(&vk.cs); Ok(ProvingKeyV2 { vk, @@ -466,6 +463,15 @@ where } else { cs.directly_convert_selectors_to_fixed(assembly.selectors) }; + // println!( + // "DBG configure queries:\n{:#?}", + // ( + // &cs.advice_queries, + // &cs.instance_queries, + // &cs.fixed_queries, + // &cs.num_advice_queries + // ) + // ); fixed.extend( selector_polys .into_iter() diff --git a/halo2_proofs/src/plonk/lookup.rs b/halo2_proofs/src/plonk/lookup.rs index a7c4f68af2..375404bad5 100644 --- a/halo2_proofs/src/plonk/lookup.rs +++ b/halo2_proofs/src/plonk/lookup.rs @@ -1,10 +1,17 @@ -use super::circuit::Expression; +use super::circuit::{Expression, ExpressionMid}; use ff::Field; use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; +#[derive(Clone, Debug)] +pub struct ArgumentV2 { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) table_expressions: Vec>, +} + #[derive(Clone)] pub struct Argument { pub(crate) name: String, diff --git a/halo2_proofs/src/plonk/lookup/prover.rs b/halo2_proofs/src/plonk/lookup/prover.rs index c6c9859046..377773980b 100644 --- a/halo2_proofs/src/plonk/lookup/prover.rs +++ b/halo2_proofs/src/plonk/lookup/prover.rs @@ -287,7 +287,7 @@ impl Permuted { mut rng: R, transcript: &mut T, ) -> Result, Error> { - let blinding_factors = pk.vk.queries.blinding_factors(); + let blinding_factors = pk.vk.cs.blinding_factors(); // Goal is to compute the products of fractions // // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) @@ -723,7 +723,7 @@ fn permute_expression_pair_v2<'params, C: CurveAffine, P: Params<'params, C>, R: input_expression: &Polynomial, table_expression: &Polynomial, ) -> Result, Error> { - let blinding_factors = pk.vk.queries.blinding_factors(); + let blinding_factors = pk.vk.cs.blinding_factors(); let usable_rows = params.n() as usize - (blinding_factors + 1); let mut permuted_input_expression: Vec = input_expression.to_vec(); diff --git a/halo2_proofs/src/plonk/permutation/prover.rs b/halo2_proofs/src/plonk/permutation/prover.rs index d5683a815b..560a047d60 100644 --- a/halo2_proofs/src/plonk/permutation/prover.rs +++ b/halo2_proofs/src/plonk/permutation/prover.rs @@ -72,7 +72,7 @@ impl Argument { // 3 circuit for the permutation argument. assert!(pk.vk.cs_degree >= 3); let chunk_len = pk.vk.cs_degree - 2; - let blinding_factors = pk.vk.queries.blinding_factors(); + let blinding_factors = pk.vk.cs.blinding_factors(); // Each column gets its own delta power. let mut deltaomega = C::Scalar::ONE; @@ -393,7 +393,7 @@ impl Constructed { transcript: &mut T, ) -> Result, Error> { let domain = &pk.vk.domain; - let blinding_factors = pk.vk.queries.blinding_factors(); + let blinding_factors = pk.vk.cs.blinding_factors(); { let mut sets = self.sets.iter(); @@ -485,7 +485,7 @@ impl Evaluated { pk: &'a plonk::ProvingKeyV2, x: ChallengeX, ) -> impl Iterator> + Clone { - let blinding_factors = pk.vk.queries.blinding_factors(); + let blinding_factors = pk.vk.cs.blinding_factors(); let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); let x_last = pk .vk diff --git a/halo2_proofs/src/plonk/permutation/verifier.rs b/halo2_proofs/src/plonk/permutation/verifier.rs index 2cb6e6f925..ac2f944298 100644 --- a/halo2_proofs/src/plonk/permutation/verifier.rs +++ b/halo2_proofs/src/plonk/permutation/verifier.rs @@ -186,16 +186,14 @@ impl Evaluated { .iter() .map(|&column| match column.column_type() { Any::Advice(_) => { - advice_evals - [vk.queries.get_any_query_index(column, Rotation::cur())] + advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } Any::Fixed => { - fixed_evals - [vk.queries.get_any_query_index(column, Rotation::cur())] + fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } Any::Instance => { instance_evals - [vk.queries.get_any_query_index(column, Rotation::cur())] + [vk.cs.get_any_query_index(column, Rotation::cur())] } }) .zip(permutation_evals.iter()) @@ -209,15 +207,13 @@ impl Evaluated { .pow_vartime([(chunk_index * chunk_len) as u64])); for eval in columns.iter().map(|&column| match column.column_type() { Any::Advice(_) => { - advice_evals - [vk.queries.get_any_query_index(column, Rotation::cur())] + advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } Any::Fixed => { - fixed_evals[vk.queries.get_any_query_index(column, Rotation::cur())] + fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } Any::Instance => { - instance_evals - [vk.queries.get_any_query_index(column, Rotation::cur())] + instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } }) { right *= &(eval + ¤t_delta + &*gamma); @@ -338,7 +334,7 @@ impl Evaluated { vk: &'r plonk::VerifyingKeyV2, x: ChallengeX, ) -> impl Iterator> + Clone { - let blinding_factors = vk.queries.blinding_factors(); + let blinding_factors = vk.cs.blinding_factors(); let x_next = vk.domain.rotate_omega(*x, Rotation::next()); let x_last = vk .domain diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 90f7e0c1c0..c374f62628 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -61,7 +61,7 @@ pub struct ProverV2< // advice_queries: Vec<(Column, Rotation)>, // instance_queries: Vec<(Column, Rotation)>, // fixed_queries: Vec<(Column, Rotation)>, - phases: Vec, + phases: Vec, // State instance: Vec>, advice: Vec>, @@ -95,6 +95,7 @@ impl< where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { + // println!("DBG prove vk.queries.advices {:?}", pk.vk.queries.advice); for instance in instances.iter() { if instance.len() != pk.vk.cs.num_instance_columns { return Err(Error::InvalidInstances); @@ -105,8 +106,8 @@ impl< pk.vk.hash_into(&mut transcript)?; let meta = &pk.vk.cs; - let queries = &pk.vk.queries; - let phases = meta.phases(); + // let queries = &pk.vk.queries; + let phases = meta.phases().collect(); let domain = &pk.vk.domain; @@ -118,11 +119,12 @@ impl< .map(|values| { let mut poly = domain.empty_lagrange(); assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (queries.blinding_factors() + 1)) { + if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { return Err(Error::InstanceTooLarge); } for (poly, value) in poly.iter_mut().zip(values.iter()) { if !P::QUERY_INSTANCE { + // dbg!(1, value); transcript.common_scalar(*value)?; } *poly = *value; @@ -146,6 +148,7 @@ impl< drop(instance_commitments_projective); for commitment in &instance_commitments { + // dbg!(2, commitment); transcript.common_point(*commitment)?; } } @@ -208,13 +211,13 @@ impl< panic!("TODO: Return Error instead. All phases already commited"); } }; - if phase != *current_phase { + if phase != current_phase.0 { panic!("TODO: Return Error instead. Committing invalid phase"); } let params = self.params; let meta = &self.pk.vk.cs; - let queries = &self.pk.vk.queries; + // let queries = &self.pk.vk.queries; let transcript = &mut self.transcript; let mut rng = &mut self.rng; @@ -275,7 +278,7 @@ impl< Option, LagrangeCoeff>>, >| -> Result<(), Error> { - let unusable_rows_start = params.n() as usize - (queries.blinding_factors() + 1); + let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); let mut advice_values = batch_invert_assigned::(witness.into_iter().flatten().collect()); let unblinded_advice: HashSet = @@ -355,7 +358,7 @@ impl< { let params = self.params; let meta = &self.pk.vk.cs; - let queries = &self.pk.vk.queries; + // let queries = &self.pk.vk.queries; let pk = self.pk; let domain = &self.pk.vk.domain; @@ -526,8 +529,8 @@ impl< // Compute and hash instance evals for the circuit instance for instance in instance.iter() { // Evaluate polynomials at omega^i x - let instance_evals: Vec<_> = queries - .instance + let instance_evals: Vec<_> = meta + .instance_queries .iter() .map(|&(column, at)| { eval_polynomial( @@ -547,8 +550,8 @@ impl< // Compute and hash advice evals for the circuit instance for advice in advice.iter() { // Evaluate polynomials at omega^i x - let advice_evals: Vec<_> = queries - .advice + let advice_evals: Vec<_> = meta + .advice_queries .iter() .map(|&(column, at)| { eval_polynomial( @@ -557,6 +560,7 @@ impl< ) }) .collect(); + dbg!(&advice_evals); // Hash each advice column evaluation for eval in advice_evals.iter() { @@ -565,8 +569,8 @@ impl< } // Compute and hash fixed evals - let fixed_evals: Vec<_> = queries - .fixed + let fixed_evals: Vec<_> = meta + .fixed_queries .iter() .map(|&(column, at)| { eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) @@ -623,7 +627,7 @@ impl< iter::empty() .chain( P::QUERY_INSTANCE - .then_some(queries.instance.iter().map(move |&(column, at)| { + .then_some(meta.instance_queries.iter().map(move |&(column, at)| { ProverQuery { point: domain.rotate_omega(*x, at), poly: &instance.instance_polys[column.index()], @@ -633,16 +637,20 @@ impl< .into_iter() .flatten(), ) - .chain(queries.advice.iter().map(move |&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &advice.advice_polys[column.index()], - blind: advice.advice_blinds[column.index()], - })) + .chain( + meta.advice_queries + .iter() + .map(move |&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &advice.advice_polys[column.index()], + blind: advice.advice_blinds[column.index()], + }), + ) .chain(permutation.open_v2(pk, x)) .chain(lookups.iter().flat_map(move |p| p.open_v2(pk, x))) .chain(shuffles.iter().flat_map(move |p| p.open_v2(pk, x))) }) - .chain(queries.fixed.iter().map(|&(column, at)| ProverQuery { + .chain(meta.fixed_queries.iter().map(|&(column, at)| ProverQuery { point: domain.rotate_omega(*x, at), poly: &pk.fixed_polys[column.index()], blind: Blind::default(), @@ -652,6 +660,7 @@ impl< .chain(vanishing.open(x)); let prover = P::new(params); + println!("DBG create_proof"); prover .create_proof(rng, &mut transcript, instances) .map_err(|_| Error::ConstraintSystemFailure)?; @@ -925,6 +934,7 @@ where let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); for current_phase in pk.vk.cs.phases() { + println!("DBG phase {:?}", current_phase); let column_indices = meta .advice_column_phase .iter() @@ -1004,17 +1014,23 @@ where } }) .collect(); + // println!("DBG blinds: {:?}", blinds); let advice_commitments_projective: Vec<_> = advice_values .iter() .zip(blinds.iter()) .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) .collect(); + // println!( + // "DBG advice_commitments_projective: {:?}", + // advice_commitments_projective + // ); let mut advice_commitments = vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; ::CurveExt::batch_normalize( &advice_commitments_projective, &mut advice_commitments, ); + // println!("DBG advice_commitments: {:?}", advice_commitments); let advice_commitments = advice_commitments; drop(advice_commitments_projective); diff --git a/halo2_proofs/src/plonk/shuffle.rs b/halo2_proofs/src/plonk/shuffle.rs index e32353c710..c2136b5d30 100644 --- a/halo2_proofs/src/plonk/shuffle.rs +++ b/halo2_proofs/src/plonk/shuffle.rs @@ -1,10 +1,17 @@ -use super::circuit::Expression; +use super::circuit::{Expression, ExpressionMid}; use ff::Field; use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; +#[derive(Clone, Debug)] +pub struct ArgumentV2 { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) shuffle_expressions: Vec>, +} + #[derive(Clone)] pub struct Argument { pub(crate) name: String, diff --git a/halo2_proofs/src/plonk/shuffle/prover.rs b/halo2_proofs/src/plonk/shuffle/prover.rs index 929acfe4fa..59ccef29f3 100644 --- a/halo2_proofs/src/plonk/shuffle/prover.rs +++ b/halo2_proofs/src/plonk/shuffle/prover.rs @@ -188,7 +188,7 @@ impl> Argument { challenges, ); - let blinding_factors = pk.vk.queries.blinding_factors(); + let blinding_factors = pk.vk.cs.blinding_factors(); let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; parallelize(&mut shuffle_product, |shuffle_product, start| { diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index cfdd15677a..6a22beb739 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -38,6 +38,7 @@ pub fn verify_proof_v2< where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { + // println!("DBG verify vk.cs.advice_queriess {:?}", vk.cs.advice_queries); // Check that instances matches the expected number of instance columns for instances in instances.iter() { if instances.len() != vk.cs.num_instance_columns { @@ -52,9 +53,7 @@ where instance .iter() .map(|instance| { - if instance.len() - > params.n() as usize - (vk.queries.blinding_factors() + 1) - { + if instance.len() > params.n() as usize - (vk.cs.blinding_factors() + 1) { return Err(Error::InstanceTooLarge); } let mut poly = instance.to_vec(); @@ -79,6 +78,7 @@ where for instance_commitments in instance_commitments.iter() { // Hash the instance (external) commitments into the transcript for commitment in instance_commitments { + // dbg!(2, commitment); transcript.common_point(*commitment)? } } @@ -86,6 +86,7 @@ where for instance in instances.iter() { for instance in instance.iter() { for value in instance.iter() { + // dbg!(1, value); transcript.common_scalar(*value)?; } } @@ -184,13 +185,15 @@ where let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); let instance_evals = if V::QUERY_INSTANCE { (0..num_proofs) - .map(|_| -> Result, _> { read_n_scalars(transcript, vk.queries.instance.len()) }) + .map(|_| -> Result, _> { + read_n_scalars(transcript, vk.cs.instance_queries.len()) + }) .collect::, _>>()? } else { let xn = x.pow([params.n()]); let (min_rotation, max_rotation) = - vk.queries - .instance + vk.cs + .instance_queries .iter() .fold((0, 0), |(min, max), (_, rotation)| { if rotation.0 < min { @@ -214,8 +217,8 @@ where instances .iter() .map(|instances| { - vk.queries - .instance + vk.cs + .instance_queries .iter() .map(|(column, rotation)| { let instances = instances[column.index()]; @@ -228,10 +231,11 @@ where }; let advice_evals = (0..num_proofs) - .map(|_| -> Result, _> { read_n_scalars(transcript, vk.queries.advice.len()) }) + .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) .collect::, _>>()?; + dbg!(&advice_evals); - let fixed_evals = read_n_scalars(transcript, vk.queries.fixed.len())?; + let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; let vanishing = vanishing.evaluate_after_x(transcript)?; @@ -268,7 +272,7 @@ where // x^n let xn = x.pow([params.n()]); - let blinding_factors = vk.queries.blinding_factors(); + let blinding_factors = vk.cs.blinding_factors(); let l_evals = vk .domain .l_i_range(*x, xn, (-((blinding_factors + 1) as i32))..=0); @@ -388,7 +392,7 @@ where iter::empty() .chain( V::QUERY_INSTANCE - .then_some(vk.queries.instance.iter().enumerate().map( + .then_some(vk.cs.instance_queries.iter().enumerate().map( move |(query_index, &(column, at))| { VerifierQuery::new_commitment( &instance_commitments[column.index()], @@ -400,7 +404,7 @@ where .into_iter() .flatten(), ) - .chain(vk.queries.advice.iter().enumerate().map( + .chain(vk.cs.advice_queries.iter().enumerate().map( move |(query_index, &(column, at))| { VerifierQuery::new_commitment( &advice_commitments[column.index()], @@ -415,8 +419,8 @@ where }, ) .chain( - vk.queries - .fixed + vk.cs + .fixed_queries .iter() .enumerate() .map(|(query_index, &(column, at))| { @@ -786,6 +790,8 @@ where vanishing.verify(params, expressions, y, xn) }; + // println!("DBG verify fixed_queries:\n{:#?}", vk.cs.fixed_queries); + let queries = instance_commitments .iter() .zip(instance_evals.iter()) diff --git a/halo2_proofs/src/transcript.rs b/halo2_proofs/src/transcript.rs index 6e4f812bdf..cf338a53f9 100644 --- a/halo2_proofs/src/transcript.rs +++ b/halo2_proofs/src/transcript.rs @@ -358,11 +358,13 @@ where fn write_point(&mut self, point: C) -> io::Result<()> { self.common_point(point)?; let compressed = point.to_bytes(); + println!("DBG write_point\n{:02x?}", compressed.as_ref()); self.writer.write_all(compressed.as_ref()) } fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { self.common_scalar(scalar)?; let data = scalar.to_repr(); + println!("DBG write_scalar\n{:02x?}", data.as_ref()); self.writer.write_all(data.as_ref()) } } diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index f24b3d83cb..51dab029f8 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -48,7 +48,7 @@ struct MyCircuitConfig { // s_rlc * (c[0] + challenge * d[0] - e[0]) s_rlc: Selector, e: Column, - challenge: Challenge, + // challenge: Challenge, // Instance with a gate: s_instance * (a[0] - instance[0]) s_instance: Selector, @@ -134,9 +134,9 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let c = meta.advice_column(); let d = meta.fixed_column(); - meta.enable_equality(a); - meta.enable_equality(b); - meta.enable_equality(d); + // meta.enable_equality(a); + // meta.enable_equality(b); + // meta.enable_equality(d); let s_lookup = meta.fixed_column(); let s_ltable = meta.fixed_column(); @@ -144,63 +144,67 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let s_shuffle = meta.fixed_column(); let s_stable = meta.fixed_column(); - let s_rlc = meta.selector(); - let e = meta.advice_column_in(SecondPhase); - let challenge = meta.challenge_usable_after(FirstPhase); + // let s_rlc = meta.selector(); + let s_rlc = s_gate.clone(); + // let e = meta.advice_column_in(SecondPhase); + let e = c.clone(); + // let challenge = meta.challenge_usable_after(FirstPhase); - let s_instance = meta.selector(); + // let s_instance = meta.selector(); + let s_instance = s_gate.clone(); let instance = meta.instance_column(); - meta.enable_equality(instance); + // meta.enable_equality(instance); let one = Expression::Constant(F::ONE); meta.create_gate("gate_a", |meta| { let s_gate = meta.query_selector(s_gate); let a1 = meta.query_advice(a, Rotation::next()); - let a = meta.query_advice(a, Rotation::cur()); + let a0 = meta.query_advice(a, Rotation::cur()); let b = meta.query_advice(b, Rotation::cur()); let c = meta.query_advice(c, Rotation::cur()); let d = meta.query_fixed(d, Rotation::cur()); - vec![s_gate * (a + b * c * d - a1)] + vec![s_gate * (a0 + b * c * d - a1)] }); - meta.lookup_any("lookup", |meta| { - let s_lookup = meta.query_fixed(s_lookup, Rotation::cur()); - let s_ltable = meta.query_fixed(s_ltable, Rotation::cur()); - let a = meta.query_advice(a, Rotation::cur()); - let b = meta.query_advice(b, Rotation::cur()); - let c = meta.query_advice(c, Rotation::cur()); - let d = meta.query_fixed(d, Rotation::cur()); - let lhs = [one.clone(), a, b].map(|c| c * s_lookup.clone()); - let rhs = [one.clone(), d, c].map(|c| c * s_ltable.clone()); - lhs.into_iter().zip(rhs.into_iter()).collect() - }); - - meta.shuffle("shuffle", |meta| { - let s_shuffle = meta.query_fixed(s_shuffle, Rotation::cur()); - let s_stable = meta.query_fixed(s_stable, Rotation::cur()); - let a = meta.query_advice(a, Rotation::cur()); - let b = meta.query_advice(b, Rotation::cur()); - let lhs = [one.clone(), a].map(|c| c * s_shuffle.clone()); - let rhs = [one.clone(), b].map(|c| c * s_stable.clone()); - lhs.into_iter().zip(rhs.into_iter()).collect() - }); - - meta.create_gate("gate_rlc", |meta| { - let s_rlc = meta.query_selector(s_rlc); - let a = meta.query_advice(a, Rotation::cur()); - let b = meta.query_advice(b, Rotation::cur()); - let c = meta.query_advice(c, Rotation::cur()); - let d = meta.query_fixed(d, Rotation::cur()); - let e = meta.query_advice(e, Rotation::cur()); - let challenge = meta.query_challenge(challenge); - - vec![ - s_rlc.clone() * (a + challenge.clone() * b - e.clone()), - s_rlc * (c + challenge * d - e), - ] - }); + // meta.lookup_any("lookup", |meta| { + // let s_lookup = meta.query_fixed(s_lookup, Rotation::cur()); + // let s_ltable = meta.query_fixed(s_ltable, Rotation::cur()); + // let a = meta.query_advice(a, Rotation::cur()); + // let b = meta.query_advice(b, Rotation::cur()); + // let c = meta.query_advice(c, Rotation::cur()); + // let d = meta.query_fixed(d, Rotation::cur()); + // let lhs = [one.clone(), a, b].map(|c| c * s_lookup.clone()); + // let rhs = [one.clone(), d, c].map(|c| c * s_ltable.clone()); + // lhs.into_iter().zip(rhs.into_iter()).collect() + // }); + + // NOTE: This works + // meta.shuffle("shuffle", |meta| { + // let s_shuffle = meta.query_fixed(s_shuffle, Rotation::cur()); + // let s_stable = meta.query_fixed(s_stable, Rotation::cur()); + // let a = meta.query_advice(a, Rotation::cur()); + // let b = meta.query_advice(b, Rotation::cur()); + // let lhs = [one.clone(), a].map(|c| c * s_shuffle.clone()); + // let rhs = [one.clone(), b].map(|c| c * s_stable.clone()); + // lhs.into_iter().zip(rhs.into_iter()).collect() + // }); + + // meta.create_gate("gate_rlc", |meta| { + // let s_rlc = meta.query_selector(s_rlc); + // let a = meta.query_advice(a, Rotation::cur()); + // let b = meta.query_advice(b, Rotation::cur()); + // let c = meta.query_advice(c, Rotation::cur()); + // let d = meta.query_fixed(d, Rotation::cur()); + // let e = meta.query_advice(e, Rotation::cur()); + // let challenge = meta.query_challenge(challenge); + + // vec![ + // s_rlc.clone() * (a + challenge.clone() * b - e.clone()), + // s_rlc * (c + challenge * d - e), + // ] + // }); MyCircuitConfig { s_gate, @@ -212,7 +216,7 @@ impl, const WIDTH_FACTOR: usize> MyCircuit s_ltable, s_rlc, e, - challenge, + // challenge, s_shuffle, s_stable, s_instance, @@ -225,73 +229,90 @@ impl, const WIDTH_FACTOR: usize> MyCircuit config: &MyCircuitConfig, layouter: &mut impl Layouter, ) -> Result<(usize, Vec>), Error> { - let challenge = layouter.get_challenge(config.challenge); + // let challenge = layouter.get_challenge(config.challenge); let (rows, instance_copy) = layouter.assign_region( || "unit", |mut region| { let mut offset = 0; let mut instance_copy = Vec::new(); // First "a" value comes from instance - config.s_instance.enable(&mut region, offset); - let res = region.assign_advice_from_instance( - || "", - config.instance, - 0, - config.a, - offset, - )?; + config.s_instance.enable(&mut region, offset).expect("todo"); + // let res = region + // .assign_advice_from_instance(|| "", config.instance, 0, config.a, offset) + // .expect("todo"); + let res = region + .assign_advice( + || "", + config.a, + offset, + || Value::known(F::from(self.input)), + ) + .expect("todo"); // Enable the gate on a few consecutive rows with rotations - let (res, _) = - config.assign_gate(&mut region, &mut offset, Some(res), [0, 3, 4, 1])?; + let (res, _) = config + .assign_gate(&mut region, &mut offset, Some(res), [0, 3, 4, 1]) + .expect("todo"); instance_copy.push(res.clone()); - let (res, _) = - config.assign_gate(&mut region, &mut offset, Some(res), [0, 6, 7, 1])?; + let (res, _) = config + .assign_gate(&mut region, &mut offset, Some(res), [0, 6, 7, 1]) + .expect("todo"); instance_copy.push(res.clone()); - let (res, _) = - config.assign_gate(&mut region, &mut offset, Some(res), [0, 8, 9, 1])?; + let (res, _) = config + .assign_gate(&mut region, &mut offset, Some(res), [0, 8, 9, 1]) + .expect("todo"); instance_copy.push(res.clone()); - let (res, _) = config.assign_gate( - &mut region, - &mut offset, - Some(res), - [0, 0xffffffff, 0xdeadbeef, 1], - )?; - let _ = config.assign_gate( - &mut region, - &mut offset, - Some(res), - [0, 0xabad1d3a, 0x12345678, 0x42424242], - )?; + let (res, _) = config + .assign_gate( + &mut region, + &mut offset, + Some(res), + [0, 0xffffffff, 0xdeadbeef, 1], + ) + .expect("todo"); + let _ = config + .assign_gate( + &mut region, + &mut offset, + Some(res), + [0, 0xabad1d3a, 0x12345678, 0x42424242], + ) + .expect("todo"); offset += 1; // Enable the gate on non-consecutive rows with advice-advice copy constraints enabled - let (_, abcd1) = - config.assign_gate(&mut region, &mut offset, None, [5, 2, 1, 1])?; + let (_, abcd1) = config + .assign_gate(&mut region, &mut offset, None, [5, 2, 1, 1]) + .expect("todo"); offset += 1; - let (_, abcd2) = - config.assign_gate(&mut region, &mut offset, None, [2, 3, 1, 1])?; + let (_, abcd2) = config + .assign_gate(&mut region, &mut offset, None, [2, 3, 1, 1]) + .expect("todo"); offset += 1; - let (_, abcd3) = - config.assign_gate(&mut region, &mut offset, None, [4, 2, 1, 1])?; + let (_, abcd3) = config + .assign_gate(&mut region, &mut offset, None, [4, 2, 1, 1]) + .expect("todo"); offset += 1; - region.constrain_equal(abcd1[1].cell(), abcd2[0].cell())?; - region.constrain_equal(abcd2[0].cell(), abcd3[1].cell())?; + // region.constrain_equal(abcd1[1].cell(), abcd2[0].cell()).expect("todo"); + // region.constrain_equal(abcd2[0].cell(), abcd3[1].cell()).expect("todo"); instance_copy.push(abcd1[1].clone()); instance_copy.push(abcd2[0].clone()); // Enable the gate on non-consecutive rows with advice-fixed copy constraints enabled - let (_, abcd1) = - config.assign_gate(&mut region, &mut offset, None, [5, 9, 1, 9])?; + let (_, abcd1) = config + .assign_gate(&mut region, &mut offset, None, [5, 9, 1, 9]) + .expect("todo"); offset += 1; - let (_, abcd2) = - config.assign_gate(&mut region, &mut offset, None, [2, 9, 1, 1])?; + let (_, abcd2) = config + .assign_gate(&mut region, &mut offset, None, [2, 9, 1, 1]) + .expect("todo"); offset += 1; - let (_, abcd3) = - config.assign_gate(&mut region, &mut offset, None, [9, 2, 1, 1])?; + let (_, abcd3) = config + .assign_gate(&mut region, &mut offset, None, [9, 2, 1, 1]) + .expect("todo"); offset += 1; - region.constrain_equal(abcd1[1].cell(), abcd1[3].cell())?; - region.constrain_equal(abcd2[1].cell(), abcd1[3].cell())?; - region.constrain_equal(abcd3[0].cell(), abcd1[3].cell())?; + // region.constrain_equal(abcd1[1].cell(), abcd1[3].cell()).expect("todo"); + // region.constrain_equal(abcd2[1].cell(), abcd1[3].cell()).expect("todo"); + // region.constrain_equal(abcd3[0].cell(), abcd1[3].cell()).expect("todo"); // Enable a dynamic lookup (powers of two) let table: Vec<_> = (0u64..=10).map(|exp| (exp, 2u64.pow(exp as u32))).collect(); @@ -300,30 +321,46 @@ impl, const WIDTH_FACTOR: usize> MyCircuit .iter() .zip(lookups.iter().chain(std::iter::repeat(&(0, 1)))) { - region.assign_fixed(|| "", config.s_lookup, offset, || Value::known(F::ONE))?; - region.assign_fixed(|| "", config.s_ltable, offset, || Value::known(F::ONE))?; + region + .assign_fixed(|| "", config.s_lookup, offset, || Value::known(F::ONE)) + .expect("todo"); + region + .assign_fixed(|| "", config.s_ltable, offset, || Value::known(F::ONE)) + .expect("todo"); let lookup_row0 = Value::known(F::from(lookup_row.0)); let lookup_row1 = Value::known(F::from(lookup_row.1)); - region.assign_advice(|| "", config.a, offset, || lookup_row0)?; - region.assign_advice(|| "", config.b, offset, || lookup_row1)?; + region + .assign_advice(|| "", config.a, offset, || lookup_row0) + .expect("todo"); + region + .assign_advice(|| "", config.b, offset, || lookup_row1) + .expect("todo"); let table_row0 = Value::known(F::from(table_row.0)); let table_row1 = Value::known(F::from(table_row.1)); - region.assign_fixed(|| "", config.d, offset, || table_row0)?; - region.assign_advice(|| "", config.c, offset, || table_row1)?; + region + .assign_fixed(|| "", config.d, offset, || table_row0) + .expect("todo"); + region + .assign_advice(|| "", config.c, offset, || table_row1) + .expect("todo"); offset += 1; } // Enable RLC gate 3 times - for abcd in [[3, 5, 3, 5], [8, 9, 8, 9], [111, 222, 111, 222]] { - config.s_rlc.enable(&mut region, offset)?; - let (_, _) = config.assign_gate(&mut region, &mut offset, None, abcd)?; - let rlc = challenge.map(|ch| { - let [a, b, ..] = abcd; - F::from(a) + ch * F::from(b) - }); - region.assign_advice(|| "", config.e, offset - 1, || rlc)?; - offset += 1; - } + // for abcd in [[3, 5, 3, 5], [8, 9, 8, 9], [111, 222, 111, 222]] { + // config.s_rlc.enable(&mut region, offset)?; + // let (_, _) = config + // .assign_gate(&mut region, &mut offset, None, abcd) + // .expect("todo"); + // let rlc = challenge.map(|ch| { + // let [a, b, ..] = abcd; + // F::from(a) + ch * F::from(b) + // }); + // region + // .assign_advice(|| "", config.e, offset - 1, || rlc) + // .expect("todo"); + // offset += 1; + // } // Enable a dynamic shuffle (sequence from 0 to 15) let table: Vec<_> = (0u64..16).collect(); @@ -331,17 +368,20 @@ impl, const WIDTH_FACTOR: usize> MyCircuit assert_eq!(table.len(), shuffle.len()); for (table_row, shuffle_row) in table.iter().zip(shuffle.iter()) { - region.assign_fixed( - || "", - config.s_shuffle, - offset, - || Value::known(F::ONE), - )?; - region.assign_fixed(|| "", config.s_stable, offset, || Value::known(F::ONE))?; + region + .assign_fixed(|| "", config.s_shuffle, offset, || Value::known(F::ONE)) + .expect("todo"); + region + .assign_fixed(|| "", config.s_stable, offset, || Value::known(F::ONE)) + .expect("todo"); let shuffle_row0 = Value::known(F::from(*shuffle_row)); - region.assign_advice(|| "", config.a, offset, || shuffle_row0)?; + region + .assign_advice(|| "", config.a, offset, || shuffle_row0) + .expect("todo"); let table_row0 = Value::known(F::from(*table_row)); - region.assign_advice(|| "", config.b, offset, || table_row0)?; + region + .assign_advice(|| "", config.b, offset, || table_row0) + .expect("todo"); offset += 1; } @@ -386,7 +426,7 @@ impl, const WIDTH_FACTOR: usize> Circuit for MyCircuit = MyCircuit::new(k, 42); // Setup - let params = ParamsKZG::::new(k); + let mut rng = BlockRng::new(OneNg {}); + let params = ParamsKZG::::setup(k, &mut rng); let verifier_params = params.verifier_params(); let vk = keygen_vk(¶ms, &circuit).expect("keygen_vk should not fail"); let pk = keygen_pk(¶ms, vk.clone(), &circuit).expect("keygen_pk should not fail"); @@ -450,18 +491,21 @@ fn test_mycircuit_full_legacy() { .map(|instance| instance.as_slice()) .collect::>()); - let rng = BlockRng::new(OneNg {}); let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); create_proof::, ProverSHPLONK<'_, Bn256>, _, _, _, _>( ¶ms, &pk, &[circuit.clone()], &[instances_slice], - rng, + &mut rng, &mut transcript, ) .expect("proof generation should not fail"); let proof = transcript.finalize(); + println!("DBG proof.len={} ", proof.len()); + for word in proof.chunks(32) { + println!(" {:02x?}", word); + } // Verify let mut verifier_transcript = @@ -486,13 +530,16 @@ fn test_mycircuit_full_split() { let (compiled_circuit, config, cs) = compile_circuit(k, &circuit, false).unwrap(); // Setup - let params = ParamsKZG::::new(k); + let mut rng = BlockRng::new(OneNg {}); + let params = ParamsKZG::::setup(k, &mut rng); let verifier_params = params.verifier_params(); let vk = keygen_vk_v2(¶ms, &compiled_circuit).expect("keygen_vk should not fail"); + println!("vk: {:#?}", vk); let pk = keygen_pk_v2(¶ms, vk.clone(), &compiled_circuit).expect("keygen_pk should not fail"); // Proving + println!("DBG Proving..."); let instances = circuit.instances(); let instances_slice: &[&[Fr]] = &(instances .iter() @@ -500,25 +547,34 @@ fn test_mycircuit_full_split() { .collect::>()); let mut witness_calc = WitnessCalculator::new(k, &circuit, &config, &cs, instances_slice); - let rng = BlockRng::new(OneNg {}); let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); let mut prover = ProverV2::, ProverSHPLONK<'_, Bn256>, _, _, _>::new( ¶ms, &pk, &[instances_slice], - rng, + &mut rng, transcript, ) .unwrap(); - let witness_phase0 = witness_calc.calc(0, &HashMap::new()).unwrap(); - let challenges_phase0 = prover.commit_phase(0, vec![witness_phase0]).unwrap(); - let witness_phase1 = witness_calc.calc(1, &challenges_phase0).unwrap(); - let _challenges_phase1 = prover.commit_phase(1, vec![witness_phase1]).unwrap(); + let mut challenges = HashMap::new(); + // for phase in [0, 1] { + for phase in [0] { + println!("DBG phase {}", phase); + let witness = witness_calc.calc(phase, &challenges).unwrap(); + // println!("DBG witness: {:?}", witness); + challenges = prover.commit_phase(phase, vec![witness]).unwrap(); + // println!("DBG challenges {:?}", challenges); + } let mut transcript = prover.create_proof().unwrap(); let proof = transcript.finalize(); + println!("DBG proof.len={} ", proof.len()); + for word in proof.chunks(32) { + println!(" {:02x?}", word); + } // Verify + println!("DBG Verifying..."); let mut verifier_transcript = Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); let strategy = SingleStrategy::new(&verifier_params); From 19298c2ffb482fb104a7080456fd1c2b4608f65d Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 3 Jan 2024 11:55:42 +0100 Subject: [PATCH 14/79] Get it working --- halo2_proofs/src/plonk.rs | 5 +- halo2_proofs/src/plonk/circuit.rs | 268 +++++++++++++------ halo2_proofs/src/plonk/prover.rs | 2 +- halo2_proofs/src/plonk/verifier.rs | 2 +- halo2_proofs/src/poly.rs | 2 +- halo2_proofs/src/transcript.rs | 4 +- halo2_proofs/tests/frontend_backend_split.rs | 186 ++++++------- 7 files changed, 291 insertions(+), 178 deletions(-) diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 152361dd30..5e7205dbaa 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -200,9 +200,8 @@ impl VerifyingKeyV2 { hasher.update(s.as_bytes()); // Hash in final Blake2bState - // TODO: Uncomment - // vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - dbg!(&vk.transcript_repr); + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + // dbg!(&vk.transcript_repr); vk } diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index a94f80fc0b..92d17f0b06 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1704,43 +1704,84 @@ pub struct CompiledCircuitV2 { pub(crate) cs: ConstraintSystemV2Backend, } -struct QueriesSet { - advice: BTreeSet<(Column, Rotation)>, - instance: BTreeSet<(Column, Rotation)>, - fixed: BTreeSet<(Column, Rotation)>, -} - -fn collect_queries(expr: &ExpressionMid, queries: &mut QueriesSet) { - match expr { - ExpressionMid::Constant(_) => (), - ExpressionMid::Fixed(query) => { - queries - .fixed - .insert((Column::new(query.column_index, Fixed), query.rotation)); - } - ExpressionMid::Advice(query) => { - queries.advice.insert(( - Column::new(query.column_index, Advice { phase: query.phase }), - query.rotation, - )); - } - ExpressionMid::Instance(query) => { - queries - .instance - .insert((Column::new(query.column_index, Instance), query.rotation)); - } - ExpressionMid::Challenge(_) => (), - ExpressionMid::Negated(a) => collect_queries(a, queries), - ExpressionMid::Sum(a, b) => { - collect_queries(a, queries); - collect_queries(b, queries); - } - ExpressionMid::Product(a, b) => { - collect_queries(a, queries); - collect_queries(b, queries); +struct QueriesMap { + advice_map: HashMap<(Column, Rotation), usize>, + instance_map: HashMap<(Column, Rotation), usize>, + fixed_map: HashMap<(Column, Rotation), usize>, + advice: Vec<(Column, Rotation)>, + instance: Vec<(Column, Rotation)>, + fixed: Vec<(Column, Rotation)>, +} + +impl QueriesMap { + fn add_advice(&mut self, col: Column, rot: Rotation) -> usize { + *self.advice_map.entry((col, rot)).or_insert_with(|| { + self.advice.push((col, rot)); + self.advice.len() - 1 + }) + } + fn add_instance(&mut self, col: Column, rot: Rotation) -> usize { + *self.instance_map.entry((col, rot)).or_insert_with(|| { + self.instance.push((col, rot)); + self.instance.len() - 1 + }) + } + fn add_fixed(&mut self, col: Column, rot: Rotation) -> usize { + *self.fixed_map.entry((col, rot)).or_insert_with(|| { + self.fixed.push((col, rot)); + self.fixed.len() - 1 + }) + } +} + +impl QueriesMap { + fn to_expression(&mut self, expr: &ExpressionMid) -> Expression { + match expr { + ExpressionMid::Constant(c) => Expression::Constant(*c), + ExpressionMid::Fixed(query) => { + let (col, rot) = (Column::new(query.column_index, Fixed), query.rotation); + let index = self.add_fixed(col, rot); + Expression::Fixed(FixedQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Advice(query) => { + let (col, rot) = ( + Column::new(query.column_index, Advice { phase: query.phase }), + query.rotation, + ); + let index = self.add_advice(col, rot); + Expression::Advice(AdviceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + phase: query.phase, + }) + } + ExpressionMid::Instance(query) => { + let (col, rot) = (Column::new(query.column_index, Instance), query.rotation); + let index = self.add_instance(col, rot); + Expression::Instance(InstanceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Challenge(c) => Expression::Challenge(*c), + ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.to_expression(e))), + ExpressionMid::Sum(lhs, rhs) => Expression::Sum( + Box::new(self.to_expression(lhs)), + Box::new(self.to_expression(rhs)), + ), + ExpressionMid::Product(lhs, rhs) => Expression::Product( + Box::new(self.to_expression(lhs)), + Box::new(self.to_expression(rhs)), + ), + ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.to_expression(e)), *c), } - ExpressionMid::Scaled(a, _) => collect_queries(a, queries), - }; + } } /* @@ -2138,63 +2179,100 @@ impl ConstraintSystemV2Backend { (0..=max_phase).collect() } - pub(crate) fn collect_queries(&self) -> Queries { - let mut queries = QueriesSet { - advice: BTreeSet::new(), - instance: BTreeSet::new(), - fixed: BTreeSet::new(), + pub(crate) fn collect_queries( + &self, + ) -> ( + Queries, + Vec>, + Vec>, + Vec>, + ) { + let mut queries = QueriesMap { + advice_map: HashMap::new(), + instance_map: HashMap::new(), + fixed_map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), }; - let mut num_advice_queries = vec![0; self.num_advice_columns]; - for gate in &self.gates { - for expr in gate.polynomials() { - collect_queries(expr, &mut queries); - } - } - for lookup in &self.lookups { - for expr in lookup - .input_expressions - .iter() - .chain(lookup.table_expressions.iter()) - { - collect_queries(expr, &mut queries); - } - } - for shuffle in &self.shuffles { - for expr in shuffle - .input_expressions - .iter() - .chain(shuffle.shuffle_expressions.iter()) - { - collect_queries(expr, &mut queries); - } - } + let gates: Vec<_> = self + .gates + .iter() + .map(|gate| Gate { + name: gate.name.clone(), + constraint_names: gate.constraint_names.clone(), + polys: gate + .polynomials() + .iter() + .map(|e| queries.to_expression(e)) + .collect(), + queried_selectors: Vec::new(), // Unused? + queried_cells: Vec::new(), // Unused? + }) + .collect(); + let lookups: Vec<_> = self + .lookups + .iter() + .map(|lookup| lookup::Argument { + name: lookup.name.clone(), + input_expressions: lookup + .input_expressions + .iter() + .map(|e| queries.to_expression(e)) + .collect(), + table_expressions: lookup + .table_expressions + .iter() + .map(|e| queries.to_expression(e)) + .collect(), + }) + .collect(); + let shuffles: Vec<_> = self + .shuffles + .iter() + .map(|shuffle| shuffle::Argument { + name: shuffle.name.clone(), + input_expressions: shuffle + .input_expressions + .iter() + .map(|e| queries.to_expression(e)) + .collect(), + shuffle_expressions: shuffle + .shuffle_expressions + .iter() + .map(|e| queries.to_expression(e)) + .collect(), + }) + .collect(); + for column in self.permutation.get_columns() { match column.column_type { - Any::Instance => queries - .instance - .insert((Column::new(column.index(), Instance), Rotation::cur())), - Any::Fixed => queries - .fixed - .insert((Column::new(column.index(), Fixed), Rotation::cur())), - Any::Advice(advice) => queries - .advice - .insert((Column::new(column.index(), advice), Rotation::cur())), + Any::Instance => { + queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) + } + Any::Fixed => { + queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()) + } + Any::Advice(advice) => { + queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) + } }; } + let mut num_advice_queries = vec![0; self.num_advice_columns]; for (column, _) in queries.advice.iter() { num_advice_queries[column.index()] += 1; } let queries = Queries { - advice: queries.advice.into_iter().collect(), - instance: queries.instance.into_iter().collect(), - fixed: queries.fixed.into_iter().collect(), + advice: queries.advice, + instance: queries.instance, + fixed: queries.fixed, num_advice_queries, }; // println!("DBG collected queries\n{:#?}", queries); - queries + (queries, gates, lookups, shuffles) } } @@ -2252,8 +2330,38 @@ pub struct ConstraintSystem { } impl From> for ConstraintSystem { - fn from(circuit: ConstraintSystemV2Backend) -> Self { - todo!() + fn from(cs2: ConstraintSystemV2Backend) -> Self { + let (queries, gates, lookups, shuffles) = cs2.collect_queries(); + ConstraintSystem { + num_fixed_columns: cs2.num_fixed_columns, + num_advice_columns: cs2.num_advice_columns, + num_instance_columns: cs2.num_instance_columns, + num_selectors: 0, + num_challenges: cs2.num_challenges, + unblinded_advice_columns: cs2.unblinded_advice_columns, + advice_column_phase: cs2 + .advice_column_phase + .into_iter() + .map(|p| sealed::Phase(p)) + .collect(), + challenge_phase: cs2 + .challenge_phase + .into_iter() + .map(|p| sealed::Phase(p)) + .collect(), + selector_map: Vec::new(), + gates, + advice_queries: queries.advice, + num_advice_queries: queries.num_advice_queries, + instance_queries: queries.instance, + fixed_queries: queries.fixed, + permutation: cs2.permutation, + lookups, + shuffles, + general_column_annotations: cs2.general_column_annotations, + constants: Vec::new(), + minimum_degree: None, + } } } diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index c374f62628..2c17f83e49 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -560,7 +560,7 @@ impl< ) }) .collect(); - dbg!(&advice_evals); + // dbg!(&advice_evals); // Hash each advice column evaluation for eval in advice_evals.iter() { diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index 6a22beb739..ace4ff31d2 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -233,7 +233,7 @@ where let advice_evals = (0..num_proofs) .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) .collect::, _>>()?; - dbg!(&advice_evals); + // dbg!(&advice_evals); let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; diff --git a/halo2_proofs/src/poly.rs b/halo2_proofs/src/poly.rs index 68b33f0d47..b505d6b49b 100644 --- a/halo2_proofs/src/poly.rs +++ b/halo2_proofs/src/poly.rs @@ -324,7 +324,7 @@ impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { /// Describes the relative rotation of a vector. Negative numbers represent /// reverse (leftmost) rotations and positive numbers represent forward (rightmost) /// rotations. Zero represents no rotation. -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Rotation(pub i32); impl Rotation { diff --git a/halo2_proofs/src/transcript.rs b/halo2_proofs/src/transcript.rs index cf338a53f9..ae2c39d5f6 100644 --- a/halo2_proofs/src/transcript.rs +++ b/halo2_proofs/src/transcript.rs @@ -358,13 +358,13 @@ where fn write_point(&mut self, point: C) -> io::Result<()> { self.common_point(point)?; let compressed = point.to_bytes(); - println!("DBG write_point\n{:02x?}", compressed.as_ref()); + // println!("DBG write_point\n{:02x?}", compressed.as_ref()); self.writer.write_all(compressed.as_ref()) } fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { self.common_scalar(scalar)?; let data = scalar.to_repr(); - println!("DBG write_scalar\n{:02x?}", data.as_ref()); + // println!("DBG write_scalar\n{:02x?}", data.as_ref()); self.writer.write_all(data.as_ref()) } } diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 51dab029f8..fd0c8428db 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -48,7 +48,7 @@ struct MyCircuitConfig { // s_rlc * (c[0] + challenge * d[0] - e[0]) s_rlc: Selector, e: Column, - // challenge: Challenge, + challenge: Challenge, // Instance with a gate: s_instance * (a[0] - instance[0]) s_instance: Selector, @@ -134,9 +134,9 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let c = meta.advice_column(); let d = meta.fixed_column(); - // meta.enable_equality(a); - // meta.enable_equality(b); - // meta.enable_equality(d); + meta.enable_equality(a); + meta.enable_equality(b); + meta.enable_equality(d); let s_lookup = meta.fixed_column(); let s_ltable = meta.fixed_column(); @@ -144,16 +144,13 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let s_shuffle = meta.fixed_column(); let s_stable = meta.fixed_column(); - // let s_rlc = meta.selector(); - let s_rlc = s_gate.clone(); - // let e = meta.advice_column_in(SecondPhase); - let e = c.clone(); - // let challenge = meta.challenge_usable_after(FirstPhase); + let s_rlc = meta.selector(); + let e = meta.advice_column_in(SecondPhase); + let challenge = meta.challenge_usable_after(FirstPhase); - // let s_instance = meta.selector(); - let s_instance = s_gate.clone(); + let s_instance = meta.selector(); let instance = meta.instance_column(); - // meta.enable_equality(instance); + meta.enable_equality(instance); let one = Expression::Constant(F::ONE); @@ -168,43 +165,42 @@ impl, const WIDTH_FACTOR: usize> MyCircuit vec![s_gate * (a0 + b * c * d - a1)] }); - // meta.lookup_any("lookup", |meta| { - // let s_lookup = meta.query_fixed(s_lookup, Rotation::cur()); - // let s_ltable = meta.query_fixed(s_ltable, Rotation::cur()); - // let a = meta.query_advice(a, Rotation::cur()); - // let b = meta.query_advice(b, Rotation::cur()); - // let c = meta.query_advice(c, Rotation::cur()); - // let d = meta.query_fixed(d, Rotation::cur()); - // let lhs = [one.clone(), a, b].map(|c| c * s_lookup.clone()); - // let rhs = [one.clone(), d, c].map(|c| c * s_ltable.clone()); - // lhs.into_iter().zip(rhs.into_iter()).collect() - // }); - - // NOTE: This works - // meta.shuffle("shuffle", |meta| { - // let s_shuffle = meta.query_fixed(s_shuffle, Rotation::cur()); - // let s_stable = meta.query_fixed(s_stable, Rotation::cur()); - // let a = meta.query_advice(a, Rotation::cur()); - // let b = meta.query_advice(b, Rotation::cur()); - // let lhs = [one.clone(), a].map(|c| c * s_shuffle.clone()); - // let rhs = [one.clone(), b].map(|c| c * s_stable.clone()); - // lhs.into_iter().zip(rhs.into_iter()).collect() - // }); - - // meta.create_gate("gate_rlc", |meta| { - // let s_rlc = meta.query_selector(s_rlc); - // let a = meta.query_advice(a, Rotation::cur()); - // let b = meta.query_advice(b, Rotation::cur()); - // let c = meta.query_advice(c, Rotation::cur()); - // let d = meta.query_fixed(d, Rotation::cur()); - // let e = meta.query_advice(e, Rotation::cur()); - // let challenge = meta.query_challenge(challenge); - - // vec![ - // s_rlc.clone() * (a + challenge.clone() * b - e.clone()), - // s_rlc * (c + challenge * d - e), - // ] - // }); + meta.lookup_any("lookup", |meta| { + let s_lookup = meta.query_fixed(s_lookup, Rotation::cur()); + let s_ltable = meta.query_fixed(s_ltable, Rotation::cur()); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + let lhs = [one.clone(), a, b].map(|c| c * s_lookup.clone()); + let rhs = [one.clone(), d, c].map(|c| c * s_ltable.clone()); + lhs.into_iter().zip(rhs.into_iter()).collect() + }); + + meta.shuffle("shuffle", |meta| { + let s_shuffle = meta.query_fixed(s_shuffle, Rotation::cur()); + let s_stable = meta.query_fixed(s_stable, Rotation::cur()); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let lhs = [one.clone(), a].map(|c| c * s_shuffle.clone()); + let rhs = [one.clone(), b].map(|c| c * s_stable.clone()); + lhs.into_iter().zip(rhs.into_iter()).collect() + }); + + meta.create_gate("gate_rlc", |meta| { + let s_rlc = meta.query_selector(s_rlc); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + let e = meta.query_advice(e, Rotation::cur()); + let challenge = meta.query_challenge(challenge); + + vec![ + s_rlc.clone() * (a + challenge.clone() * b - e.clone()), + s_rlc * (c + challenge * d - e), + ] + }); MyCircuitConfig { s_gate, @@ -216,7 +212,7 @@ impl, const WIDTH_FACTOR: usize> MyCircuit s_ltable, s_rlc, e, - // challenge, + challenge, s_shuffle, s_stable, s_instance, @@ -229,7 +225,7 @@ impl, const WIDTH_FACTOR: usize> MyCircuit config: &MyCircuitConfig, layouter: &mut impl Layouter, ) -> Result<(usize, Vec>), Error> { - // let challenge = layouter.get_challenge(config.challenge); + let challenge = layouter.get_challenge(config.challenge); let (rows, instance_copy) = layouter.assign_region( || "unit", |mut region| { @@ -237,17 +233,17 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let mut instance_copy = Vec::new(); // First "a" value comes from instance config.s_instance.enable(&mut region, offset).expect("todo"); - // let res = region - // .assign_advice_from_instance(|| "", config.instance, 0, config.a, offset) - // .expect("todo"); let res = region - .assign_advice( - || "", - config.a, - offset, - || Value::known(F::from(self.input)), - ) + .assign_advice_from_instance(|| "", config.instance, 0, config.a, offset) .expect("todo"); + // let res = region + // .assign_advice( + // || "", + // config.a, + // offset, + // || Value::known(F::from(self.input)), + // ) + // .expect("todo"); // Enable the gate on a few consecutive rows with rotations let (res, _) = config .assign_gate(&mut region, &mut offset, Some(res), [0, 3, 4, 1]) @@ -292,8 +288,12 @@ impl, const WIDTH_FACTOR: usize> MyCircuit .assign_gate(&mut region, &mut offset, None, [4, 2, 1, 1]) .expect("todo"); offset += 1; - // region.constrain_equal(abcd1[1].cell(), abcd2[0].cell()).expect("todo"); - // region.constrain_equal(abcd2[0].cell(), abcd3[1].cell()).expect("todo"); + region + .constrain_equal(abcd1[1].cell(), abcd2[0].cell()) + .expect("todo"); + region + .constrain_equal(abcd2[0].cell(), abcd3[1].cell()) + .expect("todo"); instance_copy.push(abcd1[1].clone()); instance_copy.push(abcd2[0].clone()); @@ -310,9 +310,15 @@ impl, const WIDTH_FACTOR: usize> MyCircuit .assign_gate(&mut region, &mut offset, None, [9, 2, 1, 1]) .expect("todo"); offset += 1; - // region.constrain_equal(abcd1[1].cell(), abcd1[3].cell()).expect("todo"); - // region.constrain_equal(abcd2[1].cell(), abcd1[3].cell()).expect("todo"); - // region.constrain_equal(abcd3[0].cell(), abcd1[3].cell()).expect("todo"); + region + .constrain_equal(abcd1[1].cell(), abcd1[3].cell()) + .expect("todo"); + region + .constrain_equal(abcd2[1].cell(), abcd1[3].cell()) + .expect("todo"); + region + .constrain_equal(abcd3[0].cell(), abcd1[3].cell()) + .expect("todo"); // Enable a dynamic lookup (powers of two) let table: Vec<_> = (0u64..=10).map(|exp| (exp, 2u64.pow(exp as u32))).collect(); @@ -347,20 +353,20 @@ impl, const WIDTH_FACTOR: usize> MyCircuit } // Enable RLC gate 3 times - // for abcd in [[3, 5, 3, 5], [8, 9, 8, 9], [111, 222, 111, 222]] { - // config.s_rlc.enable(&mut region, offset)?; - // let (_, _) = config - // .assign_gate(&mut region, &mut offset, None, abcd) - // .expect("todo"); - // let rlc = challenge.map(|ch| { - // let [a, b, ..] = abcd; - // F::from(a) + ch * F::from(b) - // }); - // region - // .assign_advice(|| "", config.e, offset - 1, || rlc) - // .expect("todo"); - // offset += 1; - // } + for abcd in [[3, 5, 3, 5], [8, 9, 8, 9], [111, 222, 111, 222]] { + config.s_rlc.enable(&mut region, offset)?; + let (_, _) = config + .assign_gate(&mut region, &mut offset, None, abcd) + .expect("todo"); + let rlc = challenge.map(|ch| { + let [a, b, ..] = abcd; + F::from(a) + ch * F::from(b) + }); + region + .assign_advice(|| "", config.e, offset - 1, || rlc) + .expect("todo"); + offset += 1; + } // Enable a dynamic shuffle (sequence from 0 to 15) let table: Vec<_> = (0u64..16).collect(); @@ -502,10 +508,10 @@ fn test_mycircuit_full_legacy() { ) .expect("proof generation should not fail"); let proof = transcript.finalize(); - println!("DBG proof.len={} ", proof.len()); - for word in proof.chunks(32) { - println!(" {:02x?}", word); - } + // println!("DBG proof.len={} ", proof.len()); + // for word in proof.chunks(32) { + // println!(" {:02x?}", word); + // } // Verify let mut verifier_transcript = @@ -534,7 +540,7 @@ fn test_mycircuit_full_split() { let params = ParamsKZG::::setup(k, &mut rng); let verifier_params = params.verifier_params(); let vk = keygen_vk_v2(¶ms, &compiled_circuit).expect("keygen_vk should not fail"); - println!("vk: {:#?}", vk); + // println!("vk: {:#?}", vk); let pk = keygen_pk_v2(¶ms, vk.clone(), &compiled_circuit).expect("keygen_pk should not fail"); @@ -558,8 +564,8 @@ fn test_mycircuit_full_split() { ) .unwrap(); let mut challenges = HashMap::new(); - // for phase in [0, 1] { - for phase in [0] { + for phase in [0, 1] { + // for phase in [0] { println!("DBG phase {}", phase); let witness = witness_calc.calc(phase, &challenges).unwrap(); // println!("DBG witness: {:?}", witness); @@ -568,10 +574,10 @@ fn test_mycircuit_full_split() { } let mut transcript = prover.create_proof().unwrap(); let proof = transcript.finalize(); - println!("DBG proof.len={} ", proof.len()); - for word in proof.chunks(32) { - println!(" {:02x?}", word); - } + // println!("DBG proof.len={} ", proof.len()); + // for word in proof.chunks(32) { + // println!(" {:02x?}", word); + // } // Verify println!("DBG Verifying..."); From 0ebdc59b627cbd414911aa42c16282ba19cee9d3 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 3 Jan 2024 12:04:40 +0100 Subject: [PATCH 15/79] Clean up --- halo2_proofs/src/dev/cost.rs | 6 +-- halo2_proofs/src/plonk.rs | 47 +--------------- halo2_proofs/src/plonk/circuit.rs | 56 -------------------- halo2_proofs/src/plonk/evaluation.rs | 2 +- halo2_proofs/tests/frontend_backend_split.rs | 8 --- 5 files changed, 5 insertions(+), 114 deletions(-) diff --git a/halo2_proofs/src/dev/cost.rs b/halo2_proofs/src/dev/cost.rs index 1f131a2d40..735f1f0dc7 100644 --- a/halo2_proofs/src/dev/cost.rs +++ b/halo2_proofs/src/dev/cost.rs @@ -102,11 +102,11 @@ impl Layout { total_rows: 0, total_advice_rows: 0, total_fixed_rows: 0, - /// Any cells assigned outside of a region. + // Any cells assigned outside of a region. loose_cells: vec![], - /// Pairs of cells between which we have equality constraints. + // Pairs of cells between which we have equality constraints. equality: vec![], - /// Selector assignments used for optimization pass + // Selector assignments used for optimization pass selectors: vec![vec![false; n]; num_selectors], } } diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 5e7205dbaa..a6cdba70c1 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -99,50 +99,6 @@ impl Queries { // off-by-one errors. factors + 1 } - - pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, advice_query) in self.advice.iter().enumerate() { - if advice_query == &(column, at) { - return index; - } - } - - panic!("get_advice_query_index called for non-existent query"); - } - - pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, fixed_query) in self.fixed.iter().enumerate() { - if fixed_query == &(column, at) { - return index; - } - } - - panic!("get_fixed_query_index called for non-existent query"); - } - - pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, instance_query) in self.instance.iter().enumerate() { - if instance_query == &(column, at) { - return index; - } - } - - panic!("get_instance_query_index called for non-existent query"); - } - - pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { - match column.column_type() { - Any::Advice(_) => { - self.get_advice_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Fixed => { - self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Instance => { - self.get_instance_query_index(Column::::try_from(column).unwrap(), at) - } - } - } } // TODO: Remove in favour of VerifyingKey @@ -427,8 +383,7 @@ impl VerifyingKey { hasher.update(s.as_bytes()); // Hash in final Blake2bState - // TODO: Uncomment - // vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); vk } diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 92d17f0b06..67b463bcda 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -2123,62 +2123,6 @@ pub fn compile_circuit>( } impl ConstraintSystemV2Backend { - /* - /// Compute the degree of the constraint system (the maximum degree of all - /// constraints). - pub fn degree(&self) -> usize { - // The permutation argument will serve alongside the gates, so must be - // accounted for. - let mut degree = self.permutation.required_degree(); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.lookups - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.shuffles - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // Account for each gate to ensure our quotient polynomial is the - // correct degree and that our extended domain is the right size. - degree = std::cmp::max( - degree, - self.gates - .iter() - .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) - .max() - .unwrap_or(0), - ); - - // std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) - degree - } - */ - - pub(crate) fn phases(&self) -> Vec { - let max_phase = self - .advice_column_phase - .iter() - .cloned() - .max() - .unwrap_or_default(); - (0..=max_phase).collect() - } - pub(crate) fn collect_queries( &self, ) -> ( diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index 812d1fd2e8..b865d516b6 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -7,7 +7,7 @@ use crate::{ }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; -use super::{shuffle, ConstraintSystem, ConstraintSystemV2Backend, Expression}; +use super::{shuffle, ConstraintSystem, Expression}; /// Return the index in the polynomial of size `isize` after rotation `rot`. fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index fd0c8428db..b46b16001f 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -236,14 +236,6 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let res = region .assign_advice_from_instance(|| "", config.instance, 0, config.a, offset) .expect("todo"); - // let res = region - // .assign_advice( - // || "", - // config.a, - // offset, - // || Value::known(F::from(self.input)), - // ) - // .expect("todo"); // Enable the gate on a few consecutive rows with rotations let (res, _) = config .assign_gate(&mut region, &mut offset, Some(res), [0, 3, 4, 1]) From 96ea9442becc576302bac998082141a2c3629e9f Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 3 Jan 2024 12:18:01 +0100 Subject: [PATCH 16/79] Clean up --- halo2_proofs/src/plonk.rs | 102 ----- halo2_proofs/src/plonk/evaluation.rs | 314 +------------ halo2_proofs/src/plonk/keygen.rs | 15 +- halo2_proofs/src/plonk/lookup/prover.rs | 422 +---------------- halo2_proofs/src/plonk/lookup/verifier.rs | 45 +- halo2_proofs/src/plonk/permutation/prover.rs | 246 ---------- .../src/plonk/permutation/verifier.rs | 166 ------- halo2_proofs/src/plonk/prover.rs | 28 +- halo2_proofs/src/plonk/shuffle/prover.rs | 217 --------- halo2_proofs/src/plonk/shuffle/verifier.rs | 26 +- halo2_proofs/src/plonk/vanishing/verifier.rs | 18 +- halo2_proofs/src/plonk/verifier.rs | 431 +----------------- halo2_proofs/tests/frontend_backend_split.rs | 8 +- 13 files changed, 32 insertions(+), 2006 deletions(-) diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index a6cdba70c1..85a544d585 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -101,78 +101,6 @@ impl Queries { } } -// TODO: Remove in favour of VerifyingKey -/// This is a verifying key which allows for the verification of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct VerifyingKeyV2 { - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - // queries: Queries, - /// Cached maximum degree of `cs` (which doesn't change after construction). - cs_degree: usize, - /// The representative of this `VerifyingKey` in transcripts. - transcript_repr: C::Scalar, -} - -impl VerifyingKeyV2 { - fn from_parts( - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - ) -> Self - where - C::ScalarExt: FromUniformBytes<64>, - { - // Compute cached values. - let cs_degree = cs.degree(); - // let queries = cs.collect_queries(); - - let mut vk = Self { - domain, - fixed_commitments, - permutation, - cs, - // queries, - cs_degree, - // Temporary, this is not pinned. - transcript_repr: C::Scalar::ZERO, - }; - - let mut hasher = Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Verify-Key") - .to_state(); - - // let s = format!("{:?}", vk.pinned()); - // TODO(Edu): Is it Ok to not use the pinned Vk here? We removed a lot of stuff from Vk - // and Cs, so maybe we already have the same as in PinnedVerificationKey? - let s = format!("{:?}", vk); - - hasher.update(&(s.len() as u64).to_le_bytes()); - hasher.update(s.as_bytes()); - - // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - // dbg!(&vk.transcript_repr); - - vk - } - - /// Hashes a verification key into a transcript. - pub fn hash_into, T: Transcript>( - &self, - transcript: &mut T, - ) -> io::Result<()> { - transcript.common_scalar(self.transcript_repr)?; - - Ok(()) - } -} - /// This is a verifying key which allows for the verification of proofs for a /// particular circuit. #[derive(Clone, Debug)] @@ -445,36 +373,6 @@ pub struct PinnedVerificationKey<'a, C: CurveAffine> { permutation: &'a permutation::VerifyingKey, } -/// This is a proving key which allows for the creation of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct ProvingKeyV2 { - vk: VerifyingKeyV2, - l0: Polynomial, - l_last: Polynomial, - l_active_row: Polynomial, - fixed_values: Vec>, - fixed_polys: Vec>, - fixed_cosets: Vec>, - permutation: permutation::ProvingKey, - ev: Evaluator, -} - -// impl ProvingKeyV2 -// where -// C::Scalar: FromUniformBytes<64>, -// { -// /// Hashes a verification key into a transcript. -// pub fn hash_into, T: Transcript>( -// &self, -// transcript: &mut T, -// ) -> io::Result<()> { -// transcript.common_scalar(self.transcript_repr)?; -// -// Ok(()) -// } -// } - /// This is a proving key which allows for the creation of proofs for a /// particular circuit. #[derive(Clone, Debug)] diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index b865d516b6..e89359fa68 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -1,5 +1,5 @@ use crate::multicore; -use crate::plonk::{lookup, permutation, Any, ProvingKey, ProvingKeyV2}; +use crate::plonk::{lookup, permutation, Any, ProvingKey}; use crate::poly::Basis; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -387,318 +387,6 @@ impl Evaluator { } /// Evaluate h poly - // NOTE: Copy of evaluate_h with ProvingKeyV2 - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn evaluate_h_v2( - &self, - pk: &ProvingKeyV2, - advice_polys: &[&[Polynomial]], - instance_polys: &[&[Polynomial]], - challenges: &[C::ScalarExt], - y: C::ScalarExt, - beta: C::ScalarExt, - gamma: C::ScalarExt, - theta: C::ScalarExt, - lookups: &[Vec>], - shuffles: &[Vec>], - permutations: &[permutation::prover::Committed], - ) -> Polynomial { - let domain = &pk.vk.domain; - let size = domain.extended_len(); - let rot_scale = 1 << (domain.extended_k() - domain.k()); - let fixed = &pk.fixed_cosets[..]; - let extended_omega = domain.get_extended_omega(); - let isize = size as i32; - let one = C::ScalarExt::ONE; - let l0 = &pk.l0; - let l_last = &pk.l_last; - let l_active_row = &pk.l_active_row; - let p = &pk.vk.cs.permutation; - - // Calculate the advice and instance cosets - let advice: Vec>> = advice_polys - .iter() - .map(|advice_polys| { - advice_polys - .iter() - .map(|poly| domain.coeff_to_extended(poly.clone())) - .collect() - }) - .collect(); - let instance: Vec>> = instance_polys - .iter() - .map(|instance_polys| { - instance_polys - .iter() - .map(|poly| domain.coeff_to_extended(poly.clone())) - .collect() - }) - .collect(); - - let mut values = domain.empty_extended(); - - // Core expression evaluations - let num_threads = multicore::current_num_threads(); - for ((((advice, instance), lookups), shuffles), permutation) in advice - .iter() - .zip(instance.iter()) - .zip(lookups.iter()) - .zip(shuffles.iter()) - .zip(permutations.iter()) - { - // Custom gates - multicore::scope(|scope| { - let chunk_size = (size + num_threads - 1) / num_threads; - for (thread_idx, values) in values.chunks_mut(chunk_size).enumerate() { - let start = thread_idx * chunk_size; - scope.spawn(move |_| { - let mut eval_data = self.custom_gates.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - *value = self.custom_gates.evaluate( - &mut eval_data, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - value, - idx, - rot_scale, - isize, - ); - } - }); - } - }); - - // Permutations - let sets = &permutation.sets; - if !sets.is_empty() { - let blinding_factors = pk.vk.cs.blinding_factors(); - let last_rotation = Rotation(-((blinding_factors + 1) as i32)); - let chunk_len = pk.vk.cs.degree() - 2; - let delta_start = beta * &C::Scalar::ZETA; - - let first_set = sets.first().unwrap(); - let last_set = sets.last().unwrap(); - - // Permutation constraints - parallelize(&mut values, |values, start| { - let mut beta_term = extended_omega.pow_vartime([start as u64, 0, 0, 0]); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - let r_last = get_rotation_idx(idx, last_rotation.0, rot_scale, isize); - - // Enforce only for the first set. - // l_0(X) * (1 - z_0(X)) = 0 - *value = *value * y - + ((one - first_set.permutation_product_coset[idx]) * l0[idx]); - // Enforce only for the last set. - // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 - *value = *value * y - + ((last_set.permutation_product_coset[idx] - * last_set.permutation_product_coset[idx] - - last_set.permutation_product_coset[idx]) - * l_last[idx]); - // Except for the first set, enforce. - // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 - for (set_idx, set) in sets.iter().enumerate() { - if set_idx != 0 { - *value = *value * y - + ((set.permutation_product_coset[idx] - - permutation.sets[set_idx - 1].permutation_product_coset - [r_last]) - * l0[idx]); - } - } - // And for all the sets we enforce: - // (1 - (l_last(X) + l_blind(X))) * ( - // z_i(\omega X) \prod_j (p(X) + \beta s_j(X) + \gamma) - // - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma) - // ) - let mut current_delta = delta_start * beta_term; - for ((set, columns), cosets) in sets - .iter() - .zip(p.columns.chunks(chunk_len)) - .zip(pk.permutation.cosets.chunks(chunk_len)) - { - let mut left = set.permutation_product_coset[r_next]; - for (values, permutation) in columns - .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], - }) - .zip(cosets.iter()) - { - left *= values[idx] + beta * permutation[idx] + gamma; - } - - let mut right = set.permutation_product_coset[idx]; - for values in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], - }) { - right *= values[idx] + current_delta + gamma; - current_delta *= &C::Scalar::DELTA; - } - - *value = *value * y + ((left - right) * l_active_row[idx]); - } - beta_term *= &extended_omega; - } - }); - } - - // Lookups - for (n, lookup) in lookups.iter().enumerate() { - // Polynomials required for this lookup. - // Calculated here so these only have to be kept in memory for the short time - // they are actually needed. - let product_coset = pk.vk.domain.coeff_to_extended(lookup.product_poly.clone()); - let permuted_input_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_input_poly.clone()); - let permuted_table_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_table_poly.clone()); - - // Lookup constraints - parallelize(&mut values, |values, start| { - let lookup_evaluator = &self.lookups[n]; - let mut eval_data = lookup_evaluator.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - - let table_value = lookup_evaluator.evaluate( - &mut eval_data, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - let r_prev = get_rotation_idx(idx, -1, rot_scale, isize); - - let a_minus_s = permuted_input_coset[idx] - permuted_table_coset[idx]; - // l_0(X) * (1 - z(X)) = 0 - *value = *value * y + ((one - product_coset[idx]) * l0[idx]); - // l_last(X) * (z(X)^2 - z(X)) = 0 - *value = *value * y - + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) - * l_last[idx]); - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) - // (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - *value = *value * y - + ((product_coset[r_next] - * (permuted_input_coset[idx] + beta) - * (permuted_table_coset[idx] + gamma) - - product_coset[idx] * table_value) - * l_active_row[idx]); - // Check that the first values in the permuted input expression and permuted - // fixed expression are the same. - // l_0(X) * (a'(X) - s'(X)) = 0 - *value = *value * y + (a_minus_s * l0[idx]); - // Check that each value in the permuted lookup input expression is either - // equal to the value above it, or the value at the same index in the - // permuted table expression. - // (1 - (l_last + l_blind)) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - *value = *value * y - + (a_minus_s - * (permuted_input_coset[idx] - permuted_input_coset[r_prev]) - * l_active_row[idx]); - } - }); - } - - // Shuffle constraints - for (n, shuffle) in shuffles.iter().enumerate() { - let product_coset = pk.vk.domain.coeff_to_extended(shuffle.product_poly.clone()); - - // Shuffle constraints - parallelize(&mut values, |values, start| { - let input_evaluator = &self.shuffles[2 * n]; - let shuffle_evaluator = &self.shuffles[2 * n + 1]; - let mut eval_data_input = shuffle_evaluator.instance(); - let mut eval_data_shuffle = shuffle_evaluator.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - - let input_value = input_evaluator.evaluate( - &mut eval_data_input, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let shuffle_value = shuffle_evaluator.evaluate( - &mut eval_data_shuffle, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - - // l_0(X) * (1 - z(X)) = 0 - *value = *value * y + ((one - product_coset[idx]) * l0[idx]); - // l_last(X) * (z(X)^2 - z(X)) = 0 - *value = *value * y - + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) - * l_last[idx]); - // (1 - (l_last(X) + l_blind(X))) * (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) = 0 - *value = *value * y - + l_active_row[idx] - * (product_coset[r_next] * shuffle_value - - product_coset[idx] * input_value) - } - }); - } - } - values - } - - /// Evaluate h poly - // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn evaluate_h( &self, diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index b5b7b200fb..6329d83b08 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -11,8 +11,7 @@ use super::{ FloorPlanner, Instance, Selector, }, evaluation::Evaluator, - permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, ProvingKeyV2, - VerifyingKey, VerifyingKeyV2, + permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, }; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -207,7 +206,7 @@ impl Assignment for Assembly { pub fn keygen_vk_v2<'params, C, P>( params: &P, circuit: &CompiledCircuitV2, -) -> Result, Error> +) -> Result, Error> where C: CurveAffine, P: Params<'params, C>, @@ -236,11 +235,13 @@ where .map(|poly| params.commit_lagrange(poly, Blind::default()).to_affine()) .collect(); - Ok(VerifyingKeyV2::from_parts( + Ok(VerifyingKey::from_parts( domain, fixed_commitments, permutation_vk, cs, + Vec::new(), + false, )) } @@ -336,9 +337,9 @@ where /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. pub fn keygen_pk_v2<'params, C, P>( params: &P, - vk: VerifyingKeyV2, + vk: VerifyingKey, circuit: &CompiledCircuitV2, -) -> Result, Error> +) -> Result, Error> where C: CurveAffine, P: Params<'params, C>, @@ -404,7 +405,7 @@ where // Compute the optimized evaluation data structure let ev = Evaluator::new(&vk.cs); - Ok(ProvingKeyV2 { + Ok(ProvingKey { vk, l0, l_last, diff --git a/halo2_proofs/src/plonk/lookup/prover.rs b/halo2_proofs/src/plonk/lookup/prover.rs index 377773980b..028b298853 100644 --- a/halo2_proofs/src/plonk/lookup/prover.rs +++ b/halo2_proofs/src/plonk/lookup/prover.rs @@ -1,6 +1,6 @@ use super::super::{ circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, - ProvingKey, ProvingKeyV2, + ProvingKey, }; use super::Argument; use crate::plonk::evaluation::evaluate; @@ -60,113 +60,6 @@ impl> Argument { /// - constructs Permuted struct using permuted_input_value = A', and /// permuted_table_expression = S'. /// The Permuted struct is used to update the Lookup, and is then returned. - // NOTE: Copy of commit_permuted that uses ProvingKeyV2 - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_permuted_v2< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKeyV2, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the lookup and compress them - let compressed_input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the lookup and compress them - let compressed_table_expression = compress_expressions(&self.table_expressions); - - // Permute compressed (InputExpression, TableExpression) pair - let (permuted_input_expression, permuted_table_expression) = permute_expression_pair_v2( - pk, - params, - domain, - &mut rng, - &compressed_input_expression, - &compressed_table_expression, - )?; - - // Closure to construct commitment to vector of values - let mut commit_values = |values: &Polynomial| { - let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); - let blind = Blind(C::Scalar::random(&mut rng)); - let commitment = params.commit_lagrange(values, blind).to_affine(); - (poly, blind, commitment) - }; - - // Commit to permuted input expression - let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = - commit_values(&permuted_input_expression); - - // Commit to permuted table expression - let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = - commit_values(&permuted_table_expression); - - // Hash permuted input commitment - transcript.write_point(permuted_input_commitment)?; - - // Hash permuted table commitment - transcript.write_point(permuted_table_commitment)?; - - Ok(Permuted { - compressed_input_expression, - permuted_input_expression, - permuted_input_poly, - permuted_input_blind, - compressed_table_expression, - permuted_table_expression, - permuted_table_poly, - permuted_table_blind, - }) - } - /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, - /// obtaining A' and S', and - /// - constructs Permuted struct using permuted_input_value = A', and - /// permuted_table_expression = S'. - /// The Permuted struct is used to update the Lookup, and is then returned. - // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit_permuted< 'a, @@ -266,151 +159,6 @@ impl> Argument { } impl Permuted { - /// Given a Lookup with input expressions, table expressions, and the permuted - /// input expression and permuted table expression, this method constructs the - /// grand product polynomial over the lookup. The grand product polynomial - /// is used to populate the Product struct. The Product struct is - /// added to the Lookup and finally returned by the method. - // NOTE: Copy of commit_permuted with ProvingKeyV2 - pub(in crate::plonk) fn commit_product_v2< - 'params, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - self, - pk: &ProvingKeyV2, - params: &P, - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - // Goal is to compute the products of fractions - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where a_j(X) is the jth input expression in this lookup, - // where a'(X) is the compression of the permuted input expressions, - // s_j(X) is the jth table expression in this lookup, - // s'(X) is the compression of the permuted table expressions, - // and i is the ith row of the expression. - let mut lookup_product = vec![C::Scalar::ZERO; params.n() as usize]; - // Denominator uses the permuted input expression and permuted table expression - parallelize(&mut lookup_product, |lookup_product, start| { - for ((lookup_product, permuted_input_value), permuted_table_value) in lookup_product - .iter_mut() - .zip(self.permuted_input_expression[start..].iter()) - .zip(self.permuted_table_expression[start..].iter()) - { - *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); - } - }); - - // Batch invert to obtain the denominators for the lookup product - // polynomials - lookup_product.iter_mut().batch_invert(); - - // Finish the computation of the entire fraction by computing the numerators - // (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - parallelize(&mut lookup_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - - *product *= &(self.compressed_input_expression[i] + &*beta); - *product *= &(self.compressed_table_expression[i] + &*gamma); - } - }); - - // The product vector is a vector of products of fractions of the form - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where there are m input expressions and m table expressions, - // a_j(\omega^i) is the jth input expression in this lookup, - // a'j(\omega^i) is the permuted input expression, - // s_j(\omega^i) is the jth table expression in this lookup, - // s'(\omega^i) is the permuted table expression, - // and i is the ith row of the expression. - - // Compute the evaluations of the lookup product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(lookup_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - // This test works only with intermediate representations in this method. - // It can be used for debugging purposes. - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - - // l_0(X) * (1 - z(X)) = 0 - assert_eq!(z[0], C::Scalar::ONE); - - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - for i in 0..u { - let mut left = z[i + 1]; - let permuted_input_value = &self.permuted_input_expression[i]; - - let permuted_table_value = &self.permuted_table_expression[i]; - - left *= &(*beta + permuted_input_value); - left *= &(*gamma + permuted_table_value); - - let mut right = z[i]; - let mut input_term = self.compressed_input_expression[i]; - let mut table_term = self.compressed_table_expression[i]; - - input_term += &(*beta); - table_term += &(*gamma); - right *= &(input_term * &table_term); - - assert_eq!(left, right); - } - - // l_last(X) * (z(X)^2 - z(X)) = 0 - // Assertion will fail only when soundness is broken, in which - // case this z[u] value will be zero. (bad!) - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - permuted_input_poly: self.permuted_input_poly, - permuted_input_blind: self.permuted_input_blind, - permuted_table_poly: self.permuted_table_poly, - permuted_table_blind: self.permuted_table_blind, - product_poly: z, - product_blind, - }) - } /// Given a Lookup with input expressions, table expressions, and the permuted /// input expression and permuted table expression, this method constructs the /// grand product polynomial over the lookup. The grand product polynomial @@ -558,37 +306,6 @@ impl Permuted { } impl Committed { - pub(in crate::plonk) fn evaluate_v2, T: TranscriptWrite>( - self, - pk: &ProvingKeyV2, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_inv = domain.rotate_omega(*x, Rotation::prev()); - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - let permuted_input_eval = eval_polynomial(&self.permuted_input_poly, *x); - let permuted_input_inv_eval = eval_polynomial(&self.permuted_input_poly, x_inv); - let permuted_table_eval = eval_polynomial(&self.permuted_table_poly, *x); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - .chain(Some(permuted_input_eval)) - .chain(Some(permuted_input_inv_eval)) - .chain(Some(permuted_table_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } - - // TODO: Remove pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &ProvingKey, @@ -621,49 +338,6 @@ impl Committed { } impl Evaluated { - // NOTE: Copy of open with ProvingKeyV2 - pub(in crate::plonk) fn open_v2<'a>( - &'a self, - pk: &'a ProvingKeyV2, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = pk.vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open lookup input commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup table commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_table_poly, - blind: self.constructed.permuted_table_blind, - })) - // Open lookup input commitments at x_inv - .chain(Some(ProverQuery { - point: x_inv, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } - - // TODO: Remove pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a ProvingKey, @@ -714,100 +388,6 @@ type ExpressionPair = (Polynomial, Polynomial, R: RngCore>( - pk: &ProvingKeyV2, - params: &P, - domain: &EvaluationDomain, - mut rng: R, - input_expression: &Polynomial, - table_expression: &Polynomial, -) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - let usable_rows = params.n() as usize - (blinding_factors + 1); - - let mut permuted_input_expression: Vec = input_expression.to_vec(); - permuted_input_expression.truncate(usable_rows); - - // Sort input lookup expression values - permuted_input_expression.sort(); - - // A BTreeMap of each unique element in the table expression and its count - let mut leftover_table_map: BTreeMap = table_expression - .iter() - .take(usable_rows) - .fold(BTreeMap::new(), |mut acc, coeff| { - *acc.entry(*coeff).or_insert(0) += 1; - acc - }); - let mut permuted_table_coeffs = vec![C::Scalar::ZERO; usable_rows]; - - let mut repeated_input_rows = permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter_mut()) - .enumerate() - .filter_map(|(row, (input_value, table_value))| { - // If this is the first occurrence of `input_value` in the input expression - if row == 0 || *input_value != permuted_input_expression[row - 1] { - *table_value = *input_value; - // Remove one instance of input_value from leftover_table_map - if let Some(count) = leftover_table_map.get_mut(input_value) { - assert!(*count > 0); - *count -= 1; - None - } else { - // Return error if input_value not found - Some(Err(Error::ConstraintSystemFailure)) - } - // If input value is repeated - } else { - Some(Ok(row)) - } - }) - .collect::, _>>()?; - - // Populate permuted table at unfilled rows with leftover table elements - for (coeff, count) in leftover_table_map.iter() { - for _ in 0..*count { - permuted_table_coeffs[repeated_input_rows.pop().unwrap()] = *coeff; - } - } - assert!(repeated_input_rows.is_empty()); - - permuted_input_expression - .extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - permuted_table_coeffs.extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - assert_eq!(permuted_input_expression.len(), params.n() as usize); - assert_eq!(permuted_table_coeffs.len(), params.n() as usize); - - #[cfg(feature = "sanity-checks")] - { - let mut last = None; - for (a, b) in permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter()) - .take(usable_rows) - { - if *a != *b { - assert_eq!(*a, last.unwrap()); - } - last = Some(*a); - } - } - - Ok(( - domain.lagrange_from_vec(permuted_input_expression), - domain.lagrange_from_vec(permuted_table_coeffs), - )) -} - -/// Given a vector of input values A and a vector of table values S, -/// this method permutes A and S to produce A' and S', such that: -/// - like values in A' are vertically adjacent to each other; and -/// - the first row in a sequence of like values in A' is the row -/// that has the corresponding value in S'. -/// This method returns (A', S') if no errors are encountered. -// TODO: Remove fn permute_expression_pair<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( pk: &ProvingKey, params: &P, diff --git a/halo2_proofs/src/plonk/lookup/verifier.rs b/halo2_proofs/src/plonk/lookup/verifier.rs index 5667a54c5d..bbc86c8e9d 100644 --- a/halo2_proofs/src/plonk/lookup/verifier.rs +++ b/halo2_proofs/src/plonk/lookup/verifier.rs @@ -6,7 +6,7 @@ use super::super::{ use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey, VerifyingKeyV2}, + plonk::{Error, VerifyingKey}, poly::{commitment::MSM, Rotation, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; @@ -168,49 +168,6 @@ impl Evaluated { )) } - // NOTE: Copy of queries with VerifyingKeyV2 - pub(in crate::plonk) fn queries_v2<'r, M: MSM + 'r>( - &'r self, - vk: &'r VerifyingKeyV2, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitment at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - *x, - self.product_eval, - ))) - // Open lookup input commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - *x, - self.permuted_input_eval, - ))) - // Open lookup table commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_table_commitment, - *x, - self.permuted_table_eval, - ))) - // Open lookup input commitments at \omega^{-1} x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - x_inv, - self.permuted_input_inv_eval, - ))) - // Open lookup product commitment at \omega x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - x_next, - self.product_next_eval, - ))) - } - - // TODO: Remove pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( &'r self, vk: &'r VerifyingKey, diff --git a/halo2_proofs/src/plonk/permutation/prover.rs b/halo2_proofs/src/plonk/permutation/prover.rs index 560a047d60..d6b108554d 100644 --- a/halo2_proofs/src/plonk/permutation/prover.rs +++ b/halo2_proofs/src/plonk/permutation/prover.rs @@ -42,157 +42,6 @@ pub(crate) struct Evaluated { } impl Argument { - // NOTE: Copy of commit with ProvingKeyV2 - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_v2< - 'params, - C: CurveAffine, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - params: &P, - pk: &plonk::ProvingKeyV2, - pkey: &ProvingKey, - advice: &[Polynomial], - fixed: &[Polynomial], - instance: &[Polynomial], - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - - // How many columns can be included in a single permutation polynomial? - // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This - // will never underflow because of the requirement of at least a degree - // 3 circuit for the permutation argument. - assert!(pk.vk.cs_degree >= 3); - let chunk_len = pk.vk.cs_degree - 2; - let blinding_factors = pk.vk.cs.blinding_factors(); - - // Each column gets its own delta power. - let mut deltaomega = C::Scalar::ONE; - - // Track the "last" value from the previous column set - let mut last_z = C::Scalar::ONE; - - let mut sets = vec![]; - - for (columns, permutations) in self - .columns - .chunks(chunk_len) - .zip(pkey.permutations.chunks(chunk_len)) - { - // Goal is to compute the products of fractions - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where p_j(X) is the jth column in this permutation, - // and i is the ith row of the column. - - let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; - - // Iterate over each column of the permutation - for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - for ((modified_values, value), permuted_value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - .zip(permuted_column_values[start..].iter()) - { - *modified_values *= &(*beta * permuted_value + &*gamma + value); - } - }); - } - - // Invert to obtain the denominator for the permutation product polynomial - modified_values.batch_invert(); - - // Iterate over each column again, this time finishing the computation - // of the entire fraction by computing the numerators - for &column in columns.iter() { - let omega = domain.get_omega(); - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); - for (modified_values, value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - { - // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta - *modified_values *= &(deltaomega * &*beta + &*gamma + value); - deltaomega *= ω - } - }); - deltaomega *= &::DELTA; - } - - // The modified_values vector is a vector of products of fractions - // of the form - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where i is the index into modified_values, for the jth column in - // the permutation - - // Compute the evaluations of the permutation product polynomial - // over our domain, starting with z[0] = 1 - let mut z = vec![last_z]; - for row in 1..(params.n() as usize) { - let mut tmp = z[row - 1]; - - tmp *= &modified_values[row - 1]; - z.push(tmp); - } - let mut z = domain.lagrange_from_vec(z); - // Set blinding factors - for z in &mut z[params.n() as usize - blinding_factors..] { - *z = C::Scalar::random(&mut rng); - } - // Set new last_z - last_z = z[params.n() as usize - (blinding_factors + 1)]; - - let blind = Blind(C::Scalar::random(&mut rng)); - - let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); - let permutation_product_blind = blind; - let z = domain.lagrange_to_coeff(z); - let permutation_product_poly = z.clone(); - - let permutation_product_coset = domain.coeff_to_extended(z.clone()); - - let permutation_product_commitment = - permutation_product_commitment_projective.to_affine(); - - // Hash the permutation product commitment - transcript.write_point(permutation_product_commitment)?; - - sets.push(CommittedSet { - permutation_product_poly, - permutation_product_coset, - permutation_product_blind, - }); - } - - Ok(Committed { sets }) - } - - // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit< 'params, @@ -385,53 +234,6 @@ impl super::ProvingKey { } impl Constructed { - // NOTE: Copy of evaluate with ProvingKeyV2 - pub(in crate::plonk) fn evaluate_v2, T: TranscriptWrite>( - self, - pk: &plonk::ProvingKeyV2, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let blinding_factors = pk.vk.cs.blinding_factors(); - - { - let mut sets = self.sets.iter(); - - while let Some(set) = sets.next() { - let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); - - let permutation_product_next_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation::next()), - ); - - // Hash permutation product evals - for eval in iter::empty() - .chain(Some(&permutation_product_eval)) - .chain(Some(&permutation_product_next_eval)) - { - transcript.write_scalar(*eval)?; - } - - // If we have any remaining sets to process, evaluate this set at omega^u - // so we can constrain the last value of its running product to equal the - // first value of the next set's running product, chaining them together. - if sets.len() > 0 { - let permutation_product_last_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), - ); - - transcript.write_scalar(permutation_product_last_eval)?; - } - } - } - - Ok(Evaluated { constructed: self }) - } - - // TODO: Remove pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &plonk::ProvingKey, @@ -479,54 +281,6 @@ impl Constructed { } impl Evaluated { - // NOTE: Copy of open with ProvingKeyV2 - pub(in crate::plonk) fn open_v2<'a>( - &'a self, - pk: &'a plonk::ProvingKeyV2, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let blinding_factors = pk.vk.cs.blinding_factors(); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - let x_last = pk - .vk - .domain - .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); - - iter::empty() - .chain(self.constructed.sets.iter().flat_map(move |set| { - iter::empty() - // Open permutation product commitments at x and \omega x - .chain(Some(ProverQuery { - point: *x, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - .chain(Some(ProverQuery { - point: x_next, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - })) - // Open it at \omega^{last} x for all but the last set. This rotation is only - // sensical for the first row, but we only use this rotation in a constraint - // that is gated on l_0. - .chain( - self.constructed - .sets - .iter() - .rev() - .skip(1) - .flat_map(move |set| { - Some(ProverQuery { - point: x_last, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - }) - }), - ) - } - - // TODO: Remove pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a plonk::ProvingKey, diff --git a/halo2_proofs/src/plonk/permutation/verifier.rs b/halo2_proofs/src/plonk/permutation/verifier.rs index ac2f944298..a4637422ae 100644 --- a/halo2_proofs/src/plonk/permutation/verifier.rs +++ b/halo2_proofs/src/plonk/permutation/verifier.rs @@ -30,29 +30,6 @@ pub struct Evaluated { } impl Argument { - pub(crate) fn read_product_commitments_v2< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - vk: &plonk::VerifyingKeyV2, - transcript: &mut T, - ) -> Result, Error> { - let chunk_len = vk.cs_degree - 2; - - let permutation_product_commitments = self - .columns - .chunks(chunk_len) - .map(|_| transcript.read_point()) - .collect::, _>>()?; - - Ok(Committed { - permutation_product_commitments, - }) - } - - // TODO: Remove pub(crate) fn read_product_commitments< C: CurveAffine, E: EncodedChallenge, @@ -122,110 +99,6 @@ impl Committed { } impl Evaluated { - // NOTE: Copy of expressions with VerifyingKeyV2 - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions_v2<'a>( - &'a self, - vk: &'a plonk::VerifyingKeyV2, - p: &'a Argument, - common: &'a CommonEvaluated, - advice_evals: &'a [C::Scalar], - fixed_evals: &'a [C::Scalar], - instance_evals: &'a [C::Scalar], - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - beta: ChallengeBeta, - gamma: ChallengeGamma, - x: ChallengeX, - ) -> impl Iterator + 'a { - let chunk_len = vk.cs_degree - 2; - iter::empty() - // Enforce only for the first set. - // l_0(X) * (1 - z_0(X)) = 0 - .chain( - self.sets - .first() - .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), - ) - // Enforce only for the last set. - // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 - .chain(self.sets.last().map(|last_set| { - (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) - * &l_last - })) - // Except for the first set, enforce. - // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 - .chain( - self.sets - .iter() - .skip(1) - .zip(self.sets.iter()) - .map(|(set, last_set)| { - ( - set.permutation_product_eval, - last_set.permutation_product_last_eval.unwrap(), - ) - }) - .map(move |(set, prev_last)| (set - &prev_last) * &l_0), - ) - // And for all the sets we enforce: - // (1 - (l_last(X) + l_blind(X))) * ( - // z_i(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) - // - z_i(X) \prod (p(X) + \delta^i \beta X + \gamma) - // ) - .chain( - self.sets - .iter() - .zip(p.columns.chunks(chunk_len)) - .zip(common.permutation_evals.chunks(chunk_len)) - .enumerate() - .map(move |(chunk_index, ((set, columns), permutation_evals))| { - let mut left = set.permutation_product_next_eval; - for (eval, permutation_eval) in columns - .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => { - advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Fixed => { - fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Instance => { - instance_evals - [vk.cs.get_any_query_index(column, Rotation::cur())] - } - }) - .zip(permutation_evals.iter()) - { - left *= &(eval + &(*beta * permutation_eval) + &*gamma); - } - - let mut right = set.permutation_product_eval; - let mut current_delta = (*beta * &*x) - * &(::DELTA - .pow_vartime([(chunk_index * chunk_len) as u64])); - for eval in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => { - advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Fixed => { - fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Instance => { - instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - }) { - right *= &(eval + ¤t_delta + &*gamma); - current_delta *= &C::Scalar::DELTA; - } - - (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) - }), - ) - } - - // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn expressions<'a>( &'a self, @@ -328,45 +201,6 @@ impl Evaluated { ) } - // NOTE: Copy of queries with VerifyingKeyV2 - pub(in crate::plonk) fn queries_v2<'r, M: MSM + 'r>( - &'r self, - vk: &'r plonk::VerifyingKeyV2, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let blinding_factors = vk.cs.blinding_factors(); - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - let x_last = vk - .domain - .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); - - iter::empty() - .chain(self.sets.iter().flat_map(move |set| { - iter::empty() - // Open permutation product commitments at x and \omega^{-1} x - // Open permutation product commitments at x and \omega x - .chain(Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - *x, - set.permutation_product_eval, - ))) - .chain(Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - x_next, - set.permutation_product_next_eval, - ))) - })) - // Open it at \omega^{last} x for all but the last set - .chain(self.sets.iter().rev().skip(1).flat_map(move |set| { - Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - x_last, - set.permutation_product_last_eval.unwrap(), - )) - })) - } - - // TODO: Remove pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( &'r self, vk: &'r plonk::VerifyingKey, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 2c17f83e49..95cbdef5dc 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -12,7 +12,7 @@ use super::{ Instance, Selector, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, - ChallengeX, ChallengeY, Error, ProvingKey, ProvingKeyV2, + ChallengeX, ChallengeY, Error, ProvingKey, }; use crate::{ @@ -57,7 +57,7 @@ pub struct ProverV2< > { // Circuit and setup fields params: &'params Scheme::ParamsProver, - pk: &'a ProvingKeyV2, + pk: &'a ProvingKey, // advice_queries: Vec<(Column, Rotation)>, // instance_queries: Vec<(Column, Rotation)>, // fixed_queries: Vec<(Column, Rotation)>, @@ -85,7 +85,7 @@ impl< /// Create a new prover object pub fn new( params: &'params Scheme::ParamsProver, - pk: &'a ProvingKeyV2, + pk: &'a ProvingKey, // TODO: If this was a vector the usage would be simpler instances: &[&[&[Scheme::Scalar]]], rng: R, @@ -384,7 +384,7 @@ impl< meta.lookups .iter() .map(|lookup| { - lookup.commit_permuted_v2( + lookup.commit_permuted( pk, params, &domain, @@ -419,7 +419,7 @@ impl< .iter() .zip(advice.iter()) .map(|(instance, advice)| { - meta.permutation.commit_v2( + meta.permutation.commit( params, pk, &pk.permutation, @@ -441,7 +441,7 @@ impl< lookups .into_iter() .map(|lookup| { - lookup.commit_product_v2(pk, params, beta, gamma, &mut rng, &mut transcript) + lookup.commit_product(pk, params, beta, gamma, &mut rng, &mut transcript) }) .collect::, _>>() }) @@ -455,7 +455,7 @@ impl< meta.shuffles .iter() .map(|shuffle| { - shuffle.commit_product_v2( + shuffle.commit_product( pk, params, domain, @@ -499,7 +499,7 @@ impl< .collect(); // Evaluate the h(X) polynomial - let h_poly = pk.ev.evaluate_h_v2( + let h_poly = pk.ev.evaluate_h( pk, &advice .iter() @@ -591,7 +591,7 @@ impl< let permutations: Vec> = permutations .into_iter() .map(|permutation| -> Result<_, _> { - permutation.construct().evaluate_v2(pk, x, &mut transcript) + permutation.construct().evaluate(pk, x, &mut transcript) }) .collect::, _>>()?; @@ -601,7 +601,7 @@ impl< .map(|lookups| -> Result, _> { lookups .into_iter() - .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .map(|p| p.evaluate(pk, x, &mut transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -612,7 +612,7 @@ impl< .map(|shuffles| -> Result, _> { shuffles .into_iter() - .map(|p| p.evaluate_v2(pk, x, &mut transcript)) + .map(|p| p.evaluate(pk, x, &mut transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -646,9 +646,9 @@ impl< blind: advice.advice_blinds[column.index()], }), ) - .chain(permutation.open_v2(pk, x)) - .chain(lookups.iter().flat_map(move |p| p.open_v2(pk, x))) - .chain(shuffles.iter().flat_map(move |p| p.open_v2(pk, x))) + .chain(permutation.open(pk, x)) + .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) + .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) }) .chain(meta.fixed_queries.iter().map(|&(column, at)| ProverQuery { point: domain.rotate_omega(*x, at), diff --git a/halo2_proofs/src/plonk/shuffle/prover.rs b/halo2_proofs/src/plonk/shuffle/prover.rs index 59ccef29f3..fd30436a47 100644 --- a/halo2_proofs/src/plonk/shuffle/prover.rs +++ b/halo2_proofs/src/plonk/shuffle/prover.rs @@ -1,6 +1,5 @@ use super::super::{ circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, - ProvingKeyV2, }; use super::Argument; use crate::plonk::evaluation::evaluate; @@ -41,61 +40,6 @@ impl> Argument { /// [S_0, S_1, ..., S_{m-1}], this method /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - // NOTE: Copy of compress with ProvingKeyV2 - #[allow(clippy::too_many_arguments)] - fn compress_v2<'a, 'params: 'a, C, P: Params<'params, C>>( - &self, - pk: &ProvingKeyV2, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - ) -> Compressed - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the shuffle and compress them - let input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the shuffle and compress them - let shuffle_expression = compress_expressions(&self.shuffle_expressions); - - Compressed { - input_expression, - shuffle_expression, - } - } - - /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - // TODO: Remove #[allow(clippy::too_many_arguments)] fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( &self, @@ -149,118 +93,6 @@ impl> Argument { /// constructs the grand product polynomial over the shuffle. /// The grand product polynomial is used to populate the Product struct. /// The Product struct is added to the Shuffle and finally returned by the method. - // NOTE: Copy of commit_product with ProvingKeyV2 - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_product_v2< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKeyV2, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - gamma: ChallengeGamma, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - let compressed = self.compress_v2( - pk, - params, - domain, - theta, - advice_values, - fixed_values, - instance_values, - challenges, - ); - - let blinding_factors = pk.vk.cs.blinding_factors(); - - let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; - parallelize(&mut shuffle_product, |shuffle_product, start| { - for (shuffle_product, shuffle_value) in shuffle_product - .iter_mut() - .zip(compressed.shuffle_expression[start..].iter()) - { - *shuffle_product = *gamma + shuffle_value; - } - }); - - shuffle_product.iter_mut().batch_invert(); - - parallelize(&mut shuffle_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - *product *= &(*gamma + compressed.input_expression[i]); - } - }); - - // Compute the evaluations of the shuffle product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(shuffle_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - assert_eq!(z[0], C::Scalar::ONE); - for i in 0..u { - let mut left = z[i + 1]; - let input_value = &compressed.input_expression[i]; - let shuffle_value = &compressed.shuffle_expression[i]; - left *= &(*gamma + shuffle_value); - let mut right = z[i]; - right *= &(*gamma + input_value); - assert_eq!(left, right); - } - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - product_poly: z, - product_blind, - }) - } - - /// Given a Shuffle with input expressions and table expressions this method - /// constructs the grand product polynomial over the shuffle. - /// The grand product polynomial is used to populate the Product struct. - /// The Product struct is added to the Shuffle and finally returned by the method. - // TODO: Remove #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit_product< 'a, @@ -369,31 +201,6 @@ impl> Argument { } impl Committed { - // NOTE: Copy of evaluate with ProvingKeyV2 - pub(in crate::plonk) fn evaluate_v2, T: TranscriptWrite>( - self, - pk: &ProvingKeyV2, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } - - // TODO: Remove pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( self, pk: &ProvingKey, @@ -419,30 +226,6 @@ impl Committed { } impl Evaluated { - // NOTE: Copy of open with ProvingKeyV2 - pub(in crate::plonk) fn open_v2<'a>( - &'a self, - pk: &'a ProvingKeyV2, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open shuffle product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open shuffle product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } - - // TODO: Remove pub(in crate::plonk) fn open<'a>( &'a self, pk: &'a ProvingKey, diff --git a/halo2_proofs/src/plonk/shuffle/verifier.rs b/halo2_proofs/src/plonk/shuffle/verifier.rs index 2f77b52d1d..379cc5c8a1 100644 --- a/halo2_proofs/src/plonk/shuffle/verifier.rs +++ b/halo2_proofs/src/plonk/shuffle/verifier.rs @@ -4,7 +4,7 @@ use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, Challeng use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey, VerifyingKeyV2}, + plonk::{Error, VerifyingKey}, poly::{commitment::MSM, Rotation, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; @@ -114,30 +114,6 @@ impl Evaluated { ) } - // NOTE: Copy of queries with VerifyingKeyV2 - pub(in crate::plonk) fn queries_v2<'r, M: MSM + 'r>( - &'r self, - vk: &'r VerifyingKeyV2, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open shuffle product commitment at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - *x, - self.product_eval, - ))) - // Open shuffle product commitment at \omega x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - x_next, - self.product_next_eval, - ))) - } - - // TODO: Remove pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( &'r self, vk: &'r VerifyingKey, diff --git a/halo2_proofs/src/plonk/vanishing/verifier.rs b/halo2_proofs/src/plonk/vanishing/verifier.rs index a179336e0d..0881dfb2c0 100644 --- a/halo2_proofs/src/plonk/vanishing/verifier.rs +++ b/halo2_proofs/src/plonk/vanishing/verifier.rs @@ -4,7 +4,7 @@ use ff::Field; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey, VerifyingKeyV2}, + plonk::{Error, VerifyingKey}, poly::{ commitment::{Params, MSM}, VerifierQuery, @@ -53,22 +53,6 @@ impl Argument { } impl Committed { - pub(in crate::plonk) fn read_commitments_after_y_v2< - E: EncodedChallenge, - T: TranscriptRead, - >( - self, - vk: &VerifyingKeyV2, - transcript: &mut T, - ) -> Result, Error> { - // Obtain a commitment to h(X) in the form of multiple pieces of degree n - 1 - let h_commitments = read_n_points(transcript, vk.domain.get_quotient_poly_degree())?; - - Ok(Constructed { - h_commitments, - random_poly_commitment: self.random_poly_commitment, - }) - } pub(in crate::plonk) fn read_commitments_after_y< E: EncodedChallenge, T: TranscriptRead, diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index ace4ff31d2..531c43355a 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -4,7 +4,7 @@ use std::iter; use super::{ vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, - VerifyingKey, VerifyingKeyV2, + VerifyingKey, }; use crate::arithmetic::compute_inner_product; use crate::poly::commitment::{CommitmentScheme, Verifier}; @@ -20,435 +20,6 @@ mod batch; #[cfg(feature = "batch")] pub use batch::BatchVerifier; -/// Returns a boolean indicating whether or not the proof is valid -pub fn verify_proof_v2< - 'params, - Scheme: CommitmentScheme, - V: Verifier<'params, Scheme>, - E: EncodedChallenge, - T: TranscriptRead, - Strategy: VerificationStrategy<'params, Scheme, V>, ->( - params: &'params Scheme::ParamsVerifier, - vk: &VerifyingKeyV2, - strategy: Strategy, - instances: &[&[&[Scheme::Scalar]]], - transcript: &mut T, -) -> Result -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - // println!("DBG verify vk.cs.advice_queriess {:?}", vk.cs.advice_queries); - // Check that instances matches the expected number of instance columns - for instances in instances.iter() { - if instances.len() != vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); - } - } - - let instance_commitments = if V::QUERY_INSTANCE { - instances - .iter() - .map(|instance| { - instance - .iter() - .map(|instance| { - if instance.len() > params.n() as usize - (vk.cs.blinding_factors() + 1) { - return Err(Error::InstanceTooLarge); - } - let mut poly = instance.to_vec(); - poly.resize(params.n() as usize, Scheme::Scalar::ZERO); - let poly = vk.domain.lagrange_from_vec(poly); - - Ok(params.commit_lagrange(&poly, Blind::default()).to_affine()) - }) - .collect::, _>>() - }) - .collect::, _>>()? - } else { - vec![vec![]; instances.len()] - }; - - let num_proofs = instance_commitments.len(); - - // Hash verification key into transcript - vk.hash_into(transcript)?; - - if V::QUERY_INSTANCE { - for instance_commitments in instance_commitments.iter() { - // Hash the instance (external) commitments into the transcript - for commitment in instance_commitments { - // dbg!(2, commitment); - transcript.common_point(*commitment)? - } - } - } else { - for instance in instances.iter() { - for instance in instance.iter() { - for value in instance.iter() { - // dbg!(1, value); - transcript.common_scalar(*value)?; - } - } - } - } - - // Hash the prover's advice commitments into the transcript and squeeze challenges - let (advice_commitments, challenges) = { - let mut advice_commitments = - vec![vec![Scheme::Curve::default(); vk.cs.num_advice_columns]; num_proofs]; - let mut challenges = vec![Scheme::Scalar::ZERO; vk.cs.num_challenges]; - - for current_phase in vk.cs.phases() { - for advice_commitments in advice_commitments.iter_mut() { - for (phase, commitment) in vk - .cs - .advice_column_phase - .iter() - .zip(advice_commitments.iter_mut()) - { - if current_phase == *phase { - *commitment = transcript.read_point()?; - } - } - } - for (phase, challenge) in vk.cs.challenge_phase.iter().zip(challenges.iter_mut()) { - if current_phase == *phase { - *challenge = *transcript.squeeze_challenge_scalar::<()>(); - } - } - } - - (advice_commitments, challenges) - }; - - // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); - - let lookups_permuted = (0..num_proofs) - .map(|_| -> Result, _> { - // Hash each lookup permuted commitment - vk.cs - .lookups - .iter() - .map(|argument| argument.read_permuted_commitments(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Sample beta challenge - let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); - - // Sample gamma challenge - let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); - - let permutations_committed = (0..num_proofs) - .map(|_| { - // Hash each permutation product commitment - vk.cs - .permutation - .read_product_commitments_v2(vk, transcript) - }) - .collect::, _>>()?; - - let lookups_committed = lookups_permuted - .into_iter() - .map(|lookups| { - // Hash each lookup product commitment - lookups - .into_iter() - .map(|lookup| lookup.read_product_commitment(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles_committed = (0..num_proofs) - .map(|_| -> Result, _> { - // Hash each shuffle product commitment - vk.cs - .shuffles - .iter() - .map(|argument| argument.read_product_commitment(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?; - - // Sample y challenge, which keeps the gates linearly independent. - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); - - let vanishing = vanishing.read_commitments_after_y_v2(vk, transcript)?; - - // Sample x challenge, which is used to ensure the circuit is - // satisfied with high probability. - let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); - let instance_evals = if V::QUERY_INSTANCE { - (0..num_proofs) - .map(|_| -> Result, _> { - read_n_scalars(transcript, vk.cs.instance_queries.len()) - }) - .collect::, _>>()? - } else { - let xn = x.pow([params.n()]); - let (min_rotation, max_rotation) = - vk.cs - .instance_queries - .iter() - .fold((0, 0), |(min, max), (_, rotation)| { - if rotation.0 < min { - (rotation.0, max) - } else if rotation.0 > max { - (min, rotation.0) - } else { - (min, max) - } - }); - let max_instance_len = instances - .iter() - .flat_map(|instance| instance.iter().map(|instance| instance.len())) - .max_by(Ord::cmp) - .unwrap_or_default(); - let l_i_s = &vk.domain.l_i_range( - *x, - xn, - -max_rotation..max_instance_len as i32 + min_rotation.abs(), - ); - instances - .iter() - .map(|instances| { - vk.cs - .instance_queries - .iter() - .map(|(column, rotation)| { - let instances = instances[column.index()]; - let offset = (max_rotation - rotation.0) as usize; - compute_inner_product(instances, &l_i_s[offset..offset + instances.len()]) - }) - .collect::>() - }) - .collect::>() - }; - - let advice_evals = (0..num_proofs) - .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) - .collect::, _>>()?; - // dbg!(&advice_evals); - - let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; - - let vanishing = vanishing.evaluate_after_x(transcript)?; - - let permutations_common = vk.permutation.evaluate(transcript)?; - - let permutations_evaluated = permutations_committed - .into_iter() - .map(|permutation| permutation.evaluate(transcript)) - .collect::, _>>()?; - - let lookups_evaluated = lookups_committed - .into_iter() - .map(|lookups| -> Result, _> { - lookups - .into_iter() - .map(|lookup| lookup.evaluate(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles_evaluated = shuffles_committed - .into_iter() - .map(|shuffles| -> Result, _> { - shuffles - .into_iter() - .map(|shuffle| shuffle.evaluate(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // This check ensures the circuit is satisfied so long as the polynomial - // commitments open to the correct values. - let vanishing = { - // x^n - let xn = x.pow([params.n()]); - - let blinding_factors = vk.cs.blinding_factors(); - let l_evals = vk - .domain - .l_i_range(*x, xn, (-((blinding_factors + 1) as i32))..=0); - assert_eq!(l_evals.len(), 2 + blinding_factors); - let l_last = l_evals[0]; - let l_blind: Scheme::Scalar = l_evals[1..(1 + blinding_factors)] - .iter() - .fold(Scheme::Scalar::ZERO, |acc, eval| acc + eval); - let l_0 = l_evals[1 + blinding_factors]; - - // Compute the expected value of h(x) - let expressions = advice_evals - .iter() - .zip(instance_evals.iter()) - .zip(permutations_evaluated.iter()) - .zip(lookups_evaluated.iter()) - .zip(shuffles_evaluated.iter()) - .flat_map( - |((((advice_evals, instance_evals), permutation), lookups), shuffles)| { - let challenges = &challenges; - let fixed_evals = &fixed_evals; - std::iter::empty() - // Evaluate the circuit using the custom gates provided - .chain(vk.cs.gates.iter().flat_map(move |gate| { - gate.polynomials().iter().map(move |poly| { - poly.evaluate( - &|scalar| scalar, - &|_| { - panic!("virtual selectors are removed during optimization") - }, - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - })) - .chain(permutation.expressions_v2( - vk, - &vk.cs.permutation, - &permutations_common, - advice_evals, - fixed_evals, - instance_evals, - l_0, - l_last, - l_blind, - beta, - gamma, - x, - )) - .chain(lookups.iter().zip(vk.cs.lookups.iter()).flat_map( - move |(p, argument)| { - p.expressions( - l_0, - l_last, - l_blind, - argument, - theta, - beta, - gamma, - advice_evals, - fixed_evals, - instance_evals, - challenges, - ) - }, - )) - .chain(shuffles.iter().zip(vk.cs.shuffles.iter()).flat_map( - move |(p, argument)| { - p.expressions( - l_0, - l_last, - l_blind, - argument, - theta, - gamma, - advice_evals, - fixed_evals, - instance_evals, - challenges, - ) - }, - )) - }, - ); - - vanishing.verify(params, expressions, y, xn) - }; - - let queries = instance_commitments - .iter() - .zip(instance_evals.iter()) - .zip(advice_commitments.iter()) - .zip(advice_evals.iter()) - .zip(permutations_evaluated.iter()) - .zip(lookups_evaluated.iter()) - .zip(shuffles_evaluated.iter()) - .flat_map( - |( - ( - ( - ( - ((instance_commitments, instance_evals), advice_commitments), - advice_evals, - ), - permutation, - ), - lookups, - ), - shuffles, - )| { - iter::empty() - .chain( - V::QUERY_INSTANCE - .then_some(vk.cs.instance_queries.iter().enumerate().map( - move |(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &instance_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - instance_evals[query_index], - ) - }, - )) - .into_iter() - .flatten(), - ) - .chain(vk.cs.advice_queries.iter().enumerate().map( - move |(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &advice_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - advice_evals[query_index], - ) - }, - )) - .chain(permutation.queries_v2(vk, x)) - .chain(lookups.iter().flat_map(move |p| p.queries_v2(vk, x))) - .chain(shuffles.iter().flat_map(move |p| p.queries_v2(vk, x))) - }, - ) - .chain( - vk.cs - .fixed_queries - .iter() - .enumerate() - .map(|(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &vk.fixed_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - fixed_evals[query_index], - ) - }), - ) - .chain(permutations_common.queries(&vk.permutation, x)) - .chain(vanishing.queries(x)); - - // We are now convinced the circuit is satisfied so long as the - // polynomial commitments open to the correct values. - - let verifier = V::new(params); - Ok(strategy - .process(|msm| { - println!("ONE"); - verifier - .verify_proof(transcript, queries, msm) - .map_err(|_| Error::Opening) - }) - .expect("todo")) -} - -// TODO: Remove /// Returns a boolean indicating whether or not the proof is valid pub fn verify_proof< 'params, diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index b46b16001f..dc9c12efae 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -8,9 +8,9 @@ use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPla use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, verify_proof, - verify_proof_v2, Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, - ConstraintSystem, ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, - ProverV2, ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, WitnessCalculator, + Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, + ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, ProverV2, + ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, WitnessCalculator, }; use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; use halo2_proofs::poly::Rotation; @@ -577,7 +577,7 @@ fn test_mycircuit_full_split() { Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); let strategy = SingleStrategy::new(&verifier_params); - verify_proof_v2::, VerifierSHPLONK<'_, Bn256>, _, _, _>( + verify_proof::, VerifierSHPLONK<'_, Bn256>, _, _, _>( ¶ms, &vk, strategy, From bfc5d86b0de7d9c10cb0bbd1c0f45abbcd6c38c2 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 3 Jan 2024 12:20:34 +0100 Subject: [PATCH 17/79] Clean up --- halo2_proofs/src/plonk/evaluation.rs | 92 ---------------------------- 1 file changed, 92 deletions(-) diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index e89359fa68..431c487c7e 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -205,99 +205,7 @@ pub struct CalculationInfo { } impl Evaluator { - /* - /// Creates a new evaluation structure - pub fn new_v2(cs: &ConstraintSystemV2Backend) -> Self { - let mut ev = Evaluator::default(); - - // Custom gates - let mut parts = Vec::new(); - for gate in cs.gates.iter() { - parts.extend( - gate.polynomials() - .iter() - .map(|poly| ev.custom_gates.add_expression(poly)), - ); - } - ev.custom_gates.add_calculation(Calculation::Horner( - ValueSource::PreviousValue(), - parts, - ValueSource::Y(), - )); - - // Lookups - for lookup in cs.lookups.iter() { - let mut graph = GraphEvaluator::default(); - - let mut evaluate_lc = |expressions: &Vec>| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; - - // Input coset - let compressed_input_coset = evaluate_lc(&lookup.input_expressions); - // table coset - let compressed_table_coset = evaluate_lc(&lookup.table_expressions); - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - let right_gamma = graph.add_calculation(Calculation::Add( - compressed_table_coset, - ValueSource::Gamma(), - )); - let lc = graph.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Beta(), - )); - graph.add_calculation(Calculation::Mul(lc, right_gamma)); - - ev.lookups.push(graph); - } - - // Shuffles - for shuffle in cs.shuffles.iter() { - let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; - - let mut graph_input = GraphEvaluator::default(); - let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); - let _ = graph_input.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Gamma(), - )); - - let mut graph_shuffle = GraphEvaluator::default(); - let compressed_shuffle_coset = - evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); - let _ = graph_shuffle.add_calculation(Calculation::Add( - compressed_shuffle_coset, - ValueSource::Gamma(), - )); - - ev.shuffles.push(graph_input); - ev.shuffles.push(graph_shuffle); - } - - ev - } - */ - /// Creates a new evaluation structure - // TODO: Remove pub fn new(cs: &ConstraintSystem) -> Self { let mut ev = Evaluator::default(); From 84954577c98cf2822c24ce44f64d2962f3579f4a Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 3 Jan 2024 15:29:51 +0100 Subject: [PATCH 18/79] Benchmark --- halo2_proofs/Cargo.toml | 2 ++ halo2_proofs/src/plonk/circuit.rs | 2 +- halo2_proofs/tests/frontend_backend_split.rs | 36 +++++++++++++++++--- 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index cf62f69ce0..cb9123fda3 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -72,6 +72,7 @@ criterion = "0.3" gumdrop = "0.8" proptest = "1" rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +dhat = "0.3.2" [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] getrandom = { version = "0.2", features = ["js"] } @@ -91,6 +92,7 @@ thread-safe-region = [] sanity-checks = [] batch = ["rand_core/getrandom"] circuit-params = [] +dhat-heap = [] [lib] bench = false diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 67b463bcda..d7ddbf6a31 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1936,7 +1936,7 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret 0 => FirstPhase.to_sealed(), 1 => SecondPhase.to_sealed(), 2 => ThirdPhase.to_sealed(), - _ => unreachable!("only phase [0..2] supported"), + _ => unreachable!("only phase [0,2] supported"), }; let mut witness = WitnessCollection { k: self.k, diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index dc9c12efae..36ce018f88 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -1,6 +1,10 @@ #![allow(clippy::many_single_char_names)] #![allow(clippy::op_ref)] +#[cfg(feature = "dhat-heap")] +#[global_allocator] +static ALLOC: dhat::Alloc = dhat::Alloc; + use assert_matches::assert_matches; use ff::{FromUniformBytes, WithSmallOrderMulGroup}; use halo2_proofs::arithmetic::Field; @@ -469,18 +473,27 @@ fn test_mycircuit_mock() { prover.assert_satisfied(); } +use std::time::Instant; + +const K: u32 = 16; +const WIDTH_FACTOR: usize = 4; + #[test] fn test_mycircuit_full_legacy() { - let k = 6; - const WIDTH_FACTOR: usize = 1; + #[cfg(feature = "dhat-heap")] + let _profiler = dhat::Profiler::new_heap(); + + let k = K; let circuit: MyCircuit = MyCircuit::new(k, 42); // Setup let mut rng = BlockRng::new(OneNg {}); let params = ParamsKZG::::setup(k, &mut rng); let verifier_params = params.verifier_params(); + let start = Instant::now(); let vk = keygen_vk(¶ms, &circuit).expect("keygen_vk should not fail"); let pk = keygen_pk(¶ms, vk.clone(), &circuit).expect("keygen_pk should not fail"); + println!("Keygen: {:?}", start.elapsed()); // Proving let instances = circuit.instances(); @@ -489,6 +502,7 @@ fn test_mycircuit_full_legacy() { .map(|instance| instance.as_slice()) .collect::>()); + let start = Instant::now(); let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); create_proof::, ProverSHPLONK<'_, Bn256>, _, _, _, _>( ¶ms, @@ -504,8 +518,10 @@ fn test_mycircuit_full_legacy() { // for word in proof.chunks(32) { // println!(" {:02x?}", word); // } + println!("Prove: {:?}", start.elapsed()); // Verify + let start = Instant::now(); let mut verifier_transcript = Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); let strategy = SingleStrategy::new(&verifier_params); @@ -518,12 +534,15 @@ fn test_mycircuit_full_legacy() { &mut verifier_transcript, ) .expect("verify succeeds"); + println!("Verify: {:?}", start.elapsed()); } #[test] fn test_mycircuit_full_split() { - let k = 6; - const WIDTH_FACTOR: usize = 1; + #[cfg(feature = "dhat-heap")] + let _profiler = dhat::Profiler::new_heap(); + + let k = K; let circuit: MyCircuit = MyCircuit::new(k, 42); let (compiled_circuit, config, cs) = compile_circuit(k, &circuit, false).unwrap(); @@ -531,10 +550,13 @@ fn test_mycircuit_full_split() { let mut rng = BlockRng::new(OneNg {}); let params = ParamsKZG::::setup(k, &mut rng); let verifier_params = params.verifier_params(); + let start = Instant::now(); let vk = keygen_vk_v2(¶ms, &compiled_circuit).expect("keygen_vk should not fail"); // println!("vk: {:#?}", vk); let pk = keygen_pk_v2(¶ms, vk.clone(), &compiled_circuit).expect("keygen_pk should not fail"); + println!("Keygen: {:?}", start.elapsed()); + drop(compiled_circuit); // Proving println!("DBG Proving..."); @@ -543,8 +565,9 @@ fn test_mycircuit_full_split() { .iter() .map(|instance| instance.as_slice()) .collect::>()); - let mut witness_calc = WitnessCalculator::new(k, &circuit, &config, &cs, instances_slice); + let start = Instant::now(); + let mut witness_calc = WitnessCalculator::new(k, &circuit, &config, &cs, instances_slice); let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); let mut prover = ProverV2::, ProverSHPLONK<'_, Bn256>, _, _, _>::new( @@ -570,8 +593,10 @@ fn test_mycircuit_full_split() { // for word in proof.chunks(32) { // println!(" {:02x?}", word); // } + println!("Prove: {:?}", start.elapsed()); // Verify + let start = Instant::now(); println!("DBG Verifying..."); let mut verifier_transcript = Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); @@ -585,4 +610,5 @@ fn test_mycircuit_full_split() { &mut verifier_transcript, ) .expect("verify succeeds"); + println!("Verify: {:?}", start.elapsed()); } From 568b0e5570565c34e48ee2cef0b59451814cfe12 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Thu, 4 Jan 2024 19:52:05 +0100 Subject: [PATCH 19/79] WIP --- halo2_proofs/src/plonk/keygen.rs | 55 +++++- halo2_proofs/src/plonk/prover.rs | 179 +++++++++++++++---- halo2_proofs/src/plonk/verifier.rs | 22 +++ halo2_proofs/src/poly/commitment.rs | 2 +- halo2_proofs/tests/frontend_backend_split.rs | 30 ++-- 5 files changed, 238 insertions(+), 50 deletions(-) diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 6329d83b08..6014ac55d8 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -7,8 +7,8 @@ use group::Curve; use super::{ circuit::{ - Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, ConstraintSystem, Fixed, - FloorPlanner, Instance, Selector, + compile_circuit, Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, + ConstraintSystem, Fixed, FloorPlanner, Instance, Selector, }, evaluation::Evaluator, permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, @@ -245,6 +245,40 @@ where )) } +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// By default, selector compression is turned **off**. +pub fn keygen_vk_legacy<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + keygen_vk_custom_legacy(params, circuit, true) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// +/// The selector compression optimization is turned on only if `compress_selectors` is `true`. +pub fn keygen_vk_custom_legacy<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; + keygen_vk_v2(params, &compiled_circuit) +} + +// TODO: Remove /// Generate a `VerifyingKey` from an instance of `Circuit`. /// By default, selector compression is turned **off**. pub fn keygen_vk<'params, C, P, ConcreteCircuit>( @@ -260,6 +294,7 @@ where keygen_vk_custom(params, circuit, true) } +// TODO: Remove /// Generate a `VerifyingKey` from an instance of `Circuit`. /// /// The selector compression optimization is turned on only if `compress_selectors` is `true`. @@ -418,6 +453,22 @@ where }) } +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. +pub fn keygen_pk_legacy<'params, C, P, ConcreteCircuit>( + params: &P, + vk: VerifyingKey, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; + keygen_pk_v2(params, vk, &compiled_circuit) +} + +// TODO: Remove /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. pub fn keygen_pk<'params, C, P, ConcreteCircuit>( params: &P, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 95cbdef5dc..e694e4427f 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -7,9 +7,10 @@ use std::{collections::HashMap, iter}; use super::{ circuit::{ + compile_circuit, sealed::{self}, Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, - Instance, Selector, + Instance, Selector, WitnessCalculator, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, @@ -42,9 +43,76 @@ struct AdviceSingle { pub advice_blinds: Vec>, } -// TODO: Rewrite as multi-instance prover, and make a wraper for signle-instance case. /// The prover object used to create proofs interactively by passing the witnesses to commit at -/// each phase. +/// each phase. This works for a single proof. This is a wrapper over ProverV2. +#[derive(Debug)] +pub struct ProverV2Single< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +>(ProverV2<'a, 'params, Scheme, P, E, R, T>); + +impl< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + > ProverV2Single<'a, 'params, Scheme, P, E, R, T> +{ + /// Create a new prover object + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + // TODO: If this was a vector the usage would be simpler + instance: &[&[Scheme::Scalar]], + rng: R, + transcript: &'a mut T, + ) -> Result + // TODO: Can I move this `where` to the struct definition? + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + Ok(Self(ProverV2::new( + params, + pk, + &[instance], + rng, + transcript, + )?)) + } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + pub fn commit_phase( + &mut self, + phase: u8, + // TODO: Turn this into Vec>>. Requires batch_invert_assigned to work with + // Vec + witness: Vec, LagrangeCoeff>>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.commit_phase(phase, vec![witness]) + } + + /// Finalizes the proof creation. + pub fn create_proof(self) -> Result<(), Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.create_proof() + } +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. This supports batch proving. #[derive(Debug)] pub struct ProverV2< 'a, @@ -58,9 +126,6 @@ pub struct ProverV2< // Circuit and setup fields params: &'params Scheme::ParamsProver, pk: &'a ProvingKey, - // advice_queries: Vec<(Column, Rotation)>, - // instance_queries: Vec<(Column, Rotation)>, - // fixed_queries: Vec<(Column, Rotation)>, phases: Vec, // State instance: Vec>, @@ -68,7 +133,7 @@ pub struct ProverV2< challenges: HashMap, next_phase_index: usize, rng: R, - transcript: T, + transcript: &'a mut T, // TODO: maybe &mut T? _marker: std::marker::PhantomData<(P, E)>, } @@ -89,7 +154,7 @@ impl< // TODO: If this was a vector the usage would be simpler instances: &[&[&[Scheme::Scalar]]], rng: R, - mut transcript: T, + transcript: &'a mut T, ) -> Result // TODO: Can I move this `where` to the struct definition? where @@ -103,7 +168,7 @@ impl< } // Hash verification key into transcript - pk.vk.hash_into(&mut transcript)?; + pk.vk.hash_into(transcript)?; let meta = &pk.vk.cs; // let queries = &pk.vk.queries; @@ -219,7 +284,6 @@ impl< let meta = &self.pk.vk.cs; // let queries = &self.pk.vk.queries; - let transcript = &mut self.transcript; let mut rng = &mut self.rng; let advice = &mut self.advice; @@ -324,7 +388,7 @@ impl< drop(advice_commitments_projective); for commitment in &advice_commitments { - transcript.write_point(*commitment)?; + self.transcript.write_point(*commitment)?; } for ((column_index, advice_values), blind) in column_indices.iter().zip(advice_values).zip(blinds) @@ -342,7 +406,7 @@ impl< for (index, phase) in meta.challenge_phase.iter().enumerate() { if current_phase == phase { let existing = - challenges.insert(index, *transcript.squeeze_challenge_scalar::<()>()); + challenges.insert(index, *self.transcript.squeeze_challenge_scalar::<()>()); assert!(existing.is_none()); } } @@ -352,7 +416,7 @@ impl< } /// Finalizes the proof creation. - pub fn create_proof(mut self) -> Result + pub fn create_proof(mut self) -> Result<(), Error> where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { @@ -362,7 +426,6 @@ impl< let pk = self.pk; let domain = &self.pk.vk.domain; - let mut transcript = self.transcript; let mut rng = self.rng; let instance = std::mem::replace(&mut self.instance, Vec::new()); @@ -375,7 +438,7 @@ impl< .collect::>(); // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); + let theta: ChallengeTheta<_> = self.transcript.squeeze_challenge_scalar(); let mut lookups_fn = |instance: &InstanceSingle, @@ -394,7 +457,7 @@ impl< &instance.instance_values, &challenges, &mut rng, - &mut transcript, + self.transcript, ) }) .collect::, _>>() @@ -409,10 +472,10 @@ impl< .collect::, _>>()?; // Sample beta challenge - let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); + let beta: ChallengeBeta<_> = self.transcript.squeeze_challenge_scalar(); // Sample gamma challenge - let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); + let gamma: ChallengeGamma<_> = self.transcript.squeeze_challenge_scalar(); // Commit to permutation. let permutations: Vec> = instance @@ -429,7 +492,7 @@ impl< beta, gamma, &mut rng, - &mut transcript, + self.transcript, ) }) .collect::, _>>()?; @@ -441,7 +504,7 @@ impl< lookups .into_iter() .map(|lookup| { - lookup.commit_product(pk, params, beta, gamma, &mut rng, &mut transcript) + lookup.commit_product(pk, params, beta, gamma, &mut rng, self.transcript) }) .collect::, _>>() }) @@ -466,7 +529,7 @@ impl< &instance.instance_values, &challenges, &mut rng, - &mut transcript, + self.transcript, ) }) .collect::, _>>() @@ -474,10 +537,10 @@ impl< .collect::, _>>()?; // Commit to the vanishing argument's random polynomial for blinding h(x_3) - let vanishing = vanishing::Argument::commit(params, domain, &mut rng, &mut transcript)?; + let vanishing = vanishing::Argument::commit(params, domain, &mut rng, self.transcript)?; // Obtain challenge for keeping all separate gates linearly independent - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + let y: ChallengeY<_> = self.transcript.squeeze_challenge_scalar(); // Calculate the advice polys let advice: Vec> = advice @@ -520,9 +583,9 @@ impl< ); // Construct the vanishing argument's h(X) commitments - let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, &mut transcript)?; + let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, self.transcript)?; - let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); + let x: ChallengeX<_> = self.transcript.squeeze_challenge_scalar(); let xn = x.pow([params.n()]); if P::QUERY_INSTANCE { @@ -542,7 +605,7 @@ impl< // Hash each instance column evaluation for eval in instance_evals.iter() { - transcript.write_scalar(*eval)?; + self.transcript.write_scalar(*eval)?; } } } @@ -564,7 +627,7 @@ impl< // Hash each advice column evaluation for eval in advice_evals.iter() { - transcript.write_scalar(*eval)?; + self.transcript.write_scalar(*eval)?; } } @@ -579,19 +642,19 @@ impl< // Hash each fixed column evaluation for eval in fixed_evals.iter() { - transcript.write_scalar(*eval)?; + self.transcript.write_scalar(*eval)?; } - let vanishing = vanishing.evaluate(x, xn, domain, &mut transcript)?; + let vanishing = vanishing.evaluate(x, xn, domain, self.transcript)?; // Evaluate common permutation data - pk.permutation.evaluate(x, &mut transcript)?; + pk.permutation.evaluate(x, self.transcript)?; // Evaluate the permutations, if any, at omega^i x. let permutations: Vec> = permutations .into_iter() .map(|permutation| -> Result<_, _> { - permutation.construct().evaluate(pk, x, &mut transcript) + permutation.construct().evaluate(pk, x, self.transcript) }) .collect::, _>>()?; @@ -601,7 +664,7 @@ impl< .map(|lookups| -> Result, _> { lookups .into_iter() - .map(|p| p.evaluate(pk, x, &mut transcript)) + .map(|p| p.evaluate(pk, x, self.transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -612,7 +675,7 @@ impl< .map(|shuffles| -> Result, _> { shuffles .into_iter() - .map(|p| p.evaluate(pk, x, &mut transcript)) + .map(|p| p.evaluate(pk, x, self.transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -662,10 +725,10 @@ impl< let prover = P::new(params); println!("DBG create_proof"); prover - .create_proof(rng, &mut transcript, instances) + .create_proof(rng, self.transcript, instances) .map_err(|_| Error::ConstraintSystemFailure)?; - Ok(transcript) + Ok(()) } } @@ -808,6 +871,52 @@ impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { } } +/// This creates a proof for the provided `circuit` when given the public +/// parameters `params` and the proving key [`ProvingKey`] that was +/// generated previously for the same circuit. The provided `instances` +/// are zero-padded internally. +pub fn create_proof_legacy< + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + ConcreteCircuit: Circuit, +>( + params: &'params Scheme::ParamsProver, + pk: &ProvingKey, + circuits: &[ConcreteCircuit], + instances: &[&[&[Scheme::Scalar]]], + mut rng: R, + transcript: &mut T, +) -> Result<(), Error> +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + let (_, config, cs) = compile_circuit(params.k(), &circuits[0], pk.vk.compress_selectors)?; + let mut witness_calcs: Vec<_> = circuits + .iter() + .enumerate() + .map(|(i, circuit)| WitnessCalculator::new(params.k(), circuit, &config, &cs, instances[i])) + .collect(); + let mut prover = ProverV2::::new(params, pk, instances, rng, transcript)?; + let mut challenges = HashMap::new(); + let phases = prover.phases.clone(); + for phase in &phases { + // for phase in [0] { + println!("DBG phase {}", phase.0); + let mut witnesses = Vec::with_capacity(circuits.len()); + for witness_calc in witness_calcs.iter_mut() { + witnesses.push(witness_calc.calc(phase.0, &challenges)?); + } + // println!("DBG witness: {:?}", witness); + challenges = prover.commit_phase(phase.0, witnesses).unwrap(); + // println!("DBG challenges {:?}", challenges); + } + prover.create_proof() +} + /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index 531c43355a..fe7b9fb5a4 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -20,6 +20,28 @@ mod batch; #[cfg(feature = "batch")] pub use batch::BatchVerifier; +/// Returns a boolean indicating whether or not the proof is valid. Verifies a single proof (not +/// batched). +pub fn verify_proof_single< + 'params, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptRead, + Strategy: VerificationStrategy<'params, Scheme, V>, +>( + params: &'params Scheme::ParamsVerifier, + vk: &VerifyingKey, + strategy: Strategy, + instance: &[&[Scheme::Scalar]], + transcript: &mut T, +) -> Result +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + verify_proof(params, vk, strategy, &[instance], transcript) +} + /// Returns a boolean indicating whether or not the proof is valid pub fn verify_proof< 'params, diff --git a/halo2_proofs/src/poly/commitment.rs b/halo2_proofs/src/poly/commitment.rs index ebc26fe9c3..feae085655 100644 --- a/halo2_proofs/src/poly/commitment.rs +++ b/halo2_proofs/src/poly/commitment.rs @@ -40,7 +40,7 @@ pub trait CommitmentScheme { } /// Parameters for circuit sysnthesis and prover parameters. -pub trait Params<'params, C: CurveAffine>: Sized + Clone { +pub trait Params<'params, C: CurveAffine>: Sized + Clone + Debug { /// Multi scalar multiplication engine type MSM: MSM + 'params; diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 36ce018f88..7f6558bf49 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -12,9 +12,10 @@ use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPla use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, verify_proof, - Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, - ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, ProverV2, - ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, WitnessCalculator, + verify_proof_single, Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, + ConstraintSystem, ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, + ProverV2Single, ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, + WitnessCalculator, }; use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; use halo2_proofs::poly::Rotation; @@ -475,14 +476,19 @@ fn test_mycircuit_mock() { use std::time::Instant; -const K: u32 = 16; -const WIDTH_FACTOR: usize = 4; +const K: u32 = 8; +const WIDTH_FACTOR: usize = 1; #[test] fn test_mycircuit_full_legacy() { #[cfg(feature = "dhat-heap")] let _profiler = dhat::Profiler::new_heap(); + use halo2_proofs::plonk::{ + create_proof_legacy as create_proof, keygen_pk_legacy as keygen_pk, + keygen_vk_legacy as keygen_vk, + }; + let k = K; let circuit: MyCircuit = MyCircuit::new(k, 42); @@ -570,12 +576,12 @@ fn test_mycircuit_full_split() { let mut witness_calc = WitnessCalculator::new(k, &circuit, &config, &cs, instances_slice); let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); let mut prover = - ProverV2::, ProverSHPLONK<'_, Bn256>, _, _, _>::new( + ProverV2Single::, ProverSHPLONK<'_, Bn256>, _, _, _>::new( ¶ms, &pk, - &[instances_slice], + instances_slice, &mut rng, - transcript, + &mut transcript, ) .unwrap(); let mut challenges = HashMap::new(); @@ -584,10 +590,10 @@ fn test_mycircuit_full_split() { println!("DBG phase {}", phase); let witness = witness_calc.calc(phase, &challenges).unwrap(); // println!("DBG witness: {:?}", witness); - challenges = prover.commit_phase(phase, vec![witness]).unwrap(); + challenges = prover.commit_phase(phase, witness).unwrap(); // println!("DBG challenges {:?}", challenges); } - let mut transcript = prover.create_proof().unwrap(); + prover.create_proof().unwrap(); let proof = transcript.finalize(); // println!("DBG proof.len={} ", proof.len()); // for word in proof.chunks(32) { @@ -602,11 +608,11 @@ fn test_mycircuit_full_split() { Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); let strategy = SingleStrategy::new(&verifier_params); - verify_proof::, VerifierSHPLONK<'_, Bn256>, _, _, _>( + verify_proof_single::, VerifierSHPLONK<'_, Bn256>, _, _, _>( ¶ms, &vk, strategy, - &[instances_slice], + instances_slice, &mut verifier_transcript, ) .expect("verify succeeds"); From ca9d0526e64d58c66e0caa046cca687f1d2b307d Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 5 Jan 2024 00:01:14 +0100 Subject: [PATCH 20/79] Make legacy wrappers over v2 --- halo2_proofs/src/plonk.rs | 3 +- halo2_proofs/src/plonk/keygen.rs | 4 +- halo2_proofs/src/plonk/prover.rs | 4 +- halo2_proofs/tests/frontend_backend_split.rs | 14 +- rust-toolchain | 2 +- scratch | 137 +++++++++++++++++++ 6 files changed, 154 insertions(+), 10 deletions(-) create mode 100644 scratch diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 85a544d585..9c1bc6eaa6 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -108,7 +108,8 @@ pub struct VerifyingKey { domain: EvaluationDomain, fixed_commitments: Vec, permutation: permutation::VerifyingKey, - cs: ConstraintSystem, + /// TODO: Remove pub + pub cs: ConstraintSystem, /// Cached maximum degree of `cs` (which doesn't change after construction). cs_degree: usize, /// The representative of this `VerifyingKey` in transcripts. diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 6014ac55d8..eb47d26887 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -275,7 +275,9 @@ where C::Scalar: FromUniformBytes<64>, { let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; - keygen_vk_v2(params, &compiled_circuit) + let mut vk = keygen_vk_v2(params, &compiled_circuit)?; + vk.compress_selectors = compress_selectors; + Ok(vk) } // TODO: Remove diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index e694e4427f..62f45e85c3 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -283,6 +283,8 @@ impl< let params = self.params; let meta = &self.pk.vk.cs; // let queries = &self.pk.vk.queries; + // println!("DBG commit_phase gate {:?}", meta.gates()[0]); + // println!("DBG commit_phase queries {:?}", meta.advice_queries()); let mut rng = &mut self.rng; @@ -888,7 +890,7 @@ pub fn create_proof_legacy< pk: &ProvingKey, circuits: &[ConcreteCircuit], instances: &[&[&[Scheme::Scalar]]], - mut rng: R, + rng: R, transcript: &mut T, ) -> Result<(), Error> where diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 7f6558bf49..34a71ff59b 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -11,11 +11,10 @@ use halo2_proofs::arithmetic::Field; use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPlanner, Value}; use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ - compile_circuit, create_proof, keygen_pk, keygen_pk_v2, keygen_vk, keygen_vk_v2, verify_proof, - verify_proof_single, Advice, Assigned, Challenge, Circuit, Column, CompiledCircuitV2, - ConstraintSystem, ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, - ProverV2Single, ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, - WitnessCalculator, + compile_circuit, keygen_pk_v2, keygen_vk_v2, verify_proof, verify_proof_single, Advice, + Assigned, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, + ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, ProverV2Single, + ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, WitnessCalculator, }; use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; use halo2_proofs::poly::Rotation; @@ -161,9 +160,9 @@ impl, const WIDTH_FACTOR: usize> MyCircuit meta.create_gate("gate_a", |meta| { let s_gate = meta.query_selector(s_gate); + let b = meta.query_advice(b, Rotation::cur()); let a1 = meta.query_advice(a, Rotation::next()); let a0 = meta.query_advice(a, Rotation::cur()); - let b = meta.query_advice(b, Rotation::cur()); let c = meta.query_advice(c, Rotation::cur()); let d = meta.query_fixed(d, Rotation::cur()); @@ -488,6 +487,7 @@ fn test_mycircuit_full_legacy() { create_proof_legacy as create_proof, keygen_pk_legacy as keygen_pk, keygen_vk_legacy as keygen_vk, }; + // use halo2_proofs::plonk::{create_proof, keygen_pk, keygen_vk}; let k = K; let circuit: MyCircuit = MyCircuit::new(k, 42); @@ -498,6 +498,8 @@ fn test_mycircuit_full_legacy() { let verifier_params = params.verifier_params(); let start = Instant::now(); let vk = keygen_vk(¶ms, &circuit).expect("keygen_vk should not fail"); + // println!("DBG gate {:?}", vk.cs.gates()[0]); + // println!("DBG queries {:?}", vk.cs.advice_queries()); let pk = keygen_pk(¶ms, vk.clone(), &circuit).expect("keygen_pk should not fail"); println!("Keygen: {:?}", start.elapsed()); diff --git a/rust-toolchain b/rust-toolchain index b6148bc0a7..7c7053aa23 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.66.0 +1.75.0 diff --git a/scratch b/scratch new file mode 100644 index 0000000000..ab22c18135 --- /dev/null +++ b/scratch @@ -0,0 +1,137 @@ +DBG collected queries +Queries { + advice: [ + ( + Column { + index: 0, + column_type: Advice, + }, + Rotation( + 0, + ), + ), + ( + Column { + index: 0, + column_type: Advice, + }, + Rotation( + 1, + ), + ), + ( + Column { + index: 1, + column_type: Advice, + }, + Rotation( + 0, + ), + ), + ( + Column { + index: 2, + column_type: Advice, + }, + Rotation( + 0, + ), + ), + ], + instance: [], + fixed: [ + ( + Column { + index: 0, + column_type: Fixed, + }, + Rotation( + 0, + ), + ), + ( + Column { + index: 5, + column_type: Fixed, + }, + Rotation( + 0, + ), + ), + ], + num_advice_queries: [ + 2, + 1, + 1, + 0, + ], +} +DBG collected queries +Queries { + advice: [ + ( + Column { + index: 0, + column_type: Advice, + }, + Rotation( + 0, + ), + ), + ( + Column { + index: 0, + column_type: Advice, + }, + Rotation( + 1, + ), + ), + ( + Column { + index: 1, + column_type: Advice, + }, + Rotation( + 0, + ), + ), + ( + Column { + index: 2, + column_type: Advice, + }, + Rotation( + 0, + ), + ), + ], + instance: [], + fixed: [ + ( + Column { + index: 0, + column_type: Fixed, + }, + Rotation( + 0, + ), + ), + ( # EXTRA + Column { + index: 5, + column_type: Fixed, + }, + Rotation( + 0, + ), + ), + ], + num_advice_queries: [ + 2, + 1, + 1, + 0, + ], +} + From 8813ef710e60b9426af8bea15586d05fd458dc67 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 5 Jan 2024 11:17:59 +0100 Subject: [PATCH 21/79] Remove old API in favour of legacy wrappers --- halo2_proofs/src/plonk/circuit.rs | 68 --- halo2_proofs/src/plonk/keygen.rs | 227 +------ halo2_proofs/src/plonk/prover.rs | 594 ++----------------- halo2_proofs/src/poly/kzg/strategy.rs | 3 +- halo2_proofs/tests/frontend_backend_split.rs | 29 +- scratch | 137 ----- 6 files changed, 51 insertions(+), 1007 deletions(-) delete mode 100644 scratch diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index d7ddbf6a31..54c4d16ec5 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -12,7 +12,6 @@ use ff::Field; use sealed::SealedPhase; use std::collections::BTreeSet; use std::collections::HashMap; -use std::collections::HashSet; use std::fmt::Debug; use std::iter::{Product, Sum}; use std::{ @@ -1784,59 +1783,6 @@ impl QueriesMap { } } -/* -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystemV2BackendQueries { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - // pub(crate) num_selectors: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - /// This is a cached vector that maps virtual selectors to the concrete - /// fixed column that they were compressed into. This is just used by dev - /// tooling right now. - // pub(crate) selector_map: Vec>, - pub(crate) gates: Vec>, - pub(crate) advice_queries: Vec<(Column, Rotation)>, - // Contains an integer for each advice column - // identifying how many distinct queries it has - // so far; should be same length as num_advice_columns. - pub(crate) num_advice_queries: Vec, - pub(crate) instance_queries: Vec<(Column, Rotation)>, - pub(crate) fixed_queries: Vec<(Column, Rotation)>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, - // Vector of fixed columns, which can be used to store constant values - // that are copied into advice columns. - // pub(crate) constants: Vec>, - - // pub(crate) minimum_degree: Option, -} -*/ - /// This is a description of the circuit environment, such as the gate, column and /// permutation arrangements. #[derive(Debug, Clone)] @@ -1844,7 +1790,6 @@ pub struct ConstraintSystemV2Backend { pub(crate) num_fixed_columns: usize, pub(crate) num_advice_columns: usize, pub(crate) num_instance_columns: usize, - // pub(crate) num_selectors: usize, pub(crate) num_challenges: usize, /// Contains the index of each advice column that is left unblinded. @@ -1855,14 +1800,7 @@ pub struct ConstraintSystemV2Backend { /// Contains the phase for each challenge. Should have same length as num_challenges. pub(crate) challenge_phase: Vec, - /// This is a cached vector that maps virtual selectors to the concrete - /// fixed column that they were compressed into. This is just used by dev - /// tooling right now. - // pub(crate) selector_map: Vec>, pub(crate) gates: Vec>, - // pub(crate) advice_queries: Vec<(Column, Rotation)>, - // pub(crate) instance_queries: Vec<(Column, Rotation)>, - // pub(crate) fixed_queries: Vec<(Column, Rotation)>, // Permutation argument for performing equality constraints pub(crate) permutation: permutation::Argument, @@ -1877,11 +1815,6 @@ pub struct ConstraintSystemV2Backend { // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. pub(crate) general_column_annotations: HashMap, - // Vector of fixed columns, which can be used to store constant values - // that are copied into advice columns. - // pub(crate) constants: Vec>, - - // pub(crate) minimum_degree: Option, } /// Witness calculator. Frontend function @@ -1942,7 +1875,6 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret k: self.k, current_phase, advice: vec![Polynomial::new_empty(self.n, F::ZERO.into()); self.cs.num_advice_columns], - unblinded_advice: HashSet::from_iter(self.cs.unblinded_advice_columns.clone()), instances: self.instances, challenges, // The prover will not be allowed to assign values to advice diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index eb47d26887..bed61f09a0 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -8,7 +8,7 @@ use group::Curve; use super::{ circuit::{ compile_circuit, Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, - ConstraintSystem, Fixed, FloorPlanner, Instance, Selector, + ConstraintSystem, Fixed, Instance, Selector, }, evaluation::Evaluator, permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, @@ -17,7 +17,6 @@ use crate::{ arithmetic::{parallelize, CurveAffine}, circuit::Value, poly::{ - batch_invert_assigned, commitment::{Blind, Params}, EvaluationDomain, }, @@ -247,7 +246,7 @@ where /// Generate a `VerifyingKey` from an instance of `Circuit`. /// By default, selector compression is turned **off**. -pub fn keygen_vk_legacy<'params, C, P, ConcreteCircuit>( +pub fn keygen_vk<'params, C, P, ConcreteCircuit>( params: &P, circuit: &ConcreteCircuit, ) -> Result, Error> @@ -257,13 +256,13 @@ where ConcreteCircuit: Circuit, C::Scalar: FromUniformBytes<64>, { - keygen_vk_custom_legacy(params, circuit, true) + keygen_vk_custom(params, circuit, true) } /// Generate a `VerifyingKey` from an instance of `Circuit`. /// /// The selector compression optimization is turned on only if `compress_selectors` is `true`. -pub fn keygen_vk_custom_legacy<'params, C, P, ConcreteCircuit>( +pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( params: &P, circuit: &ConcreteCircuit, compress_selectors: bool, @@ -280,97 +279,6 @@ where Ok(vk) } -// TODO: Remove -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// By default, selector compression is turned **off**. -pub fn keygen_vk<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - keygen_vk_custom(params, circuit, true) -} - -// TODO: Remove -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// -/// The selector compression optimization is turned on only if `compress_selectors` is `true`. -pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, - compress_selectors: bool, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - let (domain, cs, config) = create_domain::( - params.k(), - #[cfg(feature = "circuit-params")] - circuit.params(), - ); - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let mut assembly: Assembly = Assembly { - k: params.k(), - fixed: vec![domain.empty_lagrange_assigned(); cs.num_fixed_columns], - permutation: permutation::keygen::Assembly::new(params.n() as usize, &cs.permutation), - selectors: vec![vec![false; params.n() as usize]; cs.num_selectors], - usable_rows: 0..params.n() as usize - (cs.blinding_factors() + 1), - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain URS - ConcreteCircuit::FloorPlanner::synthesize( - &mut assembly, - circuit, - config, - cs.constants.clone(), - )?; - - let mut fixed = batch_invert_assigned(assembly.fixed); - let (cs, selector_polys) = if compress_selectors { - cs.compress_selectors(assembly.selectors.clone()) - } else { - // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. - let selectors = std::mem::take(&mut assembly.selectors); - cs.directly_convert_selectors_to_fixed(selectors) - }; - fixed.extend( - selector_polys - .into_iter() - .map(|poly| domain.lagrange_from_vec(poly)), - ); - - let permutation_vk = assembly - .permutation - .build_vk(params, &domain, &cs.permutation); - - let fixed_commitments = fixed - .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default()).to_affine()) - .collect(); - - Ok(VerifyingKey::from_parts( - domain, - fixed_commitments, - permutation_vk, - cs, - assembly.selectors, - compress_selectors, - )) -} - /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. pub fn keygen_pk_v2<'params, C, P>( params: &P, @@ -456,7 +364,7 @@ where } /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. -pub fn keygen_pk_legacy<'params, C, P, ConcreteCircuit>( +pub fn keygen_pk<'params, C, P, ConcreteCircuit>( params: &P, vk: VerifyingKey, circuit: &ConcreteCircuit, @@ -469,128 +377,3 @@ where let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; keygen_pk_v2(params, vk, &compiled_circuit) } - -// TODO: Remove -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. -pub fn keygen_pk<'params, C, P, ConcreteCircuit>( - params: &P, - vk: VerifyingKey, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, -{ - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - - let cs = cs; - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let mut assembly: Assembly = Assembly { - k: params.k(), - fixed: vec![vk.domain.empty_lagrange_assigned(); cs.num_fixed_columns], - permutation: permutation::keygen::Assembly::new(params.n() as usize, &cs.permutation), - selectors: vec![vec![false; params.n() as usize]; cs.num_selectors], - usable_rows: 0..params.n() as usize - (cs.blinding_factors() + 1), - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain URS - ConcreteCircuit::FloorPlanner::synthesize( - &mut assembly, - circuit, - config, - cs.constants.clone(), - )?; - - let mut fixed = batch_invert_assigned(assembly.fixed); - let (cs, selector_polys) = if vk.compress_selectors { - cs.compress_selectors(assembly.selectors) - } else { - cs.directly_convert_selectors_to_fixed(assembly.selectors) - }; - // println!( - // "DBG configure queries:\n{:#?}", - // ( - // &cs.advice_queries, - // &cs.instance_queries, - // &cs.fixed_queries, - // &cs.num_advice_queries - // ) - // ); - fixed.extend( - selector_polys - .into_iter() - .map(|poly| vk.domain.lagrange_from_vec(poly)), - ); - - let fixed_polys: Vec<_> = fixed - .iter() - .map(|poly| vk.domain.lagrange_to_coeff(poly.clone())) - .collect(); - - let fixed_cosets = fixed_polys - .iter() - .map(|poly| vk.domain.coeff_to_extended(poly.clone())) - .collect(); - - let permutation_pk = assembly - .permutation - .build_pk(params, &vk.domain, &cs.permutation); - - // Compute l_0(X) - // TODO: this can be done more efficiently - let mut l0 = vk.domain.empty_lagrange(); - l0[0] = C::Scalar::ONE; - let l0 = vk.domain.lagrange_to_coeff(l0); - let l0 = vk.domain.coeff_to_extended(l0); - - // Compute l_blind(X) which evaluates to 1 for each blinding factor row - // and 0 otherwise over the domain. - let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..].iter_mut().rev().take(cs.blinding_factors()) { - *evaluation = C::Scalar::ONE; - } - let l_blind = vk.domain.lagrange_to_coeff(l_blind); - let l_blind = vk.domain.coeff_to_extended(l_blind); - - // Compute l_last(X) which evaluates to 1 on the first inactive row (just - // before the blinding factors) and 0 otherwise over the domain - let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - cs.blinding_factors() - 1] = C::Scalar::ONE; - let l_last = vk.domain.lagrange_to_coeff(l_last); - let l_last = vk.domain.coeff_to_extended(l_last); - - // Compute l_active_row(X) - let one = C::Scalar::ONE; - let mut l_active_row = vk.domain.empty_extended(); - parallelize(&mut l_active_row, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = i + start; - *value = one - (l_last[idx] + l_blind[idx]); - } - }); - - // Compute the optimized evaluation data structure - let ev = Evaluator::new(&vk.cs); - - Ok(ProvingKey { - vk, - l0, - l_last, - l_active_row, - fixed_values: fixed, - fixed_polys, - fixed_cosets, - permutation: permutation_pk, - ev, - }) -} diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 62f45e85c3..f29e09e820 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -9,8 +9,8 @@ use super::{ circuit::{ compile_circuit, sealed::{self}, - Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, - Instance, Selector, WitnessCalculator, + Advice, Any, Assignment, Challenge, Circuit, Column, Fixed, Instance, Selector, + WitnessCalculator, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, @@ -329,14 +329,36 @@ impl< } } - // Check that all current_phase advice columns are Some - for (column_index, advice_column) in witness.iter().enumerate() { - if column_indices.contains(&column_index) { - // TODO: Check that column_index in witness is Some - // TODO: Check that the column length is `params.n()` - } else { - // TODO: Check that column_index in witness is None - }; + // Check that all current_phase advice columns are Some, and their length is correct + for witness in &witness { + for (column_index, advice_column) in witness.iter().enumerate() { + if column_indices.contains(&column_index) { + match advice_column { + None => { + return Err(Error::Other(format!( + "expected advice column with index {} at phase {}", + column_index, current_phase.0 + ))) + } + Some(advice_column) => { + if advice_column.len() != params.n() as usize { + return Err(Error::Other(format!( + "expected advice column with index {} to have length {}", + column_index, + params.n(), + ))); + } + } + } + } else { + if advice_column.is_some() { + return Err(Error::Other(format!( + "expected no advice column with index {} at phase {}", + column_index, current_phase.0 + ))); + } + }; + } } let mut commit_phase_fn = |advice: &mut AdviceSingle, @@ -738,7 +760,7 @@ pub(crate) struct WitnessCollection<'a, F: Field> { pub(crate) k: u32, pub(crate) current_phase: sealed::Phase, pub(crate) advice: Vec, LagrangeCoeff>>, - pub(crate) unblinded_advice: HashSet, + // pub(crate) unblinded_advice: HashSet, pub(crate) challenges: &'a HashMap, pub(crate) instances: &'a [&'a [F]], pub(crate) usable_rows: RangeTo, @@ -877,7 +899,7 @@ impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` /// are zero-padded internally. -pub fn create_proof_legacy< +pub fn create_proof< 'params, Scheme: CommitmentScheme, P: Prover<'params, Scheme>, @@ -919,557 +941,11 @@ where prover.create_proof() } -/// This creates a proof for the provided `circuit` when given the public -/// parameters `params` and the proving key [`ProvingKey`] that was -/// generated previously for the same circuit. The provided `instances` -/// are zero-padded internally. -// TODO: Remove -pub fn create_proof< - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - ConcreteCircuit: Circuit, ->( - params: &'params Scheme::ParamsProver, - pk: &ProvingKey, - circuits: &[ConcreteCircuit], - instances: &[&[&[Scheme::Scalar]]], - mut rng: R, - transcript: &mut T, -) -> Result<(), Error> -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - if circuits.len() != instances.len() { - return Err(Error::InvalidInstances); - } - - for instance in instances.iter() { - if instance.len() != pk.vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); - } - } - - // Hash verification key into transcript - pk.vk.hash_into(transcript)?; - - let domain = &pk.vk.domain; - let mut meta = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut meta, circuits[0].params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut meta); - - // Selector optimizations cannot be applied here; use the ConstraintSystem - // from the verification key. - let meta = &pk.vk.cs; - - struct InstanceSingle { - pub instance_values: Vec>, - pub instance_polys: Vec>, - } - - let instance: Vec> = instances - .iter() - .map(|instance| -> Result, Error> { - let instance_values = instance - .iter() - .map(|values| { - let mut poly = domain.empty_lagrange(); - assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { - return Err(Error::InstanceTooLarge); - } - for (poly, value) in poly.iter_mut().zip(values.iter()) { - if !P::QUERY_INSTANCE { - transcript.common_scalar(*value)?; - } - *poly = *value; - } - Ok(poly) - }) - .collect::, _>>()?; - - if P::QUERY_INSTANCE { - let instance_commitments_projective: Vec<_> = instance_values - .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default())) - .collect(); - let mut instance_commitments = - vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &instance_commitments_projective, - &mut instance_commitments, - ); - let instance_commitments = instance_commitments; - drop(instance_commitments_projective); - - for commitment in &instance_commitments { - transcript.common_point(*commitment)?; - } - } - - let instance_polys: Vec<_> = instance_values - .iter() - .map(|poly| { - let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); - domain.lagrange_to_coeff(lagrange_vec) - }) - .collect(); - - Ok(InstanceSingle { - instance_values, - instance_polys, - }) - }) - .collect::, _>>()?; - - #[derive(Clone)] - struct AdviceSingle { - pub advice_polys: Vec>, - pub advice_blinds: Vec>, - } - - let (advice, challenges) = { - let mut advice = vec![ - AdviceSingle:: { - advice_polys: vec![domain.empty_lagrange(); meta.num_advice_columns], - advice_blinds: vec![Blind::default(); meta.num_advice_columns], - }; - instances.len() - ]; - let mut challenges = HashMap::::with_capacity(meta.num_challenges); - - let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); - for current_phase in pk.vk.cs.phases() { - println!("DBG phase {:?}", current_phase); - let column_indices = meta - .advice_column_phase - .iter() - .enumerate() - .filter_map(|(column_index, phase)| { - if current_phase == *phase { - Some(column_index) - } else { - None - } - }) - .collect::>(); - - for ((circuit, advice), instances) in - circuits.iter().zip(advice.iter_mut()).zip(instances) - { - let mut witness = WitnessCollection { - k: params.k(), - current_phase, - advice: vec![domain.empty_lagrange_assigned(); meta.num_advice_columns], - unblinded_advice: HashSet::from_iter(meta.unblinded_advice_columns.clone()), - instances, - challenges: &challenges, - // The prover will not be allowed to assign values to advice - // cells that exist within inactive rows, which include some - // number of blinding factors and an extra row for use in the - // permutation argument. - usable_rows: ..unusable_rows_start, - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain the witness and other information. - ConcreteCircuit::FloorPlanner::synthesize( - &mut witness, - circuit, - config.clone(), - meta.constants.clone(), - )?; - - let mut advice_values = batch_invert_assigned::( - witness - .advice - .into_iter() - .enumerate() - .filter_map(|(column_index, advice)| { - if column_indices.contains(&column_index) { - Some(advice) - } else { - None - } - }) - .collect(), - ); - - // Add blinding factors to advice columns - for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { - if !witness.unblinded_advice.contains(column_index) { - for cell in &mut advice_values[unusable_rows_start..] { - *cell = Scheme::Scalar::random(&mut rng); - } - } else { - #[cfg(feature = "sanity-checks")] - for cell in &advice_values[unusable_rows_start..] { - assert_eq!(*cell, Scheme::Scalar::ZERO); - } - } - } - - // Compute commitments to advice column polynomials - let blinds: Vec<_> = column_indices - .iter() - .map(|i| { - if witness.unblinded_advice.contains(i) { - Blind::default() - } else { - Blind(Scheme::Scalar::random(&mut rng)) - } - }) - .collect(); - // println!("DBG blinds: {:?}", blinds); - let advice_commitments_projective: Vec<_> = advice_values - .iter() - .zip(blinds.iter()) - .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) - .collect(); - // println!( - // "DBG advice_commitments_projective: {:?}", - // advice_commitments_projective - // ); - let mut advice_commitments = - vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &advice_commitments_projective, - &mut advice_commitments, - ); - // println!("DBG advice_commitments: {:?}", advice_commitments); - let advice_commitments = advice_commitments; - drop(advice_commitments_projective); - - for commitment in &advice_commitments { - transcript.write_point(*commitment)?; - } - for ((column_index, advice_values), blind) in - column_indices.iter().zip(advice_values).zip(blinds) - { - advice.advice_polys[*column_index] = advice_values; - advice.advice_blinds[*column_index] = blind; - } - } - - for (index, phase) in meta.challenge_phase.iter().enumerate() { - if current_phase == *phase { - let existing = - challenges.insert(index, *transcript.squeeze_challenge_scalar::<()>()); - assert!(existing.is_none()); - } - } - } - - assert_eq!(challenges.len(), meta.num_challenges); - let challenges = (0..meta.num_challenges) - .map(|index| challenges.remove(&index).unwrap()) - .collect::>(); - - (advice, challenges) - }; - - // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); - - let lookups: Vec>> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, Error> { - // Construct and commit to permuted values for each lookup - pk.vk - .cs - .lookups - .iter() - .map(|lookup| { - lookup.commit_permuted( - pk, - params, - domain, - theta, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - transcript, - ) - }) - .collect() - }) - .collect::, _>>()?; - - // Sample beta challenge - let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); - - // Sample gamma challenge - let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); - - // Commit to permutations. - let permutations: Vec> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| { - pk.vk.cs.permutation.commit( - params, - pk, - &pk.permutation, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - beta, - gamma, - &mut rng, - transcript, - ) - }) - .collect::, _>>()?; - - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - // Construct and commit to products for each lookup - lookups - .into_iter() - .map(|lookup| lookup.commit_product(pk, params, beta, gamma, &mut rng, transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles: Vec>> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, _> { - // Compress expressions for each shuffle - pk.vk - .cs - .shuffles - .iter() - .map(|shuffle| { - shuffle.commit_product( - pk, - params, - domain, - theta, - gamma, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - transcript, - ) - }) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Commit to the vanishing argument's random polynomial for blinding h(x_3) - let vanishing = vanishing::Argument::commit(params, domain, &mut rng, transcript)?; - - // Obtain challenge for keeping all separate gates linearly independent - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); - - // Calculate the advice polys - let advice: Vec> = advice - .into_iter() - .map( - |AdviceSingle { - advice_polys, - advice_blinds, - }| { - AdviceSingle { - advice_polys: advice_polys - .into_iter() - .map(|poly| domain.lagrange_to_coeff(poly)) - .collect::>(), - advice_blinds, - } - }, - ) - .collect(); - - // Evaluate the h(X) polynomial - let h_poly = pk.ev.evaluate_h( - pk, - &advice - .iter() - .map(|a| a.advice_polys.as_slice()) - .collect::>(), - &instance - .iter() - .map(|i| i.instance_polys.as_slice()) - .collect::>(), - &challenges, - *y, - *beta, - *gamma, - *theta, - &lookups, - &shuffles, - &permutations, - ); - - // Construct the vanishing argument's h(X) commitments - let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, transcript)?; - - let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); - let xn = x.pow([params.n()]); - - if P::QUERY_INSTANCE { - // Compute and hash instance evals for each circuit instance - for instance in instance.iter() { - // Evaluate polynomials at omega^i x - let instance_evals: Vec<_> = meta - .instance_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &instance.instance_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - - // Hash each instance column evaluation - for eval in instance_evals.iter() { - transcript.write_scalar(*eval)?; - } - } - } - - // Compute and hash advice evals for each circuit instance - for advice in advice.iter() { - // Evaluate polynomials at omega^i x - let advice_evals: Vec<_> = meta - .advice_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &advice.advice_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - - // Hash each advice column evaluation - for eval in advice_evals.iter() { - transcript.write_scalar(*eval)?; - } - } - - // Compute and hash fixed evals (shared across all circuit instances) - let fixed_evals: Vec<_> = meta - .fixed_queries - .iter() - .map(|&(column, at)| { - eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) - }) - .collect(); - - // Hash each fixed column evaluation - for eval in fixed_evals.iter() { - transcript.write_scalar(*eval)?; - } - - let vanishing = vanishing.evaluate(x, xn, domain, transcript)?; - - // Evaluate common permutation data - pk.permutation.evaluate(x, transcript)?; - - // Evaluate the permutations, if any, at omega^i x. - let permutations: Vec> = permutations - .into_iter() - .map(|permutation| -> Result<_, _> { permutation.construct().evaluate(pk, x, transcript) }) - .collect::, _>>()?; - - // Evaluate the lookups, if any, at omega^i x. - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - lookups - .into_iter() - .map(|p| p.evaluate(pk, x, transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Evaluate the shuffles, if any, at omega^i x. - let shuffles: Vec>> = shuffles - .into_iter() - .map(|shuffles| -> Result, _> { - shuffles - .into_iter() - .map(|p| p.evaluate(pk, x, transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let instances = instance - .iter() - .zip(advice.iter()) - .zip(permutations.iter()) - .zip(lookups.iter()) - .zip(shuffles.iter()) - .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { - iter::empty() - .chain( - P::QUERY_INSTANCE - .then_some(pk.vk.cs.instance_queries.iter().map(move |&(column, at)| { - ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &instance.instance_polys[column.index()], - blind: Blind::default(), - } - })) - .into_iter() - .flatten(), - ) - .chain( - pk.vk - .cs - .advice_queries - .iter() - .map(move |&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &advice.advice_polys[column.index()], - blind: advice.advice_blinds[column.index()], - }), - ) - .chain(permutation.open(pk, x)) - .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) - .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) - }) - .chain( - pk.vk - .cs - .fixed_queries - .iter() - .map(|&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &pk.fixed_polys[column.index()], - blind: Blind::default(), - }), - ) - .chain(pk.permutation.open(x)) - // We query the h(X) polynomial at x - .chain(vanishing.open(x)); - - let prover = P::new(params); - prover - .create_proof(rng, transcript, instances) - .map_err(|_| Error::ConstraintSystemFailure) -} - #[test] fn test_create_proof() { use crate::{ circuit::SimpleFloorPlanner, - plonk::{keygen_pk, keygen_vk}, + plonk::{keygen_pk, keygen_vk, ConstraintSystem}, poly::kzg::{ commitment::{KZGCommitmentScheme, ParamsKZG}, multiopen::ProverSHPLONK, diff --git a/halo2_proofs/src/poly/kzg/strategy.rs b/halo2_proofs/src/poly/kzg/strategy.rs index 5e1a9cfa8e..14b6565b80 100644 --- a/halo2_proofs/src/poly/kzg/strategy.rs +++ b/halo2_proofs/src/poly/kzg/strategy.rs @@ -102,7 +102,7 @@ where self.msm_accumulator.scale(E::Scalar::random(OsRng)); // Guard is updated with new msm contributions - let guard = f(self.msm_accumulator).expect("todo"); + let guard = f(self.msm_accumulator)?; Ok(Self { msm_accumulator: guard.msm_accumulator, }) @@ -144,7 +144,6 @@ where if msm.check() { Ok(()) } else { - println!("OH NO"); Err(Error::ConstraintSystemFailure) } } diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 34a71ff59b..8144ae8c18 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -5,27 +5,20 @@ #[global_allocator] static ALLOC: dhat::Alloc = dhat::Alloc; -use assert_matches::assert_matches; -use ff::{FromUniformBytes, WithSmallOrderMulGroup}; use halo2_proofs::arithmetic::Field; -use halo2_proofs::circuit::{AssignedCell, Cell, Layouter, Region, SimpleFloorPlanner, Value}; +use halo2_proofs::circuit::{AssignedCell, Layouter, Region, SimpleFloorPlanner, Value}; use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ compile_circuit, keygen_pk_v2, keygen_vk_v2, verify_proof, verify_proof_single, Advice, - Assigned, Challenge, Circuit, Column, CompiledCircuitV2, ConstraintSystem, - ConstraintSystemV2Backend, Error, Expression, FirstPhase, Fixed, Instance, ProverV2Single, - ProvingKey, SecondPhase, Selector, TableColumn, VerifyingKey, WitnessCalculator, + Challenge, Circuit, Column, ConstraintSystem, Error, Expression, FirstPhase, Fixed, Instance, + ProverV2Single, SecondPhase, Selector, WitnessCalculator, }; -use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; +use halo2_proofs::poly::commitment::ParamsProver; use halo2_proofs::poly::Rotation; -use halo2_proofs::poly::VerificationStrategy; use halo2_proofs::transcript::{ - Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptReadBuffer, - TranscriptWriterBuffer, + Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, }; -use rand_core::{OsRng, RngCore}; use std::collections::HashMap; -use std::marker::PhantomData; #[derive(Clone)] struct MyCircuitConfig { @@ -428,7 +421,7 @@ impl, const WIDTH_FACTOR: usize> Circuit for MyCircuit, const WIDTH_FACTOR: usize> Circuit for MyCircuit = MyCircuit::new(k, 42); diff --git a/scratch b/scratch deleted file mode 100644 index ab22c18135..0000000000 --- a/scratch +++ /dev/null @@ -1,137 +0,0 @@ -DBG collected queries -Queries { - advice: [ - ( - Column { - index: 0, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 0, - column_type: Advice, - }, - Rotation( - 1, - ), - ), - ( - Column { - index: 1, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 2, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ], - instance: [], - fixed: [ - ( - Column { - index: 0, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 5, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ], - num_advice_queries: [ - 2, - 1, - 1, - 0, - ], -} -DBG collected queries -Queries { - advice: [ - ( - Column { - index: 0, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 0, - column_type: Advice, - }, - Rotation( - 1, - ), - ), - ( - Column { - index: 1, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 2, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ], - instance: [], - fixed: [ - ( - Column { - index: 0, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( # EXTRA - Column { - index: 5, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ], - num_advice_queries: [ - 2, - 1, - 1, - 0, - ], -} - From 86ef75fb4908d292a1e756c6851eb58ec6a671c0 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 5 Jan 2024 15:07:19 +0100 Subject: [PATCH 22/79] Fix test --- halo2_proofs/src/plonk/prover.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index f29e09e820..a568d4d410 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -918,6 +918,9 @@ pub fn create_proof< where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { + if circuits.len() != instances.len() { + return Err(Error::InvalidInstances); + } let (_, config, cs) = compile_circuit(params.k(), &circuits[0], pk.vk.compress_selectors)?; let mut witness_calcs: Vec<_> = circuits .iter() From 58c95675cc6b3ee4f81536e08a7e7c09888890c1 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 5 Jan 2024 15:13:40 +0100 Subject: [PATCH 23/79] Address some clippy warnings --- halo2_proofs/src/plonk/circuit.rs | 36 +++++++++----------- halo2_proofs/src/plonk/prover.rs | 36 +++++++++----------- halo2_proofs/tests/frontend_backend_split.rs | 8 ++--- 3 files changed, 36 insertions(+), 44 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 54c4d16ec5..bfc25bb1b1 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1734,7 +1734,7 @@ impl QueriesMap { } impl QueriesMap { - fn to_expression(&mut self, expr: &ExpressionMid) -> Expression { + fn as_expression(&mut self, expr: &ExpressionMid) -> Expression { match expr { ExpressionMid::Constant(c) => Expression::Constant(*c), ExpressionMid::Fixed(query) => { @@ -1769,16 +1769,16 @@ impl QueriesMap { }) } ExpressionMid::Challenge(c) => Expression::Challenge(*c), - ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.to_expression(e))), + ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), ExpressionMid::Sum(lhs, rhs) => Expression::Sum( - Box::new(self.to_expression(lhs)), - Box::new(self.to_expression(rhs)), + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), ), ExpressionMid::Product(lhs, rhs) => Expression::Product( - Box::new(self.to_expression(lhs)), - Box::new(self.to_expression(rhs)), + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), ), - ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.to_expression(e)), *c), + ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.as_expression(e)), *c), } } } @@ -1953,8 +1953,8 @@ pub fn compile_circuit>( k, fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], permutation: permutation::keygen::Assembly::new(n, &cs.permutation), - selectors: vec![vec![false; n as usize]; cs.num_selectors], - usable_rows: 0..n as usize - (cs.blinding_factors() + 1), + selectors: vec![vec![false; n]; cs.num_selectors], + usable_rows: 0..n - (cs.blinding_factors() + 1), _marker: std::marker::PhantomData, }; @@ -2081,7 +2081,7 @@ impl ConstraintSystemV2Backend { polys: gate .polynomials() .iter() - .map(|e| queries.to_expression(e)) + .map(|e| queries.as_expression(e)) .collect(), queried_selectors: Vec::new(), // Unused? queried_cells: Vec::new(), // Unused? @@ -2095,12 +2095,12 @@ impl ConstraintSystemV2Backend { input_expressions: lookup .input_expressions .iter() - .map(|e| queries.to_expression(e)) + .map(|e| queries.as_expression(e)) .collect(), table_expressions: lookup .table_expressions .iter() - .map(|e| queries.to_expression(e)) + .map(|e| queries.as_expression(e)) .collect(), }) .collect(); @@ -2112,12 +2112,12 @@ impl ConstraintSystemV2Backend { input_expressions: shuffle .input_expressions .iter() - .map(|e| queries.to_expression(e)) + .map(|e| queries.as_expression(e)) .collect(), shuffle_expressions: shuffle .shuffle_expressions .iter() - .map(|e| queries.to_expression(e)) + .map(|e| queries.as_expression(e)) .collect(), }) .collect(); @@ -2218,13 +2218,9 @@ impl From> for ConstraintSystem { advice_column_phase: cs2 .advice_column_phase .into_iter() - .map(|p| sealed::Phase(p)) - .collect(), - challenge_phase: cs2 - .challenge_phase - .into_iter() - .map(|p| sealed::Phase(p)) + .map(sealed::Phase) .collect(), + challenge_phase: cs2.challenge_phase.into_iter().map(sealed::Phase).collect(), selector_map: Vec::new(), gates, advice_queries: queries.advice, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index a568d4d410..64711e25d3 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -306,7 +306,7 @@ impl< // TODO: Check that witness.len() is the expected number of advice columns. if witness.len() != advice.len() { - return Err(Error::Other(format!("witness.len() != advice.len()"))); + return Err(Error::Other("witness.len() != advice.len()".to_string())); } for witness_circuit in witness.iter() { if witness_circuit.len() != meta.num_advice_columns { @@ -316,15 +316,13 @@ impl< meta.num_advice_columns, ))); } - for witness_column in witness_circuit { - if let Some(witness_column) = witness_column { - if witness_column.len() != self.params.n() as usize { - return Err(Error::Other(format!( - "unexpected length in witness_column. Got {}, expected {}", - witness_column.len(), - self.params.n() - ))); - } + for witness_column in witness_circuit.iter().flatten() { + if witness_column.len() != self.params.n() as usize { + return Err(Error::Other(format!( + "unexpected length in witness_column. Got {}, expected {}", + witness_column.len(), + self.params.n() + ))); } } } @@ -350,13 +348,11 @@ impl< } } } - } else { - if advice_column.is_some() { - return Err(Error::Other(format!( - "expected no advice column with index {} at phase {}", - column_index, current_phase.0 - ))); - } + } else if advice_column.is_some() { + return Err(Error::Other(format!( + "expected no advice column with index {} at phase {}", + column_index, current_phase.0 + ))); }; } } @@ -452,8 +448,8 @@ impl< let mut rng = self.rng; - let instance = std::mem::replace(&mut self.instance, Vec::new()); - let advice = std::mem::replace(&mut self.advice, Vec::new()); + let instance = std::mem::take(&mut self.instance); + let advice = std::mem::take(&mut self.advice); let mut challenges = self.challenges; assert_eq!(challenges.len(), meta.num_challenges); @@ -474,7 +470,7 @@ impl< lookup.commit_permuted( pk, params, - &domain, + domain, theta, &advice.advice_polys, &pk.fixed_values, diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 8144ae8c18..1dd7b6f194 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -171,7 +171,7 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let d = meta.query_fixed(d, Rotation::cur()); let lhs = [one.clone(), a, b].map(|c| c * s_lookup.clone()); let rhs = [one.clone(), d, c].map(|c| c * s_ltable.clone()); - lhs.into_iter().zip(rhs.into_iter()).collect() + lhs.into_iter().zip(rhs).collect() }); meta.shuffle("shuffle", |meta| { @@ -181,7 +181,7 @@ impl, const WIDTH_FACTOR: usize> MyCircuit let b = meta.query_advice(b, Rotation::cur()); let lhs = [one.clone(), a].map(|c| c * s_shuffle.clone()); let rhs = [one.clone(), b].map(|c| c * s_stable.clone()); - lhs.into_iter().zip(rhs.into_iter()).collect() + lhs.into_iter().zip(rhs).collect() }); meta.create_gate("gate_rlc", |meta| { @@ -523,7 +523,7 @@ fn test_mycircuit_full_legacy() { let start = Instant::now(); let mut verifier_transcript = Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); - let strategy = SingleStrategy::new(&verifier_params); + let strategy = SingleStrategy::new(verifier_params); verify_proof::, VerifierSHPLONK<'_, Bn256>, _, _, _>( ¶ms, @@ -599,7 +599,7 @@ fn test_mycircuit_full_split() { println!("DBG Verifying..."); let mut verifier_transcript = Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); - let strategy = SingleStrategy::new(&verifier_params); + let strategy = SingleStrategy::new(verifier_params); verify_proof_single::, VerifierSHPLONK<'_, Bn256>, _, _, _>( ¶ms, From 1bc84caf30e37d913b8818b6d0f6c6bfb9ed0216 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Fri, 5 Jan 2024 17:03:13 +0100 Subject: [PATCH 24/79] Fix pinned vk test --- halo2_proofs/tests/plonk_api.rs | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/halo2_proofs/tests/plonk_api.rs b/halo2_proofs/tests/plonk_api.rs index 307dbdfef0..274daffa91 100644 --- a/halo2_proofs/tests/plonk_api.rs +++ b/halo2_proofs/tests/plonk_api.rs @@ -649,7 +649,7 @@ fn plonk_api() { ), }, Fixed { - query_index: 2, + query_index: 0, column_index: 2, rotation: Rotation( 0, @@ -665,7 +665,7 @@ fn plonk_api() { ), }, Fixed { - query_index: 3, + query_index: 1, column_index: 3, rotation: Rotation( 0, @@ -691,7 +691,7 @@ fn plonk_api() { }, ), Fixed { - query_index: 5, + query_index: 2, column_index: 1, rotation: Rotation( 0, @@ -709,7 +709,7 @@ fn plonk_api() { ), }, Fixed { - query_index: 4, + query_index: 3, column_index: 4, rotation: Rotation( 0, @@ -720,7 +720,7 @@ fn plonk_api() { ), Product( Fixed { - query_index: 1, + query_index: 4, column_index: 0, rotation: Rotation( 0, @@ -746,7 +746,7 @@ fn plonk_api() { ), Product( Fixed { - query_index: 6, + query_index: 5, column_index: 5, rotation: Rotation( 0, @@ -851,7 +851,7 @@ fn plonk_api() { fixed_queries: [ ( Column { - index: 6, + index: 2, column_type: Fixed, }, Rotation( @@ -860,7 +860,7 @@ fn plonk_api() { ), ( Column { - index: 0, + index: 3, column_type: Fixed, }, Rotation( @@ -869,7 +869,7 @@ fn plonk_api() { ), ( Column { - index: 2, + index: 1, column_type: Fixed, }, Rotation( @@ -878,7 +878,7 @@ fn plonk_api() { ), ( Column { - index: 3, + index: 4, column_type: Fixed, }, Rotation( @@ -887,7 +887,7 @@ fn plonk_api() { ), ( Column { - index: 4, + index: 0, column_type: Fixed, }, Rotation( @@ -896,7 +896,7 @@ fn plonk_api() { ), ( Column { - index: 1, + index: 5, column_type: Fixed, }, Rotation( @@ -905,7 +905,7 @@ fn plonk_api() { ), ( Column { - index: 5, + index: 6, column_type: Fixed, }, Rotation( @@ -978,7 +978,7 @@ fn plonk_api() { ], table_expressions: [ Fixed { - query_index: 0, + query_index: 6, column_index: 6, rotation: Rotation( 0, From 582d6d5dee76401a672ae22423d19e0090d3db11 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Tue, 9 Jan 2024 11:58:51 +0100 Subject: [PATCH 25/79] Remove Polynomial from backend interface --- halo2_proofs/src/plonk/circuit.rs | 15 +++++++------ halo2_proofs/src/plonk/keygen.rs | 22 ++++++++++++++++--- halo2_proofs/src/plonk/prover.rs | 35 +++++++++++++------------------ 3 files changed, 41 insertions(+), 31 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index bfc25bb1b1..daed13dd2b 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1692,7 +1692,8 @@ pub struct PreprocessingV2 { pub(crate) permutation: permutation::keygen::Assembly, // TODO(Edu): Replace this by Vec>. Requires some methods of Polynomial to take Vec // instead - pub(crate) fixed: Vec>, + // pub(crate) fixed: Vec>, + pub(crate) fixed: Vec>, } /// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System @@ -1858,7 +1859,8 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret &mut self, phase: u8, challenges: &HashMap, - ) -> Result, LagrangeCoeff>>>, Error> { + // ) -> Result, LagrangeCoeff>>>, Error> { + ) -> Result>>>, Error> { if phase != self.next_phase { return Err(Error::Other(format!( "Expected phase {}, got {}", @@ -1874,7 +1876,7 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret let mut witness = WitnessCollection { k: self.k, current_phase, - advice: vec![Polynomial::new_empty(self.n, F::ZERO.into()); self.cs.num_advice_columns], + advice: vec![vec![Assigned::Zero; self.n]; self.cs.num_advice_columns], instances: self.instances, challenges, // The prover will not be allowed to assign values to advice @@ -1974,11 +1976,8 @@ pub fn compile_circuit>( let selectors = std::mem::take(&mut assembly.selectors); cs.directly_convert_selectors_to_fixed(selectors) }; - fixed.extend( - selector_polys - .into_iter() - .map(|poly| Polynomial::new_lagrange_from_vec(poly)), - ); + let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); + fixed.extend(selector_polys.into_iter()); let cs2 = ConstraintSystemV2Backend { num_fixed_columns: cs.num_fixed_columns, diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index bed61f09a0..9ed887b4a5 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -231,7 +231,14 @@ where .preprocessing .fixed .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default()).to_affine()) + .map(|poly| { + params + .commit_lagrange( + &Polynomial::new_lagrange_from_vec(poly.clone()), + Blind::default(), + ) + .to_affine() + }) .collect(); Ok(VerifyingKey::from_parts( @@ -299,7 +306,10 @@ where .preprocessing .fixed .iter() - .map(|poly| vk.domain.lagrange_to_coeff(poly.clone())) + .map(|poly| { + vk.domain + .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) + }) .collect(); let fixed_cosets = fixed_polys @@ -355,7 +365,13 @@ where l0, l_last, l_active_row, - fixed_values: circuit.preprocessing.fixed.clone(), + fixed_values: circuit + .preprocessing + .fixed + .clone() + .into_iter() + .map(Polynomial::new_lagrange_from_vec) + .collect(), fixed_polys, fixed_cosets, permutation: permutation_pk, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 64711e25d3..ee1ad112c0 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -94,7 +94,8 @@ impl< phase: u8, // TODO: Turn this into Vec>>. Requires batch_invert_assigned to work with // Vec - witness: Vec, LagrangeCoeff>>>, + // witness: Vec, LagrangeCoeff>>>, + witness: Vec>>>, ) -> Result, Error> where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, @@ -265,7 +266,8 @@ impl< phase: u8, // TODO: Turn this into Vec>>. Requires batch_invert_assigned to work with // Vec - witness: Vec, LagrangeCoeff>>>>, + // witness: Vec, LagrangeCoeff>>>>, + witness: Vec>>>>, ) -> Result, Error> where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, @@ -304,11 +306,10 @@ impl< }) .collect::>(); - // TODO: Check that witness.len() is the expected number of advice columns. if witness.len() != advice.len() { return Err(Error::Other("witness.len() != advice.len()".to_string())); } - for witness_circuit in witness.iter() { + for witness_circuit in &witness { if witness_circuit.len() != meta.num_advice_columns { return Err(Error::Other(format!( "unexpected length in witness_circuitk. Got {}, expected {}", @@ -316,20 +317,8 @@ impl< meta.num_advice_columns, ))); } - for witness_column in witness_circuit.iter().flatten() { - if witness_column.len() != self.params.n() as usize { - return Err(Error::Other(format!( - "unexpected length in witness_column. Got {}, expected {}", - witness_column.len(), - self.params.n() - ))); - } - } - } - - // Check that all current_phase advice columns are Some, and their length is correct - for witness in &witness { - for (column_index, advice_column) in witness.iter().enumerate() { + // Check that all current_phase advice columns are Some, and their length is correct + for (column_index, advice_column) in witness_circuit.iter().enumerate() { if column_indices.contains(&column_index) { match advice_column { None => { @@ -420,7 +409,13 @@ impl< }; for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { - commit_phase_fn(advice, witness)?; + commit_phase_fn( + advice, + witness + .into_iter() + .map(|v| v.map(Polynomial::new_lagrange_from_vec)) + .collect(), + )?; } for (index, phase) in meta.challenge_phase.iter().enumerate() { @@ -755,7 +750,7 @@ impl< pub(crate) struct WitnessCollection<'a, F: Field> { pub(crate) k: u32, pub(crate) current_phase: sealed::Phase, - pub(crate) advice: Vec, LagrangeCoeff>>, + pub(crate) advice: Vec>>, // pub(crate) unblinded_advice: HashSet, pub(crate) challenges: &'a HashMap, pub(crate) instances: &'a [&'a [F]], From a3ecfbaba3fa44ec5162755351cc64c21da72992 Mon Sep 17 00:00:00 2001 From: "Eduard S." Date: Wed, 10 Jan 2024 12:57:00 +0100 Subject: [PATCH 26/79] Clean up --- halo2_proofs/src/plonk/circuit.rs | 3 ++- halo2_proofs/src/plonk/keygen.rs | 1 - halo2_proofs/src/plonk/prover.rs | 2 +- halo2_proofs/tests/frontend_backend_split.rs | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index daed13dd2b..8dbb3db483 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -2929,7 +2929,8 @@ impl ConstraintSystem { }); } - pub(crate) fn phases(&self) -> impl Iterator { + /// Returns the list of phases + pub fn phases(&self) -> impl Iterator { let max_phase = self .advice_column_phase .iter() diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 9ed887b4a5..23f5385628 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -213,7 +213,6 @@ where { let cs2 = &circuit.cs; let cs: ConstraintSystem = cs2.clone().into(); - // let queries = cs.collect_queries(); let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); if (params.n() as usize) < cs.minimum_rows() { diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index ee1ad112c0..cc520fa74d 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -134,7 +134,7 @@ pub struct ProverV2< challenges: HashMap, next_phase_index: usize, rng: R, - transcript: &'a mut T, // TODO: maybe &mut T? + transcript: &'a mut T, _marker: std::marker::PhantomData<(P, E)>, } diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 1dd7b6f194..7716ca30bb 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -578,12 +578,12 @@ fn test_mycircuit_full_split() { ) .unwrap(); let mut challenges = HashMap::new(); - for phase in [0, 1] { + for phase in 0..cs.phases().count() { // for phase in [0] { println!("DBG phase {}", phase); - let witness = witness_calc.calc(phase, &challenges).unwrap(); + let witness = witness_calc.calc(phase as u8, &challenges).unwrap(); // println!("DBG witness: {:?}", witness); - challenges = prover.commit_phase(phase, witness).unwrap(); + challenges = prover.commit_phase(phase as u8, witness).unwrap(); // println!("DBG challenges {:?}", challenges); } prover.create_proof().unwrap(); From d350e727f11daf7c19dfe5ae6bddcc7b073fdf5d Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 15 Jan 2024 13:14:22 +0000 Subject: [PATCH 27/79] Address some of the review comments from @CPerezz --- Cargo.toml | 1 - halo2_proofs/Cargo.toml | 2 +- halo2_proofs/src/plonk.rs | 3 +- halo2_proofs/src/plonk/circuit.rs | 12 +++---- halo2_proofs/src/plonk/prover.rs | 20 +++--------- halo2_proofs/src/plonk/verifier.rs | 2 -- halo2_proofs/src/transcript.rs | 2 -- halo2_proofs/tests/frontend_backend_split.rs | 34 ++++++-------------- 8 files changed, 21 insertions(+), 55 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index bcc042a341..b44700ec43 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,4 +3,3 @@ members = [ "halo2", "halo2_proofs", ] -resolver = "2" diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index e15c425c5f..9ee1ed931f 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -95,7 +95,7 @@ thread-safe-region = [] sanity-checks = [] batch = ["rand_core/getrandom"] circuit-params = [] -dhat-heap = [] +heap-profiling = [] cost-estimator = ["serde", "serde_derive"] derive_serde = ["halo2curves/derive_serde"] diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 3edb21c826..eade0e5a74 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -108,8 +108,7 @@ pub struct VerifyingKey { domain: EvaluationDomain, fixed_commitments: Vec, permutation: permutation::VerifyingKey, - /// TODO: Remove pub - pub cs: ConstraintSystem, + cs: ConstraintSystem, /// Cached maximum degree of `cs` (which doesn't change after construction). cs_degree: usize, /// The representative of this `VerifyingKey` in transcripts. diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 7a77ef80c5..b6de88d7e0 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -4,7 +4,7 @@ use crate::dev::metadata; use crate::plonk::WitnessCollection; use crate::{ circuit::{Layouter, Region, Value}, - poly::{batch_invert_assigned, LagrangeCoeff, Polynomial, Rotation}, + poly::{batch_invert_assigned, Polynomial, Rotation}, }; use core::cmp::max; use core::ops::{Add, Mul}; @@ -1690,13 +1690,10 @@ impl Gate { pub struct PreprocessingV2 { // TODO(Edu): Can we replace this by a simpler structure? pub(crate) permutation: permutation::keygen::Assembly, - // TODO(Edu): Replace this by Vec>. Requires some methods of Polynomial to take Vec - // instead - // pub(crate) fixed: Vec>, pub(crate) fixed: Vec>, } -/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System /// as well as the fixed columns and copy constraints information. #[derive(Debug, Clone)] pub struct CompiledCircuitV2 { @@ -1859,7 +1856,6 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret &mut self, phase: u8, challenges: &HashMap, - // ) -> Result, LagrangeCoeff>>>, Error> { ) -> Result>>>, Error> { if phase != self.next_phase { return Err(Error::Other(format!( @@ -1873,6 +1869,7 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret 2 => ThirdPhase.to_sealed(), _ => unreachable!("only phase [0,2] supported"), }; + let mut witness = WitnessCollection { k: self.k, current_phase, @@ -1968,7 +1965,7 @@ pub fn compile_circuit>( cs.constants.clone(), )?; - let mut fixed = batch_invert_assigned(assembly.fixed); + let fixed = batch_invert_assigned(assembly.fixed); let (cs, selector_polys) = if compress_selectors { cs.compress_selectors(assembly.selectors.clone()) } else { @@ -2146,7 +2143,6 @@ impl ConstraintSystemV2Backend { fixed: queries.fixed, num_advice_queries, }; - // println!("DBG collected queries\n{:#?}", queries); (queries, gates, lookups, shuffles) } } diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index cc520fa74d..37ec57be57 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -92,9 +92,6 @@ impl< pub fn commit_phase( &mut self, phase: u8, - // TODO: Turn this into Vec>>. Requires batch_invert_assigned to work with - // Vec - // witness: Vec, LagrangeCoeff>>>, witness: Vec>>>, ) -> Result, Error> where @@ -161,7 +158,6 @@ impl< where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { - // println!("DBG prove vk.queries.advices {:?}", pk.vk.queries.advice); for instance in instances.iter() { if instance.len() != pk.vk.cs.num_instance_columns { return Err(Error::InvalidInstances); @@ -172,7 +168,6 @@ impl< pk.vk.hash_into(transcript)?; let meta = &pk.vk.cs; - // let queries = &pk.vk.queries; let phases = meta.phases().collect(); let domain = &pk.vk.domain; @@ -239,7 +234,11 @@ impl< let advice = vec![ AdviceSingle:: { - advice_polys: vec![domain.empty_lagrange(); meta.num_advice_columns], + // Create vectors with empty polynomials to free space while they are not being used + advice_polys: vec![ + Polynomial::new_empty(0, Scheme::Scalar::ZERO); + meta.num_advice_columns + ], advice_blinds: vec![Blind::default(); meta.num_advice_columns], }; instances.len() @@ -264,9 +263,6 @@ impl< pub fn commit_phase( &mut self, phase: u8, - // TODO: Turn this into Vec>>. Requires batch_invert_assigned to work with - // Vec - // witness: Vec, LagrangeCoeff>>>>, witness: Vec>>>>, ) -> Result, Error> where @@ -284,9 +280,6 @@ impl< let params = self.params; let meta = &self.pk.vk.cs; - // let queries = &self.pk.vk.queries; - // println!("DBG commit_phase gate {:?}", meta.gates()[0]); - // println!("DBG commit_phase queries {:?}", meta.advice_queries()); let mut rng = &mut self.rng; @@ -922,15 +915,12 @@ where let mut challenges = HashMap::new(); let phases = prover.phases.clone(); for phase in &phases { - // for phase in [0] { println!("DBG phase {}", phase.0); let mut witnesses = Vec::with_capacity(circuits.len()); for witness_calc in witness_calcs.iter_mut() { witnesses.push(witness_calc.calc(phase.0, &challenges)?); } - // println!("DBG witness: {:?}", witness); challenges = prover.commit_phase(phase.0, witnesses).unwrap(); - // println!("DBG challenges {:?}", challenges); } prover.create_proof() } diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index fe7b9fb5a4..62c18c609a 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -383,8 +383,6 @@ where vanishing.verify(params, expressions, y, xn) }; - // println!("DBG verify fixed_queries:\n{:#?}", vk.cs.fixed_queries); - let queries = instance_commitments .iter() .zip(instance_evals.iter()) diff --git a/halo2_proofs/src/transcript.rs b/halo2_proofs/src/transcript.rs index ae2c39d5f6..6e4f812bdf 100644 --- a/halo2_proofs/src/transcript.rs +++ b/halo2_proofs/src/transcript.rs @@ -358,13 +358,11 @@ where fn write_point(&mut self, point: C) -> io::Result<()> { self.common_point(point)?; let compressed = point.to_bytes(); - // println!("DBG write_point\n{:02x?}", compressed.as_ref()); self.writer.write_all(compressed.as_ref()) } fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { self.common_scalar(scalar)?; let data = scalar.to_repr(); - // println!("DBG write_scalar\n{:02x?}", data.as_ref()); self.writer.write_all(data.as_ref()) } } diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 7716ca30bb..ef96971a10 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -1,7 +1,7 @@ #![allow(clippy::many_single_char_names)] #![allow(clippy::op_ref)] -#[cfg(feature = "dhat-heap")] +#[cfg(feature = "heap-profiling")] #[global_allocator] static ALLOC: dhat::Alloc = dhat::Alloc; @@ -468,14 +468,14 @@ fn test_mycircuit_mock() { use std::time::Instant; -const K: u32 = 8; -const WIDTH_FACTOR: usize = 1; -// const K: u32 = 16; -// const WIDTH_FACTOR: usize = 4; +// const K: u32 = 8; +// const WIDTH_FACTOR: usize = 1; +const K: u32 = 16; +const WIDTH_FACTOR: usize = 4; #[test] fn test_mycircuit_full_legacy() { - #[cfg(feature = "dhat-heap")] + #[cfg(feature = "heap-profiling")] let _profiler = dhat::Profiler::new_heap(); use halo2_proofs::plonk::{create_proof, keygen_pk, keygen_vk}; @@ -489,8 +489,6 @@ fn test_mycircuit_full_legacy() { let verifier_params = params.verifier_params(); let start = Instant::now(); let vk = keygen_vk(¶ms, &circuit).expect("keygen_vk should not fail"); - // println!("DBG gate {:?}", vk.cs.gates()[0]); - // println!("DBG queries {:?}", vk.cs.advice_queries()); let pk = keygen_pk(¶ms, vk.clone(), &circuit).expect("keygen_pk should not fail"); println!("Keygen: {:?}", start.elapsed()); @@ -513,10 +511,6 @@ fn test_mycircuit_full_legacy() { ) .expect("proof generation should not fail"); let proof = transcript.finalize(); - // println!("DBG proof.len={} ", proof.len()); - // for word in proof.chunks(32) { - // println!(" {:02x?}", word); - // } println!("Prove: {:?}", start.elapsed()); // Verify @@ -538,7 +532,7 @@ fn test_mycircuit_full_legacy() { #[test] fn test_mycircuit_full_split() { - #[cfg(feature = "dhat-heap")] + #[cfg(feature = "heap-profiling")] let _profiler = dhat::Profiler::new_heap(); let k = K; @@ -551,14 +545,13 @@ fn test_mycircuit_full_split() { let verifier_params = params.verifier_params(); let start = Instant::now(); let vk = keygen_vk_v2(¶ms, &compiled_circuit).expect("keygen_vk should not fail"); - // println!("vk: {:#?}", vk); let pk = keygen_pk_v2(¶ms, vk.clone(), &compiled_circuit).expect("keygen_pk should not fail"); println!("Keygen: {:?}", start.elapsed()); drop(compiled_circuit); // Proving - println!("DBG Proving..."); + println!("Proving..."); let instances = circuit.instances(); let instances_slice: &[&[Fr]] = &(instances .iter() @@ -579,24 +572,17 @@ fn test_mycircuit_full_split() { .unwrap(); let mut challenges = HashMap::new(); for phase in 0..cs.phases().count() { - // for phase in [0] { - println!("DBG phase {}", phase); + println!("phase {}", phase); let witness = witness_calc.calc(phase as u8, &challenges).unwrap(); - // println!("DBG witness: {:?}", witness); challenges = prover.commit_phase(phase as u8, witness).unwrap(); - // println!("DBG challenges {:?}", challenges); } prover.create_proof().unwrap(); let proof = transcript.finalize(); - // println!("DBG proof.len={} ", proof.len()); - // for word in proof.chunks(32) { - // println!(" {:02x?}", word); - // } println!("Prove: {:?}", start.elapsed()); // Verify let start = Instant::now(); - println!("DBG Verifying..."); + println!("Verifying..."); let mut verifier_transcript = Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); let strategy = SingleStrategy::new(verifier_params); From 2c50b39812243d4eaf80315d2ed3ea9d0abe40ca Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 16 Jan 2024 13:51:24 +0000 Subject: [PATCH 28/79] Simplify Backend gate type --- halo2_proofs/src/plonk/circuit.rs | 40 +++++++++++++++---------------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index b6de88d7e0..7e57e2f6cb 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1623,12 +1623,11 @@ impl>, Iter: IntoIterator> IntoIterato } } -/// GateV2Backend +/// A Gate contains a single polynomial identity with a name as metadata. #[derive(Clone, Debug)] pub struct GateV2Backend { name: String, - constraint_names: Vec, - polys: Vec>, + poly: ExpressionMid, } impl GateV2Backend { @@ -1637,14 +1636,9 @@ impl GateV2Backend { self.name.as_str() } - /// Returns the name of the constraint at index `constraint_index`. - pub fn constraint_name(&self, constraint_index: usize) -> &str { - self.constraint_names[constraint_index].as_str() - } - - /// Returns constraints of this gate - pub fn polynomials(&self) -> &[ExpressionMid] { - &self.polys + /// Returns the polynomial identity of this gate + pub fn polynomial(&self) -> &ExpressionMid { + &self.poly } } @@ -1988,11 +1982,19 @@ pub fn compile_circuit>( gates: cs .gates .iter() - .map(|g| GateV2Backend { - name: g.name.clone(), - constraint_names: g.constraint_names.clone(), - polys: g.polys.clone().into_iter().map(|e| e.into()).collect(), + .map(|g| { + g.polys.clone().into_iter().enumerate().map(|(i, e)| { + let name = match g.constraint_name(i) { + "" => g.name.clone(), + constraint_name => format!("{}:{}", g.name, constraint_name), + }; + GateV2Backend { + name, + poly: e.into(), + } + }) }) + .flatten() .collect(), permutation: cs.permutation.clone(), lookups: cs @@ -2073,12 +2075,8 @@ impl ConstraintSystemV2Backend { .iter() .map(|gate| Gate { name: gate.name.clone(), - constraint_names: gate.constraint_names.clone(), - polys: gate - .polynomials() - .iter() - .map(|e| queries.as_expression(e)) - .collect(), + constraint_names: Vec::new(), + polys: vec![queries.as_expression(gate.polynomial())], queried_selectors: Vec::new(), // Unused? queried_cells: Vec::new(), // Unused? }) From c71fbfad9443154a41a86683cd698321c82debd3 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 16 Jan 2024 14:03:24 +0000 Subject: [PATCH 29/79] Document some types --- halo2_proofs/src/plonk/circuit.rs | 6 +++++- halo2_proofs/src/plonk/lookup.rs | 2 ++ halo2_proofs/src/plonk/prover.rs | 2 ++ halo2_proofs/src/plonk/shuffle.rs | 2 ++ 4 files changed, 11 insertions(+), 1 deletion(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 7e57e2f6cb..61636f05cd 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1917,7 +1917,11 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret } } -/// TODO: Document. Frontend function +/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the +/// circuit into its columns and the column configuration; as well as doing the fixed column and +/// copy constraints assignments. The output of this function can then be used for the key +/// generation, and proof generation. +/// If `compress_selectors` is true, multiple selector columns may be multiplexed. pub fn compile_circuit>( k: u32, circuit: &ConcreteCircuit, diff --git a/halo2_proofs/src/plonk/lookup.rs b/halo2_proofs/src/plonk/lookup.rs index 375404bad5..97be4b36e0 100644 --- a/halo2_proofs/src/plonk/lookup.rs +++ b/halo2_proofs/src/plonk/lookup.rs @@ -5,6 +5,7 @@ use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; +/// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone, Debug)] pub struct ArgumentV2 { pub(crate) name: String, @@ -12,6 +13,7 @@ pub struct ArgumentV2 { pub(crate) table_expressions: Vec>, } +/// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone)] pub struct Argument { pub(crate) name: String, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 37ec57be57..1168b1d519 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -31,12 +31,14 @@ use crate::{ }; use group::prime::PrimeCurveAffine; +/// Collection of instance data used during proving for a single circuit proof. #[derive(Debug)] struct InstanceSingle { pub instance_values: Vec>, pub instance_polys: Vec>, } +/// Collection of advice data used during proving for a single circuit proof. #[derive(Debug, Clone)] struct AdviceSingle { pub advice_polys: Vec>, diff --git a/halo2_proofs/src/plonk/shuffle.rs b/halo2_proofs/src/plonk/shuffle.rs index c2136b5d30..0779c2b451 100644 --- a/halo2_proofs/src/plonk/shuffle.rs +++ b/halo2_proofs/src/plonk/shuffle.rs @@ -5,6 +5,7 @@ use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; +/// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone, Debug)] pub struct ArgumentV2 { pub(crate) name: String, @@ -12,6 +13,7 @@ pub struct ArgumentV2 { pub(crate) shuffle_expressions: Vec>, } +/// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] pub struct Argument { pub(crate) name: String, From 20c16ddf4272f1e92decc514a55d48aafbf0cea0 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 16 Jan 2024 14:20:40 +0000 Subject: [PATCH 30/79] Split collect_queries function --- halo2_proofs/src/plonk/circuit.rs | 211 +++++++++++++++++------------- 1 file changed, 117 insertions(+), 94 deletions(-) diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 61636f05cd..ad70c2ea8c 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1809,6 +1809,77 @@ pub struct ConstraintSystemV2Backend { pub(crate) general_column_annotations: HashMap, } +impl Into> for ConstraintSystem { + fn into(self) -> ConstraintSystemV2Backend { + ConstraintSystemV2Backend { + num_fixed_columns: self.num_fixed_columns, + num_advice_columns: self.num_advice_columns, + num_instance_columns: self.num_instance_columns, + num_challenges: self.num_challenges, + unblinded_advice_columns: self.unblinded_advice_columns.clone(), + advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), + challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), + gates: self + .gates + .iter() + .map(|g| { + g.polys.clone().into_iter().enumerate().map(|(i, e)| { + let name = match g.constraint_name(i) { + "" => g.name.clone(), + constraint_name => format!("{}:{}", g.name, constraint_name), + }; + GateV2Backend { + name, + poly: e.into(), + } + }) + }) + .flatten() + .collect(), + permutation: self.permutation.clone(), + lookups: self + .lookups + .iter() + .map(|l| lookup::ArgumentV2 { + name: l.name.clone(), + input_expressions: l + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + table_expressions: l + .table_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + shuffles: self + .shuffles + .iter() + .map(|s| shuffle::ArgumentV2 { + name: s.name.clone(), + input_expressions: s + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + shuffle_expressions: s + .shuffle_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + general_column_annotations: self.general_column_annotations.clone(), + } + } +} + /// Witness calculator. Frontend function #[derive(Debug)] pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { @@ -1974,73 +2045,6 @@ pub fn compile_circuit>( let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); fixed.extend(selector_polys.into_iter()); - let cs2 = ConstraintSystemV2Backend { - num_fixed_columns: cs.num_fixed_columns, - num_advice_columns: cs.num_advice_columns, - num_instance_columns: cs.num_instance_columns, - num_challenges: cs.num_challenges, - unblinded_advice_columns: cs.unblinded_advice_columns.clone(), - advice_column_phase: cs.advice_column_phase.iter().map(|p| p.0).collect(), - challenge_phase: cs.challenge_phase.iter().map(|p| p.0).collect(), - // TODO: Clean up all the Expression -> Expression conversions - gates: cs - .gates - .iter() - .map(|g| { - g.polys.clone().into_iter().enumerate().map(|(i, e)| { - let name = match g.constraint_name(i) { - "" => g.name.clone(), - constraint_name => format!("{}:{}", g.name, constraint_name), - }; - GateV2Backend { - name, - poly: e.into(), - } - }) - }) - .flatten() - .collect(), - permutation: cs.permutation.clone(), - lookups: cs - .lookups - .iter() - .map(|l| lookup::ArgumentV2 { - name: l.name.clone(), - input_expressions: l - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - table_expressions: l - .table_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) - .collect(), - shuffles: cs - .shuffles - .iter() - .map(|s| shuffle::ArgumentV2 { - name: s.name.clone(), - input_expressions: s - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - shuffle_expressions: s - .shuffle_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) - .collect(), - general_column_annotations: cs.general_column_annotations.clone(), - }; let preprocessing = PreprocessingV2 { permutation: assembly.permutation, fixed, @@ -2048,7 +2052,7 @@ pub fn compile_circuit>( Ok(( CompiledCircuitV2 { - cs: cs2, + cs: cs.clone().into(), preprocessing, }, config, @@ -2057,25 +2061,10 @@ pub fn compile_circuit>( } impl ConstraintSystemV2Backend { - pub(crate) fn collect_queries( - &self, - ) -> ( - Queries, - Vec>, - Vec>, - Vec>, - ) { - let mut queries = QueriesMap { - advice_map: HashMap::new(), - instance_map: HashMap::new(), - fixed_map: HashMap::new(), - advice: Vec::new(), - instance: Vec::new(), - fixed: Vec::new(), - }; - - let gates: Vec<_> = self - .gates + /// Collect queries used in gates while mapping those gates to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_gates(&self, queries: &mut QueriesMap) -> Vec> { + self.gates .iter() .map(|gate| Gate { name: gate.name.clone(), @@ -2084,9 +2073,13 @@ impl ConstraintSystemV2Backend { queried_selectors: Vec::new(), // Unused? queried_cells: Vec::new(), // Unused? }) - .collect(); - let lookups: Vec<_> = self - .lookups + .collect() + } + + /// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_lookups(&self, queries: &mut QueriesMap) -> Vec> { + self.lookups .iter() .map(|lookup| lookup::Argument { name: lookup.name.clone(), @@ -2101,9 +2094,13 @@ impl ConstraintSystemV2Backend { .map(|e| queries.as_expression(e)) .collect(), }) - .collect(); - let shuffles: Vec<_> = self - .shuffles + .collect() + } + + /// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_shuffles(&self, queries: &mut QueriesMap) -> Vec> { + self.shuffles .iter() .map(|shuffle| shuffle::Argument { name: shuffle.name.clone(), @@ -2118,8 +2115,34 @@ impl ConstraintSystemV2Backend { .map(|e| queries.as_expression(e)) .collect(), }) - .collect(); + .collect() + } + + /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the + /// expressions of gates, lookups and shuffles into equivalent ones with indexed query + /// references. + pub(crate) fn collect_queries( + &self, + ) -> ( + Queries, + Vec>, + Vec>, + Vec>, + ) { + let mut queries = QueriesMap { + advice_map: HashMap::new(), + instance_map: HashMap::new(), + fixed_map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), + }; + + let gates = self.collect_queries_gates(&mut queries); + let lookups = self.collect_queries_lookups(&mut queries); + let shuffles = self.collect_queries_shuffles(&mut queries); + // Each column used in a copy constraint involves a query at rotation current. for column in self.permutation.get_columns() { match column.column_type { Any::Instance => { From 371e69e67493b77e59395daab9220d3bab52c507 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 16 Jan 2024 15:42:31 +0000 Subject: [PATCH 31/79] Checkpoint --- Cargo.toml | 4 + halo2_backend/Cargo.toml | 80 + halo2_backend/src/arithmetic.rs | 554 +++++ halo2_backend/src/circuit/value.rs | 703 ++++++ halo2_backend/src/dev.rs | 3 + halo2_backend/src/dev/metadata.rs | 44 + halo2_backend/src/helpers.rs | 154 ++ halo2_backend/src/lib.rs | 20 + halo2_backend/src/multicore.rs | 38 + halo2_backend/src/plonk.rs | 560 +++++ halo2_backend/src/plonk/assigned.rs | 665 ++++++ halo2_backend/src/plonk/circuit.rs | 2015 +++++++++++++++++ halo2_backend/src/plonk/error.rs | 93 + halo2_backend/src/plonk/evaluation.rs | 869 +++++++ halo2_backend/src/plonk/keygen.rs | 159 ++ halo2_backend/src/plonk/lookup.rs | 108 + halo2_backend/src/plonk/lookup/prover.rs | 475 ++++ halo2_backend/src/plonk/lookup/verifier.rs | 210 ++ halo2_backend/src/plonk/permutation.rs | 166 ++ halo2_backend/src/plonk/permutation/keygen.rs | 460 ++++ halo2_backend/src/plonk/permutation/prover.rs | 329 +++ .../src/plonk/permutation/verifier.rs | 254 +++ halo2_backend/src/plonk/prover.rs | 736 ++++++ halo2_backend/src/plonk/shuffle.rs | 76 + halo2_backend/src/plonk/shuffle/prover.rs | 250 ++ halo2_backend/src/plonk/shuffle/verifier.rs | 137 ++ halo2_backend/src/plonk/vanishing.rs | 11 + halo2_backend/src/plonk/vanishing/prover.rs | 199 ++ halo2_backend/src/plonk/vanishing/verifier.rs | 138 ++ halo2_backend/src/plonk/verifier.rs | 459 ++++ halo2_backend/src/plonk/verifier/batch.rs | 135 ++ halo2_backend/src/poly.rs | 345 +++ halo2_backend/src/poly/commitment.rs | 245 ++ halo2_backend/src/poly/domain.rs | 557 +++++ halo2_backend/src/poly/ipa/commitment.rs | 370 +++ .../src/poly/ipa/commitment/prover.rs | 167 ++ .../src/poly/ipa/commitment/verifier.rs | 100 + halo2_backend/src/poly/ipa/mod.rs | 7 + halo2_backend/src/poly/ipa/msm.rs | 271 +++ halo2_backend/src/poly/ipa/multiopen.rs | 172 ++ .../src/poly/ipa/multiopen/prover.rs | 122 + .../src/poly/ipa/multiopen/verifier.rs | 148 ++ halo2_backend/src/poly/ipa/strategy.rs | 171 ++ halo2_backend/src/poly/kzg/commitment.rs | 417 ++++ halo2_backend/src/poly/kzg/mod.rs | 8 + halo2_backend/src/poly/kzg/msm.rs | 203 ++ halo2_backend/src/poly/kzg/multiopen.rs | 5 + halo2_backend/src/poly/kzg/multiopen/gwc.rs | 50 + .../src/poly/kzg/multiopen/gwc/prover.rs | 89 + .../src/poly/kzg/multiopen/gwc/verifier.rs | 124 + .../src/poly/kzg/multiopen/shplonk.rs | 247 ++ .../src/poly/kzg/multiopen/shplonk/prover.rs | 298 +++ .../poly/kzg/multiopen/shplonk/verifier.rs | 140 ++ halo2_backend/src/poly/kzg/strategy.rs | 181 ++ halo2_backend/src/poly/multiopen_test.rs | 298 +++ halo2_backend/src/poly/query.rs | 160 ++ halo2_backend/src/poly/strategy.rs | 31 + halo2_backend/src/transcript.rs | 554 +++++ halo2_common/Cargo.toml | 80 + halo2_common/src/lib.rs | 0 halo2_frontend/Cargo.toml | 80 + halo2_frontend/src/lib.rs | 0 halo2_middleware/Cargo.toml | 80 + halo2_middleware/src/lib.rs | 0 halo2_proofs/Cargo.toml | 1 + 65 files changed, 15825 insertions(+) create mode 100644 halo2_backend/Cargo.toml create mode 100644 halo2_backend/src/arithmetic.rs create mode 100644 halo2_backend/src/circuit/value.rs create mode 100644 halo2_backend/src/dev.rs create mode 100644 halo2_backend/src/dev/metadata.rs create mode 100644 halo2_backend/src/helpers.rs create mode 100644 halo2_backend/src/lib.rs create mode 100644 halo2_backend/src/multicore.rs create mode 100644 halo2_backend/src/plonk.rs create mode 100644 halo2_backend/src/plonk/assigned.rs create mode 100644 halo2_backend/src/plonk/circuit.rs create mode 100644 halo2_backend/src/plonk/error.rs create mode 100644 halo2_backend/src/plonk/evaluation.rs create mode 100644 halo2_backend/src/plonk/keygen.rs create mode 100644 halo2_backend/src/plonk/lookup.rs create mode 100644 halo2_backend/src/plonk/lookup/prover.rs create mode 100644 halo2_backend/src/plonk/lookup/verifier.rs create mode 100644 halo2_backend/src/plonk/permutation.rs create mode 100644 halo2_backend/src/plonk/permutation/keygen.rs create mode 100644 halo2_backend/src/plonk/permutation/prover.rs create mode 100644 halo2_backend/src/plonk/permutation/verifier.rs create mode 100644 halo2_backend/src/plonk/prover.rs create mode 100644 halo2_backend/src/plonk/shuffle.rs create mode 100644 halo2_backend/src/plonk/shuffle/prover.rs create mode 100644 halo2_backend/src/plonk/shuffle/verifier.rs create mode 100644 halo2_backend/src/plonk/vanishing.rs create mode 100644 halo2_backend/src/plonk/vanishing/prover.rs create mode 100644 halo2_backend/src/plonk/vanishing/verifier.rs create mode 100644 halo2_backend/src/plonk/verifier.rs create mode 100644 halo2_backend/src/plonk/verifier/batch.rs create mode 100644 halo2_backend/src/poly.rs create mode 100644 halo2_backend/src/poly/commitment.rs create mode 100644 halo2_backend/src/poly/domain.rs create mode 100644 halo2_backend/src/poly/ipa/commitment.rs create mode 100644 halo2_backend/src/poly/ipa/commitment/prover.rs create mode 100644 halo2_backend/src/poly/ipa/commitment/verifier.rs create mode 100644 halo2_backend/src/poly/ipa/mod.rs create mode 100644 halo2_backend/src/poly/ipa/msm.rs create mode 100644 halo2_backend/src/poly/ipa/multiopen.rs create mode 100644 halo2_backend/src/poly/ipa/multiopen/prover.rs create mode 100644 halo2_backend/src/poly/ipa/multiopen/verifier.rs create mode 100644 halo2_backend/src/poly/ipa/strategy.rs create mode 100644 halo2_backend/src/poly/kzg/commitment.rs create mode 100644 halo2_backend/src/poly/kzg/mod.rs create mode 100644 halo2_backend/src/poly/kzg/msm.rs create mode 100644 halo2_backend/src/poly/kzg/multiopen.rs create mode 100644 halo2_backend/src/poly/kzg/multiopen/gwc.rs create mode 100644 halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs create mode 100644 halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs create mode 100644 halo2_backend/src/poly/kzg/multiopen/shplonk.rs create mode 100644 halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs create mode 100644 halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs create mode 100644 halo2_backend/src/poly/kzg/strategy.rs create mode 100644 halo2_backend/src/poly/multiopen_test.rs create mode 100644 halo2_backend/src/poly/query.rs create mode 100644 halo2_backend/src/poly/strategy.rs create mode 100644 halo2_backend/src/transcript.rs create mode 100644 halo2_common/Cargo.toml create mode 100644 halo2_common/src/lib.rs create mode 100644 halo2_frontend/Cargo.toml create mode 100644 halo2_frontend/src/lib.rs create mode 100644 halo2_middleware/Cargo.toml create mode 100644 halo2_middleware/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index b44700ec43..458f57fbe9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,4 +2,8 @@ members = [ "halo2", "halo2_proofs", + "halo2_frontend", + "halo2_middleware", + "halo2_backend", + "halo2_common", ] diff --git a/halo2_backend/Cargo.toml b/halo2_backend/Cargo.toml new file mode 100644 index 0000000000..79b609d9b7 --- /dev/null +++ b/halo2_backend/Cargo.toml @@ -0,0 +1,80 @@ +[package] +name = "halo2_backend" +version = "0.3.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.66.0" +description = """ +TODO +""" +license = "MIT OR Apache-2.0" +repository = "TODO" +documentation = "TODO" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +backtrace = { version = "0.3", optional = true } +ff = "0.13" +group = "0.13" +halo2curves = { version = "0.6.0", default-features = false } +rand_core = { version = "0.6", default-features = false } +tracing = "0.1" +blake2b_simd = "1" # MSRV 1.66.0 +sha3 = "0.9.1" +rand_chacha = "0.3" +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +rayon = "1.8" + +# Developer tooling dependencies +plotters = { version = "0.3.0", default-features = false, optional = true } +tabbycat = { version = "0.1", features = ["attributes"], optional = true } + +# Legacy circuit compatibility +halo2_legacy_pdqsort = { version = "0.1.0", optional = true } + +[dev-dependencies] +assert_matches = "1.5" +criterion = "0.3" +gumdrop = "0.8" +proptest = "1" +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +dhat = "0.3.2" +serde_json = "1" + +[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] +getrandom = { version = "0.2", features = ["js"] } + +[features] +default = ["batch", "bits"] +dev-graph = ["plotters", "tabbycat"] +test-dev-graph = [ + "dev-graph", + "plotters/bitmap_backend", + "plotters/bitmap_encoder", + "plotters/ttf", +] +bits = ["halo2curves/bits"] +gadget-traces = ["backtrace"] +thread-safe-region = [] +sanity-checks = [] +batch = ["rand_core/getrandom"] +circuit-params = [] +heap-profiling = [] +cost-estimator = ["serde", "serde_derive"] +derive_serde = ["halo2curves/derive_serde"] + +[lib] +bench = false diff --git a/halo2_backend/src/arithmetic.rs b/halo2_backend/src/arithmetic.rs new file mode 100644 index 0000000000..0163e355eb --- /dev/null +++ b/halo2_backend/src/arithmetic.rs @@ -0,0 +1,554 @@ +//! This module provides common utilities, traits and structures for group, +//! field and polynomial arithmetic. + +use super::multicore; +pub use ff::Field; +use group::{ + ff::{BatchInvert, PrimeField}, + Curve, Group, GroupOpsOwned, ScalarMulOwned, +}; + +pub use halo2curves::{CurveAffine, CurveExt}; + +/// This represents an element of a group with basic operations that can be +/// performed. This allows an FFT implementation (for example) to operate +/// generically over either a field or elliptic curve group. +pub trait FftGroup: + Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned +{ +} + +impl FftGroup for T +where + Scalar: Field, + T: Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned, +{ +} + +fn multiexp_serial(coeffs: &[C::Scalar], bases: &[C], acc: &mut C::Curve) { + let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); + + let c = if bases.len() < 4 { + 1 + } else if bases.len() < 32 { + 3 + } else { + (f64::from(bases.len() as u32)).ln().ceil() as usize + }; + + fn get_at(segment: usize, c: usize, bytes: &F::Repr) -> usize { + let skip_bits = segment * c; + let skip_bytes = skip_bits / 8; + + if skip_bytes >= (F::NUM_BITS as usize + 7) / 8 { + return 0; + } + + let mut v = [0; 8]; + for (v, o) in v.iter_mut().zip(bytes.as_ref()[skip_bytes..].iter()) { + *v = *o; + } + + let mut tmp = u64::from_le_bytes(v); + tmp >>= skip_bits - (skip_bytes * 8); + tmp %= 1 << c; + + tmp as usize + } + + let segments = (C::Scalar::NUM_BITS as usize / c) + 1; + + for current_segment in (0..segments).rev() { + for _ in 0..c { + *acc = acc.double(); + } + + #[derive(Clone, Copy)] + enum Bucket { + None, + Affine(C), + Projective(C::Curve), + } + + impl Bucket { + fn add_assign(&mut self, other: &C) { + *self = match *self { + Bucket::None => Bucket::Affine(*other), + Bucket::Affine(a) => Bucket::Projective(a + *other), + Bucket::Projective(mut a) => { + a += *other; + Bucket::Projective(a) + } + } + } + + fn add(self, mut other: C::Curve) -> C::Curve { + match self { + Bucket::None => other, + Bucket::Affine(a) => { + other += a; + other + } + Bucket::Projective(a) => other + &a, + } + } + } + + let mut buckets: Vec> = vec![Bucket::None; (1 << c) - 1]; + + for (coeff, base) in coeffs.iter().zip(bases.iter()) { + let coeff = get_at::(current_segment, c, coeff); + if coeff != 0 { + buckets[coeff - 1].add_assign(base); + } + } + + // Summation by parts + // e.g. 3a + 2b + 1c = a + + // (a) + b + + // ((a) + b) + c + let mut running_sum = C::Curve::identity(); + for exp in buckets.into_iter().rev() { + running_sum = exp.add(running_sum); + *acc += &running_sum; + } + } +} + +/// Performs a small multi-exponentiation operation. +/// Uses the double-and-add algorithm with doublings shared across points. +pub fn small_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { + let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); + let mut acc = C::Curve::identity(); + + // for byte idx + for byte_idx in (0..((C::Scalar::NUM_BITS as usize + 7) / 8)).rev() { + // for bit idx + for bit_idx in (0..8).rev() { + acc = acc.double(); + // for each coeff + for coeff_idx in 0..coeffs.len() { + let byte = coeffs[coeff_idx].as_ref()[byte_idx]; + if ((byte >> bit_idx) & 1) != 0 { + acc += bases[coeff_idx]; + } + } + } + } + + acc +} + +/// Performs a multi-exponentiation operation. +/// +/// This function will panic if coeffs and bases have a different length. +/// +/// This will use multithreading if beneficial. +pub fn best_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { + assert_eq!(coeffs.len(), bases.len()); + + let num_threads = multicore::current_num_threads(); + if coeffs.len() > num_threads { + let chunk = coeffs.len() / num_threads; + let num_chunks = coeffs.chunks(chunk).len(); + let mut results = vec![C::Curve::identity(); num_chunks]; + multicore::scope(|scope| { + let chunk = coeffs.len() / num_threads; + + for ((coeffs, bases), acc) in coeffs + .chunks(chunk) + .zip(bases.chunks(chunk)) + .zip(results.iter_mut()) + { + scope.spawn(move |_| { + multiexp_serial(coeffs, bases, acc); + }); + } + }); + results.iter().fold(C::Curve::identity(), |a, b| a + b) + } else { + let mut acc = C::Curve::identity(); + multiexp_serial(coeffs, bases, &mut acc); + acc + } +} + +/// Performs a radix-$2$ Fast-Fourier Transformation (FFT) on a vector of size +/// $n = 2^k$, when provided `log_n` = $k$ and an element of multiplicative +/// order $n$ called `omega` ($\omega$). The result is that the vector `a`, when +/// interpreted as the coefficients of a polynomial of degree $n - 1$, is +/// transformed into the evaluations of this polynomial at each of the $n$ +/// distinct powers of $\omega$. This transformation is invertible by providing +/// $\omega^{-1}$ in place of $\omega$ and dividing each resulting field element +/// by $n$. +/// +/// This will use multithreading if beneficial. +pub fn best_fft>(a: &mut [G], omega: Scalar, log_n: u32) { + fn bitreverse(mut n: usize, l: usize) -> usize { + let mut r = 0; + for _ in 0..l { + r = (r << 1) | (n & 1); + n >>= 1; + } + r + } + + let threads = multicore::current_num_threads(); + let log_threads = log2_floor(threads); + let n = a.len(); + assert_eq!(n, 1 << log_n); + + for k in 0..n { + let rk = bitreverse(k, log_n as usize); + if k < rk { + a.swap(rk, k); + } + } + + // precompute twiddle factors + let twiddles: Vec<_> = (0..(n / 2)) + .scan(Scalar::ONE, |w, _| { + let tw = *w; + *w *= ω + Some(tw) + }) + .collect(); + + if log_n <= log_threads { + let mut chunk = 2_usize; + let mut twiddle_chunk = n / 2; + for _ in 0..log_n { + a.chunks_mut(chunk).for_each(|coeffs| { + let (left, right) = coeffs.split_at_mut(chunk / 2); + + // case when twiddle factor is one + let (a, left) = left.split_at_mut(1); + let (b, right) = right.split_at_mut(1); + let t = b[0]; + b[0] = a[0]; + a[0] += &t; + b[0] -= &t; + + left.iter_mut() + .zip(right.iter_mut()) + .enumerate() + .for_each(|(i, (a, b))| { + let mut t = *b; + t *= &twiddles[(i + 1) * twiddle_chunk]; + *b = *a; + *a += &t; + *b -= &t; + }); + }); + chunk *= 2; + twiddle_chunk /= 2; + } + } else { + recursive_butterfly_arithmetic(a, n, 1, &twiddles) + } +} + +/// This perform recursive butterfly arithmetic +pub fn recursive_butterfly_arithmetic>( + a: &mut [G], + n: usize, + twiddle_chunk: usize, + twiddles: &[Scalar], +) { + if n == 2 { + let t = a[1]; + a[1] = a[0]; + a[0] += &t; + a[1] -= &t; + } else { + let (left, right) = a.split_at_mut(n / 2); + multicore::join( + || recursive_butterfly_arithmetic(left, n / 2, twiddle_chunk * 2, twiddles), + || recursive_butterfly_arithmetic(right, n / 2, twiddle_chunk * 2, twiddles), + ); + + // case when twiddle factor is one + let (a, left) = left.split_at_mut(1); + let (b, right) = right.split_at_mut(1); + let t = b[0]; + b[0] = a[0]; + a[0] += &t; + b[0] -= &t; + + left.iter_mut() + .zip(right.iter_mut()) + .enumerate() + .for_each(|(i, (a, b))| { + let mut t = *b; + t *= &twiddles[(i + 1) * twiddle_chunk]; + *b = *a; + *a += &t; + *b -= &t; + }); + } +} + +/// Convert coefficient bases group elements to lagrange basis by inverse FFT. +pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec { + let n_inv = C::Scalar::TWO_INV.pow_vartime([k as u64, 0, 0, 0]); + let mut omega_inv = C::Scalar::ROOT_OF_UNITY_INV; + for _ in k..C::Scalar::S { + omega_inv = omega_inv.square(); + } + + let mut g_lagrange_projective = g_projective; + best_fft(&mut g_lagrange_projective, omega_inv, k); + parallelize(&mut g_lagrange_projective, |g, _| { + for g in g.iter_mut() { + *g *= n_inv; + } + }); + + let mut g_lagrange = vec![C::identity(); 1 << k]; + parallelize(&mut g_lagrange, |g_lagrange, starts| { + C::Curve::batch_normalize( + &g_lagrange_projective[starts..(starts + g_lagrange.len())], + g_lagrange, + ); + }); + + g_lagrange +} + +/// This evaluates a provided polynomial (in coefficient form) at `point`. +pub fn eval_polynomial(poly: &[F], point: F) -> F { + fn evaluate(poly: &[F], point: F) -> F { + poly.iter() + .rev() + .fold(F::ZERO, |acc, coeff| acc * point + coeff) + } + let n = poly.len(); + let num_threads = multicore::current_num_threads(); + if n * 2 < num_threads { + evaluate(poly, point) + } else { + let chunk_size = (n + num_threads - 1) / num_threads; + let mut parts = vec![F::ZERO; num_threads]; + multicore::scope(|scope| { + for (chunk_idx, (out, poly)) in + parts.chunks_mut(1).zip(poly.chunks(chunk_size)).enumerate() + { + scope.spawn(move |_| { + let start = chunk_idx * chunk_size; + out[0] = evaluate(poly, point) * point.pow_vartime([start as u64, 0, 0, 0]); + }); + } + }); + parts.iter().fold(F::ZERO, |acc, coeff| acc + coeff) + } +} + +/// This computes the inner product of two vectors `a` and `b`. +/// +/// This function will panic if the two vectors are not the same size. +pub fn compute_inner_product(a: &[F], b: &[F]) -> F { + // TODO: parallelize? + assert_eq!(a.len(), b.len()); + + let mut acc = F::ZERO; + for (a, b) in a.iter().zip(b.iter()) { + acc += (*a) * (*b); + } + + acc +} + +/// Divides polynomial `a` in `X` by `X - b` with +/// no remainder. +pub fn kate_division<'a, F: Field, I: IntoIterator>(a: I, mut b: F) -> Vec +where + I::IntoIter: DoubleEndedIterator + ExactSizeIterator, +{ + b = -b; + let a = a.into_iter(); + + let mut q = vec![F::ZERO; a.len() - 1]; + + let mut tmp = F::ZERO; + for (q, r) in q.iter_mut().rev().zip(a.rev()) { + let mut lead_coeff = *r; + lead_coeff.sub_assign(&tmp); + *q = lead_coeff; + tmp = lead_coeff; + tmp.mul_assign(&b); + } + + q +} + +/// This utility function will parallelize an operation that is to be +/// performed over a mutable slice. +pub fn parallelize(v: &mut [T], f: F) { + // Algorithm rationale: + // + // Using the stdlib `chunks_mut` will lead to severe load imbalance. + // From https://github.com/rust-lang/rust/blob/e94bda3/library/core/src/slice/iter.rs#L1607-L1637 + // if the division is not exact, the last chunk will be the remainder. + // + // Dividing 40 items on 12 threads will lead to a chunk size of 40/12 = 3, + // There will be a 13 chunks of size 3 and 1 of size 1 distributed on 12 threads. + // This leads to 1 thread working on 6 iterations, 1 on 4 iterations and 10 on 3 iterations, + // a load imbalance of 2x. + // + // Instead we can divide work into chunks of size + // 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3 = 4*4 + 3*8 = 40 + // + // This would lead to a 6/4 = 1.5x speedup compared to naive chunks_mut + // + // See also OpenMP spec (page 60) + // http://www.openmp.org/mp-documents/openmp-4.5.pdf + // "When no chunk_size is specified, the iteration space is divided into chunks + // that are approximately equal in size, and at most one chunk is distributed to + // each thread. The size of the chunks is unspecified in this case." + // This implies chunks are the same size ±1 + + let f = &f; + let total_iters = v.len(); + let num_threads = multicore::current_num_threads(); + let base_chunk_size = total_iters / num_threads; + let cutoff_chunk_id = total_iters % num_threads; + let split_pos = cutoff_chunk_id * (base_chunk_size + 1); + let (v_hi, v_lo) = v.split_at_mut(split_pos); + + multicore::scope(|scope| { + // Skip special-case: number of iterations is cleanly divided by number of threads. + if cutoff_chunk_id != 0 { + for (chunk_id, chunk) in v_hi.chunks_exact_mut(base_chunk_size + 1).enumerate() { + let offset = chunk_id * (base_chunk_size + 1); + scope.spawn(move |_| f(chunk, offset)); + } + } + // Skip special-case: less iterations than number of threads. + if base_chunk_size != 0 { + for (chunk_id, chunk) in v_lo.chunks_exact_mut(base_chunk_size).enumerate() { + let offset = split_pos + (chunk_id * base_chunk_size); + scope.spawn(move |_| f(chunk, offset)); + } + } + }); +} + +fn log2_floor(num: usize) -> u32 { + assert!(num > 0); + + let mut pow = 0; + + while (1 << (pow + 1)) <= num { + pow += 1; + } + + pow +} + +/// Returns coefficients of an n - 1 degree polynomial given a set of n points +/// and their evaluations. This function will panic if two values in `points` +/// are the same. +pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { + assert_eq!(points.len(), evals.len()); + if points.len() == 1 { + // Constant polynomial + vec![evals[0]] + } else { + let mut denoms = Vec::with_capacity(points.len()); + for (j, x_j) in points.iter().enumerate() { + let mut denom = Vec::with_capacity(points.len() - 1); + for x_k in points + .iter() + .enumerate() + .filter(|&(k, _)| k != j) + .map(|a| a.1) + { + denom.push(*x_j - x_k); + } + denoms.push(denom); + } + // Compute (x_j - x_k)^(-1) for each j != i + denoms.iter_mut().flat_map(|v| v.iter_mut()).batch_invert(); + + let mut final_poly = vec![F::ZERO; points.len()]; + for (j, (denoms, eval)) in denoms.into_iter().zip(evals.iter()).enumerate() { + let mut tmp: Vec = Vec::with_capacity(points.len()); + let mut product = Vec::with_capacity(points.len() - 1); + tmp.push(F::ONE); + for (x_k, denom) in points + .iter() + .enumerate() + .filter(|&(k, _)| k != j) + .map(|a| a.1) + .zip(denoms.into_iter()) + { + product.resize(tmp.len() + 1, F::ZERO); + for ((a, b), product) in tmp + .iter() + .chain(std::iter::once(&F::ZERO)) + .zip(std::iter::once(&F::ZERO).chain(tmp.iter())) + .zip(product.iter_mut()) + { + *product = *a * (-denom * x_k) + *b * denom; + } + std::mem::swap(&mut tmp, &mut product); + } + assert_eq!(tmp.len(), points.len()); + assert_eq!(product.len(), points.len() - 1); + for (final_coeff, interpolation_coeff) in final_poly.iter_mut().zip(tmp.into_iter()) { + *final_coeff += interpolation_coeff * eval; + } + } + final_poly + } +} + +pub(crate) fn evaluate_vanishing_polynomial(roots: &[F], z: F) -> F { + fn evaluate(roots: &[F], z: F) -> F { + roots.iter().fold(F::ONE, |acc, point| (z - point) * acc) + } + let n = roots.len(); + let num_threads = multicore::current_num_threads(); + if n * 2 < num_threads { + evaluate(roots, z) + } else { + let chunk_size = (n + num_threads - 1) / num_threads; + let mut parts = vec![F::ONE; num_threads]; + multicore::scope(|scope| { + for (out, roots) in parts.chunks_mut(1).zip(roots.chunks(chunk_size)) { + scope.spawn(move |_| out[0] = evaluate(roots, z)); + } + }); + parts.iter().fold(F::ONE, |acc, part| acc * part) + } +} + +pub(crate) fn powers(base: F) -> impl Iterator { + std::iter::successors(Some(F::ONE), move |power| Some(base * power)) +} + +#[cfg(test)] +use rand_core::OsRng; + +#[cfg(test)] +use crate::halo2curves::pasta::Fp; + +#[test] +fn test_lagrange_interpolate() { + let rng = OsRng; + + let points = (0..5).map(|_| Fp::random(rng)).collect::>(); + let evals = (0..5).map(|_| Fp::random(rng)).collect::>(); + + for coeffs in 0..5 { + let points = &points[0..coeffs]; + let evals = &evals[0..coeffs]; + + let poly = lagrange_interpolate(points, evals); + assert_eq!(poly.len(), points.len()); + + for (point, eval) in points.iter().zip(evals) { + assert_eq!(eval_polynomial(&poly, *point), *eval); + } + } +} diff --git a/halo2_backend/src/circuit/value.rs b/halo2_backend/src/circuit/value.rs new file mode 100644 index 0000000000..db16a727c1 --- /dev/null +++ b/halo2_backend/src/circuit/value.rs @@ -0,0 +1,703 @@ +use std::borrow::Borrow; +use std::ops::{Add, Mul, Neg, Sub}; + +use group::ff::Field; + +use crate::plonk::{Assigned, Error}; + +/// A value that might exist within a circuit. +/// +/// This behaves like `Option` but differs in two key ways: +/// - It does not expose the enum cases, or provide an `Option::unwrap` equivalent. This +/// helps to ensure that unwitnessed values correctly propagate. +/// - It provides pass-through implementations of common traits such as `Add` and `Mul`, +/// for improved usability. +#[derive(Clone, Copy, Debug)] +pub struct Value { + inner: Option, +} + +impl Default for Value { + fn default() -> Self { + Self::unknown() + } +} + +impl Value { + /// Constructs an unwitnessed value. + pub const fn unknown() -> Self { + Self { inner: None } + } + + /// Constructs a known value. + /// + /// # Examples + /// + /// ``` + /// use halo2_backend::circuit::Value; + /// + /// let v = Value::known(37); + /// ``` + pub const fn known(value: V) -> Self { + Self { inner: Some(value) } + } + + /// Obtains the inner value for assigning into the circuit. + /// + /// Returns `Error::Synthesis` if this is [`Value::unknown()`]. + pub(crate) fn assign(self) -> Result { + self.inner.ok_or(Error::Synthesis) + } + + /// Converts from `&Value` to `Value<&V>`. + pub fn as_ref(&self) -> Value<&V> { + Value { + inner: self.inner.as_ref(), + } + } + + /// Converts from `&mut Value` to `Value<&mut V>`. + pub fn as_mut(&mut self) -> Value<&mut V> { + Value { + inner: self.inner.as_mut(), + } + } + + /// ONLY FOR INTERNAL CRATE USAGE; DO NOT EXPOSE! + pub(crate) fn into_option(self) -> Option { + self.inner + } + + /// Enforces an assertion on the contained value, if known. + /// + /// The assertion is ignored if `self` is [`Value::unknown()`]. Do not try to enforce + /// circuit constraints with this method! + /// + /// # Panics + /// + /// Panics if `f` returns `false`. + pub fn assert_if_known bool>(&self, f: F) { + if let Some(value) = self.inner.as_ref() { + assert!(f(value)); + } + } + + /// Checks the contained value for an error condition, if known. + /// + /// The error check is ignored if `self` is [`Value::unknown()`]. Do not try to + /// enforce circuit constraints with this method! + pub fn error_if_known_and bool>(&self, f: F) -> Result<(), Error> { + match self.inner.as_ref() { + Some(value) if f(value) => Err(Error::Synthesis), + _ => Ok(()), + } + } + + /// Maps a `Value` to `Value` by applying a function to the contained value. + pub fn map W>(self, f: F) -> Value { + Value { + inner: self.inner.map(f), + } + } + + /// Returns [`Value::unknown()`] if the value is [`Value::unknown()`], otherwise calls + /// `f` with the wrapped value and returns the result. + pub fn and_then Value>(self, f: F) -> Value { + match self.inner { + Some(v) => f(v), + None => Value::unknown(), + } + } + + /// Zips `self` with another `Value`. + /// + /// If `self` is `Value::known(s)` and `other` is `Value::known(o)`, this method + /// returns `Value::known((s, o))`. Otherwise, [`Value::unknown()`] is returned. + pub fn zip(self, other: Value) -> Value<(V, W)> { + Value { + inner: self.inner.zip(other.inner), + } + } +} + +impl Value<(V, W)> { + /// Unzips a value containing a tuple of two values. + /// + /// If `self` is `Value::known((a, b)), this method returns + /// `(Value::known(a), Value::known(b))`. Otherwise, + /// `(Value::unknown(), Value::unknown())` is returned. + pub fn unzip(self) -> (Value, Value) { + match self.inner { + Some((a, b)) => (Value::known(a), Value::known(b)), + None => (Value::unknown(), Value::unknown()), + } + } +} + +impl Value<&V> { + /// Maps a `Value<&V>` to a `Value` by copying the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn copied(self) -> Value + where + V: Copy, + { + Value { + inner: self.inner.copied(), + } + } + + /// Maps a `Value<&V>` to a `Value` by cloning the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn cloned(self) -> Value + where + V: Clone, + { + Value { + inner: self.inner.cloned(), + } + } +} + +impl Value<&mut V> { + /// Maps a `Value<&mut V>` to a `Value` by copying the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn copied(self) -> Value + where + V: Copy, + { + Value { + inner: self.inner.copied(), + } + } + + /// Maps a `Value<&mut V>` to a `Value` by cloning the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn cloned(self) -> Value + where + V: Clone, + { + Value { + inner: self.inner.cloned(), + } + } +} + +impl Value<[V; LEN]> { + /// Transposes a `Value<[V; LEN]>` into a `[Value; LEN]`. + /// + /// [`Value::unknown()`] will be mapped to `[Value::unknown(); LEN]`. + pub fn transpose_array(self) -> [Value; LEN] { + let mut ret = [Value::unknown(); LEN]; + if let Some(arr) = self.inner { + for (entry, value) in ret.iter_mut().zip(arr) { + *entry = Value::known(value); + } + } + ret + } +} + +impl Value +where + I: IntoIterator, + I::IntoIter: ExactSizeIterator, +{ + /// Transposes a `Value>` into a `Vec>`. + /// + /// [`Value::unknown()`] will be mapped to `vec![Value::unknown(); length]`. + /// + /// # Panics + /// + /// Panics if `self` is `Value::known(values)` and `values.len() != length`. + pub fn transpose_vec(self, length: usize) -> Vec> { + match self.inner { + Some(values) => { + let values = values.into_iter(); + assert_eq!(values.len(), length); + values.map(Value::known).collect() + } + None => (0..length).map(|_| Value::unknown()).collect(), + } + } +} + +// +// FromIterator +// + +impl> FromIterator> for Value { + /// Takes each element in the [`Iterator`]: if it is [`Value::unknown()`], no further + /// elements are taken, and the [`Value::unknown()`] is returned. Should no + /// [`Value::unknown()`] occur, a container of type `V` containing the values of each + /// [`Value`] is returned. + fn from_iter>>(iter: I) -> Self { + Self { + inner: iter.into_iter().map(|v| v.inner).collect(), + } + } +} + +// +// Neg +// + +impl Neg for Value { + type Output = Value; + + fn neg(self) -> Self::Output { + Value { + inner: self.inner.map(|v| -v), + } + } +} + +// +// Add +// + +impl Add for Value +where + V: Add, +{ + type Output = Value; + + fn add(self, rhs: Self) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add for &Value +where + for<'v> &'v V: Add, +{ + type Output = Value; + + fn add(self, rhs: Self) -> Self::Output { + Value { + inner: self + .inner + .as_ref() + .zip(rhs.inner.as_ref()) + .map(|(a, b)| a + b), + } + } +} + +impl Add> for Value +where + for<'v> V: Add<&'v V, Output = O>, +{ + type Output = Value; + + fn add(self, rhs: Value<&V>) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add> for Value<&V> +where + for<'v> &'v V: Add, +{ + type Output = Value; + + fn add(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add<&Value> for Value +where + for<'v> V: Add<&'v V, Output = O>, +{ + type Output = Value; + + fn add(self, rhs: &Self) -> Self::Output { + self + rhs.as_ref() + } +} + +impl Add> for &Value +where + for<'v> &'v V: Add, +{ + type Output = Value; + + fn add(self, rhs: Value) -> Self::Output { + self.as_ref() + rhs + } +} + +// +// Sub +// + +impl Sub for Value +where + V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Self) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub for &Value +where + for<'v> &'v V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Self) -> Self::Output { + Value { + inner: self + .inner + .as_ref() + .zip(rhs.inner.as_ref()) + .map(|(a, b)| a - b), + } + } +} + +impl Sub> for Value +where + for<'v> V: Sub<&'v V, Output = O>, +{ + type Output = Value; + + fn sub(self, rhs: Value<&V>) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub> for Value<&V> +where + for<'v> &'v V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub<&Value> for Value +where + for<'v> V: Sub<&'v V, Output = O>, +{ + type Output = Value; + + fn sub(self, rhs: &Self) -> Self::Output { + self - rhs.as_ref() + } +} + +impl Sub> for &Value +where + for<'v> &'v V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Value) -> Self::Output { + self.as_ref() - rhs + } +} + +// +// Mul +// + +impl Mul for Value +where + V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Self) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul for &Value +where + for<'v> &'v V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Self) -> Self::Output { + Value { + inner: self + .inner + .as_ref() + .zip(rhs.inner.as_ref()) + .map(|(a, b)| a * b), + } + } +} + +impl Mul> for Value +where + for<'v> V: Mul<&'v V, Output = O>, +{ + type Output = Value; + + fn mul(self, rhs: Value<&V>) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul> for Value<&V> +where + for<'v> &'v V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul<&Value> for Value +where + for<'v> V: Mul<&'v V, Output = O>, +{ + type Output = Value; + + fn mul(self, rhs: &Self) -> Self::Output { + self * rhs.as_ref() + } +} + +impl Mul> for &Value +where + for<'v> &'v V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Value) -> Self::Output { + self.as_ref() * rhs + } +} + +// +// Assigned +// + +impl From> for Value> { + fn from(value: Value) -> Self { + Self { + inner: value.inner.map(Assigned::from), + } + } +} + +impl Add> for Value> { + type Output = Value>; + + fn add(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add for Value> { + type Output = Value>; + + fn add(self, rhs: F) -> Self::Output { + self + Value::known(rhs) + } +} + +impl Add> for Value<&Assigned> { + type Output = Value>; + + fn add(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add for Value<&Assigned> { + type Output = Value>; + + fn add(self, rhs: F) -> Self::Output { + self + Value::known(rhs) + } +} + +impl Sub> for Value> { + type Output = Value>; + + fn sub(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub for Value> { + type Output = Value>; + + fn sub(self, rhs: F) -> Self::Output { + self - Value::known(rhs) + } +} + +impl Sub> for Value<&Assigned> { + type Output = Value>; + + fn sub(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub for Value<&Assigned> { + type Output = Value>; + + fn sub(self, rhs: F) -> Self::Output { + self - Value::known(rhs) + } +} + +impl Mul> for Value> { + type Output = Value>; + + fn mul(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul for Value> { + type Output = Value>; + + fn mul(self, rhs: F) -> Self::Output { + self * Value::known(rhs) + } +} + +impl Mul> for Value<&Assigned> { + type Output = Value>; + + fn mul(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul for Value<&Assigned> { + type Output = Value>; + + fn mul(self, rhs: F) -> Self::Output { + self * Value::known(rhs) + } +} + +impl Value { + /// Returns the field element corresponding to this value. + pub fn to_field(&self) -> Value> + where + for<'v> Assigned: From<&'v V>, + { + Value { + inner: self.inner.as_ref().map(|v| v.into()), + } + } + + /// Returns the field element corresponding to this value. + pub fn into_field(self) -> Value> + where + V: Into>, + { + Value { + inner: self.inner.map(|v| v.into()), + } + } + + /// Doubles this field element. + /// + /// # Examples + /// + /// If you have a `Value`, convert it to `Value>` first: + /// ``` + /// # use halo2curves::pasta::pallas::Base as F; + /// use halo2_backend::{circuit::Value, plonk::Assigned}; + /// + /// let v = Value::known(F::from(2)); + /// let v: Value> = v.into(); + /// v.double(); + /// ``` + pub fn double(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().double()), + } + } + + /// Squares this field element. + pub fn square(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().square()), + } + } + + /// Cubes this field element. + pub fn cube(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().cube()), + } + } + + /// Inverts this assigned value (taking the inverse of zero to be zero). + pub fn invert(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().invert()), + } + } +} + +impl Value> { + /// Evaluates this value directly, performing an unbatched inversion if necessary. + /// + /// If the denominator is zero, the returned value is zero. + pub fn evaluate(self) -> Value { + Value { + inner: self.inner.map(|v| v.evaluate()), + } + } +} diff --git a/halo2_backend/src/dev.rs b/halo2_backend/src/dev.rs new file mode 100644 index 0000000000..d848651ca0 --- /dev/null +++ b/halo2_backend/src/dev.rs @@ -0,0 +1,3 @@ +//! Tools for developing circuits. + +pub mod metadata; diff --git a/halo2_backend/src/dev/metadata.rs b/halo2_backend/src/dev/metadata.rs new file mode 100644 index 0000000000..9442acd2f1 --- /dev/null +++ b/halo2_backend/src/dev/metadata.rs @@ -0,0 +1,44 @@ +//! Metadata about circuits. + +use crate::plonk::{self, Any}; +use std::fmt::{self, Debug}; +/// Metadata about a column within a circuit. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Column { + /// The type of the column. + pub(super) column_type: Any, + /// The index of the column. + pub(super) index: usize, +} + +impl Column { + /// Return the column type. + pub fn column_type(&self) -> Any { + self.column_type + } + /// Return the column index. + pub fn index(&self) -> usize { + self.index + } +} + +impl fmt::Display for Column { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Column('{:?}', {})", self.column_type, self.index) + } +} + +impl From<(Any, usize)> for Column { + fn from((column_type, index): (Any, usize)) -> Self { + Column { column_type, index } + } +} + +impl From> for Column { + fn from(column: plonk::Column) -> Self { + Column { + column_type: *column.column_type(), + index: column.index(), + } + } +} diff --git a/halo2_backend/src/helpers.rs b/halo2_backend/src/helpers.rs new file mode 100644 index 0000000000..faf7351a3e --- /dev/null +++ b/halo2_backend/src/helpers.rs @@ -0,0 +1,154 @@ +use crate::poly::Polynomial; +use ff::PrimeField; +use halo2curves::{serde::SerdeObject, CurveAffine}; +use std::io; + +/// This enum specifies how various types are serialized and deserialized. +#[derive(Clone, Copy, Debug)] +pub enum SerdeFormat { + /// Curve elements are serialized in compressed form. + /// Field elements are serialized in standard form, with endianness specified by the + /// `PrimeField` implementation. + Processed, + /// Curve elements are serialized in uncompressed form. Field elements are serialized + /// in their internal Montgomery representation. + /// When deserializing, checks are performed to ensure curve elements indeed lie on the curve and field elements + /// are less than modulus. + RawBytes, + /// Serialization is the same as `RawBytes`, but no checks are performed. + RawBytesUnchecked, +} + +// Keep this trait for compatibility with IPA serialization +pub(crate) trait CurveRead: CurveAffine { + /// Reads a compressed element from the buffer and attempts to parse it + /// using `from_bytes`. + fn read(reader: &mut R) -> io::Result { + let mut compressed = Self::Repr::default(); + reader.read_exact(compressed.as_mut())?; + Option::from(Self::from_bytes(&compressed)) + .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Invalid point encoding in proof")) + } +} +impl CurveRead for C {} + +pub trait SerdeCurveAffine: CurveAffine + SerdeObject { + /// Reads an element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompress it + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + match format { + SerdeFormat::Processed => ::read(reader), + SerdeFormat::RawBytes => ::read_raw(reader), + SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), + } + } + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + match format { + SerdeFormat::Processed => writer.write_all(self.to_bytes().as_ref()), + _ => self.write_raw(writer), + } + } + + /// Byte length of an affine curve element according to `format`. + fn byte_length(format: SerdeFormat) -> usize { + match format { + SerdeFormat::Processed => Self::default().to_bytes().as_ref().len(), + _ => Self::Repr::default().as_ref().len() * 2, + } + } +} +impl SerdeCurveAffine for C {} + +pub trait SerdePrimeField: PrimeField + SerdeObject { + /// Reads a field element as bytes from the buffer according to the `format`: + /// - `Processed`: Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads a field element from raw bytes in its internal Montgomery representations, + /// and checks that the element is less than the modulus. + /// - `RawBytesUnchecked`: Reads a field element in Montgomery form and performs no checks. + fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + match format { + SerdeFormat::Processed => { + let mut compressed = Self::Repr::default(); + reader.read_exact(compressed.as_mut())?; + Option::from(Self::from_repr(compressed)).ok_or_else(|| { + io::Error::new(io::ErrorKind::Other, "Invalid prime field point encoding") + }) + } + SerdeFormat::RawBytes => ::read_raw(reader), + SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), + } + } + + /// Writes a field element as bytes to the buffer according to the `format`: + /// - `Processed`: Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + match format { + SerdeFormat::Processed => writer.write_all(self.to_repr().as_ref()), + _ => self.write_raw(writer), + } + } +} +impl SerdePrimeField for F {} + +/// Convert a slice of `bool` into a `u8`. +/// +/// Panics if the slice has length greater than 8. +pub fn pack(bits: &[bool]) -> u8 { + let mut value = 0u8; + assert!(bits.len() <= 8); + for (bit_index, bit) in bits.iter().enumerate() { + value |= (*bit as u8) << bit_index; + } + value +} + +/// Writes the first `bits.len()` bits of a `u8` into `bits`. +pub fn unpack(byte: u8, bits: &mut [bool]) { + for (bit_index, bit) in bits.iter_mut().enumerate() { + *bit = (byte >> bit_index) & 1 == 1; + } +} + +/// Reads a vector of polynomials from buffer +pub(crate) fn read_polynomial_vec( + reader: &mut R, + format: SerdeFormat, +) -> io::Result>> { + let mut len = [0u8; 4]; + reader.read_exact(&mut len)?; + let len = u32::from_be_bytes(len); + + (0..len) + .map(|_| Polynomial::::read(reader, format)) + .collect::>>() +} + +/// Writes a slice of polynomials to buffer +pub(crate) fn write_polynomial_slice( + slice: &[Polynomial], + writer: &mut W, + format: SerdeFormat, +) -> io::Result<()> { + writer.write_all(&(slice.len() as u32).to_be_bytes())?; + for poly in slice.iter() { + poly.write(writer, format)?; + } + Ok(()) +} + +/// Gets the total number of bytes of a slice of polynomials, assuming all polynomials are the same length +pub(crate) fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize { + let field_len = F::default().to_repr().as_ref().len(); + 4 + slice.len() * (4 + field_len * slice.get(0).map(|poly| poly.len()).unwrap_or(0)) +} diff --git a/halo2_backend/src/lib.rs b/halo2_backend/src/lib.rs new file mode 100644 index 0000000000..2d8e3e11bb --- /dev/null +++ b/halo2_backend/src/lib.rs @@ -0,0 +1,20 @@ +//! # halo2_backend + +#![cfg_attr(docsrs, feature(doc_cfg))] +// The actual lints we want to disable. +#![allow(clippy::op_ref, clippy::many_single_char_names)] +#![deny(rustdoc::broken_intra_doc_links)] +#![deny(missing_debug_implementations)] +#![deny(missing_docs)] +#![deny(unsafe_code)] + +pub mod arithmetic; +pub use halo2curves; +mod multicore; +pub mod plonk; +pub mod poly; +pub mod transcript; + +pub mod dev; +mod helpers; +pub use helpers::SerdeFormat; diff --git a/halo2_backend/src/multicore.rs b/halo2_backend/src/multicore.rs new file mode 100644 index 0000000000..4d30b91a8b --- /dev/null +++ b/halo2_backend/src/multicore.rs @@ -0,0 +1,38 @@ +pub use rayon::{ + current_num_threads, + iter::{IndexedParallelIterator, IntoParallelRefIterator}, + iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}, + join, scope, + slice::ParallelSliceMut, + Scope, +}; + +pub trait TryFoldAndReduce { + /// Implements `iter.try_fold().try_reduce()` for `rayon::iter::ParallelIterator`, + /// falling back on `Iterator::try_fold` when the `multicore` feature flag is + /// disabled. + /// The `try_fold_and_reduce` function can only be called by a iter with + /// `Result` item type because the `fold_op` must meet the trait + /// bounds of both `try_fold` and `try_reduce` from rayon. + fn try_fold_and_reduce( + self, + identity: impl Fn() -> T + Send + Sync, + fold_op: impl Fn(T, Result) -> Result + Send + Sync, + ) -> Result; +} + +impl TryFoldAndReduce for I +where + T: Send + Sync, + E: Send + Sync, + I: rayon::iter::ParallelIterator>, +{ + fn try_fold_and_reduce( + self, + identity: impl Fn() -> T + Send + Sync, + fold_op: impl Fn(T, Result) -> Result + Send + Sync, + ) -> Result { + self.try_fold(&identity, &fold_op) + .try_reduce(&identity, |a, b| fold_op(a, Ok(b))) + } +} diff --git a/halo2_backend/src/plonk.rs b/halo2_backend/src/plonk.rs new file mode 100644 index 0000000000..4bd8366c58 --- /dev/null +++ b/halo2_backend/src/plonk.rs @@ -0,0 +1,560 @@ +//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] +//! that is designed specifically for the polynomial commitment scheme described +//! in the [Halo][halo] paper. +//! +//! [halo]: https://eprint.iacr.org/2019/1021 +//! [plonk]: https://eprint.iacr.org/2019/953 + +use blake2b_simd::Params as Blake2bParams; +use group::ff::{Field, FromUniformBytes, PrimeField}; + +use crate::arithmetic::CurveAffine; +use crate::helpers::{ + polynomial_slice_byte_length, write_polynomial_slice, SerdeCurveAffine, SerdePrimeField, +}; +use crate::poly::{ + Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, + Polynomial, Rotation, +}; +use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; +use crate::SerdeFormat; + +mod assigned; +mod circuit; +mod error; +mod evaluation; +mod keygen; +mod lookup; +pub mod permutation; +mod shuffle; +mod vanishing; + +mod prover; +mod verifier; + +pub use assigned::*; +pub use circuit::*; +pub use error::*; +pub use keygen::*; +pub use prover::*; +pub use verifier::*; + +use evaluation::Evaluator; +use std::io; + +/// List of queries (columns and rotations) used by a circuit +#[derive(Debug, Clone)] +pub struct Queries { + /// List of unique advice queries + pub advice: Vec<(Column, Rotation)>, + /// List of unique instance queries + pub instance: Vec<(Column, Rotation)>, + /// List of unique fixed queries + pub fixed: Vec<(Column, Rotation)>, + /// Contains an integer for each advice column + /// identifying how many distinct queries it has + /// so far; should be same length as cs.num_advice_columns. + pub num_advice_queries: Vec, +} + +impl Queries { + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } +} + +/// This is a verifying key which allows for the verification of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct VerifyingKey { + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystem, + /// Cached maximum degree of `cs` (which doesn't change after construction). + cs_degree: usize, + /// The representative of this `VerifyingKey` in transcripts. + transcript_repr: C::Scalar, + selectors: Vec>, + /// Whether selector compression is turned on or not. + compress_selectors: bool, +} + +// Current version of the VK +const VERSION: u8 = 0x03; + +impl VerifyingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a verifying key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + // Version byte that will be checked on read. + writer.write_all(&[VERSION])?; + let k = &self.domain.k(); + assert!(*k <= C::Scalar::S); + // k value fits in 1 byte + writer.write_all(&[*k as u8])?; + writer.write_all(&[self.compress_selectors as u8])?; + writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; + for commitment in &self.fixed_commitments { + commitment.write(writer, format)?; + } + self.permutation.write(writer, format)?; + + if !self.compress_selectors { + assert!(self.selectors.is_empty()); + } + // write self.selectors + for selector in &self.selectors { + // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write + for bits in selector.chunks(8) { + writer.write_all(&[crate::helpers::pack(bits)])?; + } + } + Ok(()) + } + + // TODO + /* + /// Reads a verification key from a buffer. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let mut version_byte = [0u8; 1]; + reader.read_exact(&mut version_byte)?; + if VERSION != version_byte[0] { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected version byte", + )); + } + + let mut k = [0u8; 1]; + reader.read_exact(&mut k)?; + let k = u8::from_le_bytes(k); + if k as u32 > C::Scalar::S { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!( + "circuit size value (k): {} exceeds maxium: {}", + k, + C::Scalar::S + ), + )); + } + let mut compress_selectors = [0u8; 1]; + reader.read_exact(&mut compress_selectors)?; + if compress_selectors[0] != 0 && compress_selectors[0] != 1 { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected compress_selectors not boolean", + )); + } + let compress_selectors = compress_selectors[0] == 1; + let (domain, cs, _) = keygen::create_domain::( + k as u32, + #[cfg(feature = "circuit-params")] + params, + ); + let mut num_fixed_columns = [0u8; 4]; + reader.read_exact(&mut num_fixed_columns)?; + let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); + + let fixed_commitments: Vec<_> = (0..num_fixed_columns) + .map(|_| C::read(reader, format)) + .collect::>()?; + + let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; + + let (cs, selectors) = if compress_selectors { + // read selectors + let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] + .into_iter() + .map(|mut selector| { + let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; + reader.read_exact(&mut selector_bytes)?; + for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { + crate::helpers::unpack(byte, bits); + } + Ok(selector) + }) + .collect::>()?; + let (cs, _) = cs.compress_selectors(selectors.clone()); + (cs, selectors) + } else { + // we still need to replace selectors with fixed Expressions in `cs` + let fake_selectors = vec![vec![]; cs.num_selectors]; + let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); + (cs, vec![]) + }; + + Ok(Self::from_parts( + domain, + fixed_commitments, + permutation, + cs, + selectors, + compress_selectors, + )) + } + */ + + /// Writes a verifying key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + // TODO + /* + /// Reads a verification key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) + } + */ +} + +impl VerifyingKey { + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + 10 + (self.fixed_commitments.len() * C::byte_length(format)) + + self.permutation.bytes_length(format) + + self.selectors.len() + * (self + .selectors + .get(0) + .map(|selector| (selector.len() + 7) / 8) + .unwrap_or(0)) + } + + fn from_parts( + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystem, + selectors: Vec>, + compress_selectors: bool, + ) -> Self + where + C::ScalarExt: FromUniformBytes<64>, + { + // Compute cached values. + let cs_degree = cs.degree(); + + let mut vk = Self { + domain, + fixed_commitments, + permutation, + cs, + cs_degree, + // Temporary, this is not pinned. + transcript_repr: C::Scalar::ZERO, + selectors, + compress_selectors, + }; + + let mut hasher = Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Verify-Key") + .to_state(); + + let s = format!("{:?}", vk.pinned()); + + hasher.update(&(s.len() as u64).to_le_bytes()); + hasher.update(s.as_bytes()); + + // Hash in final Blake2bState + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + + vk + } + + /// Hashes a verification key into a transcript. + pub fn hash_into, T: Transcript>( + &self, + transcript: &mut T, + ) -> io::Result<()> { + transcript.common_scalar(self.transcript_repr)?; + + Ok(()) + } + + /// Obtains a pinned representation of this verification key that contains + /// the minimal information necessary to reconstruct the verification key. + pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { + PinnedVerificationKey { + base_modulus: C::Base::MODULUS, + scalar_modulus: C::Scalar::MODULUS, + domain: self.domain.pinned(), + fixed_commitments: &self.fixed_commitments, + permutation: &self.permutation, + cs: self.cs.pinned(), + } + } + + /// Returns commitments of fixed polynomials + pub fn fixed_commitments(&self) -> &Vec { + &self.fixed_commitments + } + + /// Returns `VerifyingKey` of permutation + pub fn permutation(&self) -> &permutation::VerifyingKey { + &self.permutation + } + + /// Returns `ConstraintSystem` + pub fn cs(&self) -> &ConstraintSystem { + &self.cs + } + + /// Returns representative of this `VerifyingKey` in transcripts + pub fn transcript_repr(&self) -> C::Scalar { + self.transcript_repr + } +} + +/// Minimal representation of a verification key that can be used to identify +/// its active contents. +#[allow(dead_code)] +#[derive(Debug)] +pub struct PinnedVerificationKey<'a, C: CurveAffine> { + base_modulus: &'static str, + scalar_modulus: &'static str, + domain: PinnedEvaluationDomain<'a, C::Scalar>, + cs: PinnedConstraintSystem<'a, C::Scalar>, + fixed_commitments: &'a Vec, + permutation: &'a permutation::VerifyingKey, +} + +/// This is a proving key which allows for the creation of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct ProvingKey { + vk: VerifyingKey, + l0: Polynomial, + l_last: Polynomial, + l_active_row: Polynomial, + fixed_values: Vec>, + fixed_polys: Vec>, + fixed_cosets: Vec>, + permutation: permutation::ProvingKey, + ev: Evaluator, +} + +impl ProvingKey +where + C::Scalar: FromUniformBytes<64>, +{ + /// Get the underlying [`VerifyingKey`]. + pub fn get_vk(&self) -> &VerifyingKey { + &self.vk + } + + /// Gets the total number of bytes in the serialization of `self` + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + let scalar_len = C::Scalar::default().to_repr().as_ref().len(); + self.vk.bytes_length(format) + + 12 + + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) + + polynomial_slice_byte_length(&self.fixed_values) + + polynomial_slice_byte_length(&self.fixed_polys) + + polynomial_slice_byte_length(&self.fixed_cosets) + + self.permutation.bytes_length() + } +} + +impl ProvingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a proving key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + self.vk.write(writer, format)?; + self.l0.write(writer, format)?; + self.l_last.write(writer, format)?; + self.l_active_row.write(writer, format)?; + write_polynomial_slice(&self.fixed_values, writer, format)?; + write_polynomial_slice(&self.fixed_polys, writer, format)?; + write_polynomial_slice(&self.fixed_cosets, writer, format)?; + self.permutation.write(writer, format)?; + Ok(()) + } + + // TODO + /* + /// Reads a proving key from a buffer. + /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let vk = VerifyingKey::::read::( + reader, + format, + #[cfg(feature = "circuit-params")] + params, + )?; + let l0 = Polynomial::read(reader, format)?; + let l_last = Polynomial::read(reader, format)?; + let l_active_row = Polynomial::read(reader, format)?; + let fixed_values = read_polynomial_vec(reader, format)?; + let fixed_polys = read_polynomial_vec(reader, format)?; + let fixed_cosets = read_polynomial_vec(reader, format)?; + let permutation = permutation::ProvingKey::read(reader, format)?; + let ev = Evaluator::new(vk.cs()); + Ok(Self { + vk, + l0, + l_last, + l_active_row, + fixed_values, + fixed_polys, + fixed_cosets, + permutation, + ev, + }) + } + */ + + /// Writes a proving key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + // TODO + /* + /// Reads a proving key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) + } + */ +} + +impl VerifyingKey { + /// Get the underlying [`EvaluationDomain`]. + pub fn get_domain(&self) -> &EvaluationDomain { + &self.domain + } +} + +#[derive(Clone, Copy, Debug)] +struct Theta; +type ChallengeTheta = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct Beta; +type ChallengeBeta = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct Gamma; +type ChallengeGamma = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct Y; +type ChallengeY = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X; +type ChallengeX = ChallengeScalar; diff --git a/halo2_backend/src/plonk/assigned.rs b/halo2_backend/src/plonk/assigned.rs new file mode 100644 index 0000000000..07de325678 --- /dev/null +++ b/halo2_backend/src/plonk/assigned.rs @@ -0,0 +1,665 @@ +use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; + +use group::ff::Field; + +/// A value assigned to a cell within a circuit. +/// +/// Stored as a fraction, so the backend can use batch inversion. +/// +/// A denominator of zero maps to an assigned value of zero. +#[derive(Clone, Copy, Debug)] +pub enum Assigned { + /// The field element zero. + Zero, + /// A value that does not require inversion to evaluate. + Trivial(F), + /// A value stored as a fraction to enable batch inversion. + Rational(F, F), +} + +impl From<&Assigned> for Assigned { + fn from(val: &Assigned) -> Self { + *val + } +} + +impl From<&F> for Assigned { + fn from(numerator: &F) -> Self { + Assigned::Trivial(*numerator) + } +} + +impl From for Assigned { + fn from(numerator: F) -> Self { + Assigned::Trivial(numerator) + } +} + +impl From<(F, F)> for Assigned { + fn from((numerator, denominator): (F, F)) -> Self { + Assigned::Rational(numerator, denominator) + } +} + +impl PartialEq for Assigned { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + // At least one side is directly zero. + (Self::Zero, Self::Zero) => true, + (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + x.is_zero_vartime() + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, + (Self::Trivial(x), Self::Rational(numerator, denominator)) + | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { + &(*x * denominator) == numerator + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, + } + } +} + +impl Eq for Assigned {} + +impl Neg for Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + match self { + Self::Zero => Self::Zero, + Self::Trivial(numerator) => Self::Trivial(-numerator), + Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), + } + } +} + +impl Neg for &Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + -*self + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + // One side is directly zero. + (Self::Zero, _) => rhs, + (_, Self::Zero) => self, + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + other + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator + denominator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + self + Self::Trivial(rhs) + } +} + +impl Add for &Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for Assigned { + type Output = Assigned; + fn add(self, rhs: &Self) -> Assigned { + self + *rhs + } +} + +impl Add> for &Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for &Assigned { + type Output = Assigned; + fn add(self, rhs: &Assigned) -> Assigned { + *self + *rhs + } +} + +impl AddAssign for Assigned { + fn add_assign(&mut self, rhs: Self) { + *self = *self + rhs; + } +} + +impl AddAssign<&Assigned> for Assigned { + fn add_assign(&mut self, rhs: &Self) { + *self = *self + rhs; + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + self + (-rhs) + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + self + (-rhs) + } +} + +impl Sub for &Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for Assigned { + type Output = Assigned; + fn sub(self, rhs: &Self) -> Assigned { + self - *rhs + } +} + +impl Sub> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: &Assigned) -> Assigned { + *self - *rhs + } +} + +impl SubAssign for Assigned { + fn sub_assign(&mut self, rhs: Self) { + *self = *self - rhs; + } +} + +impl SubAssign<&Assigned> for Assigned { + fn sub_assign(&mut self, rhs: &Self) { + *self = *self - rhs; + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + (Self::Zero, _) | (_, Self::Zero) => Self::Zero, + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + self * Self::Trivial(rhs) + } +} + +impl Mul for &Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + *self * rhs + } +} + +impl Mul<&Assigned> for Assigned { + type Output = Assigned; + fn mul(self, rhs: &Assigned) -> Assigned { + self * *rhs + } +} + +impl MulAssign for Assigned { + fn mul_assign(&mut self, rhs: Self) { + *self = *self * rhs; + } +} + +impl MulAssign<&Assigned> for Assigned { + fn mul_assign(&mut self, rhs: &Self) { + *self = *self * rhs; + } +} + +impl Assigned { + /// Returns the numerator. + pub fn numerator(&self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => *x, + Self::Rational(numerator, _) => *numerator, + } + } + + /// Returns the denominator, if non-trivial. + pub fn denominator(&self) -> Option { + match self { + Self::Zero => None, + Self::Trivial(_) => None, + Self::Rational(_, denominator) => Some(*denominator), + } + } + + /// Returns true iff this element is zero. + pub fn is_zero_vartime(&self) -> bool { + match self { + Self::Zero => true, + Self::Trivial(x) => x.is_zero_vartime(), + // Assigned maps x/0 -> 0. + Self::Rational(numerator, denominator) => { + numerator.is_zero_vartime() || denominator.is_zero_vartime() + } + } + } + + /// Doubles this element. + #[must_use] + pub fn double(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.double()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.double(), *denominator) + } + } + } + + /// Squares this element. + #[must_use] + pub fn square(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.square()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.square(), denominator.square()) + } + } + } + + /// Cubes this element. + #[must_use] + pub fn cube(&self) -> Self { + self.square() * self + } + + /// Inverts this assigned value (taking the inverse of zero to be zero). + pub fn invert(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Rational(F::ONE, *x), + Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), + } + } + + /// Evaluates this assigned value directly, performing an unbatched inversion if + /// necessary. + /// + /// If the denominator is zero, this returns zero. + pub fn evaluate(self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => x, + Self::Rational(numerator, denominator) => { + if denominator == F::ONE { + numerator + } else { + numerator * denominator.invert().unwrap_or(F::ZERO) + } + } + } + } +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use super::Assigned; + // We use (numerator, denominator) in the comments below to denote a rational. + #[test] + fn add_trivial_to_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // 2 + (1,0) = 2 + 0 = 2 + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn add_rational_to_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) + (1,0) = (1,2) + 0 = (1,2) + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn sub_trivial_from_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - 2 = 0 - 2 = -2 + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // 2 - (1,0) = 2 - 0 = 2 + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn sub_rational_from_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - (1,2) = 0 - (1,2) = -(1,2) + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // (1,2) - (1,0) = (1,2) - 0 = (1,2) + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn mul_rational_by_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) * (1,0) = (1,2) * 0 = 0 + assert_eq!((a * b).evaluate(), Fp::zero()); + + // (1,0) * (1,2) = 0 * (1,2) = 0 + assert_eq!((b * a).evaluate(), Fp::zero()); + } +} + +#[cfg(test)] +mod proptests { + use std::{ + cmp, + ops::{Add, Mul, Neg, Sub}, + }; + + use group::ff::Field; + use halo2curves::pasta::Fp; + use proptest::{collection::vec, prelude::*, sample::select}; + + use super::Assigned; + + trait UnaryOperand: Neg { + fn double(&self) -> Self; + fn square(&self) -> Self; + fn cube(&self) -> Self; + fn inv0(&self) -> Self; + } + + impl UnaryOperand for F { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert().unwrap_or(F::ZERO) + } + } + + impl UnaryOperand for Assigned { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert() + } + } + + #[derive(Clone, Debug)] + enum UnaryOperator { + Neg, + Double, + Square, + Cube, + Inv0, + } + + const UNARY_OPERATORS: &[UnaryOperator] = &[ + UnaryOperator::Neg, + UnaryOperator::Double, + UnaryOperator::Square, + UnaryOperator::Cube, + UnaryOperator::Inv0, + ]; + + impl UnaryOperator { + fn apply(&self, a: F) -> F { + match self { + Self::Neg => -a, + Self::Double => a.double(), + Self::Square => a.square(), + Self::Cube => a.cube(), + Self::Inv0 => a.inv0(), + } + } + } + + trait BinaryOperand: Sized + Add + Sub + Mul {} + impl BinaryOperand for F {} + impl BinaryOperand for Assigned {} + + #[derive(Clone, Debug)] + enum BinaryOperator { + Add, + Sub, + Mul, + } + + const BINARY_OPERATORS: &[BinaryOperator] = &[ + BinaryOperator::Add, + BinaryOperator::Sub, + BinaryOperator::Mul, + ]; + + impl BinaryOperator { + fn apply(&self, a: F, b: F) -> F { + match self { + Self::Add => a + b, + Self::Sub => a - b, + Self::Mul => a * b, + } + } + } + + #[derive(Clone, Debug)] + enum Operator { + Unary(UnaryOperator), + Binary(BinaryOperator), + } + + prop_compose! { + /// Use narrow that can be easily reduced. + fn arb_element()(val in any::()) -> Fp { + Fp::from(val) + } + } + + prop_compose! { + fn arb_trivial()(element in arb_element()) -> Assigned { + Assigned::Trivial(element) + } + } + + prop_compose! { + /// Generates half of the denominators as zero to represent a deferred inversion. + fn arb_rational()( + numerator in arb_element(), + denominator in prop_oneof![ + 1 => Just(Fp::zero()), + 2 => arb_element(), + ], + ) -> Assigned { + Assigned::Rational(numerator, denominator) + } + } + + prop_compose! { + fn arb_operators(num_unary: usize, num_binary: usize)( + unary in vec(select(UNARY_OPERATORS), num_unary), + binary in vec(select(BINARY_OPERATORS), num_binary), + ) -> Vec { + unary.into_iter() + .map(Operator::Unary) + .chain(binary.into_iter().map(Operator::Binary)) + .collect() + } + } + + prop_compose! { + fn arb_testcase()( + num_unary in 0usize..5, + num_binary in 0usize..5, + )( + values in vec( + prop_oneof![ + 1 => Just(Assigned::Zero), + 2 => arb_trivial(), + 2 => arb_rational(), + ], + // Ensure that: + // - we have at least one value to apply unary operators to. + // - we can apply every binary operator pairwise sequentially. + cmp::max(usize::from(num_unary > 0), num_binary + 1)), + operations in arb_operators(num_unary, num_binary).prop_shuffle(), + ) -> (Vec>, Vec) { + (values, operations) + } + } + + proptest! { + #[test] + fn operation_commutativity((values, operations) in arb_testcase()) { + // Evaluate the values at the start. + let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); + + // Apply the operations to both the deferred and evaluated values. + fn evaluate( + items: Vec, + operators: &[Operator], + ) -> F { + let mut ops = operators.iter(); + + // Process all binary operators. We are guaranteed to have exactly as many + // binary operators as we need calls to the reduction closure. + let mut res = items.into_iter().reduce(|mut a, b| loop { + match ops.next() { + Some(Operator::Unary(op)) => a = op.apply(a), + Some(Operator::Binary(op)) => break op.apply(a, b), + None => unreachable!(), + } + }).unwrap(); + + // Process any unary operators that weren't handled in the reduce() call + // above (either if we only had one item, or there were unary operators + // after the last binary operator). We are guaranteed to have no binary + // operators remaining at this point. + loop { + match ops.next() { + Some(Operator::Unary(op)) => res = op.apply(res), + Some(Operator::Binary(_)) => unreachable!(), + None => break res, + } + } + } + let deferred_result = evaluate(values, &operations); + let evaluated_result = evaluate(elements, &operations); + + // The two should be equal, i.e. deferred inversion should commute with the + // list of operations. + assert_eq!(deferred_result.evaluate(), evaluated_result); + } + } +} diff --git a/halo2_backend/src/plonk/circuit.rs b/halo2_backend/src/plonk/circuit.rs new file mode 100644 index 0000000000..5357fc8016 --- /dev/null +++ b/halo2_backend/src/plonk/circuit.rs @@ -0,0 +1,2015 @@ +use super::{lookup, permutation, shuffle, Queries}; +use crate::dev::metadata; +use crate::poly::Rotation; +use core::cmp::max; +use core::ops::{Add, Mul}; +use ff::Field; +use sealed::SealedPhase; +use std::collections::HashMap; +use std::fmt::Debug; +use std::iter::{Product, Sum}; +use std::{ + convert::TryFrom, + ops::{Neg, Sub}, +}; + +/// A column type +pub trait ColumnType: + 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into +{ + /// Return expression from cell + fn query_cell(&self, index: usize, at: Rotation) -> Expression; +} + +/// A column with an index and type +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Column { + index: usize, + column_type: C, +} + +impl Column { + pub(crate) fn new(index: usize, column_type: C) -> Self { + Column { index, column_type } + } + + /// Index of this column. + pub fn index(&self) -> usize { + self.index + } + + /// Type of this column. + pub fn column_type(&self) -> &C { + &self.column_type + } + + /// Return expression from column at a relative position + pub fn query_cell(&self, at: Rotation) -> Expression { + self.column_type.query_cell(self.index, at) + } + + /// Return expression from column at the current row + pub fn cur(&self) -> Expression { + self.query_cell(Rotation::cur()) + } + + /// Return expression from column at the next row + pub fn next(&self) -> Expression { + self.query_cell(Rotation::next()) + } + + /// Return expression from column at the previous row + pub fn prev(&self) -> Expression { + self.query_cell(Rotation::prev()) + } + + /// Return expression from column at the specified rotation + pub fn rot(&self, rotation: i32) -> Expression { + self.query_cell(Rotation(rotation)) + } +} + +impl Ord for Column { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match self.column_type.into().cmp(&other.column_type.into()) { + // Indices are assigned within column types. + std::cmp::Ordering::Equal => self.index.cmp(&other.index), + order => order, + } + } +} + +impl PartialOrd for Column { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +pub(crate) mod sealed { + /// Phase of advice column + #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] + pub struct Phase(pub(crate) u8); + + impl Phase { + pub fn prev(&self) -> Option { + self.0.checked_sub(1).map(Phase) + } + } + + impl SealedPhase for Phase { + fn to_sealed(self) -> Phase { + self + } + } + + /// Sealed trait to help keep `Phase` private. + pub trait SealedPhase { + fn to_sealed(self) -> Phase; + } +} + +/// Phase of advice column +pub trait Phase: SealedPhase {} + +impl Phase for P {} + +/// First phase +#[derive(Debug)] +pub struct FirstPhase; + +impl SealedPhase for super::FirstPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(0) + } +} + +/// Second phase +#[derive(Debug)] +pub struct SecondPhase; + +impl SealedPhase for super::SecondPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(1) + } +} + +/// Third phase +#[derive(Debug)] +pub struct ThirdPhase; + +impl SealedPhase for super::ThirdPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(2) + } +} + +/// An advice column +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub struct Advice { + pub(crate) phase: sealed::Phase, +} + +impl Default for Advice { + fn default() -> Advice { + Advice { + phase: FirstPhase.to_sealed(), + } + } +} + +impl Advice { + /// Returns `Advice` in given `Phase` + pub fn new(phase: P) -> Advice { + Advice { + phase: phase.to_sealed(), + } + } + + /// Phase of this column + pub fn phase(&self) -> u8 { + self.phase.0 + } +} + +impl std::fmt::Debug for Advice { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut debug_struct = f.debug_struct("Advice"); + // Only show advice's phase if it's not in first phase. + if self.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &self.phase); + } + debug_struct.finish() + } +} + +/// A fixed column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Fixed; + +/// An instance column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Instance; + +/// An enum over the Advice, Fixed, Instance structs +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub enum Any { + /// An Advice variant + Advice(Advice), + /// A Fixed variant + Fixed, + /// An Instance variant + Instance, +} + +impl Any { + /// Returns Advice variant in `FirstPhase` + pub fn advice() -> Any { + Any::Advice(Advice::default()) + } + + /// Returns Advice variant in given `Phase` + pub fn advice_in(phase: P) -> Any { + Any::Advice(Advice::new(phase)) + } +} + +impl std::fmt::Debug for Any { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Any::Advice(advice) => { + let mut debug_struct = f.debug_struct("Advice"); + // Only show advice's phase if it's not in first phase. + if advice.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &advice.phase); + } + debug_struct.finish() + } + Any::Fixed => f.debug_struct("Fixed").finish(), + Any::Instance => f.debug_struct("Instance").finish(), + } + } +} + +impl Ord for Any { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match (self, other) { + (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, + (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), + // Across column types, sort Instance < Advice < Fixed. + (Any::Instance, Any::Advice(_)) + | (Any::Advice(_), Any::Fixed) + | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, + (Any::Fixed, Any::Instance) + | (Any::Fixed, Any::Advice(_)) + | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, + } + } +} + +impl PartialOrd for Any { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl ColumnType for Advice { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Advice(AdviceQuery { + index: None, + column_index: index, + rotation: at, + phase: self.phase, + }) + } +} +impl ColumnType for Fixed { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Fixed(FixedQuery { + index: None, + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Instance { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Instance(InstanceQuery { + index: None, + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Any { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + match self { + Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { + index: None, + column_index: index, + rotation: at, + phase: *phase, + }), + Any::Fixed => Expression::Fixed(FixedQuery { + index: None, + column_index: index, + rotation: at, + }), + Any::Instance => Expression::Instance(InstanceQuery { + index: None, + column_index: index, + rotation: at, + }), + } + } +} + +impl From for Any { + fn from(advice: Advice) -> Any { + Any::Advice(advice) + } +} + +impl From for Any { + fn from(_: Fixed) -> Any { + Any::Fixed + } +} + +impl From for Any { + fn from(_: Instance) -> Any { + Any::Instance + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Advice(advice.column_type), + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Fixed, + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Instance, + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Advice(advice) => Ok(Column { + index: any.index(), + column_type: *advice, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Fixed => Ok(Column { + index: any.index(), + column_type: Fixed, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Instance => Ok(Column { + index: any.index(), + column_type: Instance, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl FixedQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, + /// Phase of this advice column + pub phase: sealed::Phase, +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, + /// Phase of this advice column + pub(crate) phase: sealed::Phase, +} + +impl AdviceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } + + /// Phase of this advice column + pub fn phase(&self) -> u8 { + self.phase.0 + } +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl InstanceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// A challenge squeezed from transcript after advice columns at the phase have been committed. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Challenge { + index: usize, + pub(crate) phase: sealed::Phase, +} + +impl Challenge { + /// Index of this challenge. + pub fn index(&self) -> usize { + self.index + } + + /// Phase of this challenge. + pub fn phase(&self) -> u8 { + self.phase.0 + } + + /// Return Expression + pub fn expr(&self) -> Expression { + Expression::Challenge(*self) + } +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ExpressionMid { + /// This is a constant polynomial + Constant(F), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQueryMid), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQueryMid), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQueryMid), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl ExpressionMid { + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + use ExpressionMid::*; + match self { + Constant(_) => 0, + Fixed(_) => 1, + Advice(_) => 1, + Instance(_) => 1, + Challenge(_) => 0, + Negated(poly) => poly.degree(), + Sum(a, b) => max(a.degree(), b.degree()), + Product(a, b) => a.degree() + b.degree(), + Scaled(poly, _) => poly.degree(), + } + } +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, PartialEq, Eq)] +pub enum Expression { + /// This is a constant polynomial + Constant(F), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQuery), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQuery), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQuery), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl Into> for Expression { + fn into(self) -> ExpressionMid { + match self { + Expression::Constant(c) => ExpressionMid::Constant(c), + Expression::Fixed(FixedQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Fixed(FixedQueryMid { + column_index, + rotation, + }), + Expression::Advice(AdviceQuery { + column_index, + rotation, + phase, + .. + }) => ExpressionMid::Advice(AdviceQueryMid { + column_index, + rotation, + phase, + }), + Expression::Instance(InstanceQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Instance(InstanceQueryMid { + column_index, + rotation, + }), + Expression::Challenge(c) => ExpressionMid::Challenge(c), + Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), + Expression::Sum(lhs, rhs) => { + ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Product(lhs, rhs) => { + ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Scaled(e, c) => ExpressionMid::Scaled(Box::new((*e).into()), c), + } + } +} + +impl Expression { + /// Evaluate the polynomial using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + constant: &impl Fn(F) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + product(a, b) + } + Expression::Scaled(a, f) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + scaled(a, *f) + } + } + } + + /// Evaluate the polynomial lazily using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate_lazy( + &self, + constant: &impl Fn(F) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + zero: &T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + let b = b.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let (a, b) = if a.complexity() <= b.complexity() { + (a, b) + } else { + (b, a) + }; + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + + if a == *zero { + a + } else { + let b = b.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + product(a, b) + } + } + Expression::Scaled(a, f) => { + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + scaled(a, *f) + } + } + } + + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + match self { + Expression::Constant(scalar) => write!(writer, "{scalar:?}"), + Expression::Fixed(query) => { + write!( + writer, + "fixed[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Advice(query) => { + write!( + writer, + "advice[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Instance(query) => { + write!( + writer, + "instance[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Challenge(challenge) => { + write!(writer, "challenge[{}]", challenge.index()) + } + Expression::Negated(a) => { + writer.write_all(b"(-")?; + a.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Sum(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"+")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Product(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"*")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Scaled(a, f) => { + a.write_identifier(writer)?; + write!(writer, "*{f:?}") + } + } + } + + /// Identifier for this expression. Expressions with identical identifiers + /// do the same calculation (but the expressions don't need to be exactly equal + /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). + pub fn identifier(&self) -> String { + let mut cursor = std::io::Cursor::new(Vec::new()); + self.write_identifier(&mut cursor).unwrap(); + String::from_utf8(cursor.into_inner()).unwrap() + } + + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.degree(), + Expression::Sum(a, b) => max(a.degree(), b.degree()), + Expression::Product(a, b) => a.degree() + b.degree(), + Expression::Scaled(poly, _) => poly.degree(), + } + } + + /// Approximate the computational complexity of this expression. + pub fn complexity(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.complexity() + 5, + Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, + Expression::Product(a, b) => a.complexity() + b.complexity() + 30, + Expression::Scaled(poly, _) => poly.complexity() + 30, + } + } + + /// Square this expression. + pub fn square(self) -> Self { + self.clone() * self + } +} + +impl std::fmt::Debug for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), + // Skip enum variant and print query struct directly to maintain backwards compatibility. + Expression::Fixed(query) => { + let mut debug_struct = f.debug_struct("Fixed"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Advice(query) => { + let mut debug_struct = f.debug_struct("Advice"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation); + // Only show advice's phase if it's not in first phase. + if query.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &query.phase); + } + debug_struct.finish() + } + Expression::Instance(query) => { + let mut debug_struct = f.debug_struct("Instance"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Challenge(challenge) => { + f.debug_tuple("Challenge").field(challenge).finish() + } + Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), + Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), + Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), + Expression::Scaled(poly, scalar) => { + f.debug_tuple("Scaled").field(poly).field(scalar).finish() + } + } + } +} + +impl Neg for Expression { + type Output = Expression; + fn neg(self) -> Self::Output { + Expression::Negated(Box::new(self)) + } +} + +impl Add for Expression { + type Output = Expression; + fn add(self, rhs: Expression) -> Expression { + Expression::Sum(Box::new(self), Box::new(rhs)) + } +} + +impl Sub for Expression { + type Output = Expression; + fn sub(self, rhs: Expression) -> Expression { + Expression::Sum(Box::new(self), Box::new(-rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: Expression) -> Expression { + Expression::Product(Box::new(self), Box::new(rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: F) -> Expression { + Expression::Scaled(Box::new(self), rhs) + } +} + +impl Sum for Expression { + fn sum>(iter: I) -> Self { + iter.reduce(|acc, x| acc + x) + .unwrap_or(Expression::Constant(F::ZERO)) + } +} + +impl Product for Expression { + fn product>(iter: I) -> Self { + iter.reduce(|acc, x| acc * x) + .unwrap_or(Expression::Constant(F::ONE)) + } +} + +/// Represents an index into a vector where each entry corresponds to a distinct +/// point that polynomials are queried at. +#[derive(Copy, Clone, Debug)] +pub(crate) struct PointIndex(pub usize); + +/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset +/// within a custom gate. +#[derive(Clone, Debug)] +pub struct VirtualCell { + pub(crate) column: Column, + pub(crate) rotation: Rotation, +} + +impl>> From<(Col, Rotation)> for VirtualCell { + fn from((column, rotation): (Col, Rotation)) -> Self { + VirtualCell { + column: column.into(), + rotation, + } + } +} + +/// An individual polynomial constraint. +/// +/// These are returned by the closures passed to `ConstraintSystem::create_gate`. +#[derive(Debug)] +pub struct Constraint { + name: String, + poly: Expression, +} + +impl From> for Constraint { + fn from(poly: Expression) -> Self { + Constraint { + name: "".to_string(), + poly, + } + } +} + +impl> From<(S, Expression)> for Constraint { + fn from((name, poly): (S, Expression)) -> Self { + Constraint { + name: name.as_ref().to_string(), + poly, + } + } +} + +impl From> for Vec> { + fn from(poly: Expression) -> Self { + vec![Constraint { + name: "".to_string(), + poly, + }] + } +} + +/// A set of polynomial constraints with a common selector. +/// +/// ``` +/// use halo2_backend::{plonk::{Constraints, Expression}, poly::Rotation}; +/// use halo2curves::pasta::Fp; +/// # use halo2_backend::plonk::ConstraintSystem; +/// +/// # let mut meta = ConstraintSystem::::default(); +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let c = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("foo", |meta| { +/// let next = meta.query_advice(a, Rotation::next()); +/// let a = meta.query_advice(a, Rotation::cur()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let c = meta.query_advice(c, Rotation::cur()); +/// let s_ternary = meta.query_selector(s); +/// +/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); +/// +/// Constraints::with_selector( +/// s_ternary, +/// std::array::IntoIter::new([ +/// ("a is boolean", a.clone() * one_minus_a.clone()), +/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), +/// ]), +/// ) +/// }); +/// ``` +/// +/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to +/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, +/// you can pass an array directly. +#[derive(Debug)] +pub struct Constraints>, Iter: IntoIterator> { + selector: Expression, + constraints: Iter, +} + +impl>, Iter: IntoIterator> Constraints { + /// Constructs a set of constraints that are controlled by the given selector. + /// + /// Each constraint `c` in `iterator` will be converted into the constraint + /// `selector * c`. + pub fn with_selector(selector: Expression, constraints: Iter) -> Self { + Constraints { + selector, + constraints, + } + } +} + +fn apply_selector_to_constraint>>( + (selector, c): (Expression, C), +) -> Constraint { + let constraint: Constraint = c.into(); + Constraint { + name: constraint.name, + poly: selector * constraint.poly, + } +} + +type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; +type ConstraintsIterator = std::iter::Map< + std::iter::Zip>, I>, + ApplySelectorToConstraint, +>; + +impl>, Iter: IntoIterator> IntoIterator + for Constraints +{ + type Item = Constraint; + type IntoIter = ConstraintsIterator; + + fn into_iter(self) -> Self::IntoIter { + std::iter::repeat(self.selector) + .zip(self.constraints) + .map(apply_selector_to_constraint) + } +} + +/// A Gate contains a single polynomial identity with a name as metadata. +#[derive(Clone, Debug)] +pub struct GateV2Backend { + name: String, + poly: ExpressionMid, +} + +impl GateV2Backend { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the polynomial identity of this gate + pub fn polynomial(&self) -> &ExpressionMid { + &self.poly + } +} + +/// Gate +#[derive(Clone, Debug)] +pub struct Gate { + name: String, + constraint_names: Vec, + polys: Vec>, + /// We track queried selectors separately from other cells, so that we can use them to + /// trigger debug checks on gates. + queried_cells: Vec, +} + +impl Gate { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the name of the constraint at index `constraint_index`. + pub fn constraint_name(&self, constraint_index: usize) -> &str { + self.constraint_names[constraint_index].as_str() + } + + /// Returns constraints of this gate + pub fn polynomials(&self) -> &[Expression] { + &self.polys + } +} + +/// Data that needs to be preprocessed from a circuit +#[derive(Debug, Clone)] +pub struct PreprocessingV2 { + // TODO(Edu): Can we replace this by a simpler structure? + pub(crate) permutation: permutation::keygen::Assembly, + pub(crate) fixed: Vec>, +} + +/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +/// as well as the fixed columns and copy constraints information. +#[derive(Debug, Clone)] +pub struct CompiledCircuitV2 { + pub(crate) preprocessing: PreprocessingV2, + pub(crate) cs: ConstraintSystemV2Backend, +} + +struct QueriesMap { + advice_map: HashMap<(Column, Rotation), usize>, + instance_map: HashMap<(Column, Rotation), usize>, + fixed_map: HashMap<(Column, Rotation), usize>, + advice: Vec<(Column, Rotation)>, + instance: Vec<(Column, Rotation)>, + fixed: Vec<(Column, Rotation)>, +} + +impl QueriesMap { + fn add_advice(&mut self, col: Column, rot: Rotation) -> usize { + *self.advice_map.entry((col, rot)).or_insert_with(|| { + self.advice.push((col, rot)); + self.advice.len() - 1 + }) + } + fn add_instance(&mut self, col: Column, rot: Rotation) -> usize { + *self.instance_map.entry((col, rot)).or_insert_with(|| { + self.instance.push((col, rot)); + self.instance.len() - 1 + }) + } + fn add_fixed(&mut self, col: Column, rot: Rotation) -> usize { + *self.fixed_map.entry((col, rot)).or_insert_with(|| { + self.fixed.push((col, rot)); + self.fixed.len() - 1 + }) + } +} + +impl QueriesMap { + fn as_expression(&mut self, expr: &ExpressionMid) -> Expression { + match expr { + ExpressionMid::Constant(c) => Expression::Constant(*c), + ExpressionMid::Fixed(query) => { + let (col, rot) = (Column::new(query.column_index, Fixed), query.rotation); + let index = self.add_fixed(col, rot); + Expression::Fixed(FixedQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Advice(query) => { + let (col, rot) = ( + Column::new(query.column_index, Advice { phase: query.phase }), + query.rotation, + ); + let index = self.add_advice(col, rot); + Expression::Advice(AdviceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + phase: query.phase, + }) + } + ExpressionMid::Instance(query) => { + let (col, rot) = (Column::new(query.column_index, Instance), query.rotation); + let index = self.add_instance(col, rot); + Expression::Instance(InstanceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Challenge(c) => Expression::Challenge(*c), + ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), + ExpressionMid::Sum(lhs, rhs) => Expression::Sum( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + ExpressionMid::Product(lhs, rhs) => Expression::Product( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.as_expression(e)), *c), + } + } +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystemV2Backend { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + pub(crate) gates: Vec>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, +} + +impl Into> for ConstraintSystem { + fn into(self) -> ConstraintSystemV2Backend { + ConstraintSystemV2Backend { + num_fixed_columns: self.num_fixed_columns, + num_advice_columns: self.num_advice_columns, + num_instance_columns: self.num_instance_columns, + num_challenges: self.num_challenges, + unblinded_advice_columns: self.unblinded_advice_columns.clone(), + advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), + challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), + gates: self + .gates + .iter() + .map(|g| { + g.polys.clone().into_iter().enumerate().map(|(i, e)| { + let name = match g.constraint_name(i) { + "" => g.name.clone(), + constraint_name => format!("{}:{}", g.name, constraint_name), + }; + GateV2Backend { + name, + poly: e.into(), + } + }) + }) + .flatten() + .collect(), + permutation: self.permutation.clone(), + lookups: self + .lookups + .iter() + .map(|l| lookup::ArgumentV2 { + name: l.name.clone(), + input_expressions: l + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + table_expressions: l + .table_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + shuffles: self + .shuffles + .iter() + .map(|s| shuffle::ArgumentV2 { + name: s.name.clone(), + input_expressions: s + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + shuffle_expressions: s + .shuffle_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + general_column_annotations: self.general_column_annotations.clone(), + } + } +} + +impl ConstraintSystemV2Backend { + /// Collect queries used in gates while mapping those gates to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_gates(&self, queries: &mut QueriesMap) -> Vec> { + self.gates + .iter() + .map(|gate| Gate { + name: gate.name.clone(), + constraint_names: Vec::new(), + polys: vec![queries.as_expression(gate.polynomial())], + queried_cells: Vec::new(), // Unused? + }) + .collect() + } + + /// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_lookups(&self, queries: &mut QueriesMap) -> Vec> { + self.lookups + .iter() + .map(|lookup| lookup::Argument { + name: lookup.name.clone(), + input_expressions: lookup + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + table_expressions: lookup + .table_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() + } + + /// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_shuffles(&self, queries: &mut QueriesMap) -> Vec> { + self.shuffles + .iter() + .map(|shuffle| shuffle::Argument { + name: shuffle.name.clone(), + input_expressions: shuffle + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + shuffle_expressions: shuffle + .shuffle_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() + } + + /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the + /// expressions of gates, lookups and shuffles into equivalent ones with indexed query + /// references. + pub(crate) fn collect_queries( + &self, + ) -> ( + Queries, + Vec>, + Vec>, + Vec>, + ) { + let mut queries = QueriesMap { + advice_map: HashMap::new(), + instance_map: HashMap::new(), + fixed_map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), + }; + + let gates = self.collect_queries_gates(&mut queries); + let lookups = self.collect_queries_lookups(&mut queries); + let shuffles = self.collect_queries_shuffles(&mut queries); + + // Each column used in a copy constraint involves a query at rotation current. + for column in self.permutation.get_columns() { + match column.column_type { + Any::Instance => { + queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) + } + Any::Fixed => { + queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()) + } + Any::Advice(advice) => { + queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) + } + }; + } + + let mut num_advice_queries = vec![0; self.num_advice_columns]; + for (column, _) in queries.advice.iter() { + num_advice_queries[column.index()] += 1; + } + + let queries = Queries { + advice: queries.advice, + instance: queries.instance, + fixed: queries.fixed, + num_advice_queries, + }; + (queries, gates, lookups, shuffles) + } +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystem { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_selectors: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + pub(crate) gates: Vec>, + pub(crate) advice_queries: Vec<(Column, Rotation)>, + // Contains an integer for each advice column + // identifying how many distinct queries it has + // so far; should be same length as num_advice_columns. + pub(crate) num_advice_queries: Vec, + pub(crate) instance_queries: Vec<(Column, Rotation)>, + pub(crate) fixed_queries: Vec<(Column, Rotation)>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, + + // Vector of fixed columns, which can be used to store constant values + // that are copied into advice columns. + pub(crate) constants: Vec>, + + pub(crate) minimum_degree: Option, +} + +impl From> for ConstraintSystem { + fn from(cs2: ConstraintSystemV2Backend) -> Self { + let (queries, gates, lookups, shuffles) = cs2.collect_queries(); + ConstraintSystem { + num_fixed_columns: cs2.num_fixed_columns, + num_advice_columns: cs2.num_advice_columns, + num_instance_columns: cs2.num_instance_columns, + num_selectors: 0, + num_challenges: cs2.num_challenges, + unblinded_advice_columns: cs2.unblinded_advice_columns, + advice_column_phase: cs2 + .advice_column_phase + .into_iter() + .map(sealed::Phase) + .collect(), + challenge_phase: cs2.challenge_phase.into_iter().map(sealed::Phase).collect(), + gates, + advice_queries: queries.advice, + num_advice_queries: queries.num_advice_queries, + instance_queries: queries.instance, + fixed_queries: queries.fixed, + permutation: cs2.permutation, + lookups, + shuffles, + general_column_annotations: cs2.general_column_annotations, + constants: Vec::new(), + minimum_degree: None, + } + } +} + +/// Represents the minimal parameters that determine a `ConstraintSystem`. +#[allow(dead_code)] +pub struct PinnedConstraintSystem<'a, F: Field> { + num_fixed_columns: &'a usize, + num_advice_columns: &'a usize, + num_instance_columns: &'a usize, + num_selectors: &'a usize, + num_challenges: &'a usize, + advice_column_phase: &'a Vec, + challenge_phase: &'a Vec, + gates: PinnedGates<'a, F>, + advice_queries: &'a Vec<(Column, Rotation)>, + instance_queries: &'a Vec<(Column, Rotation)>, + fixed_queries: &'a Vec<(Column, Rotation)>, + permutation: &'a permutation::Argument, + lookups: &'a Vec>, + shuffles: &'a Vec>, + constants: &'a Vec>, + minimum_degree: &'a Option, +} + +impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut debug_struct = f.debug_struct("PinnedConstraintSystem"); + debug_struct + .field("num_fixed_columns", self.num_fixed_columns) + .field("num_advice_columns", self.num_advice_columns) + .field("num_instance_columns", self.num_instance_columns) + .field("num_selectors", self.num_selectors); + // Only show multi-phase related fields if it's used. + if *self.num_challenges > 0 { + debug_struct + .field("num_challenges", self.num_challenges) + .field("advice_column_phase", self.advice_column_phase) + .field("challenge_phase", self.challenge_phase); + } + debug_struct + .field("gates", &self.gates) + .field("advice_queries", self.advice_queries) + .field("instance_queries", self.instance_queries) + .field("fixed_queries", self.fixed_queries) + .field("permutation", self.permutation) + .field("lookups", self.lookups); + if !self.shuffles.is_empty() { + debug_struct.field("shuffles", self.shuffles); + } + debug_struct + .field("constants", self.constants) + .field("minimum_degree", self.minimum_degree); + debug_struct.finish() + } +} + +struct PinnedGates<'a, F: Field>(&'a Vec>); + +impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + f.debug_list() + .entries(self.0.iter().flat_map(|gate| gate.polynomials().iter())) + .finish() + } +} + +impl Default for ConstraintSystem { + fn default() -> ConstraintSystem { + ConstraintSystem { + num_fixed_columns: 0, + num_advice_columns: 0, + num_instance_columns: 0, + num_selectors: 0, + num_challenges: 0, + unblinded_advice_columns: Vec::new(), + advice_column_phase: Vec::new(), + challenge_phase: Vec::new(), + gates: vec![], + fixed_queries: Vec::new(), + advice_queries: Vec::new(), + num_advice_queries: Vec::new(), + instance_queries: Vec::new(), + permutation: permutation::Argument::new(), + lookups: Vec::new(), + shuffles: Vec::new(), + general_column_annotations: HashMap::new(), + constants: vec![], + minimum_degree: None, + } + } +} + +impl ConstraintSystem { + /// Obtain a pinned version of this constraint system; a structure with the + /// minimal parameters needed to determine the rest of the constraint + /// system. + pub fn pinned(&self) -> PinnedConstraintSystem<'_, F> { + PinnedConstraintSystem { + num_fixed_columns: &self.num_fixed_columns, + num_advice_columns: &self.num_advice_columns, + num_instance_columns: &self.num_instance_columns, + num_selectors: &self.num_selectors, + num_challenges: &self.num_challenges, + advice_column_phase: &self.advice_column_phase, + challenge_phase: &self.challenge_phase, + gates: PinnedGates(&self.gates), + fixed_queries: &self.fixed_queries, + advice_queries: &self.advice_queries, + instance_queries: &self.instance_queries, + permutation: &self.permutation, + lookups: &self.lookups, + shuffles: &self.shuffles, + constants: &self.constants, + minimum_degree: &self.minimum_degree, + } + } + + pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, advice_query) in self.advice_queries.iter().enumerate() { + if advice_query == &(column, at) { + return index; + } + } + + panic!("get_advice_query_index called for non-existent query"); + } + + pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, fixed_query) in self.fixed_queries.iter().enumerate() { + if fixed_query == &(column, at) { + return index; + } + } + + panic!("get_fixed_query_index called for non-existent query"); + } + + pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, instance_query) in self.instance_queries.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + + panic!("get_instance_query_index called for non-existent query"); + } + + pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { + match column.column_type() { + Any::Advice(_) => { + self.get_advice_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Fixed => { + self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Instance => { + self.get_instance_query_index(Column::::try_from(column).unwrap(), at) + } + } + } + + /// Returns the list of phases + pub fn phases(&self) -> impl Iterator { + let max_phase = self + .advice_column_phase + .iter() + .max() + .map(|phase| phase.0) + .unwrap_or_default(); + (0..=max_phase).map(sealed::Phase) + } + + /// Compute the degree of the constraint system (the maximum degree of all + /// constraints). + pub fn degree(&self) -> usize { + // The permutation argument will serve alongside the gates, so must be + // accounted for. + let mut degree = self.permutation.required_degree(); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.lookups + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.shuffles + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // Account for each gate to ensure our quotient polynomial is the + // correct degree and that our extended domain is the right size. + degree = std::cmp::max( + degree, + self.gates + .iter() + .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) + .max() + .unwrap_or(0), + ); + + std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } + + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Returns number of fixed columns + pub fn num_fixed_columns(&self) -> usize { + self.num_fixed_columns + } + + /// Returns number of advice columns + pub fn num_advice_columns(&self) -> usize { + self.num_advice_columns + } + + /// Returns number of instance columns + pub fn num_instance_columns(&self) -> usize { + self.num_instance_columns + } + + /// Returns number of selectors + pub fn num_selectors(&self) -> usize { + self.num_selectors + } + + /// Returns number of challenges + pub fn num_challenges(&self) -> usize { + self.num_challenges + } + + /// Returns phase of advice columns + pub fn advice_column_phase(&self) -> Vec { + self.advice_column_phase + .iter() + .map(|phase| phase.0) + .collect() + } + + /// Returns phase of challenges + pub fn challenge_phase(&self) -> Vec { + self.challenge_phase.iter().map(|phase| phase.0).collect() + } + + /// Returns gates + pub fn gates(&self) -> &Vec> { + &self.gates + } + + /// Returns general column annotations + pub fn general_column_annotations(&self) -> &HashMap { + &self.general_column_annotations + } + + /// Returns advice queries + pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { + &self.advice_queries + } + + /// Returns instance queries + pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { + &self.instance_queries + } + + /// Returns fixed queries + pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { + &self.fixed_queries + } + + /// Returns permutation argument + pub fn permutation(&self) -> &permutation::Argument { + &self.permutation + } + + /// Returns lookup arguments + pub fn lookups(&self) -> &Vec> { + &self.lookups + } + + /// Returns shuffle arguments + pub fn shuffles(&self) -> &Vec> { + &self.shuffles + } + + /// Returns constants + pub fn constants(&self) -> &Vec> { + &self.constants + } +} + +#[cfg(test)] +mod tests { + use super::Expression; + use halo2curves::bn256::Fr; + + #[test] + fn iter_sum() { + let exprs: Vec> = vec![ + Expression::Constant(1.into()), + Expression::Constant(2.into()), + Expression::Constant(3.into()), + ]; + let happened: Expression = exprs.into_iter().sum(); + let expected: Expression = Expression::Sum( + Box::new(Expression::Sum( + Box::new(Expression::Constant(1.into())), + Box::new(Expression::Constant(2.into())), + )), + Box::new(Expression::Constant(3.into())), + ); + + assert_eq!(happened, expected); + } + + #[test] + fn iter_product() { + let exprs: Vec> = vec![ + Expression::Constant(1.into()), + Expression::Constant(2.into()), + Expression::Constant(3.into()), + ]; + let happened: Expression = exprs.into_iter().product(); + let expected: Expression = Expression::Product( + Box::new(Expression::Product( + Box::new(Expression::Constant(1.into())), + Box::new(Expression::Constant(2.into())), + )), + Box::new(Expression::Constant(3.into())), + ); + + assert_eq!(happened, expected); + } +} diff --git a/halo2_backend/src/plonk/error.rs b/halo2_backend/src/plonk/error.rs new file mode 100644 index 0000000000..93f21bf40d --- /dev/null +++ b/halo2_backend/src/plonk/error.rs @@ -0,0 +1,93 @@ +use std::error; +use std::fmt; +use std::io; + +use super::{Any, Column}; + +/// This is an error that could occur during proving or circuit synthesis. +// TODO: these errors need to be cleaned up +#[derive(Debug)] +pub enum Error { + /// This is an error that can occur during synthesis of the circuit, for + /// example, when the witness is not present. + Synthesis, + /// The provided instances do not match the circuit parameters. + InvalidInstances, + /// The constraint system is not satisfied. + ConstraintSystemFailure, + /// Out of bounds index passed to a backend + BoundsFailure, + /// Opening error + Opening, + /// Transcript error + Transcript(io::Error), + /// `k` is too small for the given circuit. + NotEnoughRowsAvailable { + /// The current value of `k` being used. + current_k: u32, + }, + /// Instance provided exceeds number of available rows + InstanceTooLarge, + /// Circuit synthesis requires global constants, but circuit configuration did not + /// call [`ConstraintSystem::enable_constant`] on fixed columns with sufficient space. + /// + /// [`ConstraintSystem::enable_constant`]: crate::plonk::ConstraintSystem::enable_constant + NotEnoughColumnsForConstants, + /// The instance sets up a copy constraint involving a column that has not been + /// included in the permutation. + ColumnNotInPermutation(Column), + /// Generic error not covered by previous cases + Other(String), +} + +impl From for Error { + fn from(error: io::Error) -> Self { + // The only place we can get io::Error from is the transcript. + Error::Transcript(error) + } +} + +impl Error { + /// Constructs an `Error::NotEnoughRowsAvailable`. + pub(crate) fn not_enough_rows_available(current_k: u32) -> Self { + Error::NotEnoughRowsAvailable { current_k } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Error::Synthesis => write!(f, "General synthesis error"), + Error::InvalidInstances => write!(f, "Provided instances do not match the circuit"), + Error::ConstraintSystemFailure => write!(f, "The constraint system is not satisfied"), + Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"), + Error::Opening => write!(f, "Multi-opening proof was invalid"), + Error::Transcript(e) => write!(f, "Transcript error: {e}"), + Error::NotEnoughRowsAvailable { current_k } => write!( + f, + "k = {current_k} is too small for the given circuit. Try using a larger value of k", + ), + Error::InstanceTooLarge => write!(f, "Instance vectors are larger than the circuit"), + Error::NotEnoughColumnsForConstants => { + write!( + f, + "Too few fixed columns are enabled for global constants usage" + ) + } + Error::ColumnNotInPermutation(column) => write!( + f, + "Column {column:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", + ), + Error::Other(error) => write!(f, "Other: {error}"), + } + } +} + +impl error::Error for Error { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Error::Transcript(e) => Some(e), + _ => None, + } + } +} diff --git a/halo2_backend/src/plonk/evaluation.rs b/halo2_backend/src/plonk/evaluation.rs new file mode 100644 index 0000000000..aeeb587a64 --- /dev/null +++ b/halo2_backend/src/plonk/evaluation.rs @@ -0,0 +1,869 @@ +use crate::multicore; +use crate::plonk::{lookup, permutation, Any, ProvingKey}; +use crate::poly::Basis; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + poly::{Coeff, ExtendedLagrangeCoeff, Polynomial, Rotation}, +}; +use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; + +use super::{shuffle, ConstraintSystem, Expression}; + +/// Return the index in the polynomial of size `isize` after rotation `rot`. +fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { + (((idx as i32) + (rot * rot_scale)).rem_euclid(isize)) as usize +} + +/// Value used in a calculation +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] +pub enum ValueSource { + /// This is a constant value + Constant(usize), + /// This is an intermediate value + Intermediate(usize), + /// This is a fixed column + Fixed(usize, usize), + /// This is an advice (witness) column + Advice(usize, usize), + /// This is an instance (external) column + Instance(usize, usize), + /// This is a challenge + Challenge(usize), + /// beta + Beta(), + /// gamma + Gamma(), + /// theta + Theta(), + /// y + Y(), + /// Previous value + PreviousValue(), +} + +impl Default for ValueSource { + fn default() -> Self { + ValueSource::Constant(0) + } +} + +impl ValueSource { + /// Get the value for this source + #[allow(clippy::too_many_arguments)] + pub fn get( + &self, + rotations: &[usize], + constants: &[F], + intermediates: &[F], + fixed_values: &[Polynomial], + advice_values: &[Polynomial], + instance_values: &[Polynomial], + challenges: &[F], + beta: &F, + gamma: &F, + theta: &F, + y: &F, + previous_value: &F, + ) -> F { + match self { + ValueSource::Constant(idx) => constants[*idx], + ValueSource::Intermediate(idx) => intermediates[*idx], + ValueSource::Fixed(column_index, rotation) => { + fixed_values[*column_index][rotations[*rotation]] + } + ValueSource::Advice(column_index, rotation) => { + advice_values[*column_index][rotations[*rotation]] + } + ValueSource::Instance(column_index, rotation) => { + instance_values[*column_index][rotations[*rotation]] + } + ValueSource::Challenge(index) => challenges[*index], + ValueSource::Beta() => *beta, + ValueSource::Gamma() => *gamma, + ValueSource::Theta() => *theta, + ValueSource::Y() => *y, + ValueSource::PreviousValue() => *previous_value, + } + } +} + +/// Calculation +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Calculation { + /// This is an addition + Add(ValueSource, ValueSource), + /// This is a subtraction + Sub(ValueSource, ValueSource), + /// This is a product + Mul(ValueSource, ValueSource), + /// This is a square + Square(ValueSource), + /// This is a double + Double(ValueSource), + /// This is a negation + Negate(ValueSource), + /// This is Horner's rule: `val = a; val = val * c + b[]` + Horner(ValueSource, Vec, ValueSource), + /// This is a simple assignment + Store(ValueSource), +} + +impl Calculation { + /// Get the resulting value of this calculation + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + rotations: &[usize], + constants: &[F], + intermediates: &[F], + fixed_values: &[Polynomial], + advice_values: &[Polynomial], + instance_values: &[Polynomial], + challenges: &[F], + beta: &F, + gamma: &F, + theta: &F, + y: &F, + previous_value: &F, + ) -> F { + let get_value = |value: &ValueSource| { + value.get( + rotations, + constants, + intermediates, + fixed_values, + advice_values, + instance_values, + challenges, + beta, + gamma, + theta, + y, + previous_value, + ) + }; + match self { + Calculation::Add(a, b) => get_value(a) + get_value(b), + Calculation::Sub(a, b) => get_value(a) - get_value(b), + Calculation::Mul(a, b) => get_value(a) * get_value(b), + Calculation::Square(v) => get_value(v).square(), + Calculation::Double(v) => get_value(v).double(), + Calculation::Negate(v) => -get_value(v), + Calculation::Horner(start_value, parts, factor) => { + let factor = get_value(factor); + let mut value = get_value(start_value); + for part in parts.iter() { + value = value * factor + get_value(part); + } + value + } + Calculation::Store(v) => get_value(v), + } + } +} + +/// Evaluator +#[derive(Clone, Default, Debug)] +pub struct Evaluator { + /// Custom gates evalution + pub custom_gates: GraphEvaluator, + /// Lookups evalution + pub lookups: Vec>, + /// Shuffle evalution + pub shuffles: Vec>, +} + +/// GraphEvaluator +#[derive(Clone, Debug)] +pub struct GraphEvaluator { + /// Constants + pub constants: Vec, + /// Rotations + pub rotations: Vec, + /// Calculations + pub calculations: Vec, + /// Number of intermediates + pub num_intermediates: usize, +} + +/// EvaluationData +#[derive(Default, Debug)] +pub struct EvaluationData { + /// Intermediates + pub intermediates: Vec, + /// Rotations + pub rotations: Vec, +} + +/// CaluclationInfo +#[derive(Clone, Debug)] +pub struct CalculationInfo { + /// Calculation + pub calculation: Calculation, + /// Target + pub target: usize, +} + +impl Evaluator { + /// Creates a new evaluation structure + pub fn new(cs: &ConstraintSystem) -> Self { + let mut ev = Evaluator::default(); + + // Custom gates + let mut parts = Vec::new(); + for gate in cs.gates.iter() { + parts.extend( + gate.polynomials() + .iter() + .map(|poly| ev.custom_gates.add_expression(poly)), + ); + } + ev.custom_gates.add_calculation(Calculation::Horner( + ValueSource::PreviousValue(), + parts, + ValueSource::Y(), + )); + + // Lookups + for lookup in cs.lookups.iter() { + let mut graph = GraphEvaluator::default(); + + let mut evaluate_lc = |expressions: &Vec>| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + // Input coset + let compressed_input_coset = evaluate_lc(&lookup.input_expressions); + // table coset + let compressed_table_coset = evaluate_lc(&lookup.table_expressions); + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + let right_gamma = graph.add_calculation(Calculation::Add( + compressed_table_coset, + ValueSource::Gamma(), + )); + let lc = graph.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Beta(), + )); + graph.add_calculation(Calculation::Mul(lc, right_gamma)); + + ev.lookups.push(graph); + } + + // Shuffles + for shuffle in cs.shuffles.iter() { + let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + let mut graph_input = GraphEvaluator::default(); + let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); + let _ = graph_input.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Gamma(), + )); + + let mut graph_shuffle = GraphEvaluator::default(); + let compressed_shuffle_coset = + evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); + let _ = graph_shuffle.add_calculation(Calculation::Add( + compressed_shuffle_coset, + ValueSource::Gamma(), + )); + + ev.shuffles.push(graph_input); + ev.shuffles.push(graph_shuffle); + } + + ev + } + + /// Evaluate h poly + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn evaluate_h( + &self, + pk: &ProvingKey, + advice_polys: &[&[Polynomial]], + instance_polys: &[&[Polynomial]], + challenges: &[C::ScalarExt], + y: C::ScalarExt, + beta: C::ScalarExt, + gamma: C::ScalarExt, + theta: C::ScalarExt, + lookups: &[Vec>], + shuffles: &[Vec>], + permutations: &[permutation::prover::Committed], + ) -> Polynomial { + let domain = &pk.vk.domain; + let size = domain.extended_len(); + let rot_scale = 1 << (domain.extended_k() - domain.k()); + let fixed = &pk.fixed_cosets[..]; + let extended_omega = domain.get_extended_omega(); + let isize = size as i32; + let one = C::ScalarExt::ONE; + let l0 = &pk.l0; + let l_last = &pk.l_last; + let l_active_row = &pk.l_active_row; + let p = &pk.vk.cs.permutation; + + // Calculate the advice and instance cosets + let advice: Vec>> = advice_polys + .iter() + .map(|advice_polys| { + advice_polys + .iter() + .map(|poly| domain.coeff_to_extended(poly.clone())) + .collect() + }) + .collect(); + let instance: Vec>> = instance_polys + .iter() + .map(|instance_polys| { + instance_polys + .iter() + .map(|poly| domain.coeff_to_extended(poly.clone())) + .collect() + }) + .collect(); + + let mut values = domain.empty_extended(); + + // Core expression evaluations + let num_threads = multicore::current_num_threads(); + for ((((advice, instance), lookups), shuffles), permutation) in advice + .iter() + .zip(instance.iter()) + .zip(lookups.iter()) + .zip(shuffles.iter()) + .zip(permutations.iter()) + { + // Custom gates + multicore::scope(|scope| { + let chunk_size = (size + num_threads - 1) / num_threads; + for (thread_idx, values) in values.chunks_mut(chunk_size).enumerate() { + let start = thread_idx * chunk_size; + scope.spawn(move |_| { + let mut eval_data = self.custom_gates.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + *value = self.custom_gates.evaluate( + &mut eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + value, + idx, + rot_scale, + isize, + ); + } + }); + } + }); + + // Permutations + let sets = &permutation.sets; + if !sets.is_empty() { + let blinding_factors = pk.vk.cs.blinding_factors(); + let last_rotation = Rotation(-((blinding_factors + 1) as i32)); + let chunk_len = pk.vk.cs.degree() - 2; + let delta_start = beta * &C::Scalar::ZETA; + + let first_set = sets.first().unwrap(); + let last_set = sets.last().unwrap(); + + // Permutation constraints + parallelize(&mut values, |values, start| { + let mut beta_term = extended_omega.pow_vartime([start as u64, 0, 0, 0]); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + let r_last = get_rotation_idx(idx, last_rotation.0, rot_scale, isize); + + // Enforce only for the first set. + // l_0(X) * (1 - z_0(X)) = 0 + *value = *value * y + + ((one - first_set.permutation_product_coset[idx]) * l0[idx]); + // Enforce only for the last set. + // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 + *value = *value * y + + ((last_set.permutation_product_coset[idx] + * last_set.permutation_product_coset[idx] + - last_set.permutation_product_coset[idx]) + * l_last[idx]); + // Except for the first set, enforce. + // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 + for (set_idx, set) in sets.iter().enumerate() { + if set_idx != 0 { + *value = *value * y + + ((set.permutation_product_coset[idx] + - permutation.sets[set_idx - 1].permutation_product_coset + [r_last]) + * l0[idx]); + } + } + // And for all the sets we enforce: + // (1 - (l_last(X) + l_blind(X))) * ( + // z_i(\omega X) \prod_j (p(X) + \beta s_j(X) + \gamma) + // - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma) + // ) + let mut current_delta = delta_start * beta_term; + for ((set, columns), cosets) in sets + .iter() + .zip(p.columns.chunks(chunk_len)) + .zip(pk.permutation.cosets.chunks(chunk_len)) + { + let mut left = set.permutation_product_coset[r_next]; + for (values, permutation) in columns + .iter() + .map(|&column| match column.column_type() { + Any::Advice(_) => &advice[column.index()], + Any::Fixed => &fixed[column.index()], + Any::Instance => &instance[column.index()], + }) + .zip(cosets.iter()) + { + left *= values[idx] + beta * permutation[idx] + gamma; + } + + let mut right = set.permutation_product_coset[idx]; + for values in columns.iter().map(|&column| match column.column_type() { + Any::Advice(_) => &advice[column.index()], + Any::Fixed => &fixed[column.index()], + Any::Instance => &instance[column.index()], + }) { + right *= values[idx] + current_delta + gamma; + current_delta *= &C::Scalar::DELTA; + } + + *value = *value * y + ((left - right) * l_active_row[idx]); + } + beta_term *= &extended_omega; + } + }); + } + + // Lookups + for (n, lookup) in lookups.iter().enumerate() { + // Polynomials required for this lookup. + // Calculated here so these only have to be kept in memory for the short time + // they are actually needed. + let product_coset = pk.vk.domain.coeff_to_extended(lookup.product_poly.clone()); + let permuted_input_coset = pk + .vk + .domain + .coeff_to_extended(lookup.permuted_input_poly.clone()); + let permuted_table_coset = pk + .vk + .domain + .coeff_to_extended(lookup.permuted_table_poly.clone()); + + // Lookup constraints + parallelize(&mut values, |values, start| { + let lookup_evaluator = &self.lookups[n]; + let mut eval_data = lookup_evaluator.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + + let table_value = lookup_evaluator.evaluate( + &mut eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + let r_prev = get_rotation_idx(idx, -1, rot_scale, isize); + + let a_minus_s = permuted_input_coset[idx] - permuted_table_coset[idx]; + // l_0(X) * (1 - z(X)) = 0 + *value = *value * y + ((one - product_coset[idx]) * l0[idx]); + // l_last(X) * (z(X)^2 - z(X)) = 0 + *value = *value * y + + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) + * l_last[idx]); + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) + // (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + // ) = 0 + *value = *value * y + + ((product_coset[r_next] + * (permuted_input_coset[idx] + beta) + * (permuted_table_coset[idx] + gamma) + - product_coset[idx] * table_value) + * l_active_row[idx]); + // Check that the first values in the permuted input expression and permuted + // fixed expression are the same. + // l_0(X) * (a'(X) - s'(X)) = 0 + *value = *value * y + (a_minus_s * l0[idx]); + // Check that each value in the permuted lookup input expression is either + // equal to the value above it, or the value at the same index in the + // permuted table expression. + // (1 - (l_last + l_blind)) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 + *value = *value * y + + (a_minus_s + * (permuted_input_coset[idx] - permuted_input_coset[r_prev]) + * l_active_row[idx]); + } + }); + } + + // Shuffle constraints + for (n, shuffle) in shuffles.iter().enumerate() { + let product_coset = pk.vk.domain.coeff_to_extended(shuffle.product_poly.clone()); + + // Shuffle constraints + parallelize(&mut values, |values, start| { + let input_evaluator = &self.shuffles[2 * n]; + let shuffle_evaluator = &self.shuffles[2 * n + 1]; + let mut eval_data_input = shuffle_evaluator.instance(); + let mut eval_data_shuffle = shuffle_evaluator.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + + let input_value = input_evaluator.evaluate( + &mut eval_data_input, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let shuffle_value = shuffle_evaluator.evaluate( + &mut eval_data_shuffle, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + + // l_0(X) * (1 - z(X)) = 0 + *value = *value * y + ((one - product_coset[idx]) * l0[idx]); + // l_last(X) * (z(X)^2 - z(X)) = 0 + *value = *value * y + + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) + * l_last[idx]); + // (1 - (l_last(X) + l_blind(X))) * (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) = 0 + *value = *value * y + + l_active_row[idx] + * (product_coset[r_next] * shuffle_value + - product_coset[idx] * input_value) + } + }); + } + } + values + } +} + +impl Default for GraphEvaluator { + fn default() -> Self { + Self { + // Fixed positions to allow easy access + constants: vec![ + C::ScalarExt::ZERO, + C::ScalarExt::ONE, + C::ScalarExt::from(2u64), + ], + rotations: Vec::new(), + calculations: Vec::new(), + num_intermediates: 0, + } + } +} + +impl GraphEvaluator { + /// Adds a rotation + fn add_rotation(&mut self, rotation: &Rotation) -> usize { + let position = self.rotations.iter().position(|&c| c == rotation.0); + match position { + Some(pos) => pos, + None => { + self.rotations.push(rotation.0); + self.rotations.len() - 1 + } + } + } + + /// Adds a constant + fn add_constant(&mut self, constant: &C::ScalarExt) -> ValueSource { + let position = self.constants.iter().position(|&c| c == *constant); + ValueSource::Constant(match position { + Some(pos) => pos, + None => { + self.constants.push(*constant); + self.constants.len() - 1 + } + }) + } + + /// Adds a calculation. + /// Currently does the simplest thing possible: just stores the + /// resulting value so the result can be reused when that calculation + /// is done multiple times. + fn add_calculation(&mut self, calculation: Calculation) -> ValueSource { + let existing_calculation = self + .calculations + .iter() + .find(|c| c.calculation == calculation); + match existing_calculation { + Some(existing_calculation) => ValueSource::Intermediate(existing_calculation.target), + None => { + let target = self.num_intermediates; + self.calculations.push(CalculationInfo { + calculation, + target, + }); + self.num_intermediates += 1; + ValueSource::Intermediate(target) + } + } + } + + /// Generates an optimized evaluation for the expression + fn add_expression(&mut self, expr: &Expression) -> ValueSource { + match expr { + Expression::Constant(scalar) => self.add_constant(scalar), + Expression::Fixed(query) => { + let rot_idx = self.add_rotation(&query.rotation); + self.add_calculation(Calculation::Store(ValueSource::Fixed( + query.column_index, + rot_idx, + ))) + } + Expression::Advice(query) => { + let rot_idx = self.add_rotation(&query.rotation); + self.add_calculation(Calculation::Store(ValueSource::Advice( + query.column_index, + rot_idx, + ))) + } + Expression::Instance(query) => { + let rot_idx = self.add_rotation(&query.rotation); + self.add_calculation(Calculation::Store(ValueSource::Instance( + query.column_index, + rot_idx, + ))) + } + Expression::Challenge(challenge) => self.add_calculation(Calculation::Store( + ValueSource::Challenge(challenge.index()), + )), + Expression::Negated(a) => match **a { + Expression::Constant(scalar) => self.add_constant(&-scalar), + _ => { + let result_a = self.add_expression(a); + match result_a { + ValueSource::Constant(0) => result_a, + _ => self.add_calculation(Calculation::Negate(result_a)), + } + } + }, + Expression::Sum(a, b) => { + // Undo subtraction stored as a + (-b) in expressions + match &**b { + Expression::Negated(b_int) => { + let result_a = self.add_expression(a); + let result_b = self.add_expression(b_int); + if result_a == ValueSource::Constant(0) { + self.add_calculation(Calculation::Negate(result_b)) + } else if result_b == ValueSource::Constant(0) { + result_a + } else { + self.add_calculation(Calculation::Sub(result_a, result_b)) + } + } + _ => { + let result_a = self.add_expression(a); + let result_b = self.add_expression(b); + if result_a == ValueSource::Constant(0) { + result_b + } else if result_b == ValueSource::Constant(0) { + result_a + } else if result_a <= result_b { + self.add_calculation(Calculation::Add(result_a, result_b)) + } else { + self.add_calculation(Calculation::Add(result_b, result_a)) + } + } + } + } + Expression::Product(a, b) => { + let result_a = self.add_expression(a); + let result_b = self.add_expression(b); + if result_a == ValueSource::Constant(0) || result_b == ValueSource::Constant(0) { + ValueSource::Constant(0) + } else if result_a == ValueSource::Constant(1) { + result_b + } else if result_b == ValueSource::Constant(1) { + result_a + } else if result_a == ValueSource::Constant(2) { + self.add_calculation(Calculation::Double(result_b)) + } else if result_b == ValueSource::Constant(2) { + self.add_calculation(Calculation::Double(result_a)) + } else if result_a == result_b { + self.add_calculation(Calculation::Square(result_a)) + } else if result_a <= result_b { + self.add_calculation(Calculation::Mul(result_a, result_b)) + } else { + self.add_calculation(Calculation::Mul(result_b, result_a)) + } + } + Expression::Scaled(a, f) => { + if *f == C::ScalarExt::ZERO { + ValueSource::Constant(0) + } else if *f == C::ScalarExt::ONE { + self.add_expression(a) + } else { + let cst = self.add_constant(f); + let result_a = self.add_expression(a); + self.add_calculation(Calculation::Mul(result_a, cst)) + } + } + } + } + + /// Creates a new evaluation structure + pub fn instance(&self) -> EvaluationData { + EvaluationData { + intermediates: vec![C::ScalarExt::ZERO; self.num_intermediates], + rotations: vec![0usize; self.rotations.len()], + } + } + + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + data: &mut EvaluationData, + fixed: &[Polynomial], + advice: &[Polynomial], + instance: &[Polynomial], + challenges: &[C::ScalarExt], + beta: &C::ScalarExt, + gamma: &C::ScalarExt, + theta: &C::ScalarExt, + y: &C::ScalarExt, + previous_value: &C::ScalarExt, + idx: usize, + rot_scale: i32, + isize: i32, + ) -> C::ScalarExt { + // All rotation index values + for (rot_idx, rot) in self.rotations.iter().enumerate() { + data.rotations[rot_idx] = get_rotation_idx(idx, *rot, rot_scale, isize); + } + + // All calculations, with cached intermediate results + for calc in self.calculations.iter() { + data.intermediates[calc.target] = calc.calculation.evaluate( + &data.rotations, + &self.constants, + &data.intermediates, + fixed, + advice, + instance, + challenges, + beta, + gamma, + theta, + y, + previous_value, + ); + } + + // Return the result of the last calculation (if any) + if let Some(calc) = self.calculations.last() { + data.intermediates[calc.target] + } else { + C::ScalarExt::ZERO + } + } +} + +/// Simple evaluation of an expression +pub fn evaluate( + expression: &Expression, + size: usize, + rot_scale: i32, + fixed: &[Polynomial], + advice: &[Polynomial], + instance: &[Polynomial], + challenges: &[F], +) -> Vec { + let mut values = vec![F::ZERO; size]; + let isize = size as i32; + parallelize(&mut values, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + *value = expression.evaluate( + &|scalar| scalar, + &|query| { + fixed[query.column_index] + [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] + }, + &|query| { + advice[query.column_index] + [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] + }, + &|query| { + instance[query.column_index] + [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] + }, + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * b, + &|a, scalar| a * scalar, + ); + } + }); + values +} diff --git a/halo2_backend/src/plonk/keygen.rs b/halo2_backend/src/plonk/keygen.rs new file mode 100644 index 0000000000..f39db6c5de --- /dev/null +++ b/halo2_backend/src/plonk/keygen.rs @@ -0,0 +1,159 @@ +#![allow(clippy::int_plus_one)] + +use ff::{Field, FromUniformBytes}; +use group::Curve; + +use super::{ + circuit::{CompiledCircuitV2, ConstraintSystem}, + evaluation::Evaluator, + Error, Polynomial, ProvingKey, VerifyingKey, +}; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + poly::{ + commitment::{Blind, Params}, + EvaluationDomain, + }, +}; + +/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. +pub fn keygen_vk_v2<'params, C, P>( + params: &P, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + C::Scalar: FromUniformBytes<64>, +{ + let cs2 = &circuit.cs; + let cs: ConstraintSystem = cs2.clone().into(); + let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); + + if (params.n() as usize) < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let permutation_vk = + circuit + .preprocessing + .permutation + .clone() + .build_vk(params, &domain, &cs.permutation); + + let fixed_commitments = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + params + .commit_lagrange( + &Polynomial::new_lagrange_from_vec(poly.clone()), + Blind::default(), + ) + .to_affine() + }) + .collect(); + + Ok(VerifyingKey::from_parts( + domain, + fixed_commitments, + permutation_vk, + cs, + Vec::new(), + false, + )) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. +pub fn keygen_pk_v2<'params, C, P>( + params: &P, + vk: VerifyingKey, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, +{ + let cs = &circuit.cs; + + if (params.n() as usize) < vk.cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let fixed_polys: Vec<_> = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + vk.domain + .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) + }) + .collect(); + + let fixed_cosets = fixed_polys + .iter() + .map(|poly| vk.domain.coeff_to_extended(poly.clone())) + .collect(); + + let permutation_pk = + circuit + .preprocessing + .permutation + .clone() + .build_pk(params, &vk.domain, &cs.permutation); + + // Compute l_0(X) + // TODO: this can be done more efficiently + let mut l0 = vk.domain.empty_lagrange(); + l0[0] = C::Scalar::ONE; + let l0 = vk.domain.lagrange_to_coeff(l0); + let l0 = vk.domain.coeff_to_extended(l0); + + // Compute l_blind(X) which evaluates to 1 for each blinding factor row + // and 0 otherwise over the domain. + let mut l_blind = vk.domain.empty_lagrange(); + for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { + *evaluation = C::Scalar::ONE; + } + let l_blind = vk.domain.lagrange_to_coeff(l_blind); + let l_blind = vk.domain.coeff_to_extended(l_blind); + + // Compute l_last(X) which evaluates to 1 on the first inactive row (just + // before the blinding factors) and 0 otherwise over the domain + let mut l_last = vk.domain.empty_lagrange(); + l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; + let l_last = vk.domain.lagrange_to_coeff(l_last); + let l_last = vk.domain.coeff_to_extended(l_last); + + // Compute l_active_row(X) + let one = C::Scalar::ONE; + let mut l_active_row = vk.domain.empty_extended(); + parallelize(&mut l_active_row, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = i + start; + *value = one - (l_last[idx] + l_blind[idx]); + } + }); + + // Compute the optimized evaluation data structure + let ev = Evaluator::new(&vk.cs); + + Ok(ProvingKey { + vk, + l0, + l_last, + l_active_row, + fixed_values: circuit + .preprocessing + .fixed + .clone() + .into_iter() + .map(Polynomial::new_lagrange_from_vec) + .collect(), + fixed_polys, + fixed_cosets, + permutation: permutation_pk, + ev, + }) +} diff --git a/halo2_backend/src/plonk/lookup.rs b/halo2_backend/src/plonk/lookup.rs new file mode 100644 index 0000000000..97be4b36e0 --- /dev/null +++ b/halo2_backend/src/plonk/lookup.rs @@ -0,0 +1,108 @@ +use super::circuit::{Expression, ExpressionMid}; +use ff::Field; +use std::fmt::{self, Debug}; + +pub(crate) mod prover; +pub(crate) mod verifier; + +/// Expressions involved in a lookup argument, with a name as metadata. +#[derive(Clone, Debug)] +pub struct ArgumentV2 { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) table_expressions: Vec>, +} + +/// Expressions involved in a lookup argument, with a name as metadata. +#[derive(Clone)] +pub struct Argument { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) table_expressions: Vec>, +} + +impl Debug for Argument { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Argument") + .field("input_expressions", &self.input_expressions) + .field("table_expressions", &self.table_expressions) + .finish() + } +} + +impl Argument { + /// Constructs a new lookup argument. + /// + /// `table_map` is a sequence of `(input, table)` tuples. + pub fn new>(name: S, table_map: Vec<(Expression, Expression)>) -> Self { + let (input_expressions, table_expressions) = table_map.into_iter().unzip(); + Argument { + name: name.as_ref().to_string(), + input_expressions, + table_expressions, + } + } + + pub(crate) fn required_degree(&self) -> usize { + assert_eq!(self.input_expressions.len(), self.table_expressions.len()); + + // The first value in the permutation poly should be one. + // degree 2: + // l_0(X) * (1 - z(X)) = 0 + // + // The "last" value in the permutation poly should be a boolean, for + // completeness and soundness. + // degree 3: + // l_last(X) * (z(X)^2 - z(X)) = 0 + // + // Enable the permutation argument for only the rows involved. + // degree (2 + input_degree + table_degree) or 4, whichever is larger: + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + // ) = 0 + // + // The first two values of a' and s' should be the same. + // degree 2: + // l_0(X) * (a'(X) - s'(X)) = 0 + // + // Either the two values are the same, or the previous + // value of a' is the same as the current value. + // degree 3: + // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 + let mut input_degree = 1; + for expr in self.input_expressions.iter() { + input_degree = std::cmp::max(input_degree, expr.degree()); + } + let mut table_degree = 1; + for expr in self.table_expressions.iter() { + table_degree = std::cmp::max(table_degree, expr.degree()); + } + + // In practice because input_degree and table_degree are initialized to + // one, the latter half of this max() invocation is at least 4 always, + // rendering this call pointless except to be explicit in case we change + // the initialization of input_degree/table_degree in the future. + std::cmp::max( + // (1 - (l_last + l_blind)) z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + 4, + // (1 - (l_last + l_blind)) z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + 2 + input_degree + table_degree, + ) + } + + /// Returns input of this argument + pub fn input_expressions(&self) -> &Vec> { + &self.input_expressions + } + + /// Returns table of this argument + pub fn table_expressions(&self) -> &Vec> { + &self.table_expressions + } + + /// Returns name of this argument + pub fn name(&self) -> &str { + &self.name + } +} diff --git a/halo2_backend/src/plonk/lookup/prover.rs b/halo2_backend/src/plonk/lookup/prover.rs new file mode 100644 index 0000000000..028b298853 --- /dev/null +++ b/halo2_backend/src/plonk/lookup/prover.rs @@ -0,0 +1,475 @@ +use super::super::{ + circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, + ProvingKey, +}; +use super::Argument; +use crate::plonk::evaluation::evaluate; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + poly::{ + commitment::{Blind, Params}, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use ff::WithSmallOrderMulGroup; +use group::{ + ff::{BatchInvert, Field}, + Curve, +}; +use rand_core::RngCore; +use std::{ + collections::BTreeMap, + iter, + ops::{Mul, MulAssign}, +}; + +#[derive(Debug)] +pub(in crate::plonk) struct Permuted { + compressed_input_expression: Polynomial, + permuted_input_expression: Polynomial, + permuted_input_poly: Polynomial, + permuted_input_blind: Blind, + compressed_table_expression: Polynomial, + permuted_table_expression: Polynomial, + permuted_table_poly: Polynomial, + permuted_table_blind: Blind, +} + +#[derive(Debug)] +pub(in crate::plonk) struct Committed { + pub(in crate::plonk) permuted_input_poly: Polynomial, + permuted_input_blind: Blind, + pub(in crate::plonk) permuted_table_poly: Polynomial, + permuted_table_blind: Blind, + pub(in crate::plonk) product_poly: Polynomial, + product_blind: Blind, +} + +pub(in crate::plonk) struct Evaluated { + constructed: Committed, +} + +impl> Argument { + /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions + /// [S_0, S_1, ..., S_{m-1}], this method + /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} + /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, + /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, + /// obtaining A' and S', and + /// - constructs Permuted struct using permuted_input_value = A', and + /// permuted_table_expression = S'. + /// The Permuted struct is used to update the Lookup, and is then returned. + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit_permuted< + 'a, + 'params: 'a, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, + ) -> Result, Error> + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the lookup and compress them + let compressed_input_expression = compress_expressions(&self.input_expressions); + + // Get values of table expressions involved in the lookup and compress them + let compressed_table_expression = compress_expressions(&self.table_expressions); + + // Permute compressed (InputExpression, TableExpression) pair + let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( + pk, + params, + domain, + &mut rng, + &compressed_input_expression, + &compressed_table_expression, + )?; + + // Closure to construct commitment to vector of values + let mut commit_values = |values: &Polynomial| { + let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); + let blind = Blind(C::Scalar::random(&mut rng)); + let commitment = params.commit_lagrange(values, blind).to_affine(); + (poly, blind, commitment) + }; + + // Commit to permuted input expression + let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = + commit_values(&permuted_input_expression); + + // Commit to permuted table expression + let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = + commit_values(&permuted_table_expression); + + // Hash permuted input commitment + transcript.write_point(permuted_input_commitment)?; + + // Hash permuted table commitment + transcript.write_point(permuted_table_commitment)?; + + Ok(Permuted { + compressed_input_expression, + permuted_input_expression, + permuted_input_poly, + permuted_input_blind, + compressed_table_expression, + permuted_table_expression, + permuted_table_poly, + permuted_table_blind, + }) + } +} + +impl Permuted { + /// Given a Lookup with input expressions, table expressions, and the permuted + /// input expression and permuted table expression, this method constructs the + /// grand product polynomial over the lookup. The grand product polynomial + /// is used to populate the Product struct. The Product struct is + /// added to the Lookup and finally returned by the method. + pub(in crate::plonk) fn commit_product< + 'params, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + self, + pk: &ProvingKey, + params: &P, + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + let blinding_factors = pk.vk.cs.blinding_factors(); + // Goal is to compute the products of fractions + // + // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) + // + // where a_j(X) is the jth input expression in this lookup, + // where a'(X) is the compression of the permuted input expressions, + // s_j(X) is the jth table expression in this lookup, + // s'(X) is the compression of the permuted table expressions, + // and i is the ith row of the expression. + let mut lookup_product = vec![C::Scalar::ZERO; params.n() as usize]; + // Denominator uses the permuted input expression and permuted table expression + parallelize(&mut lookup_product, |lookup_product, start| { + for ((lookup_product, permuted_input_value), permuted_table_value) in lookup_product + .iter_mut() + .zip(self.permuted_input_expression[start..].iter()) + .zip(self.permuted_table_expression[start..].iter()) + { + *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); + } + }); + + // Batch invert to obtain the denominators for the lookup product + // polynomials + lookup_product.iter_mut().batch_invert(); + + // Finish the computation of the entire fraction by computing the numerators + // (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + parallelize(&mut lookup_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + + *product *= &(self.compressed_input_expression[i] + &*beta); + *product *= &(self.compressed_table_expression[i] + &*gamma); + } + }); + + // The product vector is a vector of products of fractions of the form + // + // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) + // + // where there are m input expressions and m table expressions, + // a_j(\omega^i) is the jth input expression in this lookup, + // a'j(\omega^i) is the permuted input expression, + // s_j(\omega^i) is the jth table expression in this lookup, + // s'(\omega^i) is the permuted table expression, + // and i is the ith row of the expression. + + // Compute the evaluations of the lookup product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(lookup_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) + }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + // This test works only with intermediate representations in this method. + // It can be used for debugging purposes. + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + + // l_0(X) * (1 - z(X)) = 0 + assert_eq!(z[0], C::Scalar::ONE); + + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + for i in 0..u { + let mut left = z[i + 1]; + let permuted_input_value = &self.permuted_input_expression[i]; + + let permuted_table_value = &self.permuted_table_expression[i]; + + left *= &(*beta + permuted_input_value); + left *= &(*gamma + permuted_table_value); + + let mut right = z[i]; + let mut input_term = self.compressed_input_expression[i]; + let mut table_term = self.compressed_table_expression[i]; + + input_term += &(*beta); + table_term += &(*gamma); + right *= &(input_term * &table_term); + + assert_eq!(left, right); + } + + // l_last(X) * (z(X)^2 - z(X)) = 0 + // Assertion will fail only when soundness is broken, in which + // case this z[u] value will be zero. (bad!) + assert_eq!(z[u], C::Scalar::ONE); + } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + permuted_input_poly: self.permuted_input_poly, + permuted_input_blind: self.permuted_input_blind, + permuted_table_poly: self.permuted_table_poly, + permuted_table_blind: self.permuted_table_blind, + product_poly: z, + product_blind, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_inv = domain.rotate_omega(*x, Rotation::prev()); + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let product_eval = eval_polynomial(&self.product_poly, *x); + let product_next_eval = eval_polynomial(&self.product_poly, x_next); + let permuted_input_eval = eval_polynomial(&self.permuted_input_poly, *x); + let permuted_input_inv_eval = eval_polynomial(&self.permuted_input_poly, x_inv); + let permuted_table_eval = eval_polynomial(&self.permuted_table_poly, *x); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(product_eval)) + .chain(Some(product_next_eval)) + .chain(Some(permuted_input_eval)) + .chain(Some(permuted_input_inv_eval)) + .chain(Some(permuted_table_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_inv = pk.vk.domain.rotate_omega(*x, Rotation::prev()); + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open lookup product commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + // Open lookup input commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.permuted_input_poly, + blind: self.constructed.permuted_input_blind, + })) + // Open lookup table commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.permuted_table_poly, + blind: self.constructed.permuted_table_blind, + })) + // Open lookup input commitments at x_inv + .chain(Some(ProverQuery { + point: x_inv, + poly: &self.constructed.permuted_input_poly, + blind: self.constructed.permuted_input_blind, + })) + // Open lookup product commitments at x_next + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + } +} + +type ExpressionPair = (Polynomial, Polynomial); + +/// Given a vector of input values A and a vector of table values S, +/// this method permutes A and S to produce A' and S', such that: +/// - like values in A' are vertically adjacent to each other; and +/// - the first row in a sequence of like values in A' is the row +/// that has the corresponding value in S'. +/// This method returns (A', S') if no errors are encountered. +fn permute_expression_pair<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + mut rng: R, + input_expression: &Polynomial, + table_expression: &Polynomial, +) -> Result, Error> { + let blinding_factors = pk.vk.cs.blinding_factors(); + let usable_rows = params.n() as usize - (blinding_factors + 1); + + let mut permuted_input_expression: Vec = input_expression.to_vec(); + permuted_input_expression.truncate(usable_rows); + + // Sort input lookup expression values + permuted_input_expression.sort(); + + // A BTreeMap of each unique element in the table expression and its count + let mut leftover_table_map: BTreeMap = table_expression + .iter() + .take(usable_rows) + .fold(BTreeMap::new(), |mut acc, coeff| { + *acc.entry(*coeff).or_insert(0) += 1; + acc + }); + let mut permuted_table_coeffs = vec![C::Scalar::ZERO; usable_rows]; + + let mut repeated_input_rows = permuted_input_expression + .iter() + .zip(permuted_table_coeffs.iter_mut()) + .enumerate() + .filter_map(|(row, (input_value, table_value))| { + // If this is the first occurrence of `input_value` in the input expression + if row == 0 || *input_value != permuted_input_expression[row - 1] { + *table_value = *input_value; + // Remove one instance of input_value from leftover_table_map + if let Some(count) = leftover_table_map.get_mut(input_value) { + assert!(*count > 0); + *count -= 1; + None + } else { + // Return error if input_value not found + Some(Err(Error::ConstraintSystemFailure)) + } + // If input value is repeated + } else { + Some(Ok(row)) + } + }) + .collect::, _>>()?; + + // Populate permuted table at unfilled rows with leftover table elements + for (coeff, count) in leftover_table_map.iter() { + for _ in 0..*count { + permuted_table_coeffs[repeated_input_rows.pop().unwrap()] = *coeff; + } + } + assert!(repeated_input_rows.is_empty()); + + permuted_input_expression + .extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); + permuted_table_coeffs.extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); + assert_eq!(permuted_input_expression.len(), params.n() as usize); + assert_eq!(permuted_table_coeffs.len(), params.n() as usize); + + #[cfg(feature = "sanity-checks")] + { + let mut last = None; + for (a, b) in permuted_input_expression + .iter() + .zip(permuted_table_coeffs.iter()) + .take(usable_rows) + { + if *a != *b { + assert_eq!(*a, last.unwrap()); + } + last = Some(*a); + } + } + + Ok(( + domain.lagrange_from_vec(permuted_input_expression), + domain.lagrange_from_vec(permuted_table_coeffs), + )) +} diff --git a/halo2_backend/src/plonk/lookup/verifier.rs b/halo2_backend/src/plonk/lookup/verifier.rs new file mode 100644 index 0000000000..598691ba8f --- /dev/null +++ b/halo2_backend/src/plonk/lookup/verifier.rs @@ -0,0 +1,210 @@ +use std::iter; + +use super::super::{ + circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, +}; +use super::Argument; +use crate::{ + arithmetic::CurveAffine, + plonk::{Error, VerifyingKey}, + poly::{commitment::MSM, Rotation, VerifierQuery}, + transcript::{EncodedChallenge, TranscriptRead}, +}; +use ff::Field; + +pub struct PermutationCommitments { + permuted_input_commitment: C, + permuted_table_commitment: C, +} + +pub struct Committed { + permuted: PermutationCommitments, + product_commitment: C, +} + +pub struct Evaluated { + committed: Committed, + product_eval: C::Scalar, + product_next_eval: C::Scalar, + permuted_input_eval: C::Scalar, + permuted_input_inv_eval: C::Scalar, + permuted_table_eval: C::Scalar, +} + +impl Argument { + pub(in crate::plonk) fn read_permuted_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + transcript: &mut T, + ) -> Result, Error> { + let permuted_input_commitment = transcript.read_point()?; + let permuted_table_commitment = transcript.read_point()?; + + Ok(PermutationCommitments { + permuted_input_commitment, + permuted_table_commitment, + }) + } +} + +impl PermutationCommitments { + pub(in crate::plonk) fn read_product_commitment< + E: EncodedChallenge, + T: TranscriptRead, + >( + self, + transcript: &mut T, + ) -> Result, Error> { + let product_commitment = transcript.read_point()?; + + Ok(Committed { + permuted: self, + product_commitment, + }) + } +} + +impl Committed { + pub(crate) fn evaluate, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let product_eval = transcript.read_scalar()?; + let product_next_eval = transcript.read_scalar()?; + let permuted_input_eval = transcript.read_scalar()?; + let permuted_input_inv_eval = transcript.read_scalar()?; + let permuted_table_eval = transcript.read_scalar()?; + + Ok(Evaluated { + committed: self, + product_eval, + product_next_eval, + permuted_input_eval, + permuted_input_inv_eval, + permuted_table_eval, + }) + } +} + +impl Evaluated { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn expressions<'a>( + &'a self, + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + argument: &'a Argument, + theta: ChallengeTheta, + beta: ChallengeBeta, + gamma: ChallengeGamma, + advice_evals: &[C::Scalar], + fixed_evals: &[C::Scalar], + instance_evals: &[C::Scalar], + challenges: &[C::Scalar], + ) -> impl Iterator + 'a { + let active_rows = C::Scalar::ONE - (l_last + l_blind); + + let product_expression = || { + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + let left = self.product_next_eval + * &(self.permuted_input_eval + &*beta) + * &(self.permuted_table_eval + &*gamma); + + let compress_expressions = |expressions: &[Expression]| { + expressions + .iter() + .map(|expression| { + expression.evaluate( + &|scalar| scalar, + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + }; + let right = self.product_eval + * &(compress_expressions(&argument.input_expressions) + &*beta) + * &(compress_expressions(&argument.table_expressions) + &*gamma); + + (left - &right) * &active_rows + }; + + std::iter::empty() + .chain( + // l_0(X) * (1 - z(X)) = 0 + Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + ) + .chain( + // l_last(X) * (z(X)^2 - z(X)) = 0 + Some(l_last * &(self.product_eval.square() - &self.product_eval)), + ) + .chain( + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + // ) = 0 + Some(product_expression()), + ) + .chain(Some( + // l_0(X) * (a'(X) - s'(X)) = 0 + l_0 * &(self.permuted_input_eval - &self.permuted_table_eval), + )) + .chain(Some( + // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 + (self.permuted_input_eval - &self.permuted_table_eval) + * &(self.permuted_input_eval - &self.permuted_input_inv_eval) + * &active_rows, + )) + } + + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vk: &'r VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_inv = vk.domain.rotate_omega(*x, Rotation::prev()); + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open lookup product commitment at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + *x, + self.product_eval, + ))) + // Open lookup input commitments at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_input_commitment, + *x, + self.permuted_input_eval, + ))) + // Open lookup table commitments at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_table_commitment, + *x, + self.permuted_table_eval, + ))) + // Open lookup input commitments at \omega^{-1} x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_input_commitment, + x_inv, + self.permuted_input_inv_eval, + ))) + // Open lookup product commitment at \omega x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + x_next, + self.product_next_eval, + ))) + } +} diff --git a/halo2_backend/src/plonk/permutation.rs b/halo2_backend/src/plonk/permutation.rs new file mode 100644 index 0000000000..f41a570554 --- /dev/null +++ b/halo2_backend/src/plonk/permutation.rs @@ -0,0 +1,166 @@ +//! Implementation of permutation argument. + +use super::circuit::{Any, Column}; +use crate::{ + arithmetic::CurveAffine, + helpers::{ + polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, + SerdeCurveAffine, SerdePrimeField, + }, + poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, + SerdeFormat, +}; + +pub(crate) mod keygen; +pub(crate) mod prover; +pub(crate) mod verifier; + +pub use keygen::Assembly; + +use std::io; + +/// A permutation argument. +#[derive(Debug, Clone)] +pub struct Argument { + /// A sequence of columns involved in the argument. + pub(super) columns: Vec>, +} + +impl Argument { + pub(crate) fn new() -> Self { + Argument { columns: vec![] } + } + + /// Returns the minimum circuit degree required by the permutation argument. + /// The argument may use larger degree gates depending on the actual + /// circuit's degree and how many columns are involved in the permutation. + pub(crate) fn required_degree(&self) -> usize { + // degree 2: + // l_0(X) * (1 - z(X)) = 0 + // + // We will fit as many polynomials p_i(X) as possible + // into the required degree of the circuit, so the + // following will not affect the required degree of + // this middleware. + // + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) + // - z(X) \prod (p(X) + \delta^i \beta X + \gamma) + // ) + // + // On the first sets of columns, except the first + // set, we will do + // + // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0 + // + // where z'(X) is the permutation for the previous set + // of columns. + // + // On the final set of columns, we will do + // + // degree 3: + // l_last(X) * (z'(X)^2 - z'(X)) = 0 + // + // which will allow the last value to be zero to + // ensure the argument is perfectly complete. + + // There are constraints of degree 3 regardless of the + // number of columns involved. + 3 + } + + /// Returns columns that participate on the permutation argument. + pub fn get_columns(&self) -> Vec> { + self.columns.clone() + } +} + +/// The verifying key for a single permutation argument. +#[derive(Clone, Debug)] +pub struct VerifyingKey { + commitments: Vec, +} + +impl VerifyingKey { + /// Returns commitments of sigma polynomials + pub fn commitments(&self) -> &Vec { + &self.commitments + } + + pub(crate) fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> + where + C: SerdeCurveAffine, + { + for commitment in &self.commitments { + commitment.write(writer, format)?; + } + Ok(()) + } + + pub(crate) fn read( + reader: &mut R, + argument: &Argument, + format: SerdeFormat, + ) -> io::Result + where + C: SerdeCurveAffine, + { + let commitments = (0..argument.columns.len()) + .map(|_| C::read(reader, format)) + .collect::, _>>()?; + Ok(VerifyingKey { commitments }) + } + + pub(crate) fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + self.commitments.len() * C::byte_length(format) + } +} + +/// The proving key for a single permutation argument. +#[derive(Clone, Debug)] +pub(crate) struct ProvingKey { + permutations: Vec>, + polys: Vec>, + pub(super) cosets: Vec>, +} + +impl ProvingKey +where + C::Scalar: SerdePrimeField, +{ + /// Reads proving key for a single permutation argument from buffer using `Polynomial::read`. + pub(super) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + let permutations = read_polynomial_vec(reader, format)?; + let polys = read_polynomial_vec(reader, format)?; + let cosets = read_polynomial_vec(reader, format)?; + Ok(ProvingKey { + permutations, + polys, + cosets, + }) + } + + /// Writes proving key for a single permutation argument to buffer using `Polynomial::write`. + pub(super) fn write( + &self, + writer: &mut W, + format: SerdeFormat, + ) -> io::Result<()> { + write_polynomial_slice(&self.permutations, writer, format)?; + write_polynomial_slice(&self.polys, writer, format)?; + write_polynomial_slice(&self.cosets, writer, format)?; + Ok(()) + } +} + +impl ProvingKey { + /// Gets the total number of bytes in the serialization of `self` + pub(super) fn bytes_length(&self) -> usize { + polynomial_slice_byte_length(&self.permutations) + + polynomial_slice_byte_length(&self.polys) + + polynomial_slice_byte_length(&self.cosets) + } +} diff --git a/halo2_backend/src/plonk/permutation/keygen.rs b/halo2_backend/src/plonk/permutation/keygen.rs new file mode 100644 index 0000000000..0d78f00ac5 --- /dev/null +++ b/halo2_backend/src/plonk/permutation/keygen.rs @@ -0,0 +1,460 @@ +use ff::{Field, PrimeField}; +use group::Curve; + +use super::{Argument, ProvingKey, VerifyingKey}; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + plonk::{Any, Column, Error}, + poly::{ + commitment::{Blind, Params}, + EvaluationDomain, + }, +}; + +#[cfg(feature = "thread-safe-region")] +use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; + +#[cfg(not(feature = "thread-safe-region"))] +use crate::multicore::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; + +#[cfg(feature = "thread-safe-region")] +use std::collections::{BTreeSet, HashMap}; + +#[cfg(not(feature = "thread-safe-region"))] +/// Struct that accumulates all the necessary data in order to construct the permutation argument. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Assembly { + /// Columns that participate on the copy permutation argument. + columns: Vec>, + /// Mapping of the actual copies done. + mapping: Vec>, + /// Some aux data used to swap positions directly when sorting. + aux: Vec>, + /// More aux data + sizes: Vec>, +} + +#[cfg(not(feature = "thread-safe-region"))] +impl Assembly { + pub(crate) fn new(n: usize, p: &Argument) -> Self { + // Initialize the copy vector to keep track of copy constraints in all + // the permutation arguments. + let mut columns = vec![]; + for i in 0..p.columns.len() { + // Computes [(i, 0), (i, 1), ..., (i, n - 1)] + columns.push((0..n).map(|j| (i, j)).collect()); + } + + // Before any equality constraints are applied, every cell in the permutation is + // in a 1-cycle; therefore mapping and aux are identical, because every cell is + // its own distinguished element. + Assembly { + columns: p.columns.clone(), + mapping: columns.clone(), + aux: columns, + sizes: vec![vec![1usize; n]; p.columns.len()], + } + } + + pub(crate) fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + let left_column = self + .columns + .iter() + .position(|c| c == &left_column) + .ok_or(Error::ColumnNotInPermutation(left_column))?; + let right_column = self + .columns + .iter() + .position(|c| c == &right_column) + .ok_or(Error::ColumnNotInPermutation(right_column))?; + + // Check bounds + if left_row >= self.mapping[left_column].len() + || right_row >= self.mapping[right_column].len() + { + return Err(Error::BoundsFailure); + } + + // See book/src/design/permutation.md for a description of this algorithm. + + let mut left_cycle = self.aux[left_column][left_row]; + let mut right_cycle = self.aux[right_column][right_row]; + + // If left and right are in the same cycle, do nothing. + if left_cycle == right_cycle { + return Ok(()); + } + + if self.sizes[left_cycle.0][left_cycle.1] < self.sizes[right_cycle.0][right_cycle.1] { + std::mem::swap(&mut left_cycle, &mut right_cycle); + } + + // Merge the right cycle into the left one. + self.sizes[left_cycle.0][left_cycle.1] += self.sizes[right_cycle.0][right_cycle.1]; + let mut i = right_cycle; + loop { + self.aux[i.0][i.1] = left_cycle; + i = self.mapping[i.0][i.1]; + if i == right_cycle { + break; + } + } + + let tmp = self.mapping[left_column][left_row]; + self.mapping[left_column][left_row] = self.mapping[right_column][right_row]; + self.mapping[right_column][right_row] = tmp; + + Ok(()) + } + + pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( + self, + params: &P, + domain: &EvaluationDomain, + p: &Argument, + ) -> VerifyingKey { + build_vk(params, domain, p, |i, j| self.mapping[i][j]) + } + + pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( + self, + params: &P, + domain: &EvaluationDomain, + p: &Argument, + ) -> ProvingKey { + build_pk(params, domain, p, |i, j| self.mapping[i][j]) + } + + /// Returns columns that participate in the permutation argument. + pub fn columns(&self) -> &[Column] { + &self.columns + } + + /// Returns mappings of the copies. + pub fn mapping( + &self, + ) -> impl Iterator + '_> { + self.mapping.iter().map(|c| c.par_iter().copied()) + } +} + +#[cfg(feature = "thread-safe-region")] +/// Struct that accumulates all the necessary data in order to construct the permutation argument. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Assembly { + /// Columns that participate on the copy permutation argument. + columns: Vec>, + /// Mapping of the actual copies done. + cycles: Vec>, + /// Mapping of the actual copies done. + ordered_cycles: Vec>, + /// Mapping of the actual copies done. + aux: HashMap<(usize, usize), usize>, + /// total length of a column + col_len: usize, + /// number of columns + num_cols: usize, +} + +#[cfg(feature = "thread-safe-region")] +impl Assembly { + pub(crate) fn new(n: usize, p: &Argument) -> Self { + Assembly { + columns: p.columns.clone(), + cycles: Vec::with_capacity(n), + ordered_cycles: Vec::with_capacity(n), + aux: HashMap::new(), + col_len: n, + num_cols: p.columns.len(), + } + } + + pub(crate) fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + let left_column = self + .columns + .iter() + .position(|c| c == &left_column) + .ok_or(Error::ColumnNotInPermutation(left_column))?; + let right_column = self + .columns + .iter() + .position(|c| c == &right_column) + .ok_or(Error::ColumnNotInPermutation(right_column))?; + + // Check bounds + if left_row >= self.col_len || right_row >= self.col_len { + return Err(Error::BoundsFailure); + } + + let left_cycle = self.aux.get(&(left_column, left_row)); + let right_cycle = self.aux.get(&(right_column, right_row)); + + // extract cycle elements + let right_cycle_elems = match right_cycle { + Some(i) => { + let entry = self.cycles[*i].clone(); + self.cycles[*i] = vec![]; + entry + } + None => [(right_column, right_row)].into(), + }; + + assert!(right_cycle_elems.contains(&(right_column, right_row))); + + // merge cycles + let cycle_idx = match left_cycle { + Some(i) => { + let entry = &mut self.cycles[*i]; + entry.extend(right_cycle_elems.clone()); + *i + } + // if they were singletons -- create a new cycle entry + None => { + let mut set: Vec<(usize, usize)> = right_cycle_elems.clone(); + set.push((left_column, left_row)); + self.cycles.push(set); + let cycle_idx = self.cycles.len() - 1; + self.aux.insert((left_column, left_row), cycle_idx); + cycle_idx + } + }; + + let index_updates = vec![cycle_idx; right_cycle_elems.len()].into_iter(); + let updates = right_cycle_elems.into_iter().zip(index_updates); + + self.aux.extend(updates); + + Ok(()) + } + + /// Builds the ordered mapping of the cycles. + /// This will only get executed once. + pub fn build_ordered_mapping(&mut self) { + use crate::multicore::IntoParallelRefMutIterator; + + // will only get called once + if self.ordered_cycles.is_empty() && !self.cycles.is_empty() { + self.ordered_cycles = self + .cycles + .par_iter_mut() + .map(|col| { + let mut set = BTreeSet::new(); + set.extend(col.clone()); + // free up memory + *col = vec![]; + set + }) + .collect(); + } + } + + fn mapping_at_idx(&self, col: usize, row: usize) -> (usize, usize) { + assert!( + !self.ordered_cycles.is_empty() || self.cycles.is_empty(), + "cycles have not been ordered" + ); + + if let Some(cycle_idx) = self.aux.get(&(col, row)) { + let cycle = &self.ordered_cycles[*cycle_idx]; + let mut cycle_iter = cycle.range(( + std::ops::Bound::Excluded((col, row)), + std::ops::Bound::Unbounded, + )); + // point to the next node in the cycle + match cycle_iter.next() { + Some((i, j)) => (*i, *j), + // wrap back around to the first element which SHOULD exist + None => *(cycle.iter().next().unwrap()), + } + // is a singleton + } else { + (col, row) + } + } + + pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( + &mut self, + params: &P, + domain: &EvaluationDomain, + p: &Argument, + ) -> VerifyingKey { + self.build_ordered_mapping(); + build_vk(params, domain, p, |i, j| self.mapping_at_idx(i, j)) + } + + pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( + &mut self, + params: &P, + domain: &EvaluationDomain, + p: &Argument, + ) -> ProvingKey { + self.build_ordered_mapping(); + build_pk(params, domain, p, |i, j| self.mapping_at_idx(i, j)) + } + + /// Returns columns that participate in the permutation argument. + pub fn columns(&self) -> &[Column] { + &self.columns + } + + /// Returns mappings of the copies. + pub fn mapping( + &self, + ) -> impl Iterator + '_> { + (0..self.num_cols).map(move |i| { + (0..self.col_len) + .into_par_iter() + .map(move |j| self.mapping_at_idx(i, j)) + }) + } +} + +pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( + params: &P, + domain: &EvaluationDomain, + p: &Argument, + mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, +) -> ProvingKey { + // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] + let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; + { + let omega = domain.get_omega(); + parallelize(&mut omega_powers, |o, start| { + let mut cur = omega.pow_vartime([start as u64]); + for v in o.iter_mut() { + *v = cur; + cur *= ω + } + }) + } + + // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] + let mut deltaomega = vec![omega_powers; p.columns.len()]; + { + parallelize(&mut deltaomega, |o, start| { + let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); + for omega_powers in o.iter_mut() { + for v in omega_powers { + *v *= &cur; + } + cur *= &C::Scalar::DELTA; + } + }); + } + + // Compute permutation polynomials, convert to coset form. + let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; + { + parallelize(&mut permutations, |o, start| { + for (x, permutation_poly) in o.iter_mut().enumerate() { + let i = start + x; + for (j, p) in permutation_poly.iter_mut().enumerate() { + let (permuted_i, permuted_j) = mapping(i, j); + *p = deltaomega[permuted_i][permuted_j]; + } + } + }); + } + + let mut polys = vec![domain.empty_coeff(); p.columns.len()]; + { + parallelize(&mut polys, |o, start| { + for (x, poly) in o.iter_mut().enumerate() { + let i = start + x; + let permutation_poly = permutations[i].clone(); + *poly = domain.lagrange_to_coeff(permutation_poly); + } + }); + } + + let mut cosets = vec![domain.empty_extended(); p.columns.len()]; + { + parallelize(&mut cosets, |o, start| { + for (x, coset) in o.iter_mut().enumerate() { + let i = start + x; + let poly = polys[i].clone(); + *coset = domain.coeff_to_extended(poly); + } + }); + } + + ProvingKey { + permutations, + polys, + cosets, + } +} + +pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( + params: &P, + domain: &EvaluationDomain, + p: &Argument, + mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, +) -> VerifyingKey { + // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] + let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; + { + let omega = domain.get_omega(); + parallelize(&mut omega_powers, |o, start| { + let mut cur = omega.pow_vartime([start as u64]); + for v in o.iter_mut() { + *v = cur; + cur *= ω + } + }) + } + + // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] + let mut deltaomega = vec![omega_powers; p.columns.len()]; + { + parallelize(&mut deltaomega, |o, start| { + let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); + for omega_powers in o.iter_mut() { + for v in omega_powers { + *v *= &cur; + } + cur *= &::DELTA; + } + }); + } + + // Computes the permutation polynomial based on the permutation + // description in the assembly. + let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; + { + parallelize(&mut permutations, |o, start| { + for (x, permutation_poly) in o.iter_mut().enumerate() { + let i = start + x; + for (j, p) in permutation_poly.iter_mut().enumerate() { + let (permuted_i, permuted_j) = mapping(i, j); + *p = deltaomega[permuted_i][permuted_j]; + } + } + }); + } + + // Pre-compute commitments for the URS. + let mut commitments = Vec::with_capacity(p.columns.len()); + for permutation in &permutations { + // Compute commitment to permutation polynomial + commitments.push( + params + .commit_lagrange(permutation, Blind::default()) + .to_affine(), + ); + } + + VerifyingKey { commitments } +} diff --git a/halo2_backend/src/plonk/permutation/prover.rs b/halo2_backend/src/plonk/permutation/prover.rs new file mode 100644 index 0000000000..d6b108554d --- /dev/null +++ b/halo2_backend/src/plonk/permutation/prover.rs @@ -0,0 +1,329 @@ +use ff::PrimeField; +use group::{ + ff::{BatchInvert, Field}, + Curve, +}; +use rand_core::RngCore; +use std::iter::{self, ExactSizeIterator}; + +use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::{Argument, ProvingKey}; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + plonk::{self, Error}, + poly::{ + commitment::{Blind, Params}, + Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; + +pub(crate) struct CommittedSet { + pub(crate) permutation_product_poly: Polynomial, + pub(crate) permutation_product_coset: Polynomial, + permutation_product_blind: Blind, +} + +pub(crate) struct Committed { + pub(crate) sets: Vec>, +} + +pub struct ConstructedSet { + permutation_product_poly: Polynomial, + permutation_product_blind: Blind, +} + +pub(crate) struct Constructed { + sets: Vec>, +} + +pub(crate) struct Evaluated { + constructed: Constructed, +} + +impl Argument { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit< + 'params, + C: CurveAffine, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + params: &P, + pk: &plonk::ProvingKey, + pkey: &ProvingKey, + advice: &[Polynomial], + fixed: &[Polynomial], + instance: &[Polynomial], + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + + // How many columns can be included in a single permutation polynomial? + // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This + // will never underflow because of the requirement of at least a degree + // 3 circuit for the permutation argument. + assert!(pk.vk.cs_degree >= 3); + let chunk_len = pk.vk.cs_degree - 2; + let blinding_factors = pk.vk.cs.blinding_factors(); + + // Each column gets its own delta power. + let mut deltaomega = C::Scalar::ONE; + + // Track the "last" value from the previous column set + let mut last_z = C::Scalar::ONE; + + let mut sets = vec![]; + + for (columns, permutations) in self + .columns + .chunks(chunk_len) + .zip(pkey.permutations.chunks(chunk_len)) + { + // Goal is to compute the products of fractions + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where p_j(X) is the jth column in this permutation, + // and i is the ith row of the column. + + let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; + + // Iterate over each column of the permutation + for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + for ((modified_values, value), permuted_value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + .zip(permuted_column_values[start..].iter()) + { + *modified_values *= &(*beta * permuted_value + &*gamma + value); + } + }); + } + + // Invert to obtain the denominator for the permutation product polynomial + modified_values.batch_invert(); + + // Iterate over each column again, this time finishing the computation + // of the entire fraction by computing the numerators + for &column in columns.iter() { + let omega = domain.get_omega(); + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); + for (modified_values, value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + { + // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta + *modified_values *= &(deltaomega * &*beta + &*gamma + value); + deltaomega *= ω + } + }); + deltaomega *= &::DELTA; + } + + // The modified_values vector is a vector of products of fractions + // of the form + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where i is the index into modified_values, for the jth column in + // the permutation + + // Compute the evaluations of the permutation product polynomial + // over our domain, starting with z[0] = 1 + let mut z = vec![last_z]; + for row in 1..(params.n() as usize) { + let mut tmp = z[row - 1]; + + tmp *= &modified_values[row - 1]; + z.push(tmp); + } + let mut z = domain.lagrange_from_vec(z); + // Set blinding factors + for z in &mut z[params.n() as usize - blinding_factors..] { + *z = C::Scalar::random(&mut rng); + } + // Set new last_z + last_z = z[params.n() as usize - (blinding_factors + 1)]; + + let blind = Blind(C::Scalar::random(&mut rng)); + + let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); + let permutation_product_blind = blind; + let z = domain.lagrange_to_coeff(z); + let permutation_product_poly = z.clone(); + + let permutation_product_coset = domain.coeff_to_extended(z.clone()); + + let permutation_product_commitment = + permutation_product_commitment_projective.to_affine(); + + // Hash the permutation product commitment + transcript.write_point(permutation_product_commitment)?; + + sets.push(CommittedSet { + permutation_product_poly, + permutation_product_coset, + permutation_product_blind, + }); + } + + Ok(Committed { sets }) + } +} + +impl Committed { + pub(in crate::plonk) fn construct(self) -> Constructed { + Constructed { + sets: self + .sets + .iter() + .map(|set| ConstructedSet { + permutation_product_poly: set.permutation_product_poly.clone(), + permutation_product_blind: set.permutation_product_blind, + }) + .collect(), + } + } +} + +impl super::ProvingKey { + pub(in crate::plonk) fn open( + &self, + x: ChallengeX, + ) -> impl Iterator> + Clone { + self.polys.iter().map(move |poly| ProverQuery { + point: *x, + poly, + blind: Blind::default(), + }) + } + + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + &self, + x: ChallengeX, + transcript: &mut T, + ) -> Result<(), Error> { + // Hash permutation evals + for eval in self.polys.iter().map(|poly| eval_polynomial(poly, *x)) { + transcript.write_scalar(eval)?; + } + + Ok(()) + } +} + +impl Constructed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &plonk::ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let blinding_factors = pk.vk.cs.blinding_factors(); + + { + let mut sets = self.sets.iter(); + + while let Some(set) = sets.next() { + let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); + + let permutation_product_next_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation::next()), + ); + + // Hash permutation product evals + for eval in iter::empty() + .chain(Some(&permutation_product_eval)) + .chain(Some(&permutation_product_next_eval)) + { + transcript.write_scalar(*eval)?; + } + + // If we have any remaining sets to process, evaluate this set at omega^u + // so we can constrain the last value of its running product to equal the + // first value of the next set's running product, chaining them together. + if sets.len() > 0 { + let permutation_product_last_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), + ); + + transcript.write_scalar(permutation_product_last_eval)?; + } + } + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a plonk::ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let blinding_factors = pk.vk.cs.blinding_factors(); + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + let x_last = pk + .vk + .domain + .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); + + iter::empty() + .chain(self.constructed.sets.iter().flat_map(move |set| { + iter::empty() + // Open permutation product commitments at x and \omega x + .chain(Some(ProverQuery { + point: *x, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + .chain(Some(ProverQuery { + point: x_next, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + })) + // Open it at \omega^{last} x for all but the last set. This rotation is only + // sensical for the first row, but we only use this rotation in a constraint + // that is gated on l_0. + .chain( + self.constructed + .sets + .iter() + .rev() + .skip(1) + .flat_map(move |set| { + Some(ProverQuery { + point: x_last, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + }) + }), + ) + } +} diff --git a/halo2_backend/src/plonk/permutation/verifier.rs b/halo2_backend/src/plonk/permutation/verifier.rs new file mode 100644 index 0000000000..a4637422ae --- /dev/null +++ b/halo2_backend/src/plonk/permutation/verifier.rs @@ -0,0 +1,254 @@ +use ff::{Field, PrimeField}; +use std::iter; + +use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::{Argument, VerifyingKey}; +use crate::{ + arithmetic::CurveAffine, + plonk::{self, Error}, + poly::{commitment::MSM, Rotation, VerifierQuery}, + transcript::{EncodedChallenge, TranscriptRead}, +}; + +pub struct Committed { + permutation_product_commitments: Vec, +} + +pub struct EvaluatedSet { + permutation_product_commitment: C, + permutation_product_eval: C::Scalar, + permutation_product_next_eval: C::Scalar, + permutation_product_last_eval: Option, +} + +pub struct CommonEvaluated { + permutation_evals: Vec, +} + +pub struct Evaluated { + sets: Vec>, +} + +impl Argument { + pub(crate) fn read_product_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + vk: &plonk::VerifyingKey, + transcript: &mut T, + ) -> Result, Error> { + let chunk_len = vk.cs_degree - 2; + + let permutation_product_commitments = self + .columns + .chunks(chunk_len) + .map(|_| transcript.read_point()) + .collect::, _>>()?; + + Ok(Committed { + permutation_product_commitments, + }) + } +} + +impl VerifyingKey { + pub(in crate::plonk) fn evaluate, T: TranscriptRead>( + &self, + transcript: &mut T, + ) -> Result, Error> { + let permutation_evals = self + .commitments + .iter() + .map(|_| transcript.read_scalar()) + .collect::, _>>()?; + + Ok(CommonEvaluated { permutation_evals }) + } +} + +impl Committed { + pub(crate) fn evaluate, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let mut sets = vec![]; + + let mut iter = self.permutation_product_commitments.into_iter(); + + while let Some(permutation_product_commitment) = iter.next() { + let permutation_product_eval = transcript.read_scalar()?; + let permutation_product_next_eval = transcript.read_scalar()?; + let permutation_product_last_eval = if iter.len() > 0 { + Some(transcript.read_scalar()?) + } else { + None + }; + + sets.push(EvaluatedSet { + permutation_product_commitment, + permutation_product_eval, + permutation_product_next_eval, + permutation_product_last_eval, + }); + } + + Ok(Evaluated { sets }) + } +} + +impl Evaluated { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn expressions<'a>( + &'a self, + vk: &'a plonk::VerifyingKey, + p: &'a Argument, + common: &'a CommonEvaluated, + advice_evals: &'a [C::Scalar], + fixed_evals: &'a [C::Scalar], + instance_evals: &'a [C::Scalar], + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + beta: ChallengeBeta, + gamma: ChallengeGamma, + x: ChallengeX, + ) -> impl Iterator + 'a { + let chunk_len = vk.cs_degree - 2; + iter::empty() + // Enforce only for the first set. + // l_0(X) * (1 - z_0(X)) = 0 + .chain( + self.sets + .first() + .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), + ) + // Enforce only for the last set. + // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 + .chain(self.sets.last().map(|last_set| { + (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) + * &l_last + })) + // Except for the first set, enforce. + // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 + .chain( + self.sets + .iter() + .skip(1) + .zip(self.sets.iter()) + .map(|(set, last_set)| { + ( + set.permutation_product_eval, + last_set.permutation_product_last_eval.unwrap(), + ) + }) + .map(move |(set, prev_last)| (set - &prev_last) * &l_0), + ) + // And for all the sets we enforce: + // (1 - (l_last(X) + l_blind(X))) * ( + // z_i(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) + // - z_i(X) \prod (p(X) + \delta^i \beta X + \gamma) + // ) + .chain( + self.sets + .iter() + .zip(p.columns.chunks(chunk_len)) + .zip(common.permutation_evals.chunks(chunk_len)) + .enumerate() + .map(move |(chunk_index, ((set, columns), permutation_evals))| { + let mut left = set.permutation_product_next_eval; + for (eval, permutation_eval) in columns + .iter() + .map(|&column| match column.column_type() { + Any::Advice(_) => { + advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Fixed => { + fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Instance => { + instance_evals + [vk.cs.get_any_query_index(column, Rotation::cur())] + } + }) + .zip(permutation_evals.iter()) + { + left *= &(eval + &(*beta * permutation_eval) + &*gamma); + } + + let mut right = set.permutation_product_eval; + let mut current_delta = (*beta * &*x) + * &(::DELTA + .pow_vartime([(chunk_index * chunk_len) as u64])); + for eval in columns.iter().map(|&column| match column.column_type() { + Any::Advice(_) => { + advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Fixed => { + fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Instance => { + instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + }) { + right *= &(eval + ¤t_delta + &*gamma); + current_delta *= &C::Scalar::DELTA; + } + + (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) + }), + ) + } + + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vk: &'r plonk::VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let blinding_factors = vk.cs.blinding_factors(); + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + let x_last = vk + .domain + .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); + + iter::empty() + .chain(self.sets.iter().flat_map(move |set| { + iter::empty() + // Open permutation product commitments at x and \omega^{-1} x + // Open permutation product commitments at x and \omega x + .chain(Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + *x, + set.permutation_product_eval, + ))) + .chain(Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + x_next, + set.permutation_product_next_eval, + ))) + })) + // Open it at \omega^{last} x for all but the last set + .chain(self.sets.iter().rev().skip(1).flat_map(move |set| { + Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + x_last, + set.permutation_product_last_eval.unwrap(), + )) + })) + } +} + +impl CommonEvaluated { + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vkey: &'r VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + // Open permutation commitments for each permutation argument at x + vkey.commitments + .iter() + .zip(self.permutation_evals.iter()) + .map(move |(commitment, &eval)| VerifierQuery::new_commitment(commitment, *x, eval)) + } +} diff --git a/halo2_backend/src/plonk/prover.rs b/halo2_backend/src/plonk/prover.rs new file mode 100644 index 0000000000..e6e99d948d --- /dev/null +++ b/halo2_backend/src/plonk/prover.rs @@ -0,0 +1,736 @@ +use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use group::Curve; +use rand_core::RngCore; +use std::collections::{BTreeSet, HashSet}; +use std::{collections::HashMap, iter}; + +use super::{ + circuit::sealed::{self}, + lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, + ChallengeX, ChallengeY, Error, ProvingKey, +}; + +use crate::{ + arithmetic::{eval_polynomial, CurveAffine}, + plonk::Assigned, + poly::{ + commitment::{Blind, CommitmentScheme, Params, Prover}, + Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, + }, +}; +use crate::{ + poly::batch_invert_assigned, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use group::prime::PrimeCurveAffine; + +/// Collection of instance data used during proving for a single circuit proof. +#[derive(Debug)] +struct InstanceSingle { + pub instance_values: Vec>, + pub instance_polys: Vec>, +} + +/// Collection of advice data used during proving for a single circuit proof. +#[derive(Debug, Clone)] +struct AdviceSingle { + pub advice_polys: Vec>, + pub advice_blinds: Vec>, +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. This works for a single proof. This is a wrapper over ProverV2. +#[derive(Debug)] +pub struct ProverV2Single< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +>(ProverV2<'a, 'params, Scheme, P, E, R, T>); + +impl< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + > ProverV2Single<'a, 'params, Scheme, P, E, R, T> +{ + /// Create a new prover object + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + // TODO: If this was a vector the usage would be simpler + instance: &[&[Scheme::Scalar]], + rng: R, + transcript: &'a mut T, + ) -> Result + // TODO: Can I move this `where` to the struct definition? + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + Ok(Self(ProverV2::new( + params, + pk, + &[instance], + rng, + transcript, + )?)) + } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + pub fn commit_phase( + &mut self, + phase: u8, + witness: Vec>>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.commit_phase(phase, vec![witness]) + } + + /// Finalizes the proof creation. + pub fn create_proof(self) -> Result<(), Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.create_proof() + } +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. This supports batch proving. +#[derive(Debug)] +pub struct ProverV2< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +> { + // Circuit and setup fields + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + phases: Vec, + // State + instance: Vec>, + advice: Vec>, + challenges: HashMap, + next_phase_index: usize, + rng: R, + transcript: &'a mut T, + _marker: std::marker::PhantomData<(P, E)>, +} + +impl< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + > ProverV2<'a, 'params, Scheme, P, E, R, T> +{ + /// Create a new prover object + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + // TODO: If this was a vector the usage would be simpler + instances: &[&[&[Scheme::Scalar]]], + rng: R, + transcript: &'a mut T, + ) -> Result + // TODO: Can I move this `where` to the struct definition? + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + for instance in instances.iter() { + if instance.len() != pk.vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } + } + + // Hash verification key into transcript + pk.vk.hash_into(transcript)?; + + let meta = &pk.vk.cs; + let phases = meta.phases().collect(); + + let domain = &pk.vk.domain; + + // TODO: Name this better + let mut instance_fn = + |instance: &[&[Scheme::Scalar]]| -> Result, Error> { + let instance_values = instance + .iter() + .map(|values| { + let mut poly = domain.empty_lagrange(); + assert_eq!(poly.len(), params.n() as usize); + if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { + return Err(Error::InstanceTooLarge); + } + for (poly, value) in poly.iter_mut().zip(values.iter()) { + if !P::QUERY_INSTANCE { + // dbg!(1, value); + transcript.common_scalar(*value)?; + } + *poly = *value; + } + Ok(poly) + }) + .collect::, _>>()?; + + if P::QUERY_INSTANCE { + let instance_commitments_projective: Vec<_> = instance_values + .iter() + .map(|poly| params.commit_lagrange(poly, Blind::default())) + .collect(); + let mut instance_commitments = + vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &instance_commitments_projective, + &mut instance_commitments, + ); + let instance_commitments = instance_commitments; + drop(instance_commitments_projective); + + for commitment in &instance_commitments { + // dbg!(2, commitment); + transcript.common_point(*commitment)?; + } + } + + let instance_polys: Vec<_> = instance_values + .iter() + .map(|poly| { + let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); + domain.lagrange_to_coeff(lagrange_vec) + }) + .collect(); + + Ok(InstanceSingle { + instance_values, + instance_polys, + }) + }; + let instance: Vec> = instances + .iter() + .map(|instance| instance_fn(instance)) + .collect::, _>>()?; + + let advice = vec![ + AdviceSingle:: { + // Create vectors with empty polynomials to free space while they are not being used + advice_polys: vec![ + Polynomial::new_empty(0, Scheme::Scalar::ZERO); + meta.num_advice_columns + ], + advice_blinds: vec![Blind::default(); meta.num_advice_columns], + }; + instances.len() + ]; + let challenges = HashMap::::with_capacity(meta.num_challenges); + + Ok(ProverV2 { + params, + pk, + phases, + instance, + rng, + transcript, + advice, + challenges, + next_phase_index: 0, + _marker: std::marker::PhantomData {}, + }) + } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + pub fn commit_phase( + &mut self, + phase: u8, + witness: Vec>>>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let current_phase = match self.phases.get(self.next_phase_index) { + Some(phase) => phase, + None => { + panic!("TODO: Return Error instead. All phases already commited"); + } + }; + if phase != current_phase.0 { + panic!("TODO: Return Error instead. Committing invalid phase"); + } + + let params = self.params; + let meta = &self.pk.vk.cs; + + let mut rng = &mut self.rng; + + let advice = &mut self.advice; + let challenges = &mut self.challenges; + + let column_indices = meta + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + if witness.len() != advice.len() { + return Err(Error::Other("witness.len() != advice.len()".to_string())); + } + for witness_circuit in &witness { + if witness_circuit.len() != meta.num_advice_columns { + return Err(Error::Other(format!( + "unexpected length in witness_circuitk. Got {}, expected {}", + witness_circuit.len(), + meta.num_advice_columns, + ))); + } + // Check that all current_phase advice columns are Some, and their length is correct + for (column_index, advice_column) in witness_circuit.iter().enumerate() { + if column_indices.contains(&column_index) { + match advice_column { + None => { + return Err(Error::Other(format!( + "expected advice column with index {} at phase {}", + column_index, current_phase.0 + ))) + } + Some(advice_column) => { + if advice_column.len() != params.n() as usize { + return Err(Error::Other(format!( + "expected advice column with index {} to have length {}", + column_index, + params.n(), + ))); + } + } + } + } else if advice_column.is_some() { + return Err(Error::Other(format!( + "expected no advice column with index {} at phase {}", + column_index, current_phase.0 + ))); + }; + } + } + + let mut commit_phase_fn = |advice: &mut AdviceSingle, + witness: Vec< + Option, LagrangeCoeff>>, + >| + -> Result<(), Error> { + let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); + let mut advice_values = + batch_invert_assigned::(witness.into_iter().flatten().collect()); + let unblinded_advice: HashSet = + HashSet::from_iter(meta.unblinded_advice_columns.clone()); + + // Add blinding factors to advice columns + for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { + if !unblinded_advice.contains(column_index) { + for cell in &mut advice_values[unusable_rows_start..] { + *cell = Scheme::Scalar::random(&mut rng); + } + } else { + #[cfg(feature = "sanity-checks")] + for cell in &advice_values[unusable_rows_start..] { + assert_eq!(*cell, Scheme::Scalar::ZERO); + } + } + } + + // Compute commitments to advice column polynomials + let blinds: Vec<_> = column_indices + .iter() + .map(|i| { + if unblinded_advice.contains(i) { + Blind::default() + } else { + Blind(Scheme::Scalar::random(&mut rng)) + } + }) + .collect(); + let advice_commitments_projective: Vec<_> = advice_values + .iter() + .zip(blinds.iter()) + .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) + .collect(); + let mut advice_commitments = + vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &advice_commitments_projective, + &mut advice_commitments, + ); + let advice_commitments = advice_commitments; + drop(advice_commitments_projective); + + for commitment in &advice_commitments { + self.transcript.write_point(*commitment)?; + } + for ((column_index, advice_values), blind) in + column_indices.iter().zip(advice_values).zip(blinds) + { + advice.advice_polys[*column_index] = advice_values; + advice.advice_blinds[*column_index] = blind; + } + Ok(()) + }; + + for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { + commit_phase_fn( + advice, + witness + .into_iter() + .map(|v| v.map(Polynomial::new_lagrange_from_vec)) + .collect(), + )?; + } + + for (index, phase) in meta.challenge_phase.iter().enumerate() { + if current_phase == phase { + let existing = + challenges.insert(index, *self.transcript.squeeze_challenge_scalar::<()>()); + assert!(existing.is_none()); + } + } + + self.next_phase_index += 1; + Ok(challenges.clone()) + } + + /// Finalizes the proof creation. + pub fn create_proof(mut self) -> Result<(), Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let params = self.params; + let meta = &self.pk.vk.cs; + // let queries = &self.pk.vk.queries; + let pk = self.pk; + let domain = &self.pk.vk.domain; + + let mut rng = self.rng; + + let instance = std::mem::take(&mut self.instance); + let advice = std::mem::take(&mut self.advice); + let mut challenges = self.challenges; + + assert_eq!(challenges.len(), meta.num_challenges); + let challenges = (0..meta.num_challenges) + .map(|index| challenges.remove(&index).unwrap()) + .collect::>(); + + // Sample theta challenge for keeping lookup columns linearly independent + let theta: ChallengeTheta<_> = self.transcript.squeeze_challenge_scalar(); + + let mut lookups_fn = + |instance: &InstanceSingle, + advice: &AdviceSingle| + -> Result>, Error> { + meta.lookups + .iter() + .map(|lookup| { + lookup.commit_permuted( + pk, + params, + domain, + theta, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }; + let lookups: Vec>> = instance + .iter() + .zip(advice.iter()) + .map(|(instance, advice)| -> Result, Error> { + // Construct and commit to permuted values for each lookup + lookups_fn(instance, advice) + }) + .collect::, _>>()?; + + // Sample beta challenge + let beta: ChallengeBeta<_> = self.transcript.squeeze_challenge_scalar(); + + // Sample gamma challenge + let gamma: ChallengeGamma<_> = self.transcript.squeeze_challenge_scalar(); + + // Commit to permutation. + let permutations: Vec> = instance + .iter() + .zip(advice.iter()) + .map(|(instance, advice)| { + meta.permutation.commit( + params, + pk, + &pk.permutation, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + beta, + gamma, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>()?; + + let lookups: Vec>> = lookups + .into_iter() + .map(|lookups| -> Result, _> { + // Construct and commit to products for each lookup + lookups + .into_iter() + .map(|lookup| { + lookup.commit_product(pk, params, beta, gamma, &mut rng, self.transcript) + }) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles: Vec>> = instance + .iter() + .zip(advice.iter()) + .map(|(instance, advice)| -> Result, _> { + // Compress expressions for each shuffle + meta.shuffles + .iter() + .map(|shuffle| { + shuffle.commit_product( + pk, + params, + domain, + theta, + gamma, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Commit to the vanishing argument's random polynomial for blinding h(x_3) + let vanishing = vanishing::Argument::commit(params, domain, &mut rng, self.transcript)?; + + // Obtain challenge for keeping all separate gates linearly independent + let y: ChallengeY<_> = self.transcript.squeeze_challenge_scalar(); + + // Calculate the advice polys + let advice: Vec> = advice + .into_iter() + .map( + |AdviceSingle { + advice_polys, + advice_blinds, + }| { + AdviceSingle { + advice_polys: advice_polys + .into_iter() + .map(|poly| domain.lagrange_to_coeff(poly)) + .collect::>(), + advice_blinds, + } + }, + ) + .collect(); + + // Evaluate the h(X) polynomial + let h_poly = pk.ev.evaluate_h( + pk, + &advice + .iter() + .map(|a| a.advice_polys.as_slice()) + .collect::>(), + &instance + .iter() + .map(|i| i.instance_polys.as_slice()) + .collect::>(), + &challenges, + *y, + *beta, + *gamma, + *theta, + &lookups, + &shuffles, + &permutations, + ); + + // Construct the vanishing argument's h(X) commitments + let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, self.transcript)?; + + let x: ChallengeX<_> = self.transcript.squeeze_challenge_scalar(); + let xn = x.pow([params.n()]); + + if P::QUERY_INSTANCE { + // Compute and hash instance evals for the circuit instance + for instance in instance.iter() { + // Evaluate polynomials at omega^i x + let instance_evals: Vec<_> = meta + .instance_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &instance.instance_polys[column.index()], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + + // Hash each instance column evaluation + for eval in instance_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + } + } + + // Compute and hash advice evals for the circuit instance + for advice in advice.iter() { + // Evaluate polynomials at omega^i x + let advice_evals: Vec<_> = meta + .advice_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &advice.advice_polys[column.index()], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + // dbg!(&advice_evals); + + // Hash each advice column evaluation + for eval in advice_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + } + + // Compute and hash fixed evals + let fixed_evals: Vec<_> = meta + .fixed_queries + .iter() + .map(|&(column, at)| { + eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) + }) + .collect(); + + // Hash each fixed column evaluation + for eval in fixed_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + + let vanishing = vanishing.evaluate(x, xn, domain, self.transcript)?; + + // Evaluate common permutation data + pk.permutation.evaluate(x, self.transcript)?; + + // Evaluate the permutations, if any, at omega^i x. + let permutations: Vec> = permutations + .into_iter() + .map(|permutation| -> Result<_, _> { + permutation.construct().evaluate(pk, x, self.transcript) + }) + .collect::, _>>()?; + + // Evaluate the lookups, if any, at omega^i x. + let lookups: Vec>> = lookups + .into_iter() + .map(|lookups| -> Result, _> { + lookups + .into_iter() + .map(|p| p.evaluate(pk, x, self.transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Evaluate the shuffles, if any, at omega^i x. + let shuffles: Vec>> = shuffles + .into_iter() + .map(|shuffles| -> Result, _> { + shuffles + .into_iter() + .map(|p| p.evaluate(pk, x, self.transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let instances = instance + .iter() + .zip(advice.iter()) + .zip(permutations.iter()) + .zip(lookups.iter()) + .zip(shuffles.iter()) + .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { + iter::empty() + .chain( + P::QUERY_INSTANCE + .then_some(meta.instance_queries.iter().map(move |&(column, at)| { + ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &instance.instance_polys[column.index()], + blind: Blind::default(), + } + })) + .into_iter() + .flatten(), + ) + .chain( + meta.advice_queries + .iter() + .map(move |&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &advice.advice_polys[column.index()], + blind: advice.advice_blinds[column.index()], + }), + ) + .chain(permutation.open(pk, x)) + .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) + .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) + }) + .chain(meta.fixed_queries.iter().map(|&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &pk.fixed_polys[column.index()], + blind: Blind::default(), + })) + .chain(pk.permutation.open(x)) + // We query the h(X) polynomial at x + .chain(vanishing.open(x)); + + let prover = P::new(params); + println!("DBG create_proof"); + prover + .create_proof(rng, self.transcript, instances) + .map_err(|_| Error::ConstraintSystemFailure)?; + + Ok(()) + } +} diff --git a/halo2_backend/src/plonk/shuffle.rs b/halo2_backend/src/plonk/shuffle.rs new file mode 100644 index 0000000000..0779c2b451 --- /dev/null +++ b/halo2_backend/src/plonk/shuffle.rs @@ -0,0 +1,76 @@ +use super::circuit::{Expression, ExpressionMid}; +use ff::Field; +use std::fmt::{self, Debug}; + +pub(crate) mod prover; +pub(crate) mod verifier; + +/// Expressions involved in a shuffle argument, with a name as metadata. +#[derive(Clone, Debug)] +pub struct ArgumentV2 { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) shuffle_expressions: Vec>, +} + +/// Expressions involved in a shuffle argument, with a name as metadata. +#[derive(Clone)] +pub struct Argument { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) shuffle_expressions: Vec>, +} + +impl Debug for Argument { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Argument") + .field("input_expressions", &self.input_expressions) + .field("shuffle_expressions", &self.shuffle_expressions) + .finish() + } +} + +impl Argument { + /// Constructs a new shuffle argument. + /// + /// `shuffle` is a sequence of `(input, shuffle)` tuples. + pub fn new>(name: S, shuffle: Vec<(Expression, Expression)>) -> Self { + let (input_expressions, shuffle_expressions) = shuffle.into_iter().unzip(); + Argument { + name: name.as_ref().to_string(), + input_expressions, + shuffle_expressions, + } + } + + pub(crate) fn required_degree(&self) -> usize { + assert_eq!(self.input_expressions.len(), self.shuffle_expressions.len()); + + let mut input_degree = 1; + for expr in self.input_expressions.iter() { + input_degree = std::cmp::max(input_degree, expr.degree()); + } + let mut shuffle_degree = 1; + for expr in self.shuffle_expressions.iter() { + shuffle_degree = std::cmp::max(shuffle_degree, expr.degree()); + } + + // (1 - (l_last + l_blind)) (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) + std::cmp::max(2 + shuffle_degree, 2 + input_degree) + } + + /// Returns input of this argument + pub fn input_expressions(&self) -> &Vec> { + &self.input_expressions + } + + /// Returns table of this argument + pub fn shuffle_expressions(&self) -> &Vec> { + &self.shuffle_expressions + } + + /// Returns name of this argument + pub fn name(&self) -> &str { + &self.name + } +} diff --git a/halo2_backend/src/plonk/shuffle/prover.rs b/halo2_backend/src/plonk/shuffle/prover.rs new file mode 100644 index 0000000000..fd30436a47 --- /dev/null +++ b/halo2_backend/src/plonk/shuffle/prover.rs @@ -0,0 +1,250 @@ +use super::super::{ + circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, +}; +use super::Argument; +use crate::plonk::evaluation::evaluate; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + poly::{ + commitment::{Blind, Params}, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use ff::WithSmallOrderMulGroup; +use group::{ff::BatchInvert, Curve}; +use rand_core::RngCore; +use std::{ + iter, + ops::{Mul, MulAssign}, +}; + +#[derive(Debug)] +struct Compressed { + input_expression: Polynomial, + shuffle_expression: Polynomial, +} + +#[derive(Debug)] +pub(in crate::plonk) struct Committed { + pub(in crate::plonk) product_poly: Polynomial, + product_blind: Blind, +} + +pub(in crate::plonk) struct Evaluated { + constructed: Committed, +} + +impl> Argument { + /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions + /// [S_0, S_1, ..., S_{m-1}], this method + /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} + /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, + #[allow(clippy::too_many_arguments)] + fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( + &self, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + ) -> Compressed + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the shuffle and compress them + let input_expression = compress_expressions(&self.input_expressions); + + // Get values of table expressions involved in the shuffle and compress them + let shuffle_expression = compress_expressions(&self.shuffle_expressions); + + Compressed { + input_expression, + shuffle_expression, + } + } + + /// Given a Shuffle with input expressions and table expressions this method + /// constructs the grand product polynomial over the shuffle. + /// The grand product polynomial is used to populate the Product struct. + /// The Product struct is added to the Shuffle and finally returned by the method. + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit_product< + 'a, + 'params: 'a, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + gamma: ChallengeGamma, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, + ) -> Result, Error> + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + let compressed = self.compress( + pk, + params, + domain, + theta, + advice_values, + fixed_values, + instance_values, + challenges, + ); + + let blinding_factors = pk.vk.cs.blinding_factors(); + + let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; + parallelize(&mut shuffle_product, |shuffle_product, start| { + for (shuffle_product, shuffle_value) in shuffle_product + .iter_mut() + .zip(compressed.shuffle_expression[start..].iter()) + { + *shuffle_product = *gamma + shuffle_value; + } + }); + + shuffle_product.iter_mut().batch_invert(); + + parallelize(&mut shuffle_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + *product *= &(*gamma + compressed.input_expression[i]); + } + }); + + // Compute the evaluations of the shuffle product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(shuffle_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) + }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + assert_eq!(z[0], C::Scalar::ONE); + for i in 0..u { + let mut left = z[i + 1]; + let input_value = &compressed.input_expression[i]; + let shuffle_value = &compressed.shuffle_expression[i]; + left *= &(*gamma + shuffle_value); + let mut right = z[i]; + right *= &(*gamma + input_value); + assert_eq!(left, right); + } + assert_eq!(z[u], C::Scalar::ONE); + } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + product_poly: z, + product_blind, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let product_eval = eval_polynomial(&self.product_poly, *x); + let product_next_eval = eval_polynomial(&self.product_poly, x_next); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(product_eval)) + .chain(Some(product_next_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open shuffle product commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + // Open shuffle product commitments at x_next + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + } +} diff --git a/halo2_backend/src/plonk/shuffle/verifier.rs b/halo2_backend/src/plonk/shuffle/verifier.rs new file mode 100644 index 0000000000..759e86b234 --- /dev/null +++ b/halo2_backend/src/plonk/shuffle/verifier.rs @@ -0,0 +1,137 @@ +use std::iter; + +use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX}; +use super::Argument; +use crate::{ + arithmetic::CurveAffine, + plonk::{Error, VerifyingKey}, + poly::{commitment::MSM, Rotation, VerifierQuery}, + transcript::{EncodedChallenge, TranscriptRead}, +}; +use ff::Field; + +pub struct Committed { + product_commitment: C, +} + +pub struct Evaluated { + committed: Committed, + product_eval: C::Scalar, + product_next_eval: C::Scalar, +} + +impl Argument { + pub(in crate::plonk) fn read_product_commitment< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + transcript: &mut T, + ) -> Result, Error> { + let product_commitment = transcript.read_point()?; + + Ok(Committed { product_commitment }) + } +} + +impl Committed { + pub(crate) fn evaluate, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let product_eval = transcript.read_scalar()?; + let product_next_eval = transcript.read_scalar()?; + + Ok(Evaluated { + committed: self, + product_eval, + product_next_eval, + }) + } +} + +impl Evaluated { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn expressions<'a>( + &'a self, + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + argument: &'a Argument, + theta: ChallengeTheta, + gamma: ChallengeGamma, + advice_evals: &[C::Scalar], + fixed_evals: &[C::Scalar], + instance_evals: &[C::Scalar], + challenges: &[C::Scalar], + ) -> impl Iterator + 'a { + let active_rows = C::Scalar::ONE - (l_last + l_blind); + + let product_expression = || { + // z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma) + let compress_expressions = |expressions: &[Expression]| { + expressions + .iter() + .map(|expression| { + expression.evaluate( + &|scalar| scalar, + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + }; + // z(\omega X) (s(X) + \gamma) + let left = self.product_next_eval + * &(compress_expressions(&argument.shuffle_expressions) + &*gamma); + // z(X) (a(X) + \gamma) + let right = + self.product_eval * &(compress_expressions(&argument.input_expressions) + &*gamma); + + (left - &right) * &active_rows + }; + + std::iter::empty() + .chain( + // l_0(X) * (1 - z'(X)) = 0 + Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + ) + .chain( + // l_last(X) * (z(X)^2 - z(X)) = 0 + Some(l_last * &(self.product_eval.square() - &self.product_eval)), + ) + .chain( + // (1 - (l_last(X) + l_blind(X))) * ( z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) + Some(product_expression()), + ) + } + + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vk: &'r VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open shuffle product commitment at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + *x, + self.product_eval, + ))) + // Open shuffle product commitment at \omega x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + x_next, + self.product_next_eval, + ))) + } +} diff --git a/halo2_backend/src/plonk/vanishing.rs b/halo2_backend/src/plonk/vanishing.rs new file mode 100644 index 0000000000..81f86b02e2 --- /dev/null +++ b/halo2_backend/src/plonk/vanishing.rs @@ -0,0 +1,11 @@ +use std::marker::PhantomData; + +use crate::arithmetic::CurveAffine; + +mod prover; +mod verifier; + +/// A vanishing argument. +pub(crate) struct Argument { + _marker: PhantomData, +} diff --git a/halo2_backend/src/plonk/vanishing/prover.rs b/halo2_backend/src/plonk/vanishing/prover.rs new file mode 100644 index 0000000000..7943086826 --- /dev/null +++ b/halo2_backend/src/plonk/vanishing/prover.rs @@ -0,0 +1,199 @@ +use std::{collections::HashMap, iter}; + +use ff::Field; +use group::Curve; +use rand_chacha::ChaCha20Rng; +use rand_core::{RngCore, SeedableRng}; + +use super::Argument; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + multicore::current_num_threads, + plonk::{ChallengeX, Error}, + poly::{ + commitment::{Blind, ParamsProver}, + Coeff, EvaluationDomain, ExtendedLagrangeCoeff, Polynomial, ProverQuery, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; + +pub(in crate::plonk) struct Committed { + random_poly: Polynomial, + random_blind: Blind, +} + +pub(in crate::plonk) struct Constructed { + h_pieces: Vec>, + h_blinds: Vec>, + committed: Committed, +} + +pub(in crate::plonk) struct Evaluated { + h_poly: Polynomial, + h_blind: Blind, + committed: Committed, +} + +impl Argument { + pub(in crate::plonk) fn commit< + 'params, + P: ParamsProver<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + params: &P, + domain: &EvaluationDomain, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + // Sample a random polynomial of degree n - 1 + let n = 1usize << domain.k() as usize; + let mut rand_vec = vec![C::Scalar::ZERO; n]; + + let num_threads = current_num_threads(); + let chunk_size = n / num_threads; + let thread_seeds = (0..) + .step_by(chunk_size + 1) + .take(n % num_threads) + .chain( + (chunk_size != 0) + .then(|| ((n % num_threads) * (chunk_size + 1)..).step_by(chunk_size)) + .into_iter() + .flatten(), + ) + .take(num_threads) + .zip(iter::repeat_with(|| { + let mut seed = [0u8; 32]; + rng.fill_bytes(&mut seed); + ChaCha20Rng::from_seed(seed) + })) + .collect::>(); + + parallelize(&mut rand_vec, |chunk, offset| { + let mut rng = thread_seeds[&offset].clone(); + chunk + .iter_mut() + .for_each(|v| *v = C::Scalar::random(&mut rng)); + }); + + let random_poly: Polynomial = domain.coeff_from_vec(rand_vec); + + // Sample a random blinding factor + let random_blind = Blind(C::Scalar::random(rng)); + + // Commit + let c = params.commit(&random_poly, random_blind).to_affine(); + transcript.write_point(c)?; + + Ok(Committed { + random_poly, + random_blind, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn construct< + 'params, + P: ParamsProver<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + self, + params: &P, + domain: &EvaluationDomain, + h_poly: Polynomial, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + // Divide by t(X) = X^{params.n} - 1. + let h_poly = domain.divide_by_vanishing_poly(h_poly); + + // Obtain final h(X) polynomial + let h_poly = domain.extended_to_coeff(h_poly); + + // Split h(X) up into pieces + let h_pieces = h_poly + .chunks_exact(params.n() as usize) + .map(|v| domain.coeff_from_vec(v.to_vec())) + .collect::>(); + drop(h_poly); + let h_blinds: Vec<_> = h_pieces + .iter() + .map(|_| Blind(C::Scalar::random(&mut rng))) + .collect(); + + // Compute commitments to each h(X) piece + let h_commitments_projective: Vec<_> = h_pieces + .iter() + .zip(h_blinds.iter()) + .map(|(h_piece, blind)| params.commit(h_piece, *blind)) + .collect(); + let mut h_commitments = vec![C::identity(); h_commitments_projective.len()]; + C::Curve::batch_normalize(&h_commitments_projective, &mut h_commitments); + let h_commitments = h_commitments; + + // Hash each h(X) piece + for c in h_commitments.iter() { + transcript.write_point(*c)?; + } + + Ok(Constructed { + h_pieces, + h_blinds, + committed: self, + }) + } +} + +impl Constructed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + x: ChallengeX, + xn: C::Scalar, + domain: &EvaluationDomain, + transcript: &mut T, + ) -> Result, Error> { + let h_poly = self + .h_pieces + .iter() + .rev() + .fold(domain.empty_coeff(), |acc, eval| acc * xn + eval); + + let h_blind = self + .h_blinds + .iter() + .rev() + .fold(Blind(C::Scalar::ZERO), |acc, eval| acc * Blind(xn) + *eval); + + let random_eval = eval_polynomial(&self.committed.random_poly, *x); + transcript.write_scalar(random_eval)?; + + Ok(Evaluated { + h_poly, + h_blind, + committed: self.committed, + }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open( + &self, + x: ChallengeX, + ) -> impl Iterator> + Clone { + iter::empty() + .chain(Some(ProverQuery { + point: *x, + poly: &self.h_poly, + blind: self.h_blind, + })) + .chain(Some(ProverQuery { + point: *x, + poly: &self.committed.random_poly, + blind: self.committed.random_blind, + })) + } +} diff --git a/halo2_backend/src/plonk/vanishing/verifier.rs b/halo2_backend/src/plonk/vanishing/verifier.rs new file mode 100644 index 0000000000..0881dfb2c0 --- /dev/null +++ b/halo2_backend/src/plonk/vanishing/verifier.rs @@ -0,0 +1,138 @@ +use std::iter; + +use ff::Field; + +use crate::{ + arithmetic::CurveAffine, + plonk::{Error, VerifyingKey}, + poly::{ + commitment::{Params, MSM}, + VerifierQuery, + }, + transcript::{read_n_points, EncodedChallenge, TranscriptRead}, +}; + +use super::super::{ChallengeX, ChallengeY}; +use super::Argument; + +pub struct Committed { + random_poly_commitment: C, +} + +pub struct Constructed { + h_commitments: Vec, + random_poly_commitment: C, +} + +pub struct PartiallyEvaluated { + h_commitments: Vec, + random_poly_commitment: C, + random_eval: C::Scalar, +} + +pub struct Evaluated> { + h_commitment: M, + random_poly_commitment: C, + expected_h_eval: C::Scalar, + random_eval: C::Scalar, +} + +impl Argument { + pub(in crate::plonk) fn read_commitments_before_y< + E: EncodedChallenge, + T: TranscriptRead, + >( + transcript: &mut T, + ) -> Result, Error> { + let random_poly_commitment = transcript.read_point()?; + + Ok(Committed { + random_poly_commitment, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn read_commitments_after_y< + E: EncodedChallenge, + T: TranscriptRead, + >( + self, + vk: &VerifyingKey, + transcript: &mut T, + ) -> Result, Error> { + // Obtain a commitment to h(X) in the form of multiple pieces of degree n - 1 + let h_commitments = read_n_points(transcript, vk.domain.get_quotient_poly_degree())?; + + Ok(Constructed { + h_commitments, + random_poly_commitment: self.random_poly_commitment, + }) + } +} + +impl Constructed { + pub(in crate::plonk) fn evaluate_after_x, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let random_eval = transcript.read_scalar()?; + + Ok(PartiallyEvaluated { + h_commitments: self.h_commitments, + random_poly_commitment: self.random_poly_commitment, + random_eval, + }) + } +} + +impl PartiallyEvaluated { + pub(in crate::plonk) fn verify<'params, P: Params<'params, C>>( + self, + params: &'params P, + expressions: impl Iterator, + y: ChallengeY, + xn: C::Scalar, + ) -> Evaluated { + let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * &*y + &v); + let expected_h_eval = expected_h_eval * ((xn - C::Scalar::ONE).invert().unwrap()); + + let h_commitment = + self.h_commitments + .iter() + .rev() + .fold(params.empty_msm(), |mut acc, commitment| { + acc.scale(xn); + let commitment: C::CurveExt = (*commitment).into(); + acc.append_term(C::Scalar::ONE, commitment); + + acc + }); + + Evaluated { + expected_h_eval, + h_commitment, + random_poly_commitment: self.random_poly_commitment, + random_eval: self.random_eval, + } + } +} + +impl> Evaluated { + pub(in crate::plonk) fn queries( + &self, + x: ChallengeX, + ) -> impl Iterator> + Clone { + iter::empty() + .chain(Some(VerifierQuery::new_msm( + &self.h_commitment, + *x, + self.expected_h_eval, + ))) + .chain(Some(VerifierQuery::new_commitment( + &self.random_poly_commitment, + *x, + self.random_eval, + ))) + } +} diff --git a/halo2_backend/src/plonk/verifier.rs b/halo2_backend/src/plonk/verifier.rs new file mode 100644 index 0000000000..5d613227ff --- /dev/null +++ b/halo2_backend/src/plonk/verifier.rs @@ -0,0 +1,459 @@ +use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use group::Curve; +use std::iter; + +use super::{ + vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, + VerifyingKey, +}; +use crate::arithmetic::compute_inner_product; +use crate::poly::commitment::{CommitmentScheme, Verifier}; +use crate::poly::VerificationStrategy; +use crate::poly::{ + commitment::{Blind, Params}, + VerifierQuery, +}; +use crate::transcript::{read_n_scalars, EncodedChallenge, TranscriptRead}; + +#[cfg(feature = "batch")] +mod batch; +#[cfg(feature = "batch")] +pub use batch::BatchVerifier; + +/// Returns a boolean indicating whether or not the proof is valid. Verifies a single proof (not +/// batched). +pub fn verify_proof_single< + 'params, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptRead, + Strategy: VerificationStrategy<'params, Scheme, V>, +>( + params: &'params Scheme::ParamsVerifier, + vk: &VerifyingKey, + strategy: Strategy, + instance: &[&[Scheme::Scalar]], + transcript: &mut T, +) -> Result +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + verify_proof(params, vk, strategy, &[instance], transcript) +} + +/// Returns a boolean indicating whether or not the proof is valid +pub fn verify_proof< + 'params, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptRead, + Strategy: VerificationStrategy<'params, Scheme, V>, +>( + params: &'params Scheme::ParamsVerifier, + vk: &VerifyingKey, + strategy: Strategy, + instances: &[&[&[Scheme::Scalar]]], + transcript: &mut T, +) -> Result +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + // Check that instances matches the expected number of instance columns + for instances in instances.iter() { + if instances.len() != vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } + } + + let instance_commitments = if V::QUERY_INSTANCE { + instances + .iter() + .map(|instance| { + instance + .iter() + .map(|instance| { + if instance.len() > params.n() as usize - (vk.cs.blinding_factors() + 1) { + return Err(Error::InstanceTooLarge); + } + let mut poly = instance.to_vec(); + poly.resize(params.n() as usize, Scheme::Scalar::ZERO); + let poly = vk.domain.lagrange_from_vec(poly); + + Ok(params.commit_lagrange(&poly, Blind::default()).to_affine()) + }) + .collect::, _>>() + }) + .collect::, _>>()? + } else { + vec![vec![]; instances.len()] + }; + + let num_proofs = instance_commitments.len(); + + // Hash verification key into transcript + vk.hash_into(transcript)?; + + if V::QUERY_INSTANCE { + for instance_commitments in instance_commitments.iter() { + // Hash the instance (external) commitments into the transcript + for commitment in instance_commitments { + transcript.common_point(*commitment)? + } + } + } else { + for instance in instances.iter() { + for instance in instance.iter() { + for value in instance.iter() { + transcript.common_scalar(*value)?; + } + } + } + } + + // Hash the prover's advice commitments into the transcript and squeeze challenges + let (advice_commitments, challenges) = { + let mut advice_commitments = + vec![vec![Scheme::Curve::default(); vk.cs.num_advice_columns]; num_proofs]; + let mut challenges = vec![Scheme::Scalar::ZERO; vk.cs.num_challenges]; + + for current_phase in vk.cs.phases() { + for advice_commitments in advice_commitments.iter_mut() { + for (phase, commitment) in vk + .cs + .advice_column_phase + .iter() + .zip(advice_commitments.iter_mut()) + { + if current_phase == *phase { + *commitment = transcript.read_point()?; + } + } + } + for (phase, challenge) in vk.cs.challenge_phase.iter().zip(challenges.iter_mut()) { + if current_phase == *phase { + *challenge = *transcript.squeeze_challenge_scalar::<()>(); + } + } + } + + (advice_commitments, challenges) + }; + + // Sample theta challenge for keeping lookup columns linearly independent + let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); + + let lookups_permuted = (0..num_proofs) + .map(|_| -> Result, _> { + // Hash each lookup permuted commitment + vk.cs + .lookups + .iter() + .map(|argument| argument.read_permuted_commitments(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Sample beta challenge + let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); + + // Sample gamma challenge + let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); + + let permutations_committed = (0..num_proofs) + .map(|_| { + // Hash each permutation product commitment + vk.cs.permutation.read_product_commitments(vk, transcript) + }) + .collect::, _>>()?; + + let lookups_committed = lookups_permuted + .into_iter() + .map(|lookups| { + // Hash each lookup product commitment + lookups + .into_iter() + .map(|lookup| lookup.read_product_commitment(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles_committed = (0..num_proofs) + .map(|_| -> Result, _> { + // Hash each shuffle product commitment + vk.cs + .shuffles + .iter() + .map(|argument| argument.read_product_commitment(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?; + + // Sample y challenge, which keeps the gates linearly independent. + let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + + let vanishing = vanishing.read_commitments_after_y(vk, transcript)?; + + // Sample x challenge, which is used to ensure the circuit is + // satisfied with high probability. + let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); + let instance_evals = if V::QUERY_INSTANCE { + (0..num_proofs) + .map(|_| -> Result, _> { + read_n_scalars(transcript, vk.cs.instance_queries.len()) + }) + .collect::, _>>()? + } else { + let xn = x.pow([params.n()]); + let (min_rotation, max_rotation) = + vk.cs + .instance_queries + .iter() + .fold((0, 0), |(min, max), (_, rotation)| { + if rotation.0 < min { + (rotation.0, max) + } else if rotation.0 > max { + (min, rotation.0) + } else { + (min, max) + } + }); + let max_instance_len = instances + .iter() + .flat_map(|instance| instance.iter().map(|instance| instance.len())) + .max_by(Ord::cmp) + .unwrap_or_default(); + let l_i_s = &vk.domain.l_i_range( + *x, + xn, + -max_rotation..max_instance_len as i32 + min_rotation.abs(), + ); + instances + .iter() + .map(|instances| { + vk.cs + .instance_queries + .iter() + .map(|(column, rotation)| { + let instances = instances[column.index()]; + let offset = (max_rotation - rotation.0) as usize; + compute_inner_product(instances, &l_i_s[offset..offset + instances.len()]) + }) + .collect::>() + }) + .collect::>() + }; + + let advice_evals = (0..num_proofs) + .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) + .collect::, _>>()?; + + let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; + + let vanishing = vanishing.evaluate_after_x(transcript)?; + + let permutations_common = vk.permutation.evaluate(transcript)?; + + let permutations_evaluated = permutations_committed + .into_iter() + .map(|permutation| permutation.evaluate(transcript)) + .collect::, _>>()?; + + let lookups_evaluated = lookups_committed + .into_iter() + .map(|lookups| -> Result, _> { + lookups + .into_iter() + .map(|lookup| lookup.evaluate(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles_evaluated = shuffles_committed + .into_iter() + .map(|shuffles| -> Result, _> { + shuffles + .into_iter() + .map(|shuffle| shuffle.evaluate(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // This check ensures the circuit is satisfied so long as the polynomial + // commitments open to the correct values. + let vanishing = { + // x^n + let xn = x.pow([params.n()]); + + let blinding_factors = vk.cs.blinding_factors(); + let l_evals = vk + .domain + .l_i_range(*x, xn, (-((blinding_factors + 1) as i32))..=0); + assert_eq!(l_evals.len(), 2 + blinding_factors); + let l_last = l_evals[0]; + let l_blind: Scheme::Scalar = l_evals[1..(1 + blinding_factors)] + .iter() + .fold(Scheme::Scalar::ZERO, |acc, eval| acc + eval); + let l_0 = l_evals[1 + blinding_factors]; + + // Compute the expected value of h(x) + let expressions = advice_evals + .iter() + .zip(instance_evals.iter()) + .zip(permutations_evaluated.iter()) + .zip(lookups_evaluated.iter()) + .zip(shuffles_evaluated.iter()) + .flat_map( + |((((advice_evals, instance_evals), permutation), lookups), shuffles)| { + let challenges = &challenges; + let fixed_evals = &fixed_evals; + std::iter::empty() + // Evaluate the circuit using the custom gates provided + .chain(vk.cs.gates.iter().flat_map(move |gate| { + gate.polynomials().iter().map(move |poly| { + poly.evaluate( + &|scalar| scalar, + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + })) + .chain(permutation.expressions( + vk, + &vk.cs.permutation, + &permutations_common, + advice_evals, + fixed_evals, + instance_evals, + l_0, + l_last, + l_blind, + beta, + gamma, + x, + )) + .chain(lookups.iter().zip(vk.cs.lookups.iter()).flat_map( + move |(p, argument)| { + p.expressions( + l_0, + l_last, + l_blind, + argument, + theta, + beta, + gamma, + advice_evals, + fixed_evals, + instance_evals, + challenges, + ) + }, + )) + .chain(shuffles.iter().zip(vk.cs.shuffles.iter()).flat_map( + move |(p, argument)| { + p.expressions( + l_0, + l_last, + l_blind, + argument, + theta, + gamma, + advice_evals, + fixed_evals, + instance_evals, + challenges, + ) + }, + )) + }, + ); + + vanishing.verify(params, expressions, y, xn) + }; + + let queries = instance_commitments + .iter() + .zip(instance_evals.iter()) + .zip(advice_commitments.iter()) + .zip(advice_evals.iter()) + .zip(permutations_evaluated.iter()) + .zip(lookups_evaluated.iter()) + .zip(shuffles_evaluated.iter()) + .flat_map( + |( + ( + ( + ( + ((instance_commitments, instance_evals), advice_commitments), + advice_evals, + ), + permutation, + ), + lookups, + ), + shuffles, + )| { + iter::empty() + .chain( + V::QUERY_INSTANCE + .then_some(vk.cs.instance_queries.iter().enumerate().map( + move |(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &instance_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + instance_evals[query_index], + ) + }, + )) + .into_iter() + .flatten(), + ) + .chain(vk.cs.advice_queries.iter().enumerate().map( + move |(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &advice_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + advice_evals[query_index], + ) + }, + )) + .chain(permutation.queries(vk, x)) + .chain(lookups.iter().flat_map(move |p| p.queries(vk, x))) + .chain(shuffles.iter().flat_map(move |p| p.queries(vk, x))) + }, + ) + .chain( + vk.cs + .fixed_queries + .iter() + .enumerate() + .map(|(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &vk.fixed_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + fixed_evals[query_index], + ) + }), + ) + .chain(permutations_common.queries(&vk.permutation, x)) + .chain(vanishing.queries(x)); + + // We are now convinced the circuit is satisfied so long as the + // polynomial commitments open to the correct values. + + let verifier = V::new(params); + strategy.process(|msm| { + verifier + .verify_proof(transcript, queries, msm) + .map_err(|_| Error::Opening) + }) +} diff --git a/halo2_backend/src/plonk/verifier/batch.rs b/halo2_backend/src/plonk/verifier/batch.rs new file mode 100644 index 0000000000..ba3e2419e6 --- /dev/null +++ b/halo2_backend/src/plonk/verifier/batch.rs @@ -0,0 +1,135 @@ +use ff::FromUniformBytes; +use group::ff::Field; +use halo2curves::CurveAffine; +use rand_core::OsRng; + +use super::{verify_proof, VerificationStrategy}; +use crate::{ + multicore::{ + IndexedParallelIterator, IntoParallelIterator, ParallelIterator, TryFoldAndReduce, + }, + plonk::{Error, VerifyingKey}, + poly::{ + commitment::{Params, MSM}, + ipa::{ + commitment::{IPACommitmentScheme, ParamsVerifierIPA}, + msm::MSMIPA, + multiopen::VerifierIPA, + strategy::GuardIPA, + }, + }, + transcript::{Blake2bRead, TranscriptReadBuffer}, +}; + +/// A proof verification strategy that returns the proof's MSM. +/// +/// `BatchVerifier` handles the accumulation of the MSMs for the batched proofs. +#[derive(Debug)] +struct BatchStrategy<'params, C: CurveAffine> { + msm: MSMIPA<'params, C>, +} + +impl<'params, C: CurveAffine> + VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> + for BatchStrategy<'params, C> +{ + type Output = MSMIPA<'params, C>; + + fn new(params: &'params ParamsVerifierIPA) -> Self { + BatchStrategy { + msm: MSMIPA::new(params), + } + } + + fn process( + self, + f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, + ) -> Result { + let guard = f(self.msm)?; + Ok(guard.use_challenges()) + } + + fn finalize(self) -> bool { + unreachable!() + } +} + +#[derive(Debug)] +struct BatchItem { + instances: Vec>>, + proof: Vec, +} + +/// A verifier that checks multiple proofs in a batch. **This requires the +/// `batch` crate feature to be enabled.** +#[derive(Debug, Default)] +pub struct BatchVerifier { + items: Vec>, +} + +impl BatchVerifier +where + C::Scalar: FromUniformBytes<64>, +{ + /// Constructs a new batch verifier. + pub fn new() -> Self { + Self { items: vec![] } + } + + /// Adds a proof to the batch. + pub fn add_proof(&mut self, instances: Vec>>, proof: Vec) { + self.items.push(BatchItem { instances, proof }) + } + + /// Finalizes the batch and checks its validity. + /// + /// Returns `false` if *some* proof was invalid. If the caller needs to identify + /// specific failing proofs, it must re-process the proofs separately. + /// + /// This uses [`OsRng`] internally instead of taking an `R: RngCore` argument, because + /// the internal parallelization requires access to a RNG that is guaranteed to not + /// clone its internal state when shared between threads. + pub fn finalize(self, params: &ParamsVerifierIPA, vk: &VerifyingKey) -> bool { + fn accumulate_msm<'params, C: CurveAffine>( + mut acc: MSMIPA<'params, C>, + msm: MSMIPA<'params, C>, + ) -> MSMIPA<'params, C> { + // Scale the MSM by a random factor to ensure that if the existing MSM has + // `is_zero() == false` then this argument won't be able to interfere with it + // to make it true, with high probability. + acc.scale(C::Scalar::random(OsRng)); + + acc.add_msm(&msm); + acc + } + + let final_msm = self + .items + .into_par_iter() + .enumerate() + .map(|(i, item)| { + let instances: Vec> = item + .instances + .iter() + .map(|i| i.iter().map(|c| &c[..]).collect()) + .collect(); + let instances: Vec<_> = instances.iter().map(|i| &i[..]).collect(); + + let strategy = BatchStrategy::new(params); + let mut transcript = Blake2bRead::init(&item.proof[..]); + verify_proof(params, vk, strategy, &instances, &mut transcript).map_err(|e| { + tracing::debug!("Batch item {} failed verification: {}", i, e); + e + }) + }) + .try_fold_and_reduce( + || params.empty_msm(), + |acc, res| res.map(|proof_msm| accumulate_msm(acc, proof_msm)), + ); + + match final_msm { + Ok(msm) => msm.check(), + Err(_) => false, + } + } +} diff --git a/halo2_backend/src/poly.rs b/halo2_backend/src/poly.rs new file mode 100644 index 0000000000..b505d6b49b --- /dev/null +++ b/halo2_backend/src/poly.rs @@ -0,0 +1,345 @@ +//! Contains utilities for performing arithmetic over univariate polynomials in +//! various forms, including computing commitments to them and provably opening +//! the committed polynomials at arbitrary points. + +use crate::arithmetic::parallelize; +use crate::helpers::SerdePrimeField; +use crate::plonk::Assigned; +use crate::SerdeFormat; + +use group::ff::{BatchInvert, Field}; +use std::fmt::Debug; +use std::io; +use std::marker::PhantomData; +use std::ops::{Add, Deref, DerefMut, Index, IndexMut, Mul, RangeFrom, RangeFull, Sub}; + +/// Generic commitment scheme structures +pub mod commitment; +mod domain; +mod query; +mod strategy; + +/// Inner product argument commitment scheme +pub mod ipa; + +/// KZG commitment scheme +pub mod kzg; + +#[cfg(test)] +mod multiopen_test; + +pub use domain::*; +pub use query::{ProverQuery, VerifierQuery}; +pub use strategy::{Guard, VerificationStrategy}; + +/// This is an error that could occur during proving or circuit synthesis. +// TODO: these errors need to be cleaned up +#[derive(Debug)] +pub enum Error { + /// OpeningProof is not well-formed + OpeningError, + /// Caller needs to re-sample a point + SamplingError, +} + +/// The basis over which a polynomial is described. +pub trait Basis: Copy + Debug + Send + Sync {} + +/// The polynomial is defined as coefficients +#[derive(Clone, Copy, Debug)] +pub struct Coeff; +impl Basis for Coeff {} + +/// The polynomial is defined as coefficients of Lagrange basis polynomials +#[derive(Clone, Copy, Debug)] +pub struct LagrangeCoeff; +impl Basis for LagrangeCoeff {} + +/// The polynomial is defined as coefficients of Lagrange basis polynomials in +/// an extended size domain which supports multiplication +#[derive(Clone, Copy, Debug)] +pub struct ExtendedLagrangeCoeff; +impl Basis for ExtendedLagrangeCoeff {} + +/// Represents a univariate polynomial defined over a field and a particular +/// basis. +#[derive(Clone, Debug)] +pub struct Polynomial { + pub(crate) values: Vec, + pub(crate) _marker: PhantomData, +} + +impl Polynomial { + pub(crate) fn new_empty(size: usize, zero: F) -> Self { + Polynomial { + values: vec![zero; size], + _marker: PhantomData, + } + } +} + +impl Polynomial { + /// Obtains a polynomial in Lagrange form when given a vector of Lagrange + /// coefficients of size `n`; panics if the provided vector is the wrong + /// length. + pub(crate) fn new_lagrange_from_vec(values: Vec) -> Polynomial { + Polynomial { + values, + _marker: PhantomData, + } + } +} + +impl Index for Polynomial { + type Output = F; + + fn index(&self, index: usize) -> &F { + self.values.index(index) + } +} + +impl IndexMut for Polynomial { + fn index_mut(&mut self, index: usize) -> &mut F { + self.values.index_mut(index) + } +} + +impl Index> for Polynomial { + type Output = [F]; + + fn index(&self, index: RangeFrom) -> &[F] { + self.values.index(index) + } +} + +impl IndexMut> for Polynomial { + fn index_mut(&mut self, index: RangeFrom) -> &mut [F] { + self.values.index_mut(index) + } +} + +impl Index for Polynomial { + type Output = [F]; + + fn index(&self, index: RangeFull) -> &[F] { + self.values.index(index) + } +} + +impl IndexMut for Polynomial { + fn index_mut(&mut self, index: RangeFull) -> &mut [F] { + self.values.index_mut(index) + } +} + +impl Deref for Polynomial { + type Target = [F]; + + fn deref(&self) -> &[F] { + &self.values[..] + } +} + +impl DerefMut for Polynomial { + fn deref_mut(&mut self) -> &mut [F] { + &mut self.values[..] + } +} + +impl Polynomial { + /// Iterate over the values, which are either in coefficient or evaluation + /// form depending on the basis `B`. + pub fn iter(&self) -> impl Iterator { + self.values.iter() + } + + /// Iterate over the values mutably, which are either in coefficient or + /// evaluation form depending on the basis `B`. + pub fn iter_mut(&mut self) -> impl Iterator { + self.values.iter_mut() + } + + /// Gets the size of this polynomial in terms of the number of + /// coefficients used to describe it. + pub fn num_coeffs(&self) -> usize { + self.values.len() + } +} + +impl Polynomial { + /// Reads polynomial from buffer using `SerdePrimeField::read`. + pub(crate) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + let mut poly_len = [0u8; 4]; + reader.read_exact(&mut poly_len)?; + let poly_len = u32::from_be_bytes(poly_len); + + (0..poly_len) + .map(|_| F::read(reader, format)) + .collect::>>() + .map(|values| Self { + values, + _marker: PhantomData, + }) + } + + /// Writes polynomial to buffer using `SerdePrimeField::write`. + pub(crate) fn write( + &self, + writer: &mut W, + format: SerdeFormat, + ) -> io::Result<()> { + writer.write_all(&(self.values.len() as u32).to_be_bytes())?; + for value in self.values.iter() { + value.write(writer, format)?; + } + Ok(()) + } +} + +pub(crate) fn batch_invert_assigned( + assigned: Vec, LagrangeCoeff>>, +) -> Vec> { + let mut assigned_denominators: Vec<_> = assigned + .iter() + .map(|f| { + f.iter() + .map(|value| value.denominator()) + .collect::>() + }) + .collect(); + + assigned_denominators + .iter_mut() + .flat_map(|f| { + f.iter_mut() + // If the denominator is trivial, we can skip it, reducing the + // size of the batch inversion. + .filter_map(|d| d.as_mut()) + }) + .batch_invert(); + + assigned + .iter() + .zip(assigned_denominators) + .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) + .collect() +} + +impl Polynomial, LagrangeCoeff> { + pub(crate) fn invert( + &self, + inv_denoms: impl Iterator + ExactSizeIterator, + ) -> Polynomial { + assert_eq!(inv_denoms.len(), self.values.len()); + Polynomial { + values: self + .values + .iter() + .zip(inv_denoms) + .map(|(a, inv_den)| a.numerator() * inv_den) + .collect(), + _marker: self._marker, + } + } +} + +impl<'a, F: Field, B: Basis> Add<&'a Polynomial> for Polynomial { + type Output = Polynomial; + + fn add(mut self, rhs: &'a Polynomial) -> Polynomial { + parallelize(&mut self.values, |lhs, start| { + for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { + *lhs += *rhs; + } + }); + + self + } +} + +impl<'a, F: Field, B: Basis> Sub<&'a Polynomial> for Polynomial { + type Output = Polynomial; + + fn sub(mut self, rhs: &'a Polynomial) -> Polynomial { + parallelize(&mut self.values, |lhs, start| { + for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { + *lhs -= *rhs; + } + }); + + self + } +} + +impl Polynomial { + /// Rotates the values in a Lagrange basis polynomial by `Rotation` + pub fn rotate(&self, rotation: Rotation) -> Polynomial { + let mut values = self.values.clone(); + if rotation.0 < 0 { + values.rotate_right((-rotation.0) as usize); + } else { + values.rotate_left(rotation.0 as usize); + } + Polynomial { + values, + _marker: PhantomData, + } + } +} + +impl Mul for Polynomial { + type Output = Polynomial; + + fn mul(mut self, rhs: F) -> Polynomial { + if rhs == F::ZERO { + return Polynomial { + values: vec![F::ZERO; self.len()], + _marker: PhantomData, + }; + } + if rhs == F::ONE { + return self; + } + + parallelize(&mut self.values, |lhs, _| { + for lhs in lhs.iter_mut() { + *lhs *= rhs; + } + }); + + self + } +} + +impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { + type Output = Polynomial; + + fn sub(self, rhs: F) -> Polynomial { + let mut res = self.clone(); + res.values[0] -= rhs; + res + } +} + +/// Describes the relative rotation of a vector. Negative numbers represent +/// reverse (leftmost) rotations and positive numbers represent forward (rightmost) +/// rotations. Zero represents no rotation. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Rotation(pub i32); + +impl Rotation { + /// The current location in the evaluation domain + pub fn cur() -> Rotation { + Rotation(0) + } + + /// The previous location in the evaluation domain + pub fn prev() -> Rotation { + Rotation(-1) + } + + /// The next location in the evaluation domain + pub fn next() -> Rotation { + Rotation(1) + } +} diff --git a/halo2_backend/src/poly/commitment.rs b/halo2_backend/src/poly/commitment.rs new file mode 100644 index 0000000000..feae085655 --- /dev/null +++ b/halo2_backend/src/poly/commitment.rs @@ -0,0 +1,245 @@ +use super::{ + query::{ProverQuery, VerifierQuery}, + strategy::Guard, + Coeff, LagrangeCoeff, Polynomial, +}; +use crate::poly::Error; +use crate::transcript::{EncodedChallenge, TranscriptRead, TranscriptWrite}; +use ff::Field; +use halo2curves::CurveAffine; +use rand_core::RngCore; +use std::{ + fmt::Debug, + io::{self}, + ops::{Add, AddAssign, Mul, MulAssign}, +}; + +/// Defines components of a commitment scheme. +pub trait CommitmentScheme { + /// Application field of this commitment scheme + type Scalar: Field; + + /// Elliptic curve used to commit the application and witnesses + type Curve: CurveAffine; + + /// Constant prover parameters + type ParamsProver: for<'params> ParamsProver< + 'params, + Self::Curve, + ParamsVerifier = Self::ParamsVerifier, + >; + + /// Constant verifier parameters + type ParamsVerifier: for<'params> ParamsVerifier<'params, Self::Curve>; + + /// Wrapper for parameter generator + fn new_params(k: u32) -> Self::ParamsProver; + + /// Wrapper for parameter reader + fn read_params(reader: &mut R) -> io::Result; +} + +/// Parameters for circuit sysnthesis and prover parameters. +pub trait Params<'params, C: CurveAffine>: Sized + Clone + Debug { + /// Multi scalar multiplication engine + type MSM: MSM + 'params; + + /// Logaritmic size of the circuit + fn k(&self) -> u32; + + /// Size of the circuit + fn n(&self) -> u64; + + /// Downsize `Params` with smaller `k`. + fn downsize(&mut self, k: u32); + + /// Generates an empty multiscalar multiplication struct using the + /// appropriate params. + fn empty_msm(&'params self) -> Self::MSM; + + /// This commits to a polynomial using its evaluations over the $2^k$ size + /// evaluation domain. The commitment will be blinded by the blinding factor + /// `r`. + fn commit_lagrange( + &self, + poly: &Polynomial, + r: Blind, + ) -> C::CurveExt; + + /// Writes params to a buffer. + fn write(&self, writer: &mut W) -> io::Result<()>; + + /// Reads params from a buffer. + fn read(reader: &mut R) -> io::Result; +} + +/// Parameters for circuit sysnthesis and prover parameters. +pub trait ParamsProver<'params, C: CurveAffine>: Params<'params, C> { + /// Constant verifier parameters. + type ParamsVerifier: ParamsVerifier<'params, C>; + + /// Returns new instance of parameters + fn new(k: u32) -> Self; + + /// This computes a commitment to a polynomial described by the provided + /// slice of coefficients. The commitment may be blinded by the blinding + /// factor `r`. + fn commit(&self, poly: &Polynomial, r: Blind) + -> C::CurveExt; + + /// Getter for g generators + fn get_g(&self) -> &[C]; + + /// Returns verification parameters. + fn verifier_params(&'params self) -> &'params Self::ParamsVerifier; +} + +/// Verifier specific functionality with circuit constraints +pub trait ParamsVerifier<'params, C: CurveAffine>: Params<'params, C> {} + +/// Multi scalar multiplication engine +pub trait MSM: Clone + Debug + Send + Sync { + /// Add arbitrary term (the scalar and the point) + fn append_term(&mut self, scalar: C::Scalar, point: C::CurveExt); + + /// Add another multiexp into this one + fn add_msm(&mut self, other: &Self) + where + Self: Sized; + + /// Scale all scalars in the MSM by some scaling factor + fn scale(&mut self, factor: C::Scalar); + + /// Perform multiexp and check that it results in zero + fn check(&self) -> bool; + + /// Perform multiexp and return the result + fn eval(&self) -> C::CurveExt; + + /// Return base points + fn bases(&self) -> Vec; + + /// Scalars + fn scalars(&self) -> Vec; +} + +/// Common multi-open prover interface for various commitment schemes +pub trait Prover<'params, Scheme: CommitmentScheme> { + /// Query instance or not + const QUERY_INSTANCE: bool; + + /// Creates new prover instance + fn new(params: &'params Scheme::ParamsProver) -> Self; + + /// Create a multi-opening proof + fn create_proof< + 'com, + E: EncodedChallenge, + T: TranscriptWrite, + R, + I, + >( + &self, + rng: R, + transcript: &mut T, + queries: I, + ) -> io::Result<()> + where + I: IntoIterator> + Clone, + R: RngCore; +} + +/// Common multi-open verifier interface for various commitment schemes +pub trait Verifier<'params, Scheme: CommitmentScheme> { + /// Unfinalized verification result. This is returned in verification + /// to allow developer to compress or combined verification results + type Guard: Guard; + + /// Accumulator fot comressed verification + type MSMAccumulator; + + /// Query instance or not + const QUERY_INSTANCE: bool; + + /// Creates new verifier instance + fn new(params: &'params Scheme::ParamsVerifier) -> Self; + + /// Process the proof and returns unfinished result named `Guard` + fn verify_proof< + 'com, + E: EncodedChallenge, + T: TranscriptRead, + I, + >( + &self, + transcript: &mut T, + queries: I, + msm: Self::MSMAccumulator, + ) -> Result + where + 'params: 'com, + I: IntoIterator< + Item = VerifierQuery< + 'com, + Scheme::Curve, + >::MSM, + >, + > + Clone; +} + +/// Wrapper type around a blinding factor. +#[derive(Copy, Clone, Eq, PartialEq, Debug)] +pub struct Blind(pub F); + +impl Default for Blind { + fn default() -> Self { + Blind(F::ONE) + } +} + +impl Blind { + /// Given `rng` creates new blinding scalar + pub fn new(rng: &mut R) -> Self { + Blind(F::random(rng)) + } +} + +impl Add for Blind { + type Output = Self; + + fn add(self, rhs: Blind) -> Self { + Blind(self.0 + rhs.0) + } +} + +impl Mul for Blind { + type Output = Self; + + fn mul(self, rhs: Blind) -> Self { + Blind(self.0 * rhs.0) + } +} + +impl AddAssign for Blind { + fn add_assign(&mut self, rhs: Blind) { + self.0 += rhs.0; + } +} + +impl MulAssign for Blind { + fn mul_assign(&mut self, rhs: Blind) { + self.0 *= rhs.0; + } +} + +impl AddAssign for Blind { + fn add_assign(&mut self, rhs: F) { + self.0 += rhs; + } +} + +impl MulAssign for Blind { + fn mul_assign(&mut self, rhs: F) { + self.0 *= rhs; + } +} diff --git a/halo2_backend/src/poly/domain.rs b/halo2_backend/src/poly/domain.rs new file mode 100644 index 0000000000..ae9b8bf9ae --- /dev/null +++ b/halo2_backend/src/poly/domain.rs @@ -0,0 +1,557 @@ +//! Contains utilities for performing polynomial arithmetic over an evaluation +//! domain that is of a suitable size for the application. + +use crate::{ + arithmetic::{best_fft, parallelize}, + plonk::Assigned, +}; + +use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, Rotation}; +use ff::WithSmallOrderMulGroup; +use group::ff::{BatchInvert, Field}; + +use std::marker::PhantomData; + +/// This structure contains precomputed constants and other details needed for +/// performing operations on an evaluation domain of size $2^k$ and an extended +/// domain of size $2^{k} * j$ with $j \neq 0$. +#[derive(Clone, Debug)] +pub struct EvaluationDomain { + n: u64, + k: u32, + extended_k: u32, + omega: F, + omega_inv: F, + extended_omega: F, + extended_omega_inv: F, + g_coset: F, + g_coset_inv: F, + quotient_poly_degree: u64, + ifft_divisor: F, + extended_ifft_divisor: F, + t_evaluations: Vec, + barycentric_weight: F, +} + +impl> EvaluationDomain { + /// This constructs a new evaluation domain object based on the provided + /// values $j, k$. + pub fn new(j: u32, k: u32) -> Self { + // quotient_poly_degree * params.n - 1 is the degree of the quotient polynomial + let quotient_poly_degree = (j - 1) as u64; + + // n = 2^k + let n = 1u64 << k; + + // We need to work within an extended domain, not params.k but params.k + i + // for some integer i such that 2^(params.k + i) is sufficiently large to + // describe the quotient polynomial. + let mut extended_k = k; + while (1 << extended_k) < (n * quotient_poly_degree) { + extended_k += 1; + } + + // ensure extended_k <= S + assert!(extended_k <= F::S); + + let mut extended_omega = F::ROOT_OF_UNITY; + + // Get extended_omega, the 2^{extended_k}'th root of unity + // The loop computes extended_omega = omega^{2 ^ (S - extended_k)} + // Notice that extended_omega ^ {2 ^ extended_k} = omega ^ {2^S} = 1. + for _ in extended_k..F::S { + extended_omega = extended_omega.square(); + } + let extended_omega = extended_omega; + let mut extended_omega_inv = extended_omega; // Inversion computed later + + // Get omega, the 2^{k}'th root of unity (i.e. n'th root of unity) + // The loop computes omega = extended_omega ^ {2 ^ (extended_k - k)} + // = (omega^{2 ^ (S - extended_k)}) ^ {2 ^ (extended_k - k)} + // = omega ^ {2 ^ (S - k)}. + // Notice that omega ^ {2^k} = omega ^ {2^S} = 1. + let mut omega = extended_omega; + for _ in k..extended_k { + omega = omega.square(); + } + let omega = omega; + let mut omega_inv = omega; // Inversion computed later + + // We use zeta here because we know it generates a coset, and it's available + // already. + // The coset evaluation domain is: + // zeta {1, extended_omega, extended_omega^2, ..., extended_omega^{(2^extended_k) - 1}} + let g_coset = F::ZETA; + let g_coset_inv = g_coset.square(); + + let mut t_evaluations = Vec::with_capacity(1 << (extended_k - k)); + { + // Compute the evaluations of t(X) = X^n - 1 in the coset evaluation domain. + // We don't have to compute all of them, because it will repeat. + let orig = F::ZETA.pow_vartime([n, 0, 0, 0]); + let step = extended_omega.pow_vartime([n, 0, 0, 0]); + let mut cur = orig; + loop { + t_evaluations.push(cur); + cur *= &step; + if cur == orig { + break; + } + } + assert_eq!(t_evaluations.len(), 1 << (extended_k - k)); + + // Subtract 1 from each to give us t_evaluations[i] = t(zeta * extended_omega^i) + for coeff in &mut t_evaluations { + *coeff -= &F::ONE; + } + + // Invert, because we're dividing by this polynomial. + // We invert in a batch, below. + } + + let mut ifft_divisor = F::from(1 << k); // Inversion computed later + let mut extended_ifft_divisor = F::from(1 << extended_k); // Inversion computed later + + // The barycentric weight of 1 over the evaluation domain + // 1 / \prod_{i != 0} (1 - omega^i) + let mut barycentric_weight = F::from(n); // Inversion computed later + + // Compute batch inversion + t_evaluations + .iter_mut() + .chain(Some(&mut ifft_divisor)) + .chain(Some(&mut extended_ifft_divisor)) + .chain(Some(&mut barycentric_weight)) + .chain(Some(&mut extended_omega_inv)) + .chain(Some(&mut omega_inv)) + .batch_invert(); + + EvaluationDomain { + n, + k, + extended_k, + omega, + omega_inv, + extended_omega, + extended_omega_inv, + g_coset, + g_coset_inv, + quotient_poly_degree, + ifft_divisor, + extended_ifft_divisor, + t_evaluations, + barycentric_weight, + } + } + + /// Obtains a polynomial in Lagrange form when given a vector of Lagrange + /// coefficients of size `n`; panics if the provided vector is the wrong + /// length. + pub fn lagrange_from_vec(&self, values: Vec) -> Polynomial { + assert_eq!(values.len(), self.n as usize); + + Polynomial { + values, + _marker: PhantomData, + } + } + + /// Obtains a polynomial in coefficient form when given a vector of + /// coefficients of size `n`; panics if the provided vector is the wrong + /// length. + pub fn coeff_from_vec(&self, values: Vec) -> Polynomial { + assert_eq!(values.len(), self.n as usize); + + Polynomial { + values, + _marker: PhantomData, + } + } + + /// Returns an empty (zero) polynomial in the coefficient basis + pub fn empty_coeff(&self) -> Polynomial { + Polynomial { + values: vec![F::ZERO; self.n as usize], + _marker: PhantomData, + } + } + + /// Returns an empty (zero) polynomial in the Lagrange coefficient basis + pub fn empty_lagrange(&self) -> Polynomial { + Polynomial { + values: vec![F::ZERO; self.n as usize], + _marker: PhantomData, + } + } + + /// Returns an empty (zero) polynomial in the Lagrange coefficient basis, with + /// deferred inversions. + pub fn empty_lagrange_assigned(&self) -> Polynomial, LagrangeCoeff> { + Polynomial { + values: vec![F::ZERO.into(); self.n as usize], + _marker: PhantomData, + } + } + + /// Returns a constant polynomial in the Lagrange coefficient basis + pub fn constant_lagrange(&self, scalar: F) -> Polynomial { + Polynomial { + values: vec![scalar; self.n as usize], + _marker: PhantomData, + } + } + + /// Returns an empty (zero) polynomial in the extended Lagrange coefficient + /// basis + pub fn empty_extended(&self) -> Polynomial { + Polynomial { + values: vec![F::ZERO; self.extended_len()], + _marker: PhantomData, + } + } + + /// Returns a constant polynomial in the extended Lagrange coefficient + /// basis + pub fn constant_extended(&self, scalar: F) -> Polynomial { + Polynomial { + values: vec![scalar; self.extended_len()], + _marker: PhantomData, + } + } + + /// This takes us from an n-length vector into the coefficient form. + /// + /// This function will panic if the provided vector is not the correct + /// length. + pub fn lagrange_to_coeff(&self, mut a: Polynomial) -> Polynomial { + assert_eq!(a.values.len(), 1 << self.k); + + // Perform inverse FFT to obtain the polynomial in coefficient form + Self::ifft(&mut a.values, self.omega_inv, self.k, self.ifft_divisor); + + Polynomial { + values: a.values, + _marker: PhantomData, + } + } + + /// This takes us from an n-length coefficient vector into a coset of the extended + /// evaluation domain, rotating by `rotation` if desired. + pub fn coeff_to_extended( + &self, + mut a: Polynomial, + ) -> Polynomial { + assert_eq!(a.values.len(), 1 << self.k); + + self.distribute_powers_zeta(&mut a.values, true); + a.values.resize(self.extended_len(), F::ZERO); + best_fft(&mut a.values, self.extended_omega, self.extended_k); + + Polynomial { + values: a.values, + _marker: PhantomData, + } + } + + /// Rotate the extended domain polynomial over the original domain. + pub fn rotate_extended( + &self, + poly: &Polynomial, + rotation: Rotation, + ) -> Polynomial { + let new_rotation = ((1 << (self.extended_k - self.k)) * rotation.0.abs()) as usize; + + let mut poly = poly.clone(); + + if rotation.0 >= 0 { + poly.values.rotate_left(new_rotation); + } else { + poly.values.rotate_right(new_rotation); + } + + poly + } + + /// This takes us from the extended evaluation domain and gets us the + /// quotient polynomial coefficients. + /// + /// This function will panic if the provided vector is not the correct + /// length. + // TODO/FIXME: caller should be responsible for truncating + pub fn extended_to_coeff(&self, mut a: Polynomial) -> Vec { + assert_eq!(a.values.len(), self.extended_len()); + + // Inverse FFT + Self::ifft( + &mut a.values, + self.extended_omega_inv, + self.extended_k, + self.extended_ifft_divisor, + ); + + // Distribute powers to move from coset; opposite from the + // transformation we performed earlier. + self.distribute_powers_zeta(&mut a.values, false); + + // Truncate it to match the size of the quotient polynomial; the + // evaluation domain might be slightly larger than necessary because + // it always lies on a power-of-two boundary. + a.values + .truncate((&self.n * self.quotient_poly_degree) as usize); + + a.values + } + + /// This divides the polynomial (in the extended domain) by the vanishing + /// polynomial of the $2^k$ size domain. + pub fn divide_by_vanishing_poly( + &self, + mut a: Polynomial, + ) -> Polynomial { + assert_eq!(a.values.len(), self.extended_len()); + + // Divide to obtain the quotient polynomial in the coset evaluation + // domain. + parallelize(&mut a.values, |h, mut index| { + for h in h { + *h *= &self.t_evaluations[index % self.t_evaluations.len()]; + index += 1; + } + }); + + Polynomial { + values: a.values, + _marker: PhantomData, + } + } + + /// Given a slice of group elements `[a_0, a_1, a_2, ...]`, this returns + /// `[a_0, [zeta]a_1, [zeta^2]a_2, a_3, [zeta]a_4, [zeta^2]a_5, a_6, ...]`, + /// where zeta is a cube root of unity in the multiplicative subgroup with + /// order (p - 1), i.e. zeta^3 = 1. + /// + /// `into_coset` should be set to `true` when moving into the coset, + /// and `false` when moving out. This toggles the choice of `zeta`. + fn distribute_powers_zeta(&self, a: &mut [F], into_coset: bool) { + let coset_powers = if into_coset { + [self.g_coset, self.g_coset_inv] + } else { + [self.g_coset_inv, self.g_coset] + }; + parallelize(a, |a, mut index| { + for a in a { + // Distribute powers to move into/from coset + let i = index % (coset_powers.len() + 1); + if i != 0 { + *a *= &coset_powers[i - 1]; + } + index += 1; + } + }); + } + + fn ifft(a: &mut [F], omega_inv: F, log_n: u32, divisor: F) { + best_fft(a, omega_inv, log_n); + parallelize(a, |a, _| { + for a in a { + // Finish iFFT + *a *= &divisor; + } + }); + } + + /// Get the size of the domain + pub fn k(&self) -> u32 { + self.k + } + + /// Get the size of the extended domain + pub fn extended_k(&self) -> u32 { + self.extended_k + } + + /// Get the size of the extended domain + pub fn extended_len(&self) -> usize { + 1 << self.extended_k + } + + /// Get $\omega$, the generator of the $2^k$ order multiplicative subgroup. + pub fn get_omega(&self) -> F { + self.omega + } + + /// Get $\omega^{-1}$, the inverse of the generator of the $2^k$ order + /// multiplicative subgroup. + pub fn get_omega_inv(&self) -> F { + self.omega_inv + } + + /// Get the generator of the extended domain's multiplicative subgroup. + pub fn get_extended_omega(&self) -> F { + self.extended_omega + } + + /// Multiplies a value by some power of $\omega$, essentially rotating over + /// the domain. + pub fn rotate_omega(&self, value: F, rotation: Rotation) -> F { + let mut point = value; + if rotation.0 >= 0 { + point *= &self.get_omega().pow_vartime([rotation.0 as u64]); + } else { + point *= &self + .get_omega_inv() + .pow_vartime([(rotation.0 as i64).unsigned_abs()]); + } + point + } + + /// Computes evaluations (at the point `x`, where `xn = x^n`) of Lagrange + /// basis polynomials `l_i(X)` defined such that `l_i(omega^i) = 1` and + /// `l_i(omega^j) = 0` for all `j != i` at each provided rotation `i`. + /// + /// # Implementation + /// + /// The polynomial + /// $$\prod_{j=0,j \neq i}^{n - 1} (X - \omega^j)$$ + /// has a root at all points in the domain except $\omega^i$, where it evaluates to + /// $$\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)$$ + /// and so we divide that polynomial by this value to obtain $l_i(X)$. Since + /// $$\prod_{j=0,j \neq i}^{n - 1} (X - \omega^j) + /// = \frac{X^n - 1}{X - \omega^i}$$ + /// then $l_i(x)$ for some $x$ is evaluated as + /// $$\left(\frac{x^n - 1}{x - \omega^i}\right) + /// \cdot \left(\frac{1}{\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)}\right).$$ + /// We refer to + /// $$1 \over \prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)$$ + /// as the barycentric weight of $\omega^i$. + /// + /// We know that for $i = 0$ + /// $$\frac{1}{\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)} = \frac{1}{n}.$$ + /// + /// If we multiply $(1 / n)$ by $\omega^i$ then we obtain + /// $$\frac{1}{\prod_{j=0,j \neq 0}^{n - 1} (\omega^i - \omega^j)} + /// = \frac{1}{\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)}$$ + /// which is the barycentric weight of $\omega^i$. + pub fn l_i_range + Clone>( + &self, + x: F, + xn: F, + rotations: I, + ) -> Vec { + let mut results; + { + let rotations = rotations.clone().into_iter(); + results = Vec::with_capacity(rotations.size_hint().1.unwrap_or(0)); + for rotation in rotations { + let rotation = Rotation(rotation); + let result = x - self.rotate_omega(F::ONE, rotation); + results.push(result); + } + results.iter_mut().batch_invert(); + } + + let common = (xn - F::ONE) * self.barycentric_weight; + for (rotation, result) in rotations.into_iter().zip(results.iter_mut()) { + let rotation = Rotation(rotation); + *result = self.rotate_omega(*result * common, rotation); + } + + results + } + + /// Gets the quotient polynomial's degree (as a multiple of n) + pub fn get_quotient_poly_degree(&self) -> usize { + self.quotient_poly_degree as usize + } + + /// Obtain a pinned version of this evaluation domain; a structure with the + /// minimal parameters needed to determine the rest of the evaluation + /// domain. + pub fn pinned(&self) -> PinnedEvaluationDomain<'_, F> { + PinnedEvaluationDomain { + k: &self.k, + extended_k: &self.extended_k, + omega: &self.omega, + } + } +} + +/// Represents the minimal parameters that determine an `EvaluationDomain`. +#[allow(dead_code)] +#[derive(Debug)] +pub struct PinnedEvaluationDomain<'a, F: Field> { + k: &'a u32, + extended_k: &'a u32, + omega: &'a F, +} + +#[test] +fn test_rotate() { + use rand_core::OsRng; + + use crate::arithmetic::eval_polynomial; + use halo2curves::pasta::pallas::Scalar; + + let domain = EvaluationDomain::::new(1, 3); + let rng = OsRng; + + let mut poly = domain.empty_lagrange(); + assert_eq!(poly.len(), 8); + for value in poly.iter_mut() { + *value = Scalar::random(rng); + } + + let poly_rotated_cur = poly.rotate(Rotation::cur()); + let poly_rotated_next = poly.rotate(Rotation::next()); + let poly_rotated_prev = poly.rotate(Rotation::prev()); + + let poly = domain.lagrange_to_coeff(poly); + let poly_rotated_cur = domain.lagrange_to_coeff(poly_rotated_cur); + let poly_rotated_next = domain.lagrange_to_coeff(poly_rotated_next); + let poly_rotated_prev = domain.lagrange_to_coeff(poly_rotated_prev); + + let x = Scalar::random(rng); + + assert_eq!( + eval_polynomial(&poly[..], x), + eval_polynomial(&poly_rotated_cur[..], x) + ); + assert_eq!( + eval_polynomial(&poly[..], x * domain.omega), + eval_polynomial(&poly_rotated_next[..], x) + ); + assert_eq!( + eval_polynomial(&poly[..], x * domain.omega_inv), + eval_polynomial(&poly_rotated_prev[..], x) + ); +} + +#[test] +fn test_l_i() { + use rand_core::OsRng; + + use crate::arithmetic::{eval_polynomial, lagrange_interpolate}; + use halo2curves::pasta::pallas::Scalar; + let domain = EvaluationDomain::::new(1, 3); + + let mut l = vec![]; + let mut points = vec![]; + for i in 0..8 { + points.push(domain.omega.pow([i])); + } + for i in 0..8 { + let mut l_i = vec![Scalar::zero(); 8]; + l_i[i] = Scalar::ONE; + let l_i = lagrange_interpolate(&points[..], &l_i[..]); + l.push(l_i); + } + + let x = Scalar::random(OsRng); + let xn = x.pow([8]); + + let evaluations = domain.l_i_range(x, xn, -7..=7); + for i in 0..8 { + assert_eq!(eval_polynomial(&l[i][..], x), evaluations[7 + i]); + assert_eq!(eval_polynomial(&l[(8 - i) % 8][..], x), evaluations[7 - i]); + } +} diff --git a/halo2_backend/src/poly/ipa/commitment.rs b/halo2_backend/src/poly/ipa/commitment.rs new file mode 100644 index 0000000000..7be053c49c --- /dev/null +++ b/halo2_backend/src/poly/ipa/commitment.rs @@ -0,0 +1,370 @@ +//! This module contains an implementation of the polynomial commitment scheme +//! described in the [Halo][halo] paper. +//! +//! [halo]: https://eprint.iacr.org/2019/1021 + +use crate::arithmetic::{best_multiexp, g_to_lagrange, parallelize, CurveAffine, CurveExt}; +use crate::helpers::CurveRead; +use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, ParamsVerifier}; +use crate::poly::ipa::msm::MSMIPA; +use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; + +use group::{Curve, Group}; +use std::marker::PhantomData; + +mod prover; +mod verifier; + +pub use prover::create_proof; +pub use verifier::verify_proof; + +use std::io; + +/// Public parameters for IPA commitment scheme +#[derive(Debug, Clone)] +pub struct ParamsIPA { + pub(crate) k: u32, + pub(crate) n: u64, + pub(crate) g: Vec, + pub(crate) g_lagrange: Vec, + pub(crate) w: C, + pub(crate) u: C, +} + +/// Concrete IPA commitment scheme +#[derive(Debug)] +pub struct IPACommitmentScheme { + _marker: PhantomData, +} + +impl CommitmentScheme for IPACommitmentScheme { + type Scalar = C::ScalarExt; + type Curve = C; + + type ParamsProver = ParamsIPA; + type ParamsVerifier = ParamsVerifierIPA; + + fn new_params(k: u32) -> Self::ParamsProver { + ParamsIPA::new(k) + } + + fn read_params(reader: &mut R) -> io::Result { + ParamsIPA::read(reader) + } +} + +/// Verifier parameters +pub type ParamsVerifierIPA = ParamsIPA; + +impl<'params, C: CurveAffine> ParamsVerifier<'params, C> for ParamsIPA {} + +impl<'params, C: CurveAffine> Params<'params, C> for ParamsIPA { + type MSM = MSMIPA<'params, C>; + + fn k(&self) -> u32 { + self.k + } + + fn n(&self) -> u64 { + self.n + } + + fn downsize(&mut self, k: u32) { + assert!(k <= self.k); + + self.k = k; + self.n = 1 << k; + self.g.truncate(self.n as usize); + self.g_lagrange = g_to_lagrange(self.g.iter().map(|g| g.to_curve()).collect(), k); + } + + fn empty_msm(&'params self) -> MSMIPA { + MSMIPA::new(self) + } + + /// This commits to a polynomial using its evaluations over the $2^k$ size + /// evaluation domain. The commitment will be blinded by the blinding factor + /// `r`. + fn commit_lagrange( + &self, + poly: &Polynomial, + r: Blind, + ) -> C::Curve { + let mut tmp_scalars = Vec::with_capacity(poly.len() + 1); + let mut tmp_bases = Vec::with_capacity(poly.len() + 1); + + tmp_scalars.extend(poly.iter()); + tmp_scalars.push(r.0); + + tmp_bases.extend(self.g_lagrange.iter()); + tmp_bases.push(self.w); + + best_multiexp::(&tmp_scalars, &tmp_bases) + } + + /// Writes params to a buffer. + fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(&self.k.to_le_bytes())?; + for g_element in &self.g { + writer.write_all(g_element.to_bytes().as_ref())?; + } + for g_lagrange_element in &self.g_lagrange { + writer.write_all(g_lagrange_element.to_bytes().as_ref())?; + } + writer.write_all(self.w.to_bytes().as_ref())?; + writer.write_all(self.u.to_bytes().as_ref())?; + + Ok(()) + } + + /// Reads params from a buffer. + fn read(reader: &mut R) -> io::Result { + let mut k = [0u8; 4]; + reader.read_exact(&mut k[..])?; + let k = u32::from_le_bytes(k); + + let n: u64 = 1 << k; + + let g: Vec<_> = (0..n).map(|_| C::read(reader)).collect::>()?; + let g_lagrange: Vec<_> = (0..n).map(|_| C::read(reader)).collect::>()?; + + let w = C::read(reader)?; + let u = C::read(reader)?; + + Ok(Self { + k, + n, + g, + g_lagrange, + w, + u, + }) + } +} + +impl<'params, C: CurveAffine> ParamsProver<'params, C> for ParamsIPA { + type ParamsVerifier = ParamsVerifierIPA; + + fn verifier_params(&'params self) -> &'params Self::ParamsVerifier { + self + } + + /// Initializes parameters for the curve, given a random oracle to draw + /// points from. + fn new(k: u32) -> Self { + // This is usually a limitation on the curve, but we also want 32-bit + // architectures to be supported. + assert!(k < 32); + + // In src/arithmetic/fields.rs we ensure that usize is at least 32 bits. + + let n: u64 = 1 << k; + + let g_projective = { + let mut g = Vec::with_capacity(n as usize); + g.resize(n as usize, C::Curve::identity()); + + parallelize(&mut g, move |g, start| { + let hasher = C::CurveExt::hash_to_curve("Halo2-Parameters"); + + for (i, g) in g.iter_mut().enumerate() { + let i = (i + start) as u32; + + let mut message = [0u8; 5]; + message[1..5].copy_from_slice(&i.to_le_bytes()); + + *g = hasher(&message); + } + }); + + g + }; + + let g = { + let mut g = vec![C::identity(); n as usize]; + parallelize(&mut g, |g, starts| { + C::Curve::batch_normalize(&g_projective[starts..(starts + g.len())], g); + }); + g + }; + + // Let's evaluate all of the Lagrange basis polynomials + // using an inverse FFT. + let g_lagrange = g_to_lagrange(g_projective, k); + + let hasher = C::CurveExt::hash_to_curve("Halo2-Parameters"); + let w = hasher(&[1]).to_affine(); + let u = hasher(&[2]).to_affine(); + + ParamsIPA { + k, + n, + g, + g_lagrange, + w, + u, + } + } + + /// This computes a commitment to a polynomial described by the provided + /// slice of coefficients. The commitment will be blinded by the blinding + /// factor `r`. + fn commit(&self, poly: &Polynomial, r: Blind) -> C::Curve { + let mut tmp_scalars = Vec::with_capacity(poly.len() + 1); + let mut tmp_bases = Vec::with_capacity(poly.len() + 1); + + tmp_scalars.extend(poly.iter()); + tmp_scalars.push(r.0); + + tmp_bases.extend(self.g.iter()); + tmp_bases.push(self.w); + + best_multiexp::(&tmp_scalars, &tmp_bases) + } + + fn get_g(&self) -> &[C] { + &self.g + } +} + +#[cfg(test)] +mod test { + use crate::poly::commitment::ParamsProver; + use crate::poly::commitment::{Blind, Params, MSM}; + use crate::poly::ipa::commitment::{create_proof, verify_proof, ParamsIPA}; + use crate::poly::ipa::msm::MSMIPA; + + use ff::Field; + use group::Curve; + + #[test] + fn test_commit_lagrange_epaffine() { + const K: u32 = 6; + + use rand_core::OsRng; + + use crate::poly::EvaluationDomain; + use halo2curves::pasta::{EpAffine, Fq}; + + let params = ParamsIPA::::new(K); + let domain = EvaluationDomain::new(1, K); + + let mut a = domain.empty_lagrange(); + + for (i, a) in a.iter_mut().enumerate() { + *a = Fq::from(i as u64); + } + + let b = domain.lagrange_to_coeff(a.clone()); + + let alpha = Blind(Fq::random(OsRng)); + + assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); + } + + #[test] + fn test_commit_lagrange_eqaffine() { + const K: u32 = 6; + + use rand_core::OsRng; + + use crate::poly::EvaluationDomain; + use halo2curves::pasta::{EqAffine, Fp}; + + let params: ParamsIPA = ParamsIPA::::new(K); + let domain = EvaluationDomain::new(1, K); + + let mut a = domain.empty_lagrange(); + + for (i, a) in a.iter_mut().enumerate() { + *a = Fp::from(i as u64); + } + + let b = domain.lagrange_to_coeff(a.clone()); + + let alpha = Blind(Fp::random(OsRng)); + + assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); + } + + #[test] + fn test_opening_proof() { + const K: u32 = 6; + + use ff::Field; + use rand_core::OsRng; + + use super::super::commitment::{Blind, Params}; + use crate::arithmetic::eval_polynomial; + use crate::halo2curves::pasta::{EpAffine, Fq}; + use crate::poly::EvaluationDomain; + use crate::transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, Transcript, TranscriptRead, TranscriptWrite, + }; + + use crate::transcript::TranscriptReadBuffer; + use crate::transcript::TranscriptWriterBuffer; + + let rng = OsRng; + + let params = ParamsIPA::::new(K); + let mut params_buffer = vec![]; + as Params<_>>::write(¶ms, &mut params_buffer).unwrap(); + let params: ParamsIPA = Params::read::<_>(&mut ¶ms_buffer[..]).unwrap(); + + let domain = EvaluationDomain::new(1, K); + + let mut px = domain.empty_coeff(); + + for (i, a) in px.iter_mut().enumerate() { + *a = Fq::from(i as u64); + } + + let blind = Blind(Fq::random(rng)); + + let p = params.commit(&px, blind).to_affine(); + + let mut transcript = + Blake2bWrite::, EpAffine, Challenge255>::init(vec![]); + transcript.write_point(p).unwrap(); + let x = transcript.squeeze_challenge_scalar::<()>(); + // Evaluate the polynomial + let v = eval_polynomial(&px, *x); + transcript.write_scalar(v).unwrap(); + + let (proof, ch_prover) = { + create_proof(¶ms, rng, &mut transcript, &px, blind, *x).unwrap(); + let ch_prover = transcript.squeeze_challenge(); + (transcript.finalize(), ch_prover) + }; + + // Verify the opening proof + let mut transcript = + Blake2bRead::<&[u8], EpAffine, Challenge255>::init(&proof[..]); + let p_prime = transcript.read_point().unwrap(); + assert_eq!(p, p_prime); + let x_prime = transcript.squeeze_challenge_scalar::<()>(); + assert_eq!(*x, *x_prime); + let v_prime = transcript.read_scalar().unwrap(); + assert_eq!(v, v_prime); + + let mut commitment_msm = MSMIPA::new(¶ms); + commitment_msm.append_term(Fq::one(), p.into()); + + let guard = verify_proof(¶ms, commitment_msm, &mut transcript, *x, v).unwrap(); + let ch_verifier = transcript.squeeze_challenge(); + assert_eq!(*ch_prover, *ch_verifier); + + // Test guard behavior prior to checking another proof + { + // Test use_challenges() + let msm_challenges = guard.clone().use_challenges(); + assert!(msm_challenges.check()); + + // Test use_g() + let g = guard.compute_g(); + let (msm_g, _accumulator) = guard.clone().use_g(g); + assert!(msm_g.check()); + } + } +} diff --git a/halo2_backend/src/poly/ipa/commitment/prover.rs b/halo2_backend/src/poly/ipa/commitment/prover.rs new file mode 100644 index 0000000000..344dbc0e65 --- /dev/null +++ b/halo2_backend/src/poly/ipa/commitment/prover.rs @@ -0,0 +1,167 @@ +use ff::Field; +use rand_core::RngCore; + +use super::ParamsIPA; +use crate::arithmetic::{ + best_multiexp, compute_inner_product, eval_polynomial, parallelize, CurveAffine, +}; + +use crate::poly::commitment::ParamsProver; +use crate::poly::{commitment::Blind, Coeff, Polynomial}; +use crate::transcript::{EncodedChallenge, TranscriptWrite}; + +use group::Curve; +use std::io::{self}; + +/// Create a polynomial commitment opening proof for the polynomial defined +/// by the coefficients `px`, the blinding factor `blind` used for the +/// polynomial commitment, and the point `x` that the polynomial is +/// evaluated at. +/// +/// This function will panic if the provided polynomial is too large with +/// respect to the polynomial commitment parameters. +/// +/// **Important:** This function assumes that the provided `transcript` has +/// already seen the common inputs: the polynomial commitment P, the claimed +/// opening v, and the point x. It's probably also nice for the transcript +/// to have seen the elliptic curve description and the URS, if you want to +/// be rigorous. +pub fn create_proof< + C: CurveAffine, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +>( + params: &ParamsIPA, + mut rng: R, + transcript: &mut T, + p_poly: &Polynomial, + p_blind: Blind, + x_3: C::Scalar, +) -> io::Result<()> { + // We're limited to polynomials of degree n - 1. + assert_eq!(p_poly.len(), params.n as usize); + + // Sample a random polynomial (of same degree) that has a root at x_3, first + // by setting all coefficients to random values. + let mut s_poly = (*p_poly).clone(); + for coeff in s_poly.iter_mut() { + *coeff = C::Scalar::random(&mut rng); + } + // Evaluate the random polynomial at x_3 + let s_at_x3 = eval_polynomial(&s_poly[..], x_3); + // Subtract constant coefficient to get a random polynomial with a root at x_3 + s_poly[0] -= &s_at_x3; + // And sample a random blind + let s_poly_blind = Blind(C::Scalar::random(&mut rng)); + + // Write a commitment to the random polynomial to the transcript + let s_poly_commitment = params.commit(&s_poly, s_poly_blind).to_affine(); + transcript.write_point(s_poly_commitment)?; + + // Challenge that will ensure that the prover cannot change P but can only + // witness a random polynomial commitment that agrees with P at x_3, with high + // probability. + let xi = *transcript.squeeze_challenge_scalar::<()>(); + + // Challenge that ensures that the prover did not interfere with the U term + // in their commitments. + let z = *transcript.squeeze_challenge_scalar::<()>(); + + // We'll be opening `P' = P - [v] G_0 + [ξ] S` to ensure it has a root at + // zero. + let mut p_prime_poly = s_poly * xi + p_poly; + let v = eval_polynomial(&p_prime_poly, x_3); + p_prime_poly[0] -= &v; + let p_prime_blind = s_poly_blind * Blind(xi) + p_blind; + + // This accumulates the synthetic blinding factor `f` starting + // with the blinding factor for `P'`. + let mut f = p_prime_blind.0; + + // Initialize the vector `p_prime` as the coefficients of the polynomial. + let mut p_prime = p_prime_poly.values; + assert_eq!(p_prime.len(), params.n as usize); + + // Initialize the vector `b` as the powers of `x_3`. The inner product of + // `p_prime` and `b` is the evaluation of the polynomial at `x_3`. + let mut b = Vec::with_capacity(1 << params.k); + { + let mut cur = C::Scalar::ONE; + for _ in 0..(1 << params.k) { + b.push(cur); + cur *= &x_3; + } + } + + // Initialize the vector `G'` from the URS. We'll be progressively collapsing + // this vector into smaller and smaller vectors until it is of length 1. + let mut g_prime = params.g.clone(); + + // Perform the inner product argument, round by round. + for j in 0..params.k { + let half = 1 << (params.k - j - 1); // half the length of `p_prime`, `b`, `G'` + + // Compute L, R + // + // TODO: If we modify multiexp to take "extra" bases, we could speed + // this piece up a bit by combining the multiexps. + let l_j = best_multiexp(&p_prime[half..], &g_prime[0..half]); + let r_j = best_multiexp(&p_prime[0..half], &g_prime[half..]); + let value_l_j = compute_inner_product(&p_prime[half..], &b[0..half]); + let value_r_j = compute_inner_product(&p_prime[0..half], &b[half..]); + let l_j_randomness = C::Scalar::random(&mut rng); + let r_j_randomness = C::Scalar::random(&mut rng); + let l_j = l_j + &best_multiexp(&[value_l_j * &z, l_j_randomness], &[params.u, params.w]); + let r_j = r_j + &best_multiexp(&[value_r_j * &z, r_j_randomness], &[params.u, params.w]); + let l_j = l_j.to_affine(); + let r_j = r_j.to_affine(); + + // Feed L and R into the real transcript + transcript.write_point(l_j)?; + transcript.write_point(r_j)?; + + let u_j = *transcript.squeeze_challenge_scalar::<()>(); + let u_j_inv = u_j.invert().unwrap(); // TODO, bubble this up + + // Collapse `p_prime` and `b`. + // TODO: parallelize + for i in 0..half { + p_prime[i] = p_prime[i] + &(p_prime[i + half] * &u_j_inv); + b[i] = b[i] + &(b[i + half] * &u_j); + } + p_prime.truncate(half); + b.truncate(half); + + // Collapse `G'` + parallel_generator_collapse(&mut g_prime, u_j); + g_prime.truncate(half); + + // Update randomness (the synthetic blinding factor at the end) + f += &(l_j_randomness * &u_j_inv); + f += &(r_j_randomness * &u_j); + } + + // We have fully collapsed `p_prime`, `b`, `G'` + assert_eq!(p_prime.len(), 1); + let c = p_prime[0]; + + transcript.write_scalar(c)?; + transcript.write_scalar(f)?; + + Ok(()) +} + +fn parallel_generator_collapse(g: &mut [C], challenge: C::Scalar) { + let len = g.len() / 2; + let (g_lo, g_hi) = g.split_at_mut(len); + + parallelize(g_lo, |g_lo, start| { + let g_hi = &g_hi[start..]; + let mut tmp = Vec::with_capacity(g_lo.len()); + for (g_lo, g_hi) in g_lo.iter().zip(g_hi.iter()) { + tmp.push(g_lo.to_curve() + &(*g_hi * challenge)); + } + C::Curve::batch_normalize(&tmp, g_lo); + }); +} diff --git a/halo2_backend/src/poly/ipa/commitment/verifier.rs b/halo2_backend/src/poly/ipa/commitment/verifier.rs new file mode 100644 index 0000000000..cf258625d5 --- /dev/null +++ b/halo2_backend/src/poly/ipa/commitment/verifier.rs @@ -0,0 +1,100 @@ +use group::ff::{BatchInvert, Field}; + +use super::ParamsIPA; +use crate::{arithmetic::CurveAffine, poly::ipa::strategy::GuardIPA}; +use crate::{ + poly::{commitment::MSM, ipa::msm::MSMIPA, Error}, + transcript::{EncodedChallenge, TranscriptRead}, +}; + +/// Checks to see if the proof represented within `transcript` is valid, and a +/// point `x` that the polynomial commitment `P` opens purportedly to the value +/// `v`. The provided `msm` should evaluate to the commitment `P` being opened. +pub fn verify_proof<'params, C: CurveAffine, E: EncodedChallenge, T: TranscriptRead>( + params: &'params ParamsIPA, + mut msm: MSMIPA<'params, C>, + transcript: &mut T, + x: C::Scalar, + v: C::Scalar, +) -> Result, Error> { + let k = params.k as usize; + + // P' = P - [v] G_0 + [ξ] S + msm.add_constant_term(-v); // add [-v] G_0 + let s_poly_commitment = transcript.read_point().map_err(|_| Error::OpeningError)?; + let xi = *transcript.squeeze_challenge_scalar::<()>(); + msm.append_term(xi, s_poly_commitment.into()); + + let z = *transcript.squeeze_challenge_scalar::<()>(); + + let mut rounds = vec![]; + for _ in 0..k { + // Read L and R from the proof and write them to the transcript + let l = transcript.read_point().map_err(|_| Error::OpeningError)?; + let r = transcript.read_point().map_err(|_| Error::OpeningError)?; + + let u_j_packed = transcript.squeeze_challenge(); + let u_j = *u_j_packed.as_challenge_scalar::<()>(); + + rounds.push((l, r, u_j, /* to be inverted */ u_j, u_j_packed)); + } + + rounds + .iter_mut() + .map(|&mut (_, _, _, ref mut u_j, _)| u_j) + .batch_invert(); + + // This is the left-hand side of the verifier equation. + // P' + \sum([u_j^{-1}] L_j) + \sum([u_j] R_j) + let mut u = Vec::with_capacity(k); + let mut u_packed: Vec = Vec::with_capacity(k); + for (l, r, u_j, u_j_inv, u_j_packed) in rounds { + msm.append_term(u_j_inv, l.into()); + msm.append_term(u_j, r.into()); + + u.push(u_j); + u_packed.push(u_j_packed.get_scalar()); + } + + // Our goal is to check that the left hand side of the verifier + // equation + // P' + \sum([u_j^{-1}] L_j) + \sum([u_j] R_j) + // equals (given b = \mathbf{b}_0, and the prover's values c, f), + // the right-hand side + // = [c] (G'_0 + [b * z] U) + [f] W + // Subtracting the right-hand side from both sides we get + // P' + \sum([u_j^{-1}] L_j) + \sum([u_j] R_j) + // + [-c] G'_0 + [-cbz] U + [-f] W + // = 0 + // + // Note that the guard returned from this function does not include + // the [-c]G'_0 term. + + let c = transcript.read_scalar().map_err(|_| Error::SamplingError)?; + let neg_c = -c; + let f = transcript.read_scalar().map_err(|_| Error::SamplingError)?; + let b = compute_b(x, &u); + + msm.add_to_u_scalar(neg_c * &b * &z); + msm.add_to_w_scalar(-f); + + let guard = GuardIPA { + msm, + neg_c, + u, + u_packed, + }; + + Ok(guard) +} + +/// Computes $\prod\limits_{i=0}^{k-1} (1 + u_{k - 1 - i} x^{2^i})$. +fn compute_b(x: F, u: &[F]) -> F { + let mut tmp = F::ONE; + let mut cur = x; + for u_j in u.iter().rev() { + tmp *= F::ONE + &(*u_j * &cur); + cur *= cur; + } + tmp +} diff --git a/halo2_backend/src/poly/ipa/mod.rs b/halo2_backend/src/poly/ipa/mod.rs new file mode 100644 index 0000000000..3600e2f051 --- /dev/null +++ b/halo2_backend/src/poly/ipa/mod.rs @@ -0,0 +1,7 @@ +pub mod commitment; +/// Multiscalar multiplication engines +pub mod msm; +/// IPA multi-open scheme +pub mod multiopen; +/// Strategies used with KZG scheme +pub mod strategy; diff --git a/halo2_backend/src/poly/ipa/msm.rs b/halo2_backend/src/poly/ipa/msm.rs new file mode 100644 index 0000000000..a615ddce49 --- /dev/null +++ b/halo2_backend/src/poly/ipa/msm.rs @@ -0,0 +1,271 @@ +use crate::arithmetic::{best_multiexp, CurveAffine}; +use crate::poly::{commitment::MSM, ipa::commitment::ParamsVerifierIPA}; +use ff::Field; +use group::Group; +use std::collections::BTreeMap; + +/// A multiscalar multiplication in the polynomial commitment scheme +#[derive(Debug, Clone)] +pub struct MSMIPA<'params, C: CurveAffine> { + pub(crate) params: &'params ParamsVerifierIPA, + g_scalars: Option>, + w_scalar: Option, + u_scalar: Option, + // x-coordinate -> (scalar, y-coordinate) + other: BTreeMap, +} + +impl<'a, C: CurveAffine> MSMIPA<'a, C> { + /// Given verifier parameters Creates an empty multi scalar engine + pub fn new(params: &'a ParamsVerifierIPA) -> Self { + let g_scalars = None; + let w_scalar = None; + let u_scalar = None; + let other = BTreeMap::new(); + + Self { + g_scalars, + w_scalar, + u_scalar, + other, + + params, + } + } + + /// Add another multiexp into this one + pub fn add_msm(&mut self, other: &Self) { + for (x, (scalar, y)) in other.other.iter() { + self.other + .entry(*x) + .and_modify(|(our_scalar, our_y)| { + if our_y == y { + *our_scalar += *scalar; + } else { + assert!(*our_y == -*y); + *our_scalar -= *scalar; + } + }) + .or_insert((*scalar, *y)); + } + + if let Some(g_scalars) = &other.g_scalars { + self.add_to_g_scalars(g_scalars); + } + + if let Some(w_scalar) = &other.w_scalar { + self.add_to_w_scalar(*w_scalar); + } + + if let Some(u_scalar) = &other.u_scalar { + self.add_to_u_scalar(*u_scalar); + } + } +} + +impl<'a, C: CurveAffine> MSM for MSMIPA<'a, C> { + fn append_term(&mut self, scalar: C::Scalar, point: C::Curve) { + if !bool::from(point.is_identity()) { + use group::Curve; + let point = point.to_affine(); + let xy = point.coordinates().unwrap(); + let x = *xy.x(); + let y = *xy.y(); + + self.other + .entry(x) + .and_modify(|(our_scalar, our_y)| { + if *our_y == y { + *our_scalar += scalar; + } else { + assert!(*our_y == -y); + *our_scalar -= scalar; + } + }) + .or_insert((scalar, y)); + } + } + + /// Add another multiexp into this one + fn add_msm(&mut self, other: &Self) { + for (x, (scalar, y)) in other.other.iter() { + self.other + .entry(*x) + .and_modify(|(our_scalar, our_y)| { + if our_y == y { + *our_scalar += *scalar; + } else { + assert!(*our_y == -*y); + *our_scalar -= *scalar; + } + }) + .or_insert((*scalar, *y)); + } + + if let Some(g_scalars) = &other.g_scalars { + self.add_to_g_scalars(g_scalars); + } + + if let Some(w_scalar) = &other.w_scalar { + self.add_to_w_scalar(*w_scalar); + } + + if let Some(u_scalar) = &other.u_scalar { + self.add_to_u_scalar(*u_scalar); + } + } + + fn scale(&mut self, factor: C::Scalar) { + if let Some(g_scalars) = &mut self.g_scalars { + for g_scalar in g_scalars { + *g_scalar *= &factor; + } + } + + for other in self.other.values_mut() { + other.0 *= factor; + } + + self.w_scalar = self.w_scalar.map(|a| a * &factor); + self.u_scalar = self.u_scalar.map(|a| a * &factor); + } + + fn check(&self) -> bool { + bool::from(self.eval().is_identity()) + } + + fn eval(&self) -> C::Curve { + let len = self.g_scalars.as_ref().map(|v| v.len()).unwrap_or(0) + + self.w_scalar.map(|_| 1).unwrap_or(0) + + self.u_scalar.map(|_| 1).unwrap_or(0) + + self.other.len(); + let mut scalars: Vec = Vec::with_capacity(len); + let mut bases: Vec = Vec::with_capacity(len); + + scalars.extend(self.other.values().map(|(scalar, _)| scalar)); + bases.extend( + self.other + .iter() + .map(|(x, (_, y))| C::from_xy(*x, *y).unwrap()), + ); + + if let Some(w_scalar) = self.w_scalar { + scalars.push(w_scalar); + bases.push(self.params.w); + } + + if let Some(u_scalar) = self.u_scalar { + scalars.push(u_scalar); + bases.push(self.params.u); + } + + if let Some(g_scalars) = &self.g_scalars { + scalars.extend(g_scalars); + bases.extend(self.params.g.iter()); + } + + assert_eq!(scalars.len(), len); + + best_multiexp(&scalars, &bases) + } + + fn bases(&self) -> Vec { + self.other + .iter() + .map(|(x, (_, y))| C::from_xy(*x, *y).unwrap().into()) + .collect() + } + + fn scalars(&self) -> Vec { + self.other.values().map(|(scalar, _)| *scalar).collect() + } +} + +impl<'a, C: CurveAffine> MSMIPA<'a, C> { + /// Add a value to the first entry of `g_scalars`. + pub fn add_constant_term(&mut self, constant: C::Scalar) { + if let Some(g_scalars) = self.g_scalars.as_mut() { + g_scalars[0] += &constant; + } else { + let mut g_scalars = vec![C::Scalar::ZERO; self.params.n as usize]; + g_scalars[0] += &constant; + self.g_scalars = Some(g_scalars); + } + } + + /// Add a vector of scalars to `g_scalars`. This function will panic if the + /// caller provides a slice of scalars that is not of length `params.n`. + pub fn add_to_g_scalars(&mut self, scalars: &[C::Scalar]) { + assert_eq!(scalars.len(), self.params.n as usize); + if let Some(g_scalars) = &mut self.g_scalars { + for (g_scalar, scalar) in g_scalars.iter_mut().zip(scalars.iter()) { + *g_scalar += scalar; + } + } else { + self.g_scalars = Some(scalars.to_vec()); + } + } + /// Add to `w_scalar` + pub fn add_to_w_scalar(&mut self, scalar: C::Scalar) { + self.w_scalar = self.w_scalar.map_or(Some(scalar), |a| Some(a + &scalar)); + } + + /// Add to `u_scalar` + pub fn add_to_u_scalar(&mut self, scalar: C::Scalar) { + self.u_scalar = self.u_scalar.map_or(Some(scalar), |a| Some(a + &scalar)); + } +} + +#[cfg(test)] +mod tests { + use crate::poly::{ + commitment::{ParamsProver, MSM}, + ipa::{commitment::ParamsIPA, msm::MSMIPA}, + }; + use halo2curves::{ + pasta::{Ep, EpAffine, Fp, Fq}, + CurveAffine, + }; + + #[test] + fn msm_arithmetic() { + let base: Ep = EpAffine::from_xy(-Fp::one(), Fp::from(2)).unwrap().into(); + let base_viol = base + base; + + let params = ParamsIPA::new(4); + let mut a: MSMIPA = MSMIPA::new(¶ms); + a.append_term(Fq::one(), base); + // a = [1] P + assert!(!a.clone().check()); + a.append_term(Fq::one(), base); + // a = [1+1] P + assert!(!a.clone().check()); + a.append_term(-Fq::one(), base_viol); + // a = [1+1] P + [-1] 2P + assert!(a.clone().check()); + let b = a.clone(); + + // Append a point that is the negation of an existing one. + a.append_term(Fq::from(4), -base); + // a = [1+1-4] P + [-1] 2P + assert!(!a.clone().check()); + a.append_term(Fq::from(2), base_viol); + // a = [1+1-4] P + [-1+2] 2P + assert!(a.clone().check()); + + // Add two MSMs with common bases. + a.scale(Fq::from(3)); + a.add_msm(&b); + // a = [3*(1+1)+(1+1-4)] P + [3*(-1)+(-1+2)] 2P + assert!(a.clone().check()); + + let mut c: MSMIPA = MSMIPA::new(¶ms); + c.append_term(Fq::from(2), base); + c.append_term(Fq::one(), -base_viol); + // c = [2] P + [1] (-2P) + assert!(c.clone().check()); + // Add two MSMs with bases that differ only in sign. + a.add_msm(&c); + assert!(a.check()); + } +} diff --git a/halo2_backend/src/poly/ipa/multiopen.rs b/halo2_backend/src/poly/ipa/multiopen.rs new file mode 100644 index 0000000000..b78acb5934 --- /dev/null +++ b/halo2_backend/src/poly/ipa/multiopen.rs @@ -0,0 +1,172 @@ +//! This module contains an optimisation of the polynomial commitment opening +//! scheme described in the [Halo][halo] paper. +//! +//! [halo]: https://eprint.iacr.org/2019/1021 + +use super::*; +use crate::{poly::query::Query, transcript::ChallengeScalar}; +use ff::Field; +use std::collections::{BTreeMap, BTreeSet}; + +mod prover; +mod verifier; + +pub use prover::ProverIPA; +pub use verifier::VerifierIPA; + +#[derive(Clone, Copy, Debug)] +struct X1 {} +/// Challenge for compressing openings at the same point sets together. +type ChallengeX1 = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X2 {} +/// Challenge for keeping the multi-point quotient polynomial terms linearly independent. +type ChallengeX2 = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X3 {} +/// Challenge point at which the commitments are opened. +type ChallengeX3 = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X4 {} +/// Challenge for collapsing the openings of the various remaining polynomials at x_3 +/// together. +type ChallengeX4 = ChallengeScalar; + +#[derive(Debug)] +struct CommitmentData { + pub(crate) commitment: T, + pub(crate) set_index: usize, + pub(crate) point_indices: Vec, + pub(crate) evals: Vec, +} + +impl CommitmentData { + fn new(commitment: T) -> Self { + CommitmentData { + commitment, + set_index: 0, + point_indices: vec![], + evals: vec![], + } + } +} + +type IntermediateSets = ( + Vec>::Eval, >::Commitment>>, + Vec>, +); + +fn construct_intermediate_sets>(queries: I) -> IntermediateSets +where + I: IntoIterator + Clone, +{ + // Construct sets of unique commitments and corresponding information about + // their queries. + let mut commitment_map: Vec> = vec![]; + + // Also construct mapping from a unique point to a point_index. This defines + // an ordering on the points. + let mut point_index_map = BTreeMap::new(); + + // Iterate over all of the queries, computing the ordering of the points + // while also creating new commitment data. + for query in queries.clone() { + let num_points = point_index_map.len(); + let point_idx = point_index_map + .entry(query.get_point()) + .or_insert(num_points); + + if let Some(pos) = commitment_map + .iter() + .position(|comm| comm.commitment == query.get_commitment()) + { + commitment_map[pos].point_indices.push(*point_idx); + } else { + let mut tmp = CommitmentData::new(query.get_commitment()); + tmp.point_indices.push(*point_idx); + commitment_map.push(tmp); + } + } + + // Also construct inverse mapping from point_index to the point + let mut inverse_point_index_map = BTreeMap::new(); + for (&point, &point_index) in point_index_map.iter() { + inverse_point_index_map.insert(point_index, point); + } + + // Construct map of unique ordered point_idx_sets to their set_idx + let mut point_idx_sets = BTreeMap::new(); + // Also construct mapping from commitment to point_idx_set + let mut commitment_set_map = Vec::new(); + + for commitment_data in commitment_map.iter() { + let mut point_index_set = BTreeSet::new(); + // Note that point_index_set is ordered, unlike point_indices + for &point_index in commitment_data.point_indices.iter() { + point_index_set.insert(point_index); + } + + // Push point_index_set to CommitmentData for the relevant commitment + commitment_set_map.push((commitment_data.commitment, point_index_set.clone())); + + let num_sets = point_idx_sets.len(); + point_idx_sets.entry(point_index_set).or_insert(num_sets); + } + + // Initialise empty evals vec for each unique commitment + for commitment_data in commitment_map.iter_mut() { + let len = commitment_data.point_indices.len(); + commitment_data.evals = vec![Q::Eval::default(); len]; + } + + // Populate set_index, evals and points for each commitment using point_idx_sets + for query in queries { + // The index of the point at which the commitment is queried + let point_index = point_index_map.get(&query.get_point()).unwrap(); + + // The point_index_set at which the commitment was queried + let mut point_index_set = BTreeSet::new(); + for (commitment, point_idx_set) in commitment_set_map.iter() { + if query.get_commitment() == *commitment { + point_index_set = point_idx_set.clone(); + } + } + assert!(!point_index_set.is_empty()); + + // The set_index of the point_index_set + let set_index = point_idx_sets.get(&point_index_set).unwrap(); + for commitment_data in commitment_map.iter_mut() { + if query.get_commitment() == commitment_data.commitment { + commitment_data.set_index = *set_index; + } + } + let point_index_set: Vec = point_index_set.iter().cloned().collect(); + + // The offset of the point_index in the point_index_set + let point_index_in_set = point_index_set + .iter() + .position(|i| i == point_index) + .unwrap(); + + for commitment_data in commitment_map.iter_mut() { + if query.get_commitment() == commitment_data.commitment { + // Insert the eval using the ordering of the point_index_set + commitment_data.evals[point_index_in_set] = query.get_eval(); + } + } + } + + // Get actual points in each point set + let mut point_sets: Vec> = vec![Vec::new(); point_idx_sets.len()]; + for (point_idx_set, &set_idx) in point_idx_sets.iter() { + for &point_idx in point_idx_set.iter() { + let point = inverse_point_index_map.get(&point_idx).unwrap(); + point_sets[set_idx].push(*point); + } + } + + (commitment_map, point_sets) +} diff --git a/halo2_backend/src/poly/ipa/multiopen/prover.rs b/halo2_backend/src/poly/ipa/multiopen/prover.rs new file mode 100644 index 0000000000..2ae745d457 --- /dev/null +++ b/halo2_backend/src/poly/ipa/multiopen/prover.rs @@ -0,0 +1,122 @@ +use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; +use crate::arithmetic::{eval_polynomial, kate_division, CurveAffine}; +use crate::poly::commitment::ParamsProver; +use crate::poly::commitment::{Blind, Prover}; +use crate::poly::ipa::commitment::{self, IPACommitmentScheme, ParamsIPA}; +use crate::poly::query::ProverQuery; +use crate::poly::{Coeff, Polynomial}; +use crate::transcript::{EncodedChallenge, TranscriptWrite}; + +use ff::Field; +use group::Curve; +use rand_core::RngCore; +use std::io; +use std::marker::PhantomData; + +/// IPA multi-open prover +#[derive(Debug)] +pub struct ProverIPA<'params, C: CurveAffine> { + pub(crate) params: &'params ParamsIPA, +} + +impl<'params, C: CurveAffine> Prover<'params, IPACommitmentScheme> for ProverIPA<'params, C> { + const QUERY_INSTANCE: bool = true; + + fn new(params: &'params ParamsIPA) -> Self { + Self { params } + } + + /// Create a multi-opening proof + fn create_proof<'com, Z: EncodedChallenge, T: TranscriptWrite, R, I>( + &self, + mut rng: R, + transcript: &mut T, + queries: I, + ) -> io::Result<()> + where + I: IntoIterator> + Clone, + R: RngCore, + { + let x_1: ChallengeX1<_> = transcript.squeeze_challenge_scalar(); + let x_2: ChallengeX2<_> = transcript.squeeze_challenge_scalar(); + + let (poly_map, point_sets) = construct_intermediate_sets(queries); + + // Collapse openings at same point sets together into single openings using + // x_1 challenge. + let mut q_polys: Vec>> = vec![None; point_sets.len()]; + let mut q_blinds = vec![Blind(C::Scalar::ZERO); point_sets.len()]; + + { + let mut accumulate = |set_idx: usize, + new_poly: &Polynomial, + blind: Blind| { + if let Some(poly) = &q_polys[set_idx] { + q_polys[set_idx] = Some(poly.clone() * *x_1 + new_poly); + } else { + q_polys[set_idx] = Some(new_poly.clone()); + } + q_blinds[set_idx] *= *x_1; + q_blinds[set_idx] += blind; + }; + + for commitment_data in poly_map.into_iter() { + accumulate( + commitment_data.set_index, // set_idx, + commitment_data.commitment.poly, // poly, + commitment_data.commitment.blind, // blind, + ); + } + } + + let q_prime_poly = point_sets + .iter() + .zip(q_polys.iter()) + .fold(None, |q_prime_poly, (points, poly)| { + let mut poly = points + .iter() + .fold(poly.clone().unwrap().values, |poly, point| { + kate_division(&poly, *point) + }); + poly.resize(self.params.n as usize, C::Scalar::ZERO); + let poly = Polynomial { + values: poly, + _marker: PhantomData, + }; + + if q_prime_poly.is_none() { + Some(poly) + } else { + q_prime_poly.map(|q_prime_poly| q_prime_poly * *x_2 + &poly) + } + }) + .unwrap(); + + let q_prime_blind = Blind(C::Scalar::random(&mut rng)); + let q_prime_commitment = self.params.commit(&q_prime_poly, q_prime_blind).to_affine(); + + transcript.write_point(q_prime_commitment)?; + + let x_3: ChallengeX3<_> = transcript.squeeze_challenge_scalar(); + + // Prover sends u_i for all i, which correspond to the evaluation + // of each Q polynomial commitment at x_3. + for q_i_poly in &q_polys { + transcript.write_scalar(eval_polynomial(q_i_poly.as_ref().unwrap(), *x_3))?; + } + + let x_4: ChallengeX4<_> = transcript.squeeze_challenge_scalar(); + + let (p_poly, p_poly_blind) = q_polys.into_iter().zip(q_blinds).fold( + (q_prime_poly, q_prime_blind), + |(q_prime_poly, q_prime_blind), (poly, blind)| { + ( + q_prime_poly * *x_4 + &poly.unwrap(), + Blind((q_prime_blind.0 * &(*x_4)) + &blind.0), + ) + }, + ); + + commitment::create_proof(self.params, rng, transcript, &p_poly, p_poly_blind, *x_3) + } +} diff --git a/halo2_backend/src/poly/ipa/multiopen/verifier.rs b/halo2_backend/src/poly/ipa/multiopen/verifier.rs new file mode 100644 index 0000000000..d559e33384 --- /dev/null +++ b/halo2_backend/src/poly/ipa/multiopen/verifier.rs @@ -0,0 +1,148 @@ +use std::fmt::Debug; + +use ff::Field; + +use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; +use crate::arithmetic::{eval_polynomial, lagrange_interpolate, CurveAffine}; +use crate::poly::commitment::{Params, Verifier, MSM}; +use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA, ParamsVerifierIPA}; +use crate::poly::ipa::msm::MSMIPA; +use crate::poly::ipa::strategy::GuardIPA; +use crate::poly::query::{CommitmentReference, VerifierQuery}; +use crate::poly::Error; +use crate::transcript::{EncodedChallenge, TranscriptRead}; + +/// IPA multi-open verifier +#[derive(Debug)] +pub struct VerifierIPA<'params, C: CurveAffine> { + params: &'params ParamsIPA, +} + +impl<'params, C: CurveAffine> Verifier<'params, IPACommitmentScheme> + for VerifierIPA<'params, C> +{ + type Guard = GuardIPA<'params, C>; + type MSMAccumulator = MSMIPA<'params, C>; + + const QUERY_INSTANCE: bool = true; + + fn new(params: &'params ParamsVerifierIPA) -> Self { + Self { params } + } + + fn verify_proof<'com, E: EncodedChallenge, T: TranscriptRead, I>( + &self, + transcript: &mut T, + queries: I, + mut msm: MSMIPA<'params, C>, + ) -> Result + where + 'params: 'com, + I: IntoIterator>> + Clone, + { + // Sample x_1 for compressing openings at the same point sets together + let x_1: ChallengeX1<_> = transcript.squeeze_challenge_scalar(); + + // Sample a challenge x_2 for keeping the multi-point quotient + // polynomial terms linearly independent. + let x_2: ChallengeX2<_> = transcript.squeeze_challenge_scalar(); + + let (commitment_map, point_sets) = construct_intermediate_sets(queries); + + // Compress the commitments and expected evaluations at x together. + // using the challenge x_1 + let mut q_commitments: Vec<_> = vec![ + (self.params.empty_msm(), C::Scalar::ONE); // (accumulator, next x_1 power). + point_sets.len()]; + + // A vec of vecs of evals. The outer vec corresponds to the point set, + // while the inner vec corresponds to the points in a particular set. + let mut q_eval_sets = Vec::with_capacity(point_sets.len()); + for point_set in point_sets.iter() { + q_eval_sets.push(vec![C::Scalar::ZERO; point_set.len()]); + } + + { + let mut accumulate = |set_idx: usize, + new_commitment: CommitmentReference>, + evals: Vec| { + let (q_commitment, x_1_power) = &mut q_commitments[set_idx]; + match new_commitment { + CommitmentReference::Commitment(c) => { + q_commitment.append_term(*x_1_power, (*c).into()); + } + CommitmentReference::MSM(msm) => { + let mut msm = msm.clone(); + msm.scale(*x_1_power); + q_commitment.add_msm(&msm); + } + } + for (eval, set_eval) in evals.iter().zip(q_eval_sets[set_idx].iter_mut()) { + *set_eval += (*eval) * (*x_1_power); + } + *x_1_power *= *x_1; + }; + + // Each commitment corresponds to evaluations at a set of points. + // For each set, we collapse each commitment's evals pointwise. + // Run in order of increasing x_1 powers. + for commitment_data in commitment_map.into_iter().rev() { + accumulate( + commitment_data.set_index, // set_idx, + commitment_data.commitment, // commitment, + commitment_data.evals, // evals + ); + } + } + + // Obtain the commitment to the multi-point quotient polynomial f(X). + let q_prime_commitment = transcript.read_point().map_err(|_| Error::SamplingError)?; + + // Sample a challenge x_3 for checking that f(X) was committed to + // correctly. + let x_3: ChallengeX3<_> = transcript.squeeze_challenge_scalar(); + + // u is a vector containing the evaluations of the Q polynomial + // commitments at x_3 + let mut u = Vec::with_capacity(q_eval_sets.len()); + for _ in 0..q_eval_sets.len() { + u.push(transcript.read_scalar().map_err(|_| Error::SamplingError)?); + } + + // We can compute the expected msm_eval at x_3 using the u provided + // by the prover and from x_2 + let msm_eval = point_sets + .iter() + .zip(q_eval_sets.iter()) + .zip(u.iter()) + .fold( + C::Scalar::ZERO, + |msm_eval, ((points, evals), proof_eval)| { + let r_poly = lagrange_interpolate(points, evals); + let r_eval = eval_polynomial(&r_poly, *x_3); + let eval = points.iter().fold(*proof_eval - &r_eval, |eval, point| { + eval * &(*x_3 - point).invert().unwrap() + }); + msm_eval * &(*x_2) + &eval + }, + ); + + // Sample a challenge x_4 that we will use to collapse the openings of + // the various remaining polynomials at x_3 together. + let x_4: ChallengeX4<_> = transcript.squeeze_challenge_scalar(); + + // Compute the final commitment that has to be opened + msm.append_term(C::Scalar::ONE, q_prime_commitment.into()); + let (msm, v) = q_commitments.into_iter().zip(u.iter()).fold( + (msm, msm_eval), + |(mut msm, msm_eval), ((q_commitment, _), q_eval)| { + msm.scale(*x_4); + msm.add_msm(&q_commitment); + (msm, msm_eval * &(*x_4) + q_eval) + }, + ); + + // Verify the opening proof + super::commitment::verify_proof(self.params, msm, transcript, *x_3, v) + } +} diff --git a/halo2_backend/src/poly/ipa/strategy.rs b/halo2_backend/src/poly/ipa/strategy.rs new file mode 100644 index 0000000000..d2d1b3d364 --- /dev/null +++ b/halo2_backend/src/poly/ipa/strategy.rs @@ -0,0 +1,171 @@ +use super::commitment::{IPACommitmentScheme, ParamsIPA}; +use super::msm::MSMIPA; +use super::multiopen::VerifierIPA; +use crate::{ + arithmetic::best_multiexp, + plonk::Error, + poly::{ + commitment::MSM, + strategy::{Guard, VerificationStrategy}, + }, +}; +use ff::Field; +use group::Curve; +use halo2curves::CurveAffine; +use rand_core::OsRng; + +/// Wrapper for verification accumulator +#[derive(Debug, Clone)] +pub struct GuardIPA<'params, C: CurveAffine> { + pub(crate) msm: MSMIPA<'params, C>, + pub(crate) neg_c: C::Scalar, + pub(crate) u: Vec, + pub(crate) u_packed: Vec, +} + +/// An accumulator instance consisting of an evaluation claim and a proof. +#[derive(Debug, Clone)] +pub struct Accumulator { + /// The claimed output of the linear-time polycommit opening protocol + pub g: C, + + /// A vector of challenges u_0, ..., u_{k - 1} sampled by the verifier, to + /// be used in computing G'_0. + pub u_packed: Vec, +} + +/// Define accumulator type as `MSMIPA` +impl<'params, C: CurveAffine> Guard> for GuardIPA<'params, C> { + type MSMAccumulator = MSMIPA<'params, C>; +} + +/// IPA specific operations +impl<'params, C: CurveAffine> GuardIPA<'params, C> { + /// Lets caller supply the challenges and obtain an MSM with updated + /// scalars and points. + pub fn use_challenges(mut self) -> MSMIPA<'params, C> { + let s = compute_s(&self.u, self.neg_c); + self.msm.add_to_g_scalars(&s); + + self.msm + } + + /// Lets caller supply the purported G point and simply appends + /// [-c] G to return an updated MSM. + pub fn use_g(mut self, g: C) -> (MSMIPA<'params, C>, Accumulator) { + self.msm.append_term(self.neg_c, g.into()); + + let accumulator = Accumulator { + g, + u_packed: self.u_packed, + }; + + (self.msm, accumulator) + } + + /// Computes G = ⟨s, params.g⟩ + pub fn compute_g(&self) -> C { + let s = compute_s(&self.u, C::Scalar::ONE); + + best_multiexp(&s, &self.msm.params.g).to_affine() + } +} + +/// A verifier that checks multiple proofs in a batch. +#[derive(Debug)] +pub struct AccumulatorStrategy<'params, C: CurveAffine> { + msm: MSMIPA<'params, C>, +} + +impl<'params, C: CurveAffine> + VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> + for AccumulatorStrategy<'params, C> +{ + type Output = Self; + + fn new(params: &'params ParamsIPA) -> Self { + AccumulatorStrategy { + msm: MSMIPA::new(params), + } + } + + fn process( + mut self, + f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, + ) -> Result { + self.msm.scale(C::Scalar::random(OsRng)); + let guard = f(self.msm)?; + + Ok(Self { + msm: guard.use_challenges(), + }) + } + + /// Finalizes the batch and checks its validity. + /// + /// Returns `false` if *some* proof was invalid. If the caller needs to identify + /// specific failing proofs, it must re-process the proofs separately. + #[must_use] + fn finalize(self) -> bool { + self.msm.check() + } +} + +/// A verifier that checks single proof +#[derive(Debug)] +pub struct SingleStrategy<'params, C: CurveAffine> { + msm: MSMIPA<'params, C>, +} + +impl<'params, C: CurveAffine> + VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> + for SingleStrategy<'params, C> +{ + type Output = (); + + fn new(params: &'params ParamsIPA) -> Self { + SingleStrategy { + msm: MSMIPA::new(params), + } + } + + fn process( + self, + f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, + ) -> Result { + let guard = f(self.msm)?; + let msm = guard.use_challenges(); + if msm.check() { + Ok(()) + } else { + Err(Error::ConstraintSystemFailure) + } + } + + /// Finalizes the batch and checks its validity. + /// + /// Returns `false` if *some* proof was invalid. If the caller needs to identify + /// specific failing proofs, it must re-process the proofs separately. + #[must_use] + fn finalize(self) -> bool { + unreachable!() + } +} + +/// Computes the coefficients of $g(X) = \prod\limits_{i=0}^{k-1} (1 + u_{k - 1 - i} X^{2^i})$. +fn compute_s(u: &[F], init: F) -> Vec { + assert!(!u.is_empty()); + let mut v = vec![F::ZERO; 1 << u.len()]; + v[0] = init; + + for (len, u_j) in u.iter().rev().enumerate().map(|(i, u_j)| (1 << i, u_j)) { + let (left, right) = v.split_at_mut(len); + let right = &mut right[0..len]; + right.copy_from_slice(left); + for v in right { + *v *= u_j; + } + } + + v +} diff --git a/halo2_backend/src/poly/kzg/commitment.rs b/halo2_backend/src/poly/kzg/commitment.rs new file mode 100644 index 0000000000..114b9ac013 --- /dev/null +++ b/halo2_backend/src/poly/kzg/commitment.rs @@ -0,0 +1,417 @@ +use crate::arithmetic::{best_multiexp, g_to_lagrange, parallelize}; +use crate::helpers::SerdeCurveAffine; +use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, ParamsVerifier}; +use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; +use crate::SerdeFormat; + +use ff::{Field, PrimeField}; +use group::{prime::PrimeCurveAffine, Curve, Group}; +use halo2curves::pairing::Engine; +use halo2curves::CurveExt; +use rand_core::{OsRng, RngCore}; +use std::fmt::Debug; +use std::marker::PhantomData; + +use std::io; + +use super::msm::MSMKZG; + +/// These are the public parameters for the polynomial commitment scheme. +#[derive(Debug, Clone)] +pub struct ParamsKZG { + pub(crate) k: u32, + pub(crate) n: u64, + pub(crate) g: Vec, + pub(crate) g_lagrange: Vec, + pub(crate) g2: E::G2Affine, + pub(crate) s_g2: E::G2Affine, +} + +/// Umbrella commitment scheme construction for all KZG variants +#[derive(Debug)] +pub struct KZGCommitmentScheme { + _marker: PhantomData, +} + +impl CommitmentScheme for KZGCommitmentScheme +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type Scalar = E::Fr; + type Curve = E::G1Affine; + + type ParamsProver = ParamsKZG; + type ParamsVerifier = ParamsVerifierKZG; + + fn new_params(k: u32) -> Self::ParamsProver { + ParamsKZG::new(k) + } + + fn read_params(reader: &mut R) -> io::Result { + ParamsKZG::read(reader) + } +} + +impl ParamsKZG +where + E::G1Affine: SerdeCurveAffine, + E::G1: CurveExt, +{ + /// Initializes parameters for the curve, draws toxic secret from given rng. + /// MUST NOT be used in production. + pub fn setup(k: u32, rng: R) -> Self { + // Largest root of unity exponent of the Engine is `2^E::Fr::S`, so we can + // only support FFTs of polynomials below degree `2^E::Fr::S`. + assert!(k <= E::Fr::S); + let n: u64 = 1 << k; + + // Calculate g = [G1, [s] G1, [s^2] G1, ..., [s^(n-1)] G1] in parallel. + let g1 = E::G1Affine::generator(); + let s = ::random(rng); + + let mut g_projective = vec![E::G1::identity(); n as usize]; + parallelize(&mut g_projective, |g, start| { + let mut current_g: E::G1 = g1.into(); + current_g *= s.pow_vartime([start as u64]); + for g in g.iter_mut() { + *g = current_g; + current_g *= s; + } + }); + + let g = { + let mut g = vec![E::G1Affine::identity(); n as usize]; + parallelize(&mut g, |g, starts| { + E::G1::batch_normalize(&g_projective[starts..(starts + g.len())], g); + }); + g + }; + + let mut g_lagrange_projective = vec![E::G1::identity(); n as usize]; + let mut root = E::Fr::ROOT_OF_UNITY; + for _ in k..E::Fr::S { + root = root.square(); + } + let n_inv = E::Fr::from(n) + .invert() + .expect("inversion should be ok for n = 1<::generator(); + let s_g2 = (g2 * s).into(); + + Self { + k, + n, + g, + g_lagrange, + g2, + s_g2, + } + } + + /// Initializes parameters for the curve through existing parameters + /// k, g, g_lagrange (optional), g2, s_g2 + pub fn from_parts( + &self, + k: u32, + g: Vec, + g_lagrange: Option>, + g2: E::G2Affine, + s_g2: E::G2Affine, + ) -> Self { + Self { + k, + n: 1 << k, + g_lagrange: match g_lagrange { + Some(g_l) => g_l, + None => g_to_lagrange(g.iter().map(PrimeCurveAffine::to_curve).collect(), k), + }, + g, + g2, + s_g2, + } + } + + /// Returns gernerator on G2 + pub fn g2(&self) -> E::G2Affine { + self.g2 + } + + /// Returns first power of secret on G2 + pub fn s_g2(&self) -> E::G2Affine { + self.s_g2 + } + + /// Writes parameters to buffer + pub fn write_custom(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> + where + E::G2Affine: SerdeCurveAffine, + { + writer.write_all(&self.k.to_le_bytes())?; + for el in self.g.iter() { + el.write(writer, format)?; + } + for el in self.g_lagrange.iter() { + el.write(writer, format)?; + } + self.g2.write(writer, format)?; + self.s_g2.write(writer, format)?; + Ok(()) + } + + /// Reads params from a buffer. + pub fn read_custom(reader: &mut R, format: SerdeFormat) -> io::Result + where + E::G2Affine: SerdeCurveAffine, + { + let mut k = [0u8; 4]; + reader.read_exact(&mut k[..])?; + let k = u32::from_le_bytes(k); + let n = 1 << k; + + let (g, g_lagrange) = match format { + SerdeFormat::Processed => { + use group::GroupEncoding; + let load_points_from_file_parallelly = + |reader: &mut R| -> io::Result>> { + let mut points_compressed = + vec![<::G1Affine as GroupEncoding>::Repr::default(); n]; + for points_compressed in points_compressed.iter_mut() { + reader.read_exact((*points_compressed).as_mut())?; + } + + let mut points = vec![Option::::None; n]; + parallelize(&mut points, |points, chunks| { + for (i, point) in points.iter_mut().enumerate() { + *point = Option::from(E::G1Affine::from_bytes( + &points_compressed[chunks + i], + )); + } + }); + Ok(points) + }; + + let g = load_points_from_file_parallelly(reader)?; + let g: Vec<::G1Affine> = g + .iter() + .map(|point| { + point.ok_or_else(|| { + io::Error::new(io::ErrorKind::Other, "invalid point encoding") + }) + }) + .collect::>()?; + let g_lagrange = load_points_from_file_parallelly(reader)?; + let g_lagrange: Vec<::G1Affine> = g_lagrange + .iter() + .map(|point| { + point.ok_or_else(|| { + io::Error::new(io::ErrorKind::Other, "invalid point encoding") + }) + }) + .collect::>()?; + (g, g_lagrange) + } + SerdeFormat::RawBytes => { + let g = (0..n) + .map(|_| ::read(reader, format)) + .collect::, _>>()?; + let g_lagrange = (0..n) + .map(|_| ::read(reader, format)) + .collect::, _>>()?; + (g, g_lagrange) + } + SerdeFormat::RawBytesUnchecked => { + // avoid try branching for performance + let g = (0..n) + .map(|_| ::read(reader, format).unwrap()) + .collect::>(); + let g_lagrange = (0..n) + .map(|_| ::read(reader, format).unwrap()) + .collect::>(); + (g, g_lagrange) + } + }; + + let g2 = E::G2Affine::read(reader, format)?; + let s_g2 = E::G2Affine::read(reader, format)?; + + Ok(Self { + k, + n: n as u64, + g, + g_lagrange, + g2, + s_g2, + }) + } +} + +// TODO: see the issue at https://github.com/appliedzkp/halo2/issues/45 +// So we probably need much smaller verifier key. However for new bases in g1 should be in verifier keys. +/// KZG multi-open verification parameters +pub type ParamsVerifierKZG = ParamsKZG; + +impl<'params, E: Engine + Debug> Params<'params, E::G1Affine> for ParamsKZG +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type MSM = MSMKZG; + + fn k(&self) -> u32 { + self.k + } + + fn n(&self) -> u64 { + self.n + } + + fn downsize(&mut self, k: u32) { + assert!(k <= self.k); + + self.k = k; + self.n = 1 << k; + + self.g.truncate(self.n as usize); + self.g_lagrange = g_to_lagrange(self.g.iter().map(|g| g.to_curve()).collect(), k); + } + + fn empty_msm(&'params self) -> MSMKZG { + MSMKZG::new() + } + + fn commit_lagrange(&self, poly: &Polynomial, _: Blind) -> E::G1 { + let mut scalars = Vec::with_capacity(poly.len()); + scalars.extend(poly.iter()); + let bases = &self.g_lagrange; + let size = scalars.len(); + assert!(bases.len() >= size); + best_multiexp(&scalars, &bases[0..size]) + } + + /// Writes params to a buffer. + fn write(&self, writer: &mut W) -> io::Result<()> { + self.write_custom(writer, SerdeFormat::RawBytes) + } + + /// Reads params from a buffer. + fn read(reader: &mut R) -> io::Result { + Self::read_custom(reader, SerdeFormat::RawBytes) + } +} + +impl<'params, E: Engine + Debug> ParamsVerifier<'params, E::G1Affine> for ParamsKZG +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ +} + +impl<'params, E: Engine + Debug> ParamsProver<'params, E::G1Affine> for ParamsKZG +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type ParamsVerifier = ParamsVerifierKZG; + + fn verifier_params(&'params self) -> &'params Self::ParamsVerifier { + self + } + + fn new(k: u32) -> Self { + Self::setup(k, OsRng) + } + + fn commit(&self, poly: &Polynomial, _: Blind) -> E::G1 { + let mut scalars = Vec::with_capacity(poly.len()); + scalars.extend(poly.iter()); + let bases = &self.g; + let size = scalars.len(); + assert!(bases.len() >= size); + best_multiexp(&scalars, &bases[0..size]) + } + + fn get_g(&self) -> &[E::G1Affine] { + &self.g + } +} + +#[cfg(test)] +mod test { + use crate::poly::commitment::ParamsProver; + use crate::poly::commitment::{Blind, Params}; + use crate::poly::kzg::commitment::ParamsKZG; + use ff::Field; + + #[test] + fn test_commit_lagrange() { + const K: u32 = 6; + + use rand_core::OsRng; + + use crate::poly::EvaluationDomain; + use halo2curves::bn256::{Bn256, Fr}; + + let params = ParamsKZG::::new(K); + let domain = EvaluationDomain::new(1, K); + + let mut a = domain.empty_lagrange(); + + for (i, a) in a.iter_mut().enumerate() { + *a = Fr::from(i as u64); + } + + let b = domain.lagrange_to_coeff(a.clone()); + + let alpha = Blind(Fr::random(OsRng)); + + assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); + } + + #[test] + fn test_parameter_serialisation_roundtrip() { + const K: u32 = 4; + + use super::super::commitment::Params; + use crate::halo2curves::bn256::Bn256; + + let params0 = ParamsKZG::::new(K); + let mut data = vec![]; + as Params<_>>::write(¶ms0, &mut data).unwrap(); + let params1: ParamsKZG = Params::read::<_>(&mut &data[..]).unwrap(); + + assert_eq!(params0.k, params1.k); + assert_eq!(params0.n, params1.n); + assert_eq!(params0.g.len(), params1.g.len()); + assert_eq!(params0.g_lagrange.len(), params1.g_lagrange.len()); + + assert_eq!(params0.g, params1.g); + assert_eq!(params0.g_lagrange, params1.g_lagrange); + assert_eq!(params0.g2, params1.g2); + assert_eq!(params0.s_g2, params1.s_g2); + } +} diff --git a/halo2_backend/src/poly/kzg/mod.rs b/halo2_backend/src/poly/kzg/mod.rs new file mode 100644 index 0000000000..0c99a20c34 --- /dev/null +++ b/halo2_backend/src/poly/kzg/mod.rs @@ -0,0 +1,8 @@ +/// KZG commitment scheme +pub mod commitment; +/// Multiscalar multiplication engines +pub mod msm; +/// KZG multi-open scheme +pub mod multiopen; +/// Strategies used with KZG scheme +pub mod strategy; diff --git a/halo2_backend/src/poly/kzg/msm.rs b/halo2_backend/src/poly/kzg/msm.rs new file mode 100644 index 0000000000..f9b8c284bd --- /dev/null +++ b/halo2_backend/src/poly/kzg/msm.rs @@ -0,0 +1,203 @@ +use std::fmt::Debug; + +use super::commitment::ParamsKZG; +use crate::{ + arithmetic::{best_multiexp, parallelize}, + poly::commitment::MSM, +}; +use group::{Curve, Group}; +use halo2curves::{ + pairing::{Engine, MillerLoopResult, MultiMillerLoop}, + CurveAffine, CurveExt, +}; + +/// A multiscalar multiplication in the polynomial commitment scheme +#[derive(Clone, Default, Debug)] +pub struct MSMKZG +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + pub(crate) scalars: Vec, + pub(crate) bases: Vec, +} + +impl MSMKZG +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + /// Create an empty MSM instance + pub fn new() -> Self { + MSMKZG { + scalars: vec![], + bases: vec![], + } + } + + /// Prepares all scalars in the MSM to linear combination + pub fn combine_with_base(&mut self, base: E::Fr) { + use ff::Field; + let mut acc = E::Fr::ONE; + if !self.scalars.is_empty() { + for scalar in self.scalars.iter_mut().rev() { + *scalar *= &acc; + acc *= base; + } + } + } +} + +impl MSM for MSMKZG +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + fn append_term(&mut self, scalar: E::Fr, point: E::G1) { + self.scalars.push(scalar); + self.bases.push(point); + } + + fn add_msm(&mut self, other: &Self) { + self.scalars.extend(other.scalars().iter()); + self.bases.extend(other.bases().iter()); + } + + fn scale(&mut self, factor: E::Fr) { + if !self.scalars.is_empty() { + parallelize(&mut self.scalars, |scalars, _| { + for other_scalar in scalars { + *other_scalar *= &factor; + } + }) + } + } + + fn check(&self) -> bool { + bool::from(self.eval().is_identity()) + } + + fn eval(&self) -> E::G1 { + use group::prime::PrimeCurveAffine; + let mut bases = vec![E::G1Affine::identity(); self.scalars.len()]; + E::G1::batch_normalize(&self.bases, &mut bases); + best_multiexp(&self.scalars, &bases) + } + + fn bases(&self) -> Vec { + self.bases.clone() + } + + fn scalars(&self) -> Vec { + self.scalars.clone() + } +} + +/// A projective point collector +#[derive(Debug, Clone)] +pub(crate) struct PreMSM +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + projectives_msms: Vec>, +} + +impl PreMSM +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + pub(crate) fn new() -> Self { + PreMSM { + projectives_msms: vec![], + } + } + + pub(crate) fn normalize(self) -> MSMKZG { + let (scalars, bases) = self + .projectives_msms + .into_iter() + .map(|msm| (msm.scalars, msm.bases)) + .unzip::<_, _, Vec<_>, Vec<_>>(); + + MSMKZG { + scalars: scalars.into_iter().flatten().collect(), + bases: bases.into_iter().flatten().collect(), + } + } + + pub(crate) fn add_msm(&mut self, other: MSMKZG) { + self.projectives_msms.push(other); + } +} + +impl<'params, E: MultiMillerLoop + Debug> From<&'params ParamsKZG> for DualMSM<'params, E> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + fn from(params: &'params ParamsKZG) -> Self { + DualMSM::new(params) + } +} + +/// Two channel MSM accumulator +#[derive(Debug, Clone)] +pub struct DualMSM<'a, E: Engine> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + pub(crate) params: &'a ParamsKZG, + pub(crate) left: MSMKZG, + pub(crate) right: MSMKZG, +} + +impl<'a, E: MultiMillerLoop + Debug> DualMSM<'a, E> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + /// Create a new two channel MSM accumulator instance + pub fn new(params: &'a ParamsKZG) -> Self { + Self { + params, + left: MSMKZG::new(), + right: MSMKZG::new(), + } + } + + /// Scale all scalars in the MSM by some scaling factor + pub fn scale(&mut self, e: E::Fr) { + self.left.scale(e); + self.right.scale(e); + } + + /// Add another multiexp into this one + pub fn add_msm(&mut self, other: Self) { + self.left.add_msm(&other.left); + self.right.add_msm(&other.right); + } + + /// Performs final pairing check with given verifier params and two channel linear combination + pub fn check(self) -> bool { + let s_g2_prepared = E::G2Prepared::from(self.params.s_g2); + let n_g2_prepared = E::G2Prepared::from(-self.params.g2); + + let left = self.left.eval(); + let right = self.right.eval(); + + let (term_1, term_2) = ( + (&left.into(), &s_g2_prepared), + (&right.into(), &n_g2_prepared), + ); + let terms = &[term_1, term_2]; + + bool::from( + E::multi_miller_loop(&terms[..]) + .final_exponentiation() + .is_identity(), + ) + } +} diff --git a/halo2_backend/src/poly/kzg/multiopen.rs b/halo2_backend/src/poly/kzg/multiopen.rs new file mode 100644 index 0000000000..97b7e2b777 --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen.rs @@ -0,0 +1,5 @@ +mod gwc; +mod shplonk; + +pub use gwc::*; +pub use shplonk::*; diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc.rs b/halo2_backend/src/poly/kzg/multiopen/gwc.rs new file mode 100644 index 0000000000..3fd28dd00a --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/gwc.rs @@ -0,0 +1,50 @@ +mod prover; +mod verifier; + +pub use prover::ProverGWC; +pub use verifier::VerifierGWC; + +use crate::{poly::query::Query, transcript::ChallengeScalar}; +use ff::Field; +use std::marker::PhantomData; + +#[derive(Clone, Copy, Debug)] +struct U {} +type ChallengeU = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct V {} +type ChallengeV = ChallengeScalar; + +struct CommitmentData> { + queries: Vec, + point: F, + _marker: PhantomData, +} + +fn construct_intermediate_sets>(queries: I) -> Vec> +where + I: IntoIterator + Clone, +{ + let mut point_query_map: Vec<(F, Vec)> = Vec::new(); + for query in queries { + if let Some(pos) = point_query_map + .iter() + .position(|(point, _)| *point == query.get_point()) + { + let (_, queries) = &mut point_query_map[pos]; + queries.push(query); + } else { + point_query_map.push((query.get_point(), vec![query])); + } + } + + point_query_map + .into_iter() + .map(|(point, queries)| CommitmentData { + queries, + point, + _marker: PhantomData, + }) + .collect() +} diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs b/halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs new file mode 100644 index 0000000000..ecea01cb01 --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs @@ -0,0 +1,89 @@ +use super::{construct_intermediate_sets, ChallengeV, Query}; +use crate::arithmetic::{kate_division, powers}; +use crate::helpers::SerdeCurveAffine; +use crate::poly::commitment::ParamsProver; +use crate::poly::commitment::Prover; +use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use crate::poly::query::ProverQuery; +use crate::poly::{commitment::Blind, Polynomial}; +use crate::transcript::{EncodedChallenge, TranscriptWrite}; + +use group::Curve; +use halo2curves::pairing::Engine; +use halo2curves::CurveExt; +use rand_core::RngCore; +use std::fmt::Debug; +use std::io; +use std::marker::PhantomData; + +/// Concrete KZG prover with GWC variant +#[derive(Debug)] +pub struct ProverGWC<'params, E: Engine> { + params: &'params ParamsKZG, +} + +/// Create a multi-opening proof +impl<'params, E: Engine + Debug> Prover<'params, KZGCommitmentScheme> for ProverGWC<'params, E> +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + const QUERY_INSTANCE: bool = false; + + fn new(params: &'params ParamsKZG) -> Self { + Self { params } + } + + /// Create a multi-opening proof + fn create_proof< + 'com, + Ch: EncodedChallenge, + T: TranscriptWrite, + R, + I, + >( + &self, + _: R, + transcript: &mut T, + queries: I, + ) -> io::Result<()> + where + I: IntoIterator> + Clone, + R: RngCore, + { + let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); + let commitment_data = construct_intermediate_sets(queries); + + for commitment_at_a_point in commitment_data.iter() { + let z = commitment_at_a_point.point; + let (poly_batch, eval_batch) = commitment_at_a_point + .queries + .iter() + .zip(powers(*v)) + .map(|(query, power_of_v)| { + assert_eq!(query.get_point(), z); + + let poly = query.get_commitment().poly; + let eval = query.get_eval(); + + (poly.clone() * power_of_v, eval * power_of_v) + }) + .reduce(|(poly_acc, eval_acc), (poly, eval)| (poly_acc + &poly, eval_acc + eval)) + .unwrap(); + + let poly_batch = &poly_batch - eval_batch; + let witness_poly = Polynomial { + values: kate_division(&poly_batch.values, z), + _marker: PhantomData, + }; + let w = self + .params + .commit(&witness_poly, Blind::default()) + .to_affine(); + + transcript.write_point(w)?; + } + Ok(()) + } +} diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs b/halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs new file mode 100644 index 0000000000..fcfda6941f --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs @@ -0,0 +1,124 @@ +use std::fmt::Debug; + +use super::{construct_intermediate_sets, ChallengeU, ChallengeV}; +use crate::arithmetic::powers; +use crate::helpers::SerdeCurveAffine; +use crate::poly::commitment::Verifier; +use crate::poly::commitment::MSM; +use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use crate::poly::kzg::msm::{DualMSM, MSMKZG}; +use crate::poly::kzg::strategy::GuardKZG; +use crate::poly::query::Query; +use crate::poly::query::{CommitmentReference, VerifierQuery}; +use crate::poly::Error; +use crate::transcript::{EncodedChallenge, TranscriptRead}; + +use ff::Field; +use halo2curves::pairing::{Engine, MultiMillerLoop}; +use halo2curves::CurveExt; + +#[derive(Debug)] +/// Concrete KZG verifier with GWC variant +pub struct VerifierGWC<'params, E: Engine> { + params: &'params ParamsKZG, +} + +impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierGWC<'params, E> +where + E: MultiMillerLoop + Debug, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type Guard = GuardKZG<'params, E>; + type MSMAccumulator = DualMSM<'params, E>; + + const QUERY_INSTANCE: bool = false; + + fn new(params: &'params ParamsKZG) -> Self { + Self { params } + } + + fn verify_proof< + 'com, + Ch: EncodedChallenge, + T: TranscriptRead, + I, + >( + &self, + transcript: &mut T, + queries: I, + mut msm_accumulator: DualMSM<'params, E>, + ) -> Result + where + I: IntoIterator>> + Clone, + { + let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); + + let commitment_data = construct_intermediate_sets(queries); + + let w: Vec = (0..commitment_data.len()) + .map(|_| transcript.read_point().map_err(|_| Error::SamplingError)) + .collect::, Error>>()?; + + let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); + + let mut commitment_multi = MSMKZG::::new(); + let mut eval_multi = E::Fr::ZERO; + + let mut witness = MSMKZG::::new(); + let mut witness_with_aux = MSMKZG::::new(); + + for ((commitment_at_a_point, wi), power_of_u) in + commitment_data.iter().zip(w.into_iter()).zip(powers(*u)) + { + assert!(!commitment_at_a_point.queries.is_empty()); + let z = commitment_at_a_point.point; + + let (mut commitment_batch, eval_batch) = commitment_at_a_point + .queries + .iter() + .zip(powers(*v)) + .map(|(query, power_of_v)| { + assert_eq!(query.get_point(), z); + + let commitment = match query.get_commitment() { + CommitmentReference::Commitment(c) => { + let mut msm = MSMKZG::::new(); + msm.append_term(power_of_v, (*c).into()); + msm + } + CommitmentReference::MSM(msm) => { + let mut msm = msm.clone(); + msm.scale(power_of_v); + msm + } + }; + let eval = power_of_v * query.get_eval(); + + (commitment, eval) + }) + .reduce(|(mut commitment_acc, eval_acc), (commitment, eval)| { + commitment_acc.add_msm(&commitment); + (commitment_acc, eval_acc + eval) + }) + .unwrap(); + + commitment_batch.scale(power_of_u); + commitment_multi.add_msm(&commitment_batch); + eval_multi += power_of_u * eval_batch; + + witness_with_aux.append_term(power_of_u * z, wi.into()); + witness.append_term(power_of_u, wi.into()); + } + + msm_accumulator.left.add_msm(&witness); + + msm_accumulator.right.add_msm(&witness_with_aux); + msm_accumulator.right.add_msm(&commitment_multi); + let g0: E::G1 = self.params.g[0].into(); + msm_accumulator.right.append_term(eval_multi, -g0); + + Ok(Self::Guard::new(msm_accumulator)) + } +} diff --git a/halo2_backend/src/poly/kzg/multiopen/shplonk.rs b/halo2_backend/src/poly/kzg/multiopen/shplonk.rs new file mode 100644 index 0000000000..d0814e83e3 --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/shplonk.rs @@ -0,0 +1,247 @@ +mod prover; +mod verifier; + +use crate::multicore::{IntoParallelIterator, ParallelIterator}; +use crate::{poly::query::Query, transcript::ChallengeScalar}; +use ff::Field; +pub use prover::ProverSHPLONK; +use std::collections::BTreeSet; +pub use verifier::VerifierSHPLONK; + +#[derive(Clone, Copy, Debug)] +struct U {} +type ChallengeU = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct V {} +type ChallengeV = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct Y {} +type ChallengeY = ChallengeScalar; + +#[derive(Debug, Clone, PartialEq)] +struct Commitment((T, Vec)); + +impl Commitment { + fn get(&self) -> T { + self.0 .0.clone() + } + + fn evals(&self) -> Vec { + self.0 .1.clone() + } +} + +#[derive(Debug, Clone, PartialEq)] +struct RotationSet { + commitments: Vec>, + points: Vec, +} + +#[derive(Debug, PartialEq)] +struct IntermediateSets> { + rotation_sets: Vec>, + super_point_set: BTreeSet, +} + +fn construct_intermediate_sets>( + queries: I, +) -> IntermediateSets +where + I: IntoIterator + Clone, +{ + let queries = queries.into_iter().collect::>(); + + // Find evaluation of a commitment at a rotation + let get_eval = |commitment: Q::Commitment, rotation: F| -> F { + queries + .iter() + .find(|query| query.get_commitment() == commitment && query.get_point() == rotation) + .unwrap() + .get_eval() + }; + + // All points that appear in queries + let mut super_point_set = BTreeSet::new(); + + // Collect rotation sets for each commitment + // Example elements in the vector: + // (C_0, {r_5}), + // (C_1, {r_1, r_2, r_3}), + // (C_2, {r_2, r_3, r_4}), + // (C_3, {r_2, r_3, r_4}), + // ... + let mut commitment_rotation_set_map: Vec<(Q::Commitment, BTreeSet)> = vec![]; + for query in queries.iter() { + let rotation = query.get_point(); + super_point_set.insert(rotation); + if let Some(commitment_rotation_set) = commitment_rotation_set_map + .iter_mut() + .find(|(commitment, _)| *commitment == query.get_commitment()) + { + let (_, rotation_set) = commitment_rotation_set; + rotation_set.insert(rotation); + } else { + commitment_rotation_set_map.push(( + query.get_commitment(), + BTreeSet::from_iter(std::iter::once(rotation)), + )); + }; + } + + // Flatten rotation sets and collect commitments that opens against each commitment set + // Example elements in the vector: + // {r_5}: [C_0], + // {r_1, r_2, r_3} : [C_1] + // {r_2, r_3, r_4} : [C_2, C_3], + // ... + // NOTE: we want to make the order of the collection of rotation sets independent of the opening points, to ease the verifier computation + let mut rotation_set_commitment_map: Vec<(BTreeSet, Vec)> = vec![]; + for (commitment, rotation_set) in commitment_rotation_set_map.into_iter() { + if let Some(rotation_set_commitment) = rotation_set_commitment_map + .iter_mut() + .find(|(set, _)| set == &rotation_set) + { + let (_, commitments) = rotation_set_commitment; + commitments.push(commitment); + } else { + rotation_set_commitment_map.push((rotation_set, vec![commitment])); + }; + } + + let rotation_sets = rotation_set_commitment_map + .into_par_iter() + .map(|(rotations, commitments)| { + let rotations_vec = rotations.iter().collect::>(); + let commitments: Vec> = commitments + .into_par_iter() + .map(|commitment| { + let evals: Vec = rotations_vec + .as_slice() + .into_par_iter() + .map(|&&rotation| get_eval(commitment, rotation)) + .collect(); + Commitment((commitment, evals)) + }) + .collect(); + + RotationSet { + commitments, + points: rotations.into_iter().collect(), + } + }) + .collect::>>(); + + IntermediateSets { + rotation_sets, + super_point_set, + } +} + +#[cfg(test)] +mod proptests { + use super::{construct_intermediate_sets, Commitment, IntermediateSets}; + use ff::FromUniformBytes; + use halo2curves::pasta::Fp; + use proptest::{collection::vec, prelude::*, sample::select}; + use std::convert::TryFrom; + + #[derive(Debug, Clone)] + struct MyQuery { + point: F, + eval: F, + commitment: usize, + } + + impl super::Query for MyQuery { + type Commitment = usize; + type Eval = Fp; + + fn get_point(&self) -> Fp { + self.point + } + + fn get_eval(&self) -> Self::Eval { + self.eval + } + + fn get_commitment(&self) -> Self::Commitment { + self.commitment + } + } + + prop_compose! { + fn arb_point()( + bytes in vec(any::(), 64) + ) -> Fp { + Fp::from_uniform_bytes(&<[u8; 64]>::try_from(bytes).unwrap()) + } + } + + prop_compose! { + fn arb_query(commitment: usize, point: Fp)( + eval in arb_point() + ) -> MyQuery { + MyQuery { + point, + eval, + commitment + } + } + } + + prop_compose! { + // Mapping from column index to point index. + fn arb_queries_inner(num_points: usize, num_cols: usize, num_queries: usize)( + col_indices in vec(select((0..num_cols).collect::>()), num_queries), + point_indices in vec(select((0..num_points).collect::>()), num_queries) + ) -> Vec<(usize, usize)> { + col_indices.into_iter().zip(point_indices.into_iter()).collect() + } + } + + prop_compose! { + fn compare_queries( + num_points: usize, + num_cols: usize, + num_queries: usize, + )( + points_1 in vec(arb_point(), num_points), + points_2 in vec(arb_point(), num_points), + mapping in arb_queries_inner(num_points, num_cols, num_queries) + )( + queries_1 in mapping.iter().map(|(commitment, point_idx)| arb_query(*commitment, points_1[*point_idx])).collect::>(), + queries_2 in mapping.iter().map(|(commitment, point_idx)| arb_query(*commitment, points_2[*point_idx])).collect::>(), + ) -> ( + Vec>, + Vec> + ) { + ( + queries_1, + queries_2, + ) + } + } + + proptest! { + #[test] + fn test_intermediate_sets( + (queries_1, queries_2) in compare_queries(8, 8, 16) + ) { + let IntermediateSets { rotation_sets, .. } = construct_intermediate_sets(queries_1); + let commitment_sets = rotation_sets.iter().map(|data| + data.commitments.iter().map(Commitment::get).collect::>() + ).collect::>(); + + // It shouldn't matter what the point or eval values are; we should get + // the same exact point set indices and point indices again. + let IntermediateSets { rotation_sets: new_rotation_sets, .. } = construct_intermediate_sets(queries_2); + let new_commitment_sets = new_rotation_sets.iter().map(|data| + data.commitments.iter().map(Commitment::get).collect::>() + ).collect::>(); + + assert_eq!(commitment_sets, new_commitment_sets); + } + } +} diff --git a/halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs b/halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs new file mode 100644 index 0000000000..5001d69094 --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs @@ -0,0 +1,298 @@ +use super::{ + construct_intermediate_sets, ChallengeU, ChallengeV, ChallengeY, Commitment, RotationSet, +}; +use crate::arithmetic::{ + eval_polynomial, evaluate_vanishing_polynomial, kate_division, lagrange_interpolate, + parallelize, powers, CurveAffine, +}; +use crate::helpers::SerdeCurveAffine; +use crate::poly::commitment::{Blind, ParamsProver, Prover}; +use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use crate::poly::query::{PolynomialPointer, ProverQuery}; +use crate::poly::{Coeff, Polynomial}; +use crate::transcript::{EncodedChallenge, TranscriptWrite}; + +use crate::multicore::{IntoParallelIterator, ParallelIterator}; +use ff::Field; +use group::Curve; +use halo2curves::pairing::Engine; +use halo2curves::CurveExt; +use rand_core::RngCore; +use std::fmt::Debug; +use std::io; +use std::marker::PhantomData; +use std::ops::MulAssign; + +fn div_by_vanishing(poly: Polynomial, roots: &[F]) -> Vec { + let poly = roots + .iter() + .fold(poly.values, |poly, point| kate_division(&poly, *point)); + + poly +} + +struct CommitmentExtension<'a, C: CurveAffine> { + commitment: Commitment>, + low_degree_equivalent: Polynomial, +} + +impl<'a, C: CurveAffine> Commitment> { + fn extend(&self, points: &[C::Scalar]) -> CommitmentExtension<'a, C> { + let poly = lagrange_interpolate(points, &self.evals()[..]); + + let low_degree_equivalent = Polynomial { + values: poly, + _marker: PhantomData, + }; + + CommitmentExtension { + commitment: self.clone(), + low_degree_equivalent, + } + } +} + +impl<'a, C: CurveAffine> CommitmentExtension<'a, C> { + fn linearisation_contribution(&self, u: C::Scalar) -> Polynomial { + let p_x = self.commitment.get().poly; + let r_eval = eval_polynomial(&self.low_degree_equivalent.values[..], u); + p_x - r_eval + } + + fn quotient_contribution(&self) -> Polynomial { + let len = self.low_degree_equivalent.len(); + let mut p_x = self.commitment.get().poly.clone(); + parallelize(&mut p_x.values[0..len], |lhs, start| { + for (lhs, rhs) in lhs + .iter_mut() + .zip(self.low_degree_equivalent.values[start..].iter()) + { + *lhs -= *rhs; + } + }); + p_x + } +} + +struct RotationSetExtension<'a, C: CurveAffine> { + commitments: Vec>, + points: Vec, +} + +impl<'a, C: CurveAffine> RotationSet> { + fn extend(self, commitments: Vec>) -> RotationSetExtension<'a, C> { + RotationSetExtension { + commitments, + points: self.points, + } + } +} + +/// Concrete KZG prover with SHPLONK variant +#[derive(Debug)] +pub struct ProverSHPLONK<'a, E: Engine> { + params: &'a ParamsKZG, +} + +impl<'a, E: Engine> ProverSHPLONK<'a, E> { + /// Given parameters creates new prover instance + pub fn new(params: &'a ParamsKZG) -> Self { + Self { params } + } +} + +/// Create a multi-opening proof +impl<'params, E: Engine + Debug> Prover<'params, KZGCommitmentScheme> + for ProverSHPLONK<'params, E> +where + E::Fr: Ord, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + const QUERY_INSTANCE: bool = false; + + fn new(params: &'params ParamsKZG) -> Self { + Self { params } + } + + /// Create a multi-opening proof + fn create_proof< + 'com, + Ch: EncodedChallenge, + T: TranscriptWrite, + R, + I, + >( + &self, + _: R, + transcript: &mut T, + queries: I, + ) -> io::Result<()> + where + I: IntoIterator> + Clone, + R: RngCore, + { + // TODO: explore if it is safe to use same challenge + // for different sets that are already combined with another challenge + let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + + let quotient_contribution = |rotation_set: &RotationSetExtension| { + // [P_i_0(X) - R_i_0(X), P_i_1(X) - R_i_1(X), ... ] + #[allow(clippy::needless_collect)] + let numerators = rotation_set + .commitments + .as_slice() + .into_par_iter() + .map(|commitment| commitment.quotient_contribution()) + .collect::>(); + + // define numerator polynomial as + // N_i_j(X) = (P_i_j(X) - R_i_j(X)) + // and combine polynomials with same evaluation point set + // N_i(X) = linear_combination(y, N_i_j(X)) + // where y is random scalar to combine numerator polynomials + let n_x = numerators + .into_iter() + .zip(powers(*y)) + .map(|(numerator, power_of_y)| numerator * power_of_y) + .reduce(|acc, numerator| acc + &numerator) + .unwrap(); + + let points = &rotation_set.points[..]; + + // quotient contribution of this evaluation set is + // Q_i(X) = N_i(X) / Z_i(X) where + // Z_i(X) = (x - r_i_0) * (x - r_i_1) * ... + let mut poly = div_by_vanishing(n_x, points); + poly.resize(self.params.n as usize, E::Fr::ZERO); + + Polynomial { + values: poly, + _marker: PhantomData, + } + }; + + let intermediate_sets = construct_intermediate_sets(queries); + let (rotation_sets, super_point_set) = ( + intermediate_sets.rotation_sets, + intermediate_sets.super_point_set, + ); + + let rotation_sets: Vec> = rotation_sets + .into_par_iter() + .map(|rotation_set| { + let commitments: Vec> = rotation_set + .commitments + .as_slice() + .into_par_iter() + .map(|commitment_data| commitment_data.extend(&rotation_set.points)) + .collect(); + rotation_set.extend(commitments) + }) + .collect(); + + let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); + + #[allow(clippy::needless_collect)] + let quotient_polynomials = rotation_sets + .as_slice() + .into_par_iter() + .map(quotient_contribution) + .collect::>(); + + let h_x: Polynomial = quotient_polynomials + .into_iter() + .zip(powers(*v)) + .map(|(poly, power_of_v)| poly * power_of_v) + .reduce(|acc, poly| acc + &poly) + .unwrap(); + + let h = self.params.commit(&h_x, Blind::default()).to_affine(); + transcript.write_point(h)?; + let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); + + let linearisation_contribution = |rotation_set: RotationSetExtension| { + let mut diffs = super_point_set.clone(); + for point in rotation_set.points.iter() { + diffs.remove(point); + } + let diffs = diffs.into_iter().collect::>(); + + // calculate difference vanishing polynomial evaluation + let z_i = evaluate_vanishing_polynomial(&diffs[..], *u); + + // inner linearisation contributions are + // [P_i_0(X) - r_i_0, P_i_1(X) - r_i_1, ... ] where + // r_i_j = R_i_j(u) is the evaluation of low degree equivalent polynomial + // where u is random evaluation point + #[allow(clippy::needless_collect)] + let inner_contributions = rotation_set + .commitments + .as_slice() + .into_par_iter() + .map(|commitment| commitment.linearisation_contribution(*u)) + .collect::>(); + + // define inner contributor polynomial as + // L_i_j(X) = (P_i_j(X) - r_i_j) + // and combine polynomials with same evaluation point set + // L_i(X) = linear_combination(y, L_i_j(X)) + // where y is random scalar to combine inner contributors + let l_x: Polynomial = inner_contributions + .into_iter() + .zip(powers(*y)) + .map(|(poly, power_of_y)| poly * power_of_y) + .reduce(|acc, poly| acc + &poly) + .unwrap(); + + // finally scale l_x by difference vanishing polynomial evaluation z_i + (l_x * z_i, z_i) + }; + + #[allow(clippy::type_complexity)] + let (linearisation_contributions, z_diffs): ( + Vec>, + Vec, + ) = rotation_sets + .into_par_iter() + .map(linearisation_contribution) + .unzip(); + + let l_x: Polynomial = linearisation_contributions + .into_iter() + .zip(powers(*v)) + .map(|(poly, power_of_v)| poly * power_of_v) + .reduce(|acc, poly| acc + &poly) + .unwrap(); + + let super_point_set = super_point_set.into_iter().collect::>(); + let zt_eval = evaluate_vanishing_polynomial(&super_point_set[..], *u); + let l_x = l_x - &(h_x * zt_eval); + + // sanity check + #[cfg(debug_assertions)] + { + let must_be_zero = eval_polynomial(&l_x.values[..], *u); + assert_eq!(must_be_zero, E::Fr::ZERO); + } + + let mut h_x = div_by_vanishing(l_x, &[*u]); + + // normalize coefficients by the coefficient of the first polynomial + let z_0_diff_inv = z_diffs[0].invert().unwrap(); + for h_i in h_x.iter_mut() { + h_i.mul_assign(z_0_diff_inv) + } + + let h_x = Polynomial { + values: h_x, + _marker: PhantomData, + }; + + let h = self.params.commit(&h_x, Blind::default()).to_affine(); + transcript.write_point(h)?; + + Ok(()) + } +} diff --git a/halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs b/halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs new file mode 100644 index 0000000000..5d03940177 --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs @@ -0,0 +1,140 @@ +use std::fmt::Debug; + +use super::ChallengeY; +use super::{construct_intermediate_sets, ChallengeU, ChallengeV}; +use crate::arithmetic::{ + eval_polynomial, evaluate_vanishing_polynomial, lagrange_interpolate, powers, +}; +use crate::helpers::SerdeCurveAffine; +use crate::poly::commitment::Verifier; +use crate::poly::commitment::MSM; +use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use crate::poly::kzg::msm::DualMSM; +use crate::poly::kzg::msm::{PreMSM, MSMKZG}; +use crate::poly::kzg::strategy::GuardKZG; +use crate::poly::query::{CommitmentReference, VerifierQuery}; +use crate::poly::Error; +use crate::transcript::{EncodedChallenge, TranscriptRead}; +use ff::Field; +use halo2curves::pairing::{Engine, MultiMillerLoop}; +use halo2curves::CurveExt; +use std::ops::MulAssign; + +/// Concrete KZG multiopen verifier with SHPLONK variant +#[derive(Debug)] +pub struct VerifierSHPLONK<'params, E: Engine> { + params: &'params ParamsKZG, +} + +impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierSHPLONK<'params, E> +where + E: MultiMillerLoop + Debug, + E::Fr: Ord, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type Guard = GuardKZG<'params, E>; + type MSMAccumulator = DualMSM<'params, E>; + + const QUERY_INSTANCE: bool = false; + + fn new(params: &'params ParamsKZG) -> Self { + Self { params } + } + + /// Verify a multi-opening proof + fn verify_proof< + 'com, + Ch: EncodedChallenge, + T: TranscriptRead, + I, + >( + &self, + transcript: &mut T, + queries: I, + mut msm_accumulator: DualMSM<'params, E>, + ) -> Result + where + I: IntoIterator>> + Clone, + { + let intermediate_sets = construct_intermediate_sets(queries); + let (rotation_sets, super_point_set) = ( + intermediate_sets.rotation_sets, + intermediate_sets.super_point_set, + ); + + let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); + + let h1 = transcript.read_point().map_err(|_| Error::SamplingError)?; + let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); + let h2 = transcript.read_point().map_err(|_| Error::SamplingError)?; + + let (mut z_0_diff_inverse, mut z_0) = (E::Fr::ZERO, E::Fr::ZERO); + let (mut outer_msm, mut r_outer_acc) = (PreMSM::::new(), E::Fr::ZERO); + for (i, (rotation_set, power_of_v)) in rotation_sets.iter().zip(powers(*v)).enumerate() { + let diffs: Vec = super_point_set + .iter() + .filter(|point| !rotation_set.points.contains(point)) + .copied() + .collect(); + let mut z_diff_i = evaluate_vanishing_polynomial(&diffs[..], *u); + + // normalize coefficients by the coefficient of the first commitment + if i == 0 { + z_0 = evaluate_vanishing_polynomial(&rotation_set.points[..], *u); + z_0_diff_inverse = z_diff_i.invert().unwrap(); + z_diff_i = E::Fr::ONE; + } else { + z_diff_i.mul_assign(z_0_diff_inverse); + } + + let (mut inner_msm, r_inner_acc) = rotation_set + .commitments + .iter() + .zip(powers(*y)) + .map(|(commitment_data, power_of_y)| { + // calculate low degree equivalent + let r_x = lagrange_interpolate( + &rotation_set.points[..], + &commitment_data.evals()[..], + ); + let r_eval = power_of_y * eval_polynomial(&r_x[..], *u); + let msm = match commitment_data.get() { + CommitmentReference::Commitment(c) => { + let mut msm = MSMKZG::::new(); + msm.append_term(power_of_y, (*c).into()); + msm + } + CommitmentReference::MSM(msm) => { + let mut msm = msm.clone(); + msm.scale(power_of_y); + msm + } + }; + (msm, r_eval) + }) + .reduce(|(mut msm_acc, r_eval_acc), (msm, r_eval)| { + msm_acc.add_msm(&msm); + (msm_acc, r_eval_acc + r_eval) + }) + .unwrap(); + + inner_msm.scale(power_of_v * z_diff_i); + outer_msm.add_msm(inner_msm); + r_outer_acc += power_of_v * r_inner_acc * z_diff_i; + } + let mut outer_msm = outer_msm.normalize(); + let g1: E::G1 = self.params.g[0].into(); + outer_msm.append_term(-r_outer_acc, g1); + outer_msm.append_term(-z_0, h1.into()); + outer_msm.append_term(*u, h2.into()); + + msm_accumulator.left.append_term(E::Fr::ONE, h2.into()); + + msm_accumulator.right.add_msm(&outer_msm); + + Ok(Self::Guard::new(msm_accumulator)) + } +} diff --git a/halo2_backend/src/poly/kzg/strategy.rs b/halo2_backend/src/poly/kzg/strategy.rs new file mode 100644 index 0000000000..ee80d800ac --- /dev/null +++ b/halo2_backend/src/poly/kzg/strategy.rs @@ -0,0 +1,181 @@ +use super::{ + commitment::{KZGCommitmentScheme, ParamsKZG}, + msm::DualMSM, +}; +use crate::{ + helpers::SerdeCurveAffine, + plonk::Error, + poly::{ + commitment::Verifier, + strategy::{Guard, VerificationStrategy}, + }, +}; +use ff::Field; +use halo2curves::{ + pairing::{Engine, MultiMillerLoop}, + CurveAffine, CurveExt, +}; +use rand_core::OsRng; +use std::fmt::Debug; + +/// Wrapper for linear verification accumulator +#[derive(Debug, Clone)] +pub struct GuardKZG<'params, E: MultiMillerLoop + Debug> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + pub(crate) msm_accumulator: DualMSM<'params, E>, +} + +/// Define accumulator type as `DualMSM` +impl<'params, E> Guard> for GuardKZG<'params, E> +where + E: MultiMillerLoop + Debug, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type MSMAccumulator = DualMSM<'params, E>; +} + +/// KZG specific operations +impl<'params, E: MultiMillerLoop + Debug> GuardKZG<'params, E> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + pub(crate) fn new(msm_accumulator: DualMSM<'params, E>) -> Self { + Self { msm_accumulator } + } +} + +/// A verifier that checks multiple proofs in a batch +#[derive(Clone, Debug)] +pub struct AccumulatorStrategy<'params, E: Engine> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + pub(crate) msm_accumulator: DualMSM<'params, E>, +} + +impl<'params, E: MultiMillerLoop + Debug> AccumulatorStrategy<'params, E> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + /// Constructs an empty batch verifier + pub fn new(params: &'params ParamsKZG) -> Self { + AccumulatorStrategy { + msm_accumulator: DualMSM::new(params), + } + } + + /// Constructs and initialized new batch verifier + pub fn with(msm_accumulator: DualMSM<'params, E>) -> Self { + AccumulatorStrategy { msm_accumulator } + } +} + +/// A verifier that checks a single proof +#[derive(Clone, Debug)] +pub struct SingleStrategy<'params, E: Engine> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + pub(crate) msm: DualMSM<'params, E>, +} + +impl<'params, E: MultiMillerLoop + Debug> SingleStrategy<'params, E> +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + /// Constructs an empty batch verifier + pub fn new(params: &'params ParamsKZG) -> Self { + SingleStrategy { + msm: DualMSM::new(params), + } + } +} + +impl< + 'params, + E: MultiMillerLoop + Debug, + V: Verifier< + 'params, + KZGCommitmentScheme, + MSMAccumulator = DualMSM<'params, E>, + Guard = GuardKZG<'params, E>, + >, + > VerificationStrategy<'params, KZGCommitmentScheme, V> for AccumulatorStrategy<'params, E> +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type Output = Self; + + fn new(params: &'params ParamsKZG) -> Self { + AccumulatorStrategy::new(params) + } + + fn process( + mut self, + f: impl FnOnce(V::MSMAccumulator) -> Result, + ) -> Result { + self.msm_accumulator.scale(E::Fr::random(OsRng)); + + // Guard is updated with new msm contributions + let guard = f(self.msm_accumulator)?; + Ok(Self { + msm_accumulator: guard.msm_accumulator, + }) + } + + fn finalize(self) -> bool { + self.msm_accumulator.check() + } +} + +impl< + 'params, + E: MultiMillerLoop + Debug, + V: Verifier< + 'params, + KZGCommitmentScheme, + MSMAccumulator = DualMSM<'params, E>, + Guard = GuardKZG<'params, E>, + >, + > VerificationStrategy<'params, KZGCommitmentScheme, V> for SingleStrategy<'params, E> +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type Output = (); + + fn new(params: &'params ParamsKZG) -> Self { + Self::new(params) + } + + fn process( + self, + f: impl FnOnce(V::MSMAccumulator) -> Result, + ) -> Result { + // Guard is updated with new msm contributions + let guard = f(self.msm)?; + let msm = guard.msm_accumulator; + if msm.check() { + Ok(()) + } else { + Err(Error::ConstraintSystemFailure) + } + } + + fn finalize(self) -> bool { + unreachable!(); + } +} diff --git a/halo2_backend/src/poly/multiopen_test.rs b/halo2_backend/src/poly/multiopen_test.rs new file mode 100644 index 0000000000..47c6731167 --- /dev/null +++ b/halo2_backend/src/poly/multiopen_test.rs @@ -0,0 +1,298 @@ +#[cfg(test)] +mod test { + use crate::arithmetic::eval_polynomial; + use crate::plonk::Error; + use crate::poly::commitment::Blind; + use crate::poly::commitment::ParamsProver; + use crate::poly::{ + commitment::{CommitmentScheme, Params, Prover, Verifier}, + query::{ProverQuery, VerifierQuery}, + strategy::VerificationStrategy, + EvaluationDomain, + }; + use crate::transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read, Keccak256Write, + TranscriptReadBuffer, TranscriptWriterBuffer, + }; + use ff::WithSmallOrderMulGroup; + use group::Curve; + use rand_core::OsRng; + + #[test] + fn test_roundtrip_ipa() { + use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA}; + use crate::poly::ipa::multiopen::{ProverIPA, VerifierIPA}; + use crate::poly::ipa::strategy::AccumulatorStrategy; + use halo2curves::pasta::EqAffine; + + const K: u32 = 4; + + let params = ParamsIPA::::new(K); + + let proof = create_proof::< + IPACommitmentScheme, + ProverIPA<_>, + _, + Blake2bWrite<_, _, Challenge255<_>>, + >(¶ms); + + let verifier_params = params.verifier_params(); + + verify::< + IPACommitmentScheme, + VerifierIPA<_>, + _, + Blake2bRead<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], false); + + verify::< + IPACommitmentScheme, + VerifierIPA<_>, + _, + Blake2bRead<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], true); + } + + #[test] + fn test_roundtrip_ipa_keccak() { + use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA}; + use crate::poly::ipa::multiopen::{ProverIPA, VerifierIPA}; + use crate::poly::ipa::strategy::AccumulatorStrategy; + use halo2curves::pasta::EqAffine; + + const K: u32 = 4; + + let params = ParamsIPA::::new(K); + + let proof = create_proof::< + IPACommitmentScheme, + ProverIPA<_>, + _, + Keccak256Write<_, _, Challenge255<_>>, + >(¶ms); + + let verifier_params = params.verifier_params(); + + verify::< + IPACommitmentScheme, + VerifierIPA<_>, + _, + Keccak256Read<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], false); + + verify::< + IPACommitmentScheme, + VerifierIPA<_>, + _, + Keccak256Read<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], true); + } + + #[test] + fn test_roundtrip_gwc() { + use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; + use crate::poly::kzg::multiopen::{ProverGWC, VerifierGWC}; + use crate::poly::kzg::strategy::AccumulatorStrategy; + use halo2curves::bn256::Bn256; + + const K: u32 = 4; + + let params = ParamsKZG::::new(K); + + let proof = + create_proof::<_, ProverGWC<_>, _, Blake2bWrite<_, _, Challenge255<_>>>(¶ms); + + let verifier_params = params.verifier_params(); + + verify::<_, VerifierGWC<_>, _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>>( + verifier_params, + &proof[..], + false, + ); + + verify::< + KZGCommitmentScheme, + VerifierGWC<_>, + _, + Blake2bRead<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], true); + } + + #[test] + fn test_roundtrip_shplonk() { + use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; + use crate::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; + use crate::poly::kzg::strategy::AccumulatorStrategy; + use halo2curves::bn256::Bn256; + + const K: u32 = 4; + + let params = ParamsKZG::::new(K); + + let proof = create_proof::< + KZGCommitmentScheme, + ProverSHPLONK<_>, + _, + Blake2bWrite<_, _, Challenge255<_>>, + >(¶ms); + + let verifier_params = params.verifier_params(); + + verify::< + KZGCommitmentScheme, + VerifierSHPLONK<_>, + _, + Blake2bRead<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], false); + + verify::< + KZGCommitmentScheme, + VerifierSHPLONK<_>, + _, + Blake2bRead<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], true); + } + + fn verify< + 'a, + 'params, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptReadBuffer<&'a [u8], Scheme::Curve, E>, + Strategy: VerificationStrategy<'params, Scheme, V, Output = Strategy>, + >( + params: &'params Scheme::ParamsVerifier, + proof: &'a [u8], + should_fail: bool, + ) { + let verifier = V::new(params); + + let mut transcript = T::init(proof); + + let a = transcript.read_point().unwrap(); + let b = transcript.read_point().unwrap(); + let c = transcript.read_point().unwrap(); + + let x = transcript.squeeze_challenge(); + let y = transcript.squeeze_challenge(); + + let avx = transcript.read_scalar().unwrap(); + let bvx = transcript.read_scalar().unwrap(); + let cvy = transcript.read_scalar().unwrap(); + + let valid_queries = std::iter::empty() + .chain(Some(VerifierQuery::new_commitment(&a, x.get_scalar(), avx))) + .chain(Some(VerifierQuery::new_commitment(&b, x.get_scalar(), bvx))) + .chain(Some(VerifierQuery::new_commitment(&c, y.get_scalar(), cvy))); + + let invalid_queries = std::iter::empty() + .chain(Some(VerifierQuery::new_commitment(&a, x.get_scalar(), avx))) + .chain(Some(VerifierQuery::new_commitment(&b, x.get_scalar(), avx))) + .chain(Some(VerifierQuery::new_commitment(&c, y.get_scalar(), cvy))); + + let queries = if should_fail { + invalid_queries.clone() + } else { + valid_queries.clone() + }; + + { + let strategy = Strategy::new(params); + let strategy = strategy + .process(|msm_accumulator| { + verifier + .verify_proof(&mut transcript, queries.clone(), msm_accumulator) + .map_err(|_| Error::Opening) + }) + .unwrap(); + + assert_eq!(strategy.finalize(), !should_fail); + } + } + + fn create_proof< + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptWriterBuffer, Scheme::Curve, E>, + >( + params: &'params Scheme::ParamsProver, + ) -> Vec + where + Scheme::Scalar: WithSmallOrderMulGroup<3>, + { + let domain = EvaluationDomain::new(1, params.k()); + + let mut ax = domain.empty_coeff(); + for (i, a) in ax.iter_mut().enumerate() { + *a = <::Scalar>::from(10 + i as u64); + } + + let mut bx = domain.empty_coeff(); + for (i, a) in bx.iter_mut().enumerate() { + *a = <::Scalar>::from(100 + i as u64); + } + + let mut cx = domain.empty_coeff(); + for (i, a) in cx.iter_mut().enumerate() { + *a = <::Scalar>::from(100 + i as u64); + } + + let mut transcript = T::init(vec![]); + + let blind = Blind::new(&mut OsRng); + let a = params.commit(&ax, blind).to_affine(); + let b = params.commit(&bx, blind).to_affine(); + let c = params.commit(&cx, blind).to_affine(); + + transcript.write_point(a).unwrap(); + transcript.write_point(b).unwrap(); + transcript.write_point(c).unwrap(); + + let x = transcript.squeeze_challenge(); + let y = transcript.squeeze_challenge(); + + let avx = eval_polynomial(&ax, x.get_scalar()); + let bvx = eval_polynomial(&bx, x.get_scalar()); + let cvy = eval_polynomial(&cx, y.get_scalar()); + + transcript.write_scalar(avx).unwrap(); + transcript.write_scalar(bvx).unwrap(); + transcript.write_scalar(cvy).unwrap(); + + let queries = [ + ProverQuery { + point: x.get_scalar(), + poly: &ax, + blind, + }, + ProverQuery { + point: x.get_scalar(), + poly: &bx, + blind, + }, + ProverQuery { + point: y.get_scalar(), + poly: &cx, + blind, + }, + ] + .to_vec(); + + let prover = P::new(params); + prover + .create_proof(&mut OsRng, &mut transcript, queries) + .unwrap(); + + transcript.finalize() + } +} diff --git a/halo2_backend/src/poly/query.rs b/halo2_backend/src/poly/query.rs new file mode 100644 index 0000000000..bc7a20c240 --- /dev/null +++ b/halo2_backend/src/poly/query.rs @@ -0,0 +1,160 @@ +use std::fmt::Debug; + +use super::commitment::{Blind, MSM}; +use crate::{ + arithmetic::eval_polynomial, + poly::{Coeff, Polynomial}, +}; +use halo2curves::CurveAffine; + +pub trait Query: Sized + Clone + Send + Sync { + type Commitment: PartialEq + Copy + Send + Sync; + type Eval: Clone + Default + Debug; + + fn get_point(&self) -> F; + fn get_eval(&self) -> Self::Eval; + fn get_commitment(&self) -> Self::Commitment; +} + +/// A polynomial query at a point +#[derive(Debug, Clone, Copy)] +pub struct ProverQuery<'com, C: CurveAffine> { + /// Point at which polynomial is queried + pub(crate) point: C::Scalar, + /// Coefficients of polynomial + pub(crate) poly: &'com Polynomial, + /// Blinding factor of polynomial + pub(crate) blind: Blind, +} + +impl<'com, C> ProverQuery<'com, C> +where + C: CurveAffine, +{ + /// Create a new prover query based on a polynomial + pub fn new( + point: C::Scalar, + poly: &'com Polynomial, + blind: Blind, + ) -> Self { + ProverQuery { point, poly, blind } + } +} + +#[doc(hidden)] +#[derive(Copy, Clone)] +pub struct PolynomialPointer<'com, C: CurveAffine> { + pub(crate) poly: &'com Polynomial, + pub(crate) blind: Blind, +} + +impl<'com, C: CurveAffine> PartialEq for PolynomialPointer<'com, C> { + fn eq(&self, other: &Self) -> bool { + std::ptr::eq(self.poly, other.poly) + } +} + +impl<'com, C: CurveAffine> Query for ProverQuery<'com, C> { + type Commitment = PolynomialPointer<'com, C>; + type Eval = C::Scalar; + + fn get_point(&self) -> C::Scalar { + self.point + } + fn get_eval(&self) -> Self::Eval { + eval_polynomial(&self.poly[..], self.get_point()) + } + fn get_commitment(&self) -> Self::Commitment { + PolynomialPointer { + poly: self.poly, + blind: self.blind, + } + } +} + +impl<'com, C: CurveAffine, M: MSM> VerifierQuery<'com, C, M> { + /// Create a new verifier query based on a commitment + pub fn new_commitment(commitment: &'com C, point: C::Scalar, eval: C::Scalar) -> Self { + VerifierQuery { + point, + eval, + commitment: CommitmentReference::Commitment(commitment), + } + } + + /// Create a new verifier query based on a linear combination of commitments + pub fn new_msm(msm: &'com M, point: C::Scalar, eval: C::Scalar) -> VerifierQuery<'com, C, M> { + VerifierQuery { + point, + eval, + commitment: CommitmentReference::MSM(msm), + } + } +} + +/// A polynomial query at a point +#[derive(Debug, Clone, Copy)] +pub struct VerifierQuery<'com, C: CurveAffine, M: MSM> { + /// Point at which polynomial is queried + pub(crate) point: C::Scalar, + /// Commitment to polynomial + pub(crate) commitment: CommitmentReference<'com, C, M>, + /// Evaluation of polynomial at query point + pub(crate) eval: C::Scalar, +} + +impl<'com, C, M> VerifierQuery<'com, C, M> +where + C: CurveAffine, + M: MSM, +{ + /// Create a new verifier query based on a commitment + pub fn new( + point: C::Scalar, + commitment: CommitmentReference<'com, C, M>, + eval: C::Scalar, + ) -> Self { + VerifierQuery { + point, + commitment, + eval, + } + } +} + +#[allow(clippy::upper_case_acronyms)] +#[derive(Clone, Debug)] +pub enum CommitmentReference<'r, C: CurveAffine, M: MSM> { + Commitment(&'r C), + MSM(&'r M), +} + +impl<'r, C: CurveAffine, M: MSM> Copy for CommitmentReference<'r, C, M> {} + +impl<'r, C: CurveAffine, M: MSM> PartialEq for CommitmentReference<'r, C, M> { + #![allow(clippy::vtable_address_comparisons)] + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (&CommitmentReference::Commitment(a), &CommitmentReference::Commitment(b)) => { + std::ptr::eq(a, b) + } + (&CommitmentReference::MSM(a), &CommitmentReference::MSM(b)) => std::ptr::eq(a, b), + _ => false, + } + } +} + +impl<'com, C: CurveAffine, M: MSM> Query for VerifierQuery<'com, C, M> { + type Eval = C::Scalar; + type Commitment = CommitmentReference<'com, C, M>; + + fn get_point(&self) -> C::Scalar { + self.point + } + fn get_eval(&self) -> C::Scalar { + self.eval + } + fn get_commitment(&self) -> Self::Commitment { + self.commitment + } +} diff --git a/halo2_backend/src/poly/strategy.rs b/halo2_backend/src/poly/strategy.rs new file mode 100644 index 0000000000..850f95e6c9 --- /dev/null +++ b/halo2_backend/src/poly/strategy.rs @@ -0,0 +1,31 @@ +use super::commitment::{CommitmentScheme, Verifier}; +use crate::plonk::Error; + +/// Guards is unfinished verification result. Implement this to construct various +/// verification strategies such as aggregation and recursion. +pub trait Guard { + /// Multi scalar engine which is not evaluated yet. + type MSMAccumulator; +} + +/// Trait representing a strategy for verifying Halo 2 proofs. +pub trait VerificationStrategy<'params, Scheme: CommitmentScheme, V: Verifier<'params, Scheme>> { + /// The output type of this verification strategy after processing a proof. + type Output; + + /// Creates new verification strategy instance + fn new(params: &'params Scheme::ParamsVerifier) -> Self; + + /// Obtains an MSM from the verifier strategy and yields back the strategy's + /// output. + fn process( + self, + f: impl FnOnce(V::MSMAccumulator) -> Result, + ) -> Result; + + /// Finalizes the batch and checks its validity. + /// + /// Returns `false` if *some* proof was invalid. If the caller needs to identify + /// specific failing proofs, it must re-process the proofs separately. + fn finalize(self) -> bool; +} diff --git a/halo2_backend/src/transcript.rs b/halo2_backend/src/transcript.rs new file mode 100644 index 0000000000..6e4f812bdf --- /dev/null +++ b/halo2_backend/src/transcript.rs @@ -0,0 +1,554 @@ +//! This module contains utilities and traits for dealing with Fiat-Shamir +//! transcripts. + +use blake2b_simd::{Params as Blake2bParams, State as Blake2bState}; +use group::ff::{FromUniformBytes, PrimeField}; +use sha3::{Digest, Keccak256}; +use std::convert::TryInto; + +use halo2curves::{Coordinates, CurveAffine}; + +use std::io::{self, Read, Write}; +use std::marker::PhantomData; + +/// Prefix to a prover's message soliciting a challenge +const BLAKE2B_PREFIX_CHALLENGE: u8 = 0; + +/// Prefix to a prover's message containing a curve point +const BLAKE2B_PREFIX_POINT: u8 = 1; + +/// Prefix to a prover's message containing a scalar +const BLAKE2B_PREFIX_SCALAR: u8 = 2; + +/// Prefix to a prover's message soliciting a challenge +const KECCAK256_PREFIX_CHALLENGE: u8 = 0; + +/// First prefix to a prover's message soliciting a challenge +/// Not included in the growing state! +const KECCAK256_PREFIX_CHALLENGE_LO: u8 = 10; + +/// Second prefix to a prover's message soliciting a challenge +/// Not included in the growing state! +const KECCAK256_PREFIX_CHALLENGE_HI: u8 = 11; + +/// Prefix to a prover's message containing a curve point +const KECCAK256_PREFIX_POINT: u8 = 1; + +/// Prefix to a prover's message containing a scalar +const KECCAK256_PREFIX_SCALAR: u8 = 2; + +/// Generic transcript view (from either the prover or verifier's perspective) +pub trait Transcript> { + /// Squeeze an encoded verifier challenge from the transcript. + fn squeeze_challenge(&mut self) -> E; + + /// Squeeze a typed challenge (in the scalar field) from the transcript. + fn squeeze_challenge_scalar(&mut self) -> ChallengeScalar { + ChallengeScalar { + inner: self.squeeze_challenge().get_scalar(), + _marker: PhantomData, + } + } + + /// Writing the point to the transcript without writing it to the proof, + /// treating it as a common input. + fn common_point(&mut self, point: C) -> io::Result<()>; + + /// Writing the scalar to the transcript without writing it to the proof, + /// treating it as a common input. + fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>; +} + +/// Transcript view from the perspective of a verifier that has access to an +/// input stream of data from the prover to the verifier. +pub trait TranscriptRead>: Transcript { + /// Read a curve point from the prover. + fn read_point(&mut self) -> io::Result; + + /// Read a curve scalar from the prover. + fn read_scalar(&mut self) -> io::Result; +} + +/// Transcript view from the perspective of a prover that has access to an +/// output stream of messages from the prover to the verifier. +pub trait TranscriptWrite>: Transcript { + /// Write a curve point to the proof and the transcript. + fn write_point(&mut self, point: C) -> io::Result<()>; + + /// Write a scalar to the proof and the transcript. + fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>; +} + +/// Initializes transcript at verifier side. +pub trait TranscriptReadBuffer>: + TranscriptRead +{ + /// Initialize a transcript given an input buffer. + fn init(reader: R) -> Self; +} + +/// Manages beginning and finishing of transcript pipeline. +pub trait TranscriptWriterBuffer>: + TranscriptWrite +{ + /// Initialize a transcript given an output buffer. + fn init(writer: W) -> Self; + + /// Conclude the interaction and return the output buffer (writer). + fn finalize(self) -> W; +} + +/// We will replace BLAKE2b with an algebraic hash function in a later version. +#[derive(Debug, Clone)] +pub struct Blake2bRead> { + state: Blake2bState, + reader: R, + _marker: PhantomData<(C, E)>, +} + +/// Keccak256 hash function reader for EVM compatibility +#[derive(Debug, Clone)] +pub struct Keccak256Read> { + state: Keccak256, + reader: R, + _marker: PhantomData<(C, E)>, +} + +impl TranscriptReadBuffer> + for Blake2bRead> +where + C::Scalar: FromUniformBytes<64>, +{ + /// Initialize a transcript given an input buffer. + fn init(reader: R) -> Self { + Blake2bRead { + state: Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Transcript") + .to_state(), + reader, + _marker: PhantomData, + } + } +} + +impl TranscriptReadBuffer> + for Keccak256Read> +where + C::Scalar: FromUniformBytes<64>, +{ + /// Initialize a transcript given an input buffer. + fn init(reader: R) -> Self { + let mut state = Keccak256::new(); + state.update(b"Halo2-Transcript"); + Keccak256Read { + state, + reader, + _marker: PhantomData, + } + } +} + +impl TranscriptRead> + for Blake2bRead> +where + C::Scalar: FromUniformBytes<64>, +{ + fn read_point(&mut self) -> io::Result { + let mut compressed = C::Repr::default(); + self.reader.read_exact(compressed.as_mut())?; + let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| { + io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof") + })?; + self.common_point(point)?; + + Ok(point) + } + + fn read_scalar(&mut self) -> io::Result { + let mut data = ::Repr::default(); + self.reader.read_exact(data.as_mut())?; + let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "invalid field element encoding in proof", + ) + })?; + self.common_scalar(scalar)?; + + Ok(scalar) + } +} + +impl TranscriptRead> + for Keccak256Read> +where + C::Scalar: FromUniformBytes<64>, +{ + fn read_point(&mut self) -> io::Result { + let mut compressed = C::Repr::default(); + self.reader.read_exact(compressed.as_mut())?; + let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| { + io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof") + })?; + self.common_point(point)?; + + Ok(point) + } + + fn read_scalar(&mut self) -> io::Result { + let mut data = ::Repr::default(); + self.reader.read_exact(data.as_mut())?; + let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "invalid field element encoding in proof", + ) + })?; + self.common_scalar(scalar)?; + + Ok(scalar) + } +} + +impl Transcript> for Blake2bRead> +where + C::Scalar: FromUniformBytes<64>, +{ + fn squeeze_challenge(&mut self) -> Challenge255 { + self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]); + let hasher = self.state.clone(); + let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap(); + Challenge255::::new(&result) + } + + fn common_point(&mut self, point: C) -> io::Result<()> { + self.state.update(&[BLAKE2B_PREFIX_POINT]); + let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "cannot write points at infinity to the transcript", + ) + })?; + self.state.update(coords.x().to_repr().as_ref()); + self.state.update(coords.y().to_repr().as_ref()); + + Ok(()) + } + + fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.state.update(&[BLAKE2B_PREFIX_SCALAR]); + self.state.update(scalar.to_repr().as_ref()); + + Ok(()) + } +} + +impl Transcript> + for Keccak256Read> +where + C::Scalar: FromUniformBytes<64>, +{ + fn squeeze_challenge(&mut self) -> Challenge255 { + self.state.update([KECCAK256_PREFIX_CHALLENGE]); + + let mut state_lo = self.state.clone(); + let mut state_hi = self.state.clone(); + state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]); + state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]); + let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap(); + let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap(); + + let mut t = result_lo.to_vec(); + t.extend_from_slice(&result_hi[..]); + let result: [u8; 64] = t.as_slice().try_into().unwrap(); + + Challenge255::::new(&result) + } + + fn common_point(&mut self, point: C) -> io::Result<()> { + self.state.update([KECCAK256_PREFIX_POINT]); + let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "cannot write points at infinity to the transcript", + ) + })?; + self.state.update(coords.x().to_repr().as_ref()); + self.state.update(coords.y().to_repr().as_ref()); + + Ok(()) + } + + fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.state.update([KECCAK256_PREFIX_SCALAR]); + self.state.update(scalar.to_repr().as_ref()); + + Ok(()) + } +} + +/// We will replace BLAKE2b with an algebraic hash function in a later version. +#[derive(Debug, Clone)] +pub struct Blake2bWrite> { + state: Blake2bState, + writer: W, + _marker: PhantomData<(C, E)>, +} + +/// Keccak256 hash function writer for EVM compatibility +#[derive(Debug, Clone)] +pub struct Keccak256Write> { + state: Keccak256, + writer: W, + _marker: PhantomData<(C, E)>, +} + +impl TranscriptWriterBuffer> + for Blake2bWrite> +where + C::Scalar: FromUniformBytes<64>, +{ + /// Initialize a transcript given an output buffer. + fn init(writer: W) -> Self { + Blake2bWrite { + state: Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Transcript") + .to_state(), + writer, + _marker: PhantomData, + } + } + + fn finalize(self) -> W { + // TODO: handle outstanding scalars? see issue #138 + self.writer + } +} + +impl TranscriptWriterBuffer> + for Keccak256Write> +where + C::Scalar: FromUniformBytes<64>, +{ + /// Initialize a transcript given an output buffer. + fn init(writer: W) -> Self { + let mut state = Keccak256::new(); + state.update(b"Halo2-Transcript"); + Keccak256Write { + state, + writer, + _marker: PhantomData, + } + } + + /// Conclude the interaction and return the output buffer (writer). + fn finalize(self) -> W { + // TODO: handle outstanding scalars? see issue #138 + self.writer + } +} + +impl TranscriptWrite> + for Blake2bWrite> +where + C::Scalar: FromUniformBytes<64>, +{ + fn write_point(&mut self, point: C) -> io::Result<()> { + self.common_point(point)?; + let compressed = point.to_bytes(); + self.writer.write_all(compressed.as_ref()) + } + fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.common_scalar(scalar)?; + let data = scalar.to_repr(); + self.writer.write_all(data.as_ref()) + } +} + +impl TranscriptWrite> + for Keccak256Write> +where + C::Scalar: FromUniformBytes<64>, +{ + fn write_point(&mut self, point: C) -> io::Result<()> { + self.common_point(point)?; + let compressed = point.to_bytes(); + self.writer.write_all(compressed.as_ref()) + } + fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.common_scalar(scalar)?; + let data = scalar.to_repr(); + self.writer.write_all(data.as_ref()) + } +} + +impl Transcript> + for Blake2bWrite> +where + C::Scalar: FromUniformBytes<64>, +{ + fn squeeze_challenge(&mut self) -> Challenge255 { + self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]); + let hasher = self.state.clone(); + let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap(); + Challenge255::::new(&result) + } + + fn common_point(&mut self, point: C) -> io::Result<()> { + self.state.update(&[BLAKE2B_PREFIX_POINT]); + let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "cannot write points at infinity to the transcript", + ) + })?; + self.state.update(coords.x().to_repr().as_ref()); + self.state.update(coords.y().to_repr().as_ref()); + + Ok(()) + } + + fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.state.update(&[BLAKE2B_PREFIX_SCALAR]); + self.state.update(scalar.to_repr().as_ref()); + + Ok(()) + } +} + +impl Transcript> + for Keccak256Write> +where + C::Scalar: FromUniformBytes<64>, +{ + fn squeeze_challenge(&mut self) -> Challenge255 { + self.state.update([KECCAK256_PREFIX_CHALLENGE]); + + let mut state_lo = self.state.clone(); + let mut state_hi = self.state.clone(); + state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]); + state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]); + let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap(); + let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap(); + + let mut t = result_lo.to_vec(); + t.extend_from_slice(&result_hi[..]); + let result: [u8; 64] = t.as_slice().try_into().unwrap(); + + Challenge255::::new(&result) + } + + fn common_point(&mut self, point: C) -> io::Result<()> { + self.state.update([KECCAK256_PREFIX_POINT]); + let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "cannot write points at infinity to the transcript", + ) + })?; + self.state.update(coords.x().to_repr().as_ref()); + self.state.update(coords.y().to_repr().as_ref()); + + Ok(()) + } + + fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.state.update([KECCAK256_PREFIX_SCALAR]); + self.state.update(scalar.to_repr().as_ref()); + + Ok(()) + } +} + +/// The scalar representation of a verifier challenge. +/// +/// The `Type` type can be used to scope the challenge to a specific context, or +/// set to `()` if no context is required. +#[derive(Copy, Clone, Debug)] +pub struct ChallengeScalar { + inner: C::Scalar, + _marker: PhantomData, +} + +impl std::ops::Deref for ChallengeScalar { + type Target = C::Scalar; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +/// `EncodedChallenge` defines a challenge encoding with a [`Self::Input`] +/// that is used to derive the challenge encoding and `get_challenge` obtains +/// the _real_ `C::Scalar` that the challenge encoding represents. +pub trait EncodedChallenge { + /// The Input type used to derive the challenge encoding. For example, + /// an input from the Poseidon hash would be a base field element; + /// an input from the Blake2b hash would be a [u8; 64]. + type Input; + + /// Get an encoded challenge from a given input challenge. + fn new(challenge_input: &Self::Input) -> Self; + + /// Get a scalar field element from an encoded challenge. + fn get_scalar(&self) -> C::Scalar; + + /// Cast an encoded challenge as a typed `ChallengeScalar`. + fn as_challenge_scalar(&self) -> ChallengeScalar { + ChallengeScalar { + inner: self.get_scalar(), + _marker: PhantomData, + } + } +} + +/// A 255-bit challenge. +#[derive(Copy, Clone, Debug)] +pub struct Challenge255([u8; 32], PhantomData); + +impl std::ops::Deref for Challenge255 { + type Target = [u8; 32]; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl EncodedChallenge for Challenge255 +where + C::Scalar: FromUniformBytes<64>, +{ + type Input = [u8; 64]; + + fn new(challenge_input: &[u8; 64]) -> Self { + Challenge255( + C::Scalar::from_uniform_bytes(challenge_input) + .to_repr() + .as_ref() + .try_into() + .expect("Scalar fits into 256 bits"), + PhantomData, + ) + } + fn get_scalar(&self) -> C::Scalar { + let mut repr = ::Repr::default(); + repr.as_mut().copy_from_slice(&self.0); + C::Scalar::from_repr(repr).unwrap() + } +} + +pub(crate) fn read_n_points, T: TranscriptRead>( + transcript: &mut T, + n: usize, +) -> io::Result> { + (0..n).map(|_| transcript.read_point()).collect() +} + +pub(crate) fn read_n_scalars, T: TranscriptRead>( + transcript: &mut T, + n: usize, +) -> io::Result> { + (0..n).map(|_| transcript.read_scalar()).collect() +} diff --git a/halo2_common/Cargo.toml b/halo2_common/Cargo.toml new file mode 100644 index 0000000000..c7ea0882a0 --- /dev/null +++ b/halo2_common/Cargo.toml @@ -0,0 +1,80 @@ +[package] +name = "halo2_common" +version = "0.3.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team" +] +edition = "2021" +rust-version = "1.66.0" +description = """ +TODO +""" +license = "MIT OR Apache-2.0" +repository = "TODO" +documentation = "TODO" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +backtrace = { version = "0.3", optional = true } +ff = "0.13" +group = "0.13" +halo2curves = { version = "0.6.0", default-features = false } +rand_core = { version = "0.6", default-features = false } +tracing = "0.1" +blake2b_simd = "1" # MSRV 1.66.0 +sha3 = "0.9.1" +rand_chacha = "0.3" +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +rayon = "1.8" + +# Developer tooling dependencies +plotters = { version = "0.3.0", default-features = false, optional = true } +tabbycat = { version = "0.1", features = ["attributes"], optional = true } + +# Legacy circuit compatibility +halo2_legacy_pdqsort = { version = "0.1.0", optional = true } + +[dev-dependencies] +assert_matches = "1.5" +criterion = "0.3" +gumdrop = "0.8" +proptest = "1" +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +dhat = "0.3.2" +serde_json = "1" + +[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] +getrandom = { version = "0.2", features = ["js"] } + +[features] +default = ["batch", "bits"] +dev-graph = ["plotters", "tabbycat"] +test-dev-graph = [ + "dev-graph", + "plotters/bitmap_backend", + "plotters/bitmap_encoder", + "plotters/ttf", +] +bits = ["halo2curves/bits"] +gadget-traces = ["backtrace"] +thread-safe-region = [] +sanity-checks = [] +batch = ["rand_core/getrandom"] +circuit-params = [] +heap-profiling = [] +cost-estimator = ["serde", "serde_derive"] +derive_serde = ["halo2curves/derive_serde"] + +[lib] +bench = false diff --git a/halo2_common/src/lib.rs b/halo2_common/src/lib.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/halo2_frontend/Cargo.toml b/halo2_frontend/Cargo.toml new file mode 100644 index 0000000000..bd71de0373 --- /dev/null +++ b/halo2_frontend/Cargo.toml @@ -0,0 +1,80 @@ +[package] +name = "halo2_frontend" +version = "0.3.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.66.0" +description = """ +TODO +""" +license = "MIT OR Apache-2.0" +repository = "TODO" +documentation = "TODO" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +backtrace = { version = "0.3", optional = true } +ff = "0.13" +group = "0.13" +halo2curves = { version = "0.6.0", default-features = false } +rand_core = { version = "0.6", default-features = false } +tracing = "0.1" +blake2b_simd = "1" # MSRV 1.66.0 +sha3 = "0.9.1" +rand_chacha = "0.3" +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +rayon = "1.8" + +# Developer tooling dependencies +plotters = { version = "0.3.0", default-features = false, optional = true } +tabbycat = { version = "0.1", features = ["attributes"], optional = true } + +# Legacy circuit compatibility +halo2_legacy_pdqsort = { version = "0.1.0", optional = true } + +[dev-dependencies] +assert_matches = "1.5" +criterion = "0.3" +gumdrop = "0.8" +proptest = "1" +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +dhat = "0.3.2" +serde_json = "1" + +[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] +getrandom = { version = "0.2", features = ["js"] } + +[features] +default = ["batch", "bits"] +dev-graph = ["plotters", "tabbycat"] +test-dev-graph = [ + "dev-graph", + "plotters/bitmap_backend", + "plotters/bitmap_encoder", + "plotters/ttf", +] +bits = ["halo2curves/bits"] +gadget-traces = ["backtrace"] +thread-safe-region = [] +sanity-checks = [] +batch = ["rand_core/getrandom"] +circuit-params = [] +heap-profiling = [] +cost-estimator = ["serde", "serde_derive"] +derive_serde = ["halo2curves/derive_serde"] + +[lib] +bench = false diff --git a/halo2_frontend/src/lib.rs b/halo2_frontend/src/lib.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/halo2_middleware/Cargo.toml b/halo2_middleware/Cargo.toml new file mode 100644 index 0000000000..55df462101 --- /dev/null +++ b/halo2_middleware/Cargo.toml @@ -0,0 +1,80 @@ +[package] +name = "halo2_middleware" +version = "0.3.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.66.0" +description = """ +TODO +""" +license = "MIT OR Apache-2.0" +repository = "TODO" +documentation = "TODO" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +backtrace = { version = "0.3", optional = true } +ff = "0.13" +group = "0.13" +halo2curves = { version = "0.6.0", default-features = false } +rand_core = { version = "0.6", default-features = false } +tracing = "0.1" +blake2b_simd = "1" # MSRV 1.66.0 +sha3 = "0.9.1" +rand_chacha = "0.3" +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +rayon = "1.8" + +# Developer tooling dependencies +plotters = { version = "0.3.0", default-features = false, optional = true } +tabbycat = { version = "0.1", features = ["attributes"], optional = true } + +# Legacy circuit compatibility +halo2_legacy_pdqsort = { version = "0.1.0", optional = true } + +[dev-dependencies] +assert_matches = "1.5" +criterion = "0.3" +gumdrop = "0.8" +proptest = "1" +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +dhat = "0.3.2" +serde_json = "1" + +[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] +getrandom = { version = "0.2", features = ["js"] } + +[features] +default = ["batch", "bits"] +dev-graph = ["plotters", "tabbycat"] +test-dev-graph = [ + "dev-graph", + "plotters/bitmap_backend", + "plotters/bitmap_encoder", + "plotters/ttf", +] +bits = ["halo2curves/bits"] +gadget-traces = ["backtrace"] +thread-safe-region = [] +sanity-checks = [] +batch = ["rand_core/getrandom"] +circuit-params = [] +heap-profiling = [] +cost-estimator = ["serde", "serde_derive"] +derive_serde = ["halo2curves/derive_serde"] + +[lib] +bench = false diff --git a/halo2_middleware/src/lib.rs b/halo2_middleware/src/lib.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index 9ee1ed931f..6b1cc59a83 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -6,6 +6,7 @@ authors = [ "Ying Tong Lai ", "Daira Hopwood ", "Jack Grigg ", + "Privacy Scaling Explorations team", ] edition = "2021" rust-version = "1.66.0" From 1fc555b9504e33e3eec374dba1c46ea474fba031 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 16 Jan 2024 17:13:40 +0000 Subject: [PATCH 32/79] Checkpoint --- halo2_backend/src/circuit/value.rs | 703 ---------------------------- halo2_backend/src/plonk/circuit.rs | 21 - halo2_middleware/src/circuit.rs | 162 +++++++ halo2_middleware/src/lib.rs | 5 + halo2_middleware/src/lookup.rs | 10 + halo2_middleware/src/permutation.rs | 8 + halo2_middleware/src/poly.rs | 22 + halo2_middleware/src/shuffle.rs | 10 + 8 files changed, 217 insertions(+), 724 deletions(-) delete mode 100644 halo2_backend/src/circuit/value.rs create mode 100644 halo2_middleware/src/circuit.rs create mode 100644 halo2_middleware/src/lookup.rs create mode 100644 halo2_middleware/src/permutation.rs create mode 100644 halo2_middleware/src/poly.rs create mode 100644 halo2_middleware/src/shuffle.rs diff --git a/halo2_backend/src/circuit/value.rs b/halo2_backend/src/circuit/value.rs deleted file mode 100644 index db16a727c1..0000000000 --- a/halo2_backend/src/circuit/value.rs +++ /dev/null @@ -1,703 +0,0 @@ -use std::borrow::Borrow; -use std::ops::{Add, Mul, Neg, Sub}; - -use group::ff::Field; - -use crate::plonk::{Assigned, Error}; - -/// A value that might exist within a circuit. -/// -/// This behaves like `Option` but differs in two key ways: -/// - It does not expose the enum cases, or provide an `Option::unwrap` equivalent. This -/// helps to ensure that unwitnessed values correctly propagate. -/// - It provides pass-through implementations of common traits such as `Add` and `Mul`, -/// for improved usability. -#[derive(Clone, Copy, Debug)] -pub struct Value { - inner: Option, -} - -impl Default for Value { - fn default() -> Self { - Self::unknown() - } -} - -impl Value { - /// Constructs an unwitnessed value. - pub const fn unknown() -> Self { - Self { inner: None } - } - - /// Constructs a known value. - /// - /// # Examples - /// - /// ``` - /// use halo2_backend::circuit::Value; - /// - /// let v = Value::known(37); - /// ``` - pub const fn known(value: V) -> Self { - Self { inner: Some(value) } - } - - /// Obtains the inner value for assigning into the circuit. - /// - /// Returns `Error::Synthesis` if this is [`Value::unknown()`]. - pub(crate) fn assign(self) -> Result { - self.inner.ok_or(Error::Synthesis) - } - - /// Converts from `&Value` to `Value<&V>`. - pub fn as_ref(&self) -> Value<&V> { - Value { - inner: self.inner.as_ref(), - } - } - - /// Converts from `&mut Value` to `Value<&mut V>`. - pub fn as_mut(&mut self) -> Value<&mut V> { - Value { - inner: self.inner.as_mut(), - } - } - - /// ONLY FOR INTERNAL CRATE USAGE; DO NOT EXPOSE! - pub(crate) fn into_option(self) -> Option { - self.inner - } - - /// Enforces an assertion on the contained value, if known. - /// - /// The assertion is ignored if `self` is [`Value::unknown()`]. Do not try to enforce - /// circuit constraints with this method! - /// - /// # Panics - /// - /// Panics if `f` returns `false`. - pub fn assert_if_known bool>(&self, f: F) { - if let Some(value) = self.inner.as_ref() { - assert!(f(value)); - } - } - - /// Checks the contained value for an error condition, if known. - /// - /// The error check is ignored if `self` is [`Value::unknown()`]. Do not try to - /// enforce circuit constraints with this method! - pub fn error_if_known_and bool>(&self, f: F) -> Result<(), Error> { - match self.inner.as_ref() { - Some(value) if f(value) => Err(Error::Synthesis), - _ => Ok(()), - } - } - - /// Maps a `Value` to `Value` by applying a function to the contained value. - pub fn map W>(self, f: F) -> Value { - Value { - inner: self.inner.map(f), - } - } - - /// Returns [`Value::unknown()`] if the value is [`Value::unknown()`], otherwise calls - /// `f` with the wrapped value and returns the result. - pub fn and_then Value>(self, f: F) -> Value { - match self.inner { - Some(v) => f(v), - None => Value::unknown(), - } - } - - /// Zips `self` with another `Value`. - /// - /// If `self` is `Value::known(s)` and `other` is `Value::known(o)`, this method - /// returns `Value::known((s, o))`. Otherwise, [`Value::unknown()`] is returned. - pub fn zip(self, other: Value) -> Value<(V, W)> { - Value { - inner: self.inner.zip(other.inner), - } - } -} - -impl Value<(V, W)> { - /// Unzips a value containing a tuple of two values. - /// - /// If `self` is `Value::known((a, b)), this method returns - /// `(Value::known(a), Value::known(b))`. Otherwise, - /// `(Value::unknown(), Value::unknown())` is returned. - pub fn unzip(self) -> (Value, Value) { - match self.inner { - Some((a, b)) => (Value::known(a), Value::known(b)), - None => (Value::unknown(), Value::unknown()), - } - } -} - -impl Value<&V> { - /// Maps a `Value<&V>` to a `Value` by copying the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn copied(self) -> Value - where - V: Copy, - { - Value { - inner: self.inner.copied(), - } - } - - /// Maps a `Value<&V>` to a `Value` by cloning the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn cloned(self) -> Value - where - V: Clone, - { - Value { - inner: self.inner.cloned(), - } - } -} - -impl Value<&mut V> { - /// Maps a `Value<&mut V>` to a `Value` by copying the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn copied(self) -> Value - where - V: Copy, - { - Value { - inner: self.inner.copied(), - } - } - - /// Maps a `Value<&mut V>` to a `Value` by cloning the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn cloned(self) -> Value - where - V: Clone, - { - Value { - inner: self.inner.cloned(), - } - } -} - -impl Value<[V; LEN]> { - /// Transposes a `Value<[V; LEN]>` into a `[Value; LEN]`. - /// - /// [`Value::unknown()`] will be mapped to `[Value::unknown(); LEN]`. - pub fn transpose_array(self) -> [Value; LEN] { - let mut ret = [Value::unknown(); LEN]; - if let Some(arr) = self.inner { - for (entry, value) in ret.iter_mut().zip(arr) { - *entry = Value::known(value); - } - } - ret - } -} - -impl Value -where - I: IntoIterator, - I::IntoIter: ExactSizeIterator, -{ - /// Transposes a `Value>` into a `Vec>`. - /// - /// [`Value::unknown()`] will be mapped to `vec![Value::unknown(); length]`. - /// - /// # Panics - /// - /// Panics if `self` is `Value::known(values)` and `values.len() != length`. - pub fn transpose_vec(self, length: usize) -> Vec> { - match self.inner { - Some(values) => { - let values = values.into_iter(); - assert_eq!(values.len(), length); - values.map(Value::known).collect() - } - None => (0..length).map(|_| Value::unknown()).collect(), - } - } -} - -// -// FromIterator -// - -impl> FromIterator> for Value { - /// Takes each element in the [`Iterator`]: if it is [`Value::unknown()`], no further - /// elements are taken, and the [`Value::unknown()`] is returned. Should no - /// [`Value::unknown()`] occur, a container of type `V` containing the values of each - /// [`Value`] is returned. - fn from_iter>>(iter: I) -> Self { - Self { - inner: iter.into_iter().map(|v| v.inner).collect(), - } - } -} - -// -// Neg -// - -impl Neg for Value { - type Output = Value; - - fn neg(self) -> Self::Output { - Value { - inner: self.inner.map(|v| -v), - } - } -} - -// -// Add -// - -impl Add for Value -where - V: Add, -{ - type Output = Value; - - fn add(self, rhs: Self) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add for &Value -where - for<'v> &'v V: Add, -{ - type Output = Value; - - fn add(self, rhs: Self) -> Self::Output { - Value { - inner: self - .inner - .as_ref() - .zip(rhs.inner.as_ref()) - .map(|(a, b)| a + b), - } - } -} - -impl Add> for Value -where - for<'v> V: Add<&'v V, Output = O>, -{ - type Output = Value; - - fn add(self, rhs: Value<&V>) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add> for Value<&V> -where - for<'v> &'v V: Add, -{ - type Output = Value; - - fn add(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add<&Value> for Value -where - for<'v> V: Add<&'v V, Output = O>, -{ - type Output = Value; - - fn add(self, rhs: &Self) -> Self::Output { - self + rhs.as_ref() - } -} - -impl Add> for &Value -where - for<'v> &'v V: Add, -{ - type Output = Value; - - fn add(self, rhs: Value) -> Self::Output { - self.as_ref() + rhs - } -} - -// -// Sub -// - -impl Sub for Value -where - V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Self) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub for &Value -where - for<'v> &'v V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Self) -> Self::Output { - Value { - inner: self - .inner - .as_ref() - .zip(rhs.inner.as_ref()) - .map(|(a, b)| a - b), - } - } -} - -impl Sub> for Value -where - for<'v> V: Sub<&'v V, Output = O>, -{ - type Output = Value; - - fn sub(self, rhs: Value<&V>) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub> for Value<&V> -where - for<'v> &'v V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub<&Value> for Value -where - for<'v> V: Sub<&'v V, Output = O>, -{ - type Output = Value; - - fn sub(self, rhs: &Self) -> Self::Output { - self - rhs.as_ref() - } -} - -impl Sub> for &Value -where - for<'v> &'v V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Value) -> Self::Output { - self.as_ref() - rhs - } -} - -// -// Mul -// - -impl Mul for Value -where - V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Self) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul for &Value -where - for<'v> &'v V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Self) -> Self::Output { - Value { - inner: self - .inner - .as_ref() - .zip(rhs.inner.as_ref()) - .map(|(a, b)| a * b), - } - } -} - -impl Mul> for Value -where - for<'v> V: Mul<&'v V, Output = O>, -{ - type Output = Value; - - fn mul(self, rhs: Value<&V>) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul> for Value<&V> -where - for<'v> &'v V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul<&Value> for Value -where - for<'v> V: Mul<&'v V, Output = O>, -{ - type Output = Value; - - fn mul(self, rhs: &Self) -> Self::Output { - self * rhs.as_ref() - } -} - -impl Mul> for &Value -where - for<'v> &'v V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Value) -> Self::Output { - self.as_ref() * rhs - } -} - -// -// Assigned -// - -impl From> for Value> { - fn from(value: Value) -> Self { - Self { - inner: value.inner.map(Assigned::from), - } - } -} - -impl Add> for Value> { - type Output = Value>; - - fn add(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add for Value> { - type Output = Value>; - - fn add(self, rhs: F) -> Self::Output { - self + Value::known(rhs) - } -} - -impl Add> for Value<&Assigned> { - type Output = Value>; - - fn add(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add for Value<&Assigned> { - type Output = Value>; - - fn add(self, rhs: F) -> Self::Output { - self + Value::known(rhs) - } -} - -impl Sub> for Value> { - type Output = Value>; - - fn sub(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub for Value> { - type Output = Value>; - - fn sub(self, rhs: F) -> Self::Output { - self - Value::known(rhs) - } -} - -impl Sub> for Value<&Assigned> { - type Output = Value>; - - fn sub(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub for Value<&Assigned> { - type Output = Value>; - - fn sub(self, rhs: F) -> Self::Output { - self - Value::known(rhs) - } -} - -impl Mul> for Value> { - type Output = Value>; - - fn mul(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul for Value> { - type Output = Value>; - - fn mul(self, rhs: F) -> Self::Output { - self * Value::known(rhs) - } -} - -impl Mul> for Value<&Assigned> { - type Output = Value>; - - fn mul(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul for Value<&Assigned> { - type Output = Value>; - - fn mul(self, rhs: F) -> Self::Output { - self * Value::known(rhs) - } -} - -impl Value { - /// Returns the field element corresponding to this value. - pub fn to_field(&self) -> Value> - where - for<'v> Assigned: From<&'v V>, - { - Value { - inner: self.inner.as_ref().map(|v| v.into()), - } - } - - /// Returns the field element corresponding to this value. - pub fn into_field(self) -> Value> - where - V: Into>, - { - Value { - inner: self.inner.map(|v| v.into()), - } - } - - /// Doubles this field element. - /// - /// # Examples - /// - /// If you have a `Value`, convert it to `Value>` first: - /// ``` - /// # use halo2curves::pasta::pallas::Base as F; - /// use halo2_backend::{circuit::Value, plonk::Assigned}; - /// - /// let v = Value::known(F::from(2)); - /// let v: Value> = v.into(); - /// v.double(); - /// ``` - pub fn double(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().double()), - } - } - - /// Squares this field element. - pub fn square(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().square()), - } - } - - /// Cubes this field element. - pub fn cube(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().cube()), - } - } - - /// Inverts this assigned value (taking the inverse of zero to be zero). - pub fn invert(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().invert()), - } - } -} - -impl Value> { - /// Evaluates this value directly, performing an unbatched inversion if necessary. - /// - /// If the denominator is zero, the returned value is zero. - pub fn evaluate(self) -> Value { - Value { - inner: self.inner.map(|v| v.evaluate()), - } - } -} diff --git a/halo2_backend/src/plonk/circuit.rs b/halo2_backend/src/plonk/circuit.rs index 5357fc8016..9a0b6cd867 100644 --- a/halo2_backend/src/plonk/circuit.rs +++ b/halo2_backend/src/plonk/circuit.rs @@ -1059,23 +1059,6 @@ impl Product for Expression { #[derive(Copy, Clone, Debug)] pub(crate) struct PointIndex(pub usize); -/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset -/// within a custom gate. -#[derive(Clone, Debug)] -pub struct VirtualCell { - pub(crate) column: Column, - pub(crate) rotation: Rotation, -} - -impl>> From<(Col, Rotation)> for VirtualCell { - fn from((column, rotation): (Col, Rotation)) -> Self { - VirtualCell { - column: column.into(), - rotation, - } - } -} - /// An individual polynomial constraint. /// /// These are returned by the closures passed to `ConstraintSystem::create_gate`. @@ -1220,9 +1203,6 @@ pub struct Gate { name: String, constraint_names: Vec, polys: Vec>, - /// We track queried selectors separately from other cells, so that we can use them to - /// trigger debug checks on gates. - queried_cells: Vec, } impl Gate { @@ -1453,7 +1433,6 @@ impl ConstraintSystemV2Backend { name: gate.name.clone(), constraint_names: Vec::new(), polys: vec![queries.as_expression(gate.polynomial())], - queried_cells: Vec::new(), // Unused? }) .collect() } diff --git a/halo2_middleware/src/circuit.rs b/halo2_middleware/src/circuit.rs new file mode 100644 index 0000000000..2cc0f1c993 --- /dev/null +++ b/halo2_middleware/src/circuit.rs @@ -0,0 +1,162 @@ +use crate::poly::Rotation; +use crate::{lookup, permutation, shuffle}; +use core::cmp::max; +use ff::Field; + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, + /// Phase of this advice column + pub phase: u8, +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +/// A challenge squeezed from transcript after advice columns at the phase have been committed. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Challenge { + index: usize, + pub(crate) phase: u8, +} + +impl Challenge { + /// Index of this challenge. + pub fn index(&self) -> usize { + self.index + } + + /// Phase of this challenge. + pub fn phase(&self) -> u8 { + self.phase + } +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ExpressionMid { + /// This is a constant polynomial + Constant(F), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQueryMid), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQueryMid), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQueryMid), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl ExpressionMid { + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + use ExpressionMid::*; + match self { + Constant(_) => 0, + Fixed(_) => 1, + Advice(_) => 1, + Instance(_) => 1, + Challenge(_) => 0, + Negated(poly) => poly.degree(), + Sum(a, b) => max(a.degree(), b.degree()), + Product(a, b) => a.degree() + b.degree(), + Scaled(poly, _) => poly.degree(), + } + } +} + +/// A Gate contains a single polynomial identity with a name as metadata. +#[derive(Clone, Debug)] +pub struct GateV2Backend { + name: String, + poly: ExpressionMid, +} + +impl GateV2Backend { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the polynomial identity of this gate + pub fn polynomial(&self) -> &ExpressionMid { + &self.poly + } +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystemV2Backend { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + pub(crate) gates: Vec>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, +} + +/// Data that needs to be preprocessed from a circuit +#[derive(Debug, Clone)] +pub struct PreprocessingV2 { + // TODO(Edu): Can we replace this by a simpler structure? + pub(crate) permutation: permutation::keygen::Assembly, + pub(crate) fixed: Vec>, +} + +/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +/// as well as the fixed columns and copy constraints information. +#[derive(Debug, Clone)] +pub struct CompiledCircuitV2 { + pub(crate) preprocessing: PreprocessingV2, + pub(crate) cs: ConstraintSystemV2Backend, +} diff --git a/halo2_middleware/src/lib.rs b/halo2_middleware/src/lib.rs index e69de29bb2..82e819f636 100644 --- a/halo2_middleware/src/lib.rs +++ b/halo2_middleware/src/lib.rs @@ -0,0 +1,5 @@ +pub mod circuit; +pub mod lookup; +pub mod permutation; +pub mod poly; +pub mod shuffle; diff --git a/halo2_middleware/src/lookup.rs b/halo2_middleware/src/lookup.rs new file mode 100644 index 0000000000..e43ffb5dbf --- /dev/null +++ b/halo2_middleware/src/lookup.rs @@ -0,0 +1,10 @@ +use super::circuit::ExpressionMid; +use ff::Field; + +/// Expressions involved in a lookup argument, with a name as metadata. +#[derive(Clone, Debug)] +pub struct ArgumentV2 { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) table_expressions: Vec>, +} diff --git a/halo2_middleware/src/permutation.rs b/halo2_middleware/src/permutation.rs new file mode 100644 index 0000000000..0eb27f3030 --- /dev/null +++ b/halo2_middleware/src/permutation.rs @@ -0,0 +1,8 @@ +use crate::circuit::{Any, Column}; + +/// A permutation argument. +#[derive(Debug, Clone)] +pub struct Argument { + /// A sequence of columns involved in the argument. + pub(super) columns: Vec>, +} diff --git a/halo2_middleware/src/poly.rs b/halo2_middleware/src/poly.rs new file mode 100644 index 0000000000..7f5f58eb8d --- /dev/null +++ b/halo2_middleware/src/poly.rs @@ -0,0 +1,22 @@ +/// Describes the relative rotation of a vector. Negative numbers represent +/// reverse (leftmost) rotations and positive numbers represent forward (rightmost) +/// rotations. Zero represents no rotation. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Rotation(pub i32); + +impl Rotation { + /// The current location in the evaluation domain + pub fn cur() -> Rotation { + Rotation(0) + } + + /// The previous location in the evaluation domain + pub fn prev() -> Rotation { + Rotation(-1) + } + + /// The next location in the evaluation domain + pub fn next() -> Rotation { + Rotation(1) + } +} diff --git a/halo2_middleware/src/shuffle.rs b/halo2_middleware/src/shuffle.rs new file mode 100644 index 0000000000..990d507c0d --- /dev/null +++ b/halo2_middleware/src/shuffle.rs @@ -0,0 +1,10 @@ +use super::circuit::ExpressionMid; +use ff::Field; + +/// Expressions involved in a shuffle argument, with a name as metadata. +#[derive(Clone, Debug)] +pub struct ArgumentV2 { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) shuffle_expressions: Vec>, +} From 4f47ae693423e7b9fba4bbd9aca6e9234be4659b Mon Sep 17 00:00:00 2001 From: Eduard S Date: Thu, 18 Jan 2024 17:10:31 +0000 Subject: [PATCH 33/79] Checkpoint --- Cargo.toml | 8 +- {halo2_backend => backend}/Cargo.toml | 1 + {halo2_backend => backend}/src/arithmetic.rs | 0 {halo2_backend => backend}/src/dev.rs | 0 backend/src/dev/metadata.rs | 44 + {halo2_backend => backend}/src/helpers.rs | 0 {halo2_backend => backend}/src/lib.rs | 0 {halo2_backend => backend}/src/multicore.rs | 0 {halo2_backend => backend}/src/plonk.rs | 4 +- .../src/plonk/assigned.rs | 0 .../src/plonk/circuit.rs | 1319 +++++++++-------- {halo2_backend => backend}/src/plonk/error.rs | 2 +- .../src/plonk/evaluation.rs | 6 +- .../src/plonk/keygen.rs | 30 +- .../src/plonk/lookup.rs | 17 +- .../src/plonk/lookup/prover.rs | 3 +- .../src/plonk/lookup/verifier.rs | 3 +- .../src/plonk/permutation.rs | 14 +- .../src/plonk/permutation/keygen.rs | 18 +- .../src/plonk/permutation/prover.rs | 6 +- .../src/plonk/permutation/verifier.rs | 6 +- .../src/plonk/prover.rs | 0 .../src/plonk/shuffle.rs | 16 +- .../src/plonk/shuffle/prover.rs | 3 +- .../src/plonk/shuffle/verifier.rs | 3 +- .../src/plonk/vanishing.rs | 0 .../src/plonk/vanishing/prover.rs | 0 .../src/plonk/vanishing/verifier.rs | 0 .../src/plonk/verifier.rs | 0 .../src/plonk/verifier/batch.rs | 0 {halo2_backend => backend}/src/poly.rs | 45 +- .../src/poly/commitment.rs | 0 {halo2_backend => backend}/src/poly/domain.rs | 0 .../src/poly/ipa/commitment.rs | 0 .../src/poly/ipa/commitment/prover.rs | 0 .../src/poly/ipa/commitment/verifier.rs | 0 .../src/poly/ipa/mod.rs | 0 .../src/poly/ipa/msm.rs | 0 .../src/poly/ipa/multiopen.rs | 0 .../src/poly/ipa/multiopen/prover.rs | 0 .../src/poly/ipa/multiopen/verifier.rs | 0 .../src/poly/ipa/strategy.rs | 0 .../src/poly/kzg/commitment.rs | 0 .../src/poly/kzg/mod.rs | 0 .../src/poly/kzg/msm.rs | 0 .../src/poly/kzg/multiopen.rs | 0 .../src/poly/kzg/multiopen/gwc.rs | 0 .../src/poly/kzg/multiopen/gwc/prover.rs | 0 .../src/poly/kzg/multiopen/gwc/verifier.rs | 0 .../src/poly/kzg/multiopen/shplonk.rs | 0 .../src/poly/kzg/multiopen/shplonk/prover.rs | 0 .../poly/kzg/multiopen/shplonk/verifier.rs | 0 .../src/poly/kzg/strategy.rs | 0 .../src/poly/multiopen_test.rs | 0 {halo2_backend => backend}/src/poly/query.rs | 0 .../src/poly/strategy.rs | 0 {halo2_backend => backend}/src/transcript.rs | 0 {halo2_common => common}/Cargo.toml | 0 {halo2_common => common}/src/lib.rs | 0 {halo2_frontend => frontend}/Cargo.toml | 0 {halo2_frontend => frontend}/src/lib.rs | 0 halo2_middleware/src/circuit.rs | 162 -- halo2_middleware/src/lib.rs | 5 - halo2_middleware/src/permutation.rs | 8 - halo2_proofs/src/plonk/circuit.rs | 8 +- halo2_proofs/src/plonk/keygen.rs | 26 +- halo2_proofs/src/plonk/permutation/keygen.rs | 83 ++ {halo2_middleware => middleware}/Cargo.toml | 0 middleware/src/circuit.rs | 476 ++++++ middleware/src/lib.rs | 105 ++ .../src/lookup.rs | 6 +- .../src/dev => middleware/src}/metadata.rs | 10 +- middleware/src/permutation.rs | 76 + {halo2_middleware => middleware}/src/poly.rs | 0 .../src/shuffle.rs | 6 +- 75 files changed, 1591 insertions(+), 928 deletions(-) rename {halo2_backend => backend}/Cargo.toml (97%) rename {halo2_backend => backend}/src/arithmetic.rs (100%) rename {halo2_backend => backend}/src/dev.rs (100%) create mode 100644 backend/src/dev/metadata.rs rename {halo2_backend => backend}/src/helpers.rs (100%) rename {halo2_backend => backend}/src/lib.rs (100%) rename {halo2_backend => backend}/src/multicore.rs (100%) rename {halo2_backend => backend}/src/plonk.rs (99%) rename {halo2_backend => backend}/src/plonk/assigned.rs (100%) rename {halo2_backend => backend}/src/plonk/circuit.rs (66%) rename {halo2_backend => backend}/src/plonk/error.rs (98%) rename {halo2_backend => backend}/src/plonk/evaluation.rs (99%) rename {halo2_backend => backend}/src/plonk/keygen.rs (86%) rename {halo2_backend => backend}/src/plonk/lookup.rs (90%) rename {halo2_backend => backend}/src/plonk/lookup/prover.rs (99%) rename {halo2_backend => backend}/src/plonk/lookup/verifier.rs (98%) rename {halo2_backend => backend}/src/plonk/permutation.rs (90%) rename {halo2_backend => backend}/src/plonk/permutation/keygen.rs (96%) rename {halo2_backend => backend}/src/plonk/permutation/prover.rs (98%) rename {halo2_backend => backend}/src/plonk/permutation/verifier.rs (98%) rename {halo2_backend => backend}/src/plonk/prover.rs (100%) rename {halo2_backend => backend}/src/plonk/shuffle.rs (86%) rename {halo2_backend => backend}/src/plonk/shuffle/prover.rs (99%) rename {halo2_backend => backend}/src/plonk/shuffle/verifier.rs (98%) rename {halo2_backend => backend}/src/plonk/vanishing.rs (100%) rename {halo2_backend => backend}/src/plonk/vanishing/prover.rs (100%) rename {halo2_backend => backend}/src/plonk/vanishing/verifier.rs (100%) rename {halo2_backend => backend}/src/plonk/verifier.rs (100%) rename {halo2_backend => backend}/src/plonk/verifier/batch.rs (100%) rename {halo2_backend => backend}/src/poly.rs (92%) rename {halo2_backend => backend}/src/poly/commitment.rs (100%) rename {halo2_backend => backend}/src/poly/domain.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/commitment.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/commitment/prover.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/commitment/verifier.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/mod.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/msm.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/multiopen.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/multiopen/prover.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/multiopen/verifier.rs (100%) rename {halo2_backend => backend}/src/poly/ipa/strategy.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/commitment.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/mod.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/msm.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/multiopen.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/multiopen/gwc.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/multiopen/gwc/prover.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/multiopen/gwc/verifier.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/multiopen/shplonk.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/multiopen/shplonk/prover.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/multiopen/shplonk/verifier.rs (100%) rename {halo2_backend => backend}/src/poly/kzg/strategy.rs (100%) rename {halo2_backend => backend}/src/poly/multiopen_test.rs (100%) rename {halo2_backend => backend}/src/poly/query.rs (100%) rename {halo2_backend => backend}/src/poly/strategy.rs (100%) rename {halo2_backend => backend}/src/transcript.rs (100%) rename {halo2_common => common}/Cargo.toml (100%) rename {halo2_common => common}/src/lib.rs (100%) rename {halo2_frontend => frontend}/Cargo.toml (100%) rename {halo2_frontend => frontend}/src/lib.rs (100%) delete mode 100644 halo2_middleware/src/circuit.rs delete mode 100644 halo2_middleware/src/lib.rs delete mode 100644 halo2_middleware/src/permutation.rs rename {halo2_middleware => middleware}/Cargo.toml (100%) create mode 100644 middleware/src/circuit.rs create mode 100644 middleware/src/lib.rs rename {halo2_middleware => middleware}/src/lookup.rs (56%) rename {halo2_backend/src/dev => middleware/src}/metadata.rs (81%) create mode 100644 middleware/src/permutation.rs rename {halo2_middleware => middleware}/src/poly.rs (100%) rename {halo2_middleware => middleware}/src/shuffle.rs (55%) diff --git a/Cargo.toml b/Cargo.toml index 458f57fbe9..0b5d9a1ccf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,8 +2,8 @@ members = [ "halo2", "halo2_proofs", - "halo2_frontend", - "halo2_middleware", - "halo2_backend", - "halo2_common", + "frontend", + "middleware", + "backend", + "common", ] diff --git a/halo2_backend/Cargo.toml b/backend/Cargo.toml similarity index 97% rename from halo2_backend/Cargo.toml rename to backend/Cargo.toml index 79b609d9b7..cf1987c3dc 100644 --- a/halo2_backend/Cargo.toml +++ b/backend/Cargo.toml @@ -37,6 +37,7 @@ rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" +halo2_middleware = { path = "../middleware" } # Developer tooling dependencies plotters = { version = "0.3.0", default-features = false, optional = true } diff --git a/halo2_backend/src/arithmetic.rs b/backend/src/arithmetic.rs similarity index 100% rename from halo2_backend/src/arithmetic.rs rename to backend/src/arithmetic.rs diff --git a/halo2_backend/src/dev.rs b/backend/src/dev.rs similarity index 100% rename from halo2_backend/src/dev.rs rename to backend/src/dev.rs diff --git a/backend/src/dev/metadata.rs b/backend/src/dev/metadata.rs new file mode 100644 index 0000000000..eeea0cb3d1 --- /dev/null +++ b/backend/src/dev/metadata.rs @@ -0,0 +1,44 @@ +//! Metadata about circuits. + +// use crate::plonk::{self, Any}; +// use std::fmt::{self, Debug}; +// /// Metadata about a column within a circuit. +// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +// pub struct Column { +// /// The type of the column. +// pub(super) column_type: Any, +// /// The index of the column. +// pub(super) index: usize, +// } +// +// impl Column { +// /// Return the column type. +// pub fn column_type(&self) -> Any { +// self.column_type +// } +// /// Return the column index. +// pub fn index(&self) -> usize { +// self.index +// } +// } +// +// impl fmt::Display for Column { +// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +// write!(f, "Column('{:?}', {})", self.column_type, self.index) +// } +// } +// +// impl From<(Any, usize)> for Column { +// fn from((column_type, index): (Any, usize)) -> Self { +// Column { column_type, index } +// } +// } +// +// impl From> for Column { +// fn from(column: plonk::Column) -> Self { +// Column { +// column_type: *column.column_type(), +// index: column.index(), +// } +// } +// } diff --git a/halo2_backend/src/helpers.rs b/backend/src/helpers.rs similarity index 100% rename from halo2_backend/src/helpers.rs rename to backend/src/helpers.rs diff --git a/halo2_backend/src/lib.rs b/backend/src/lib.rs similarity index 100% rename from halo2_backend/src/lib.rs rename to backend/src/lib.rs diff --git a/halo2_backend/src/multicore.rs b/backend/src/multicore.rs similarity index 100% rename from halo2_backend/src/multicore.rs rename to backend/src/multicore.rs diff --git a/halo2_backend/src/plonk.rs b/backend/src/plonk.rs similarity index 99% rename from halo2_backend/src/plonk.rs rename to backend/src/plonk.rs index 4bd8366c58..1e0f8ed4d0 100644 --- a/halo2_backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -14,10 +14,12 @@ use crate::helpers::{ }; use crate::poly::{ Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, - Polynomial, Rotation, + Polynomial, }; use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; use crate::SerdeFormat; +use halo2_middleware::circuit::{Advice, Column, Fixed, Instance}; +use halo2_middleware::poly::Rotation; mod assigned; mod circuit; diff --git a/halo2_backend/src/plonk/assigned.rs b/backend/src/plonk/assigned.rs similarity index 100% rename from halo2_backend/src/plonk/assigned.rs rename to backend/src/plonk/assigned.rs diff --git a/halo2_backend/src/plonk/circuit.rs b/backend/src/plonk/circuit.rs similarity index 66% rename from halo2_backend/src/plonk/circuit.rs rename to backend/src/plonk/circuit.rs index 9a0b6cd867..ad1f224517 100644 --- a/halo2_backend/src/plonk/circuit.rs +++ b/backend/src/plonk/circuit.rs @@ -1,9 +1,14 @@ use super::{lookup, permutation, shuffle, Queries}; -use crate::dev::metadata; -use crate::poly::Rotation; +// use crate::dev::metadata; use core::cmp::max; use core::ops::{Add, Mul}; use ff::Field; +use halo2_middleware::circuit::{ + Advice, AdviceQueryMid, Any, Challenge, Column, ConstraintSystemV2Backend, ExpressionMid, + Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, +}; +use halo2_middleware::metadata; +use halo2_middleware::poly::Rotation; use sealed::SealedPhase; use std::collections::HashMap; use std::fmt::Debug; @@ -13,80 +18,81 @@ use std::{ ops::{Neg, Sub}, }; -/// A column type -pub trait ColumnType: - 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into -{ - /// Return expression from cell - fn query_cell(&self, index: usize, at: Rotation) -> Expression; -} - -/// A column with an index and type -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Column { - index: usize, - column_type: C, -} - -impl Column { - pub(crate) fn new(index: usize, column_type: C) -> Self { - Column { index, column_type } - } - - /// Index of this column. - pub fn index(&self) -> usize { - self.index - } - - /// Type of this column. - pub fn column_type(&self) -> &C { - &self.column_type - } - - /// Return expression from column at a relative position - pub fn query_cell(&self, at: Rotation) -> Expression { - self.column_type.query_cell(self.index, at) - } - - /// Return expression from column at the current row - pub fn cur(&self) -> Expression { - self.query_cell(Rotation::cur()) - } - - /// Return expression from column at the next row - pub fn next(&self) -> Expression { - self.query_cell(Rotation::next()) - } - - /// Return expression from column at the previous row - pub fn prev(&self) -> Expression { - self.query_cell(Rotation::prev()) - } - - /// Return expression from column at the specified rotation - pub fn rot(&self, rotation: i32) -> Expression { - self.query_cell(Rotation(rotation)) - } -} - -impl Ord for Column { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match self.column_type.into().cmp(&other.column_type.into()) { - // Indices are assigned within column types. - std::cmp::Ordering::Equal => self.index.cmp(&other.index), - order => order, - } - } -} - -impl PartialOrd for Column { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - +// /// A column type +// pub trait ColumnType: +// 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into +// { +// /// Return expression from cell +// fn query_cell(&self, index: usize, at: Rotation) -> Expression; +// } +// +// /// A column with an index and type +// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +// pub struct Column { +// index: usize, +// column_type: C, +// } +// +// impl Column { +// pub(crate) fn new(index: usize, column_type: C) -> Self { +// Column { index, column_type } +// } +// +// /// Index of this column. +// pub fn index(&self) -> usize { +// self.index +// } +// +// /// Type of this column. +// pub fn column_type(&self) -> &C { +// &self.column_type +// } +// +// /// Return expression from column at a relative position +// pub fn query_cell(&self, at: Rotation) -> Expression { +// self.column_type.query_cell(self.index, at) +// } +// +// /// Return expression from column at the current row +// pub fn cur(&self) -> Expression { +// self.query_cell(Rotation::cur()) +// } +// +// /// Return expression from column at the next row +// pub fn next(&self) -> Expression { +// self.query_cell(Rotation::next()) +// } +// +// /// Return expression from column at the previous row +// pub fn prev(&self) -> Expression { +// self.query_cell(Rotation::prev()) +// } +// +// /// Return expression from column at the specified rotation +// pub fn rot(&self, rotation: i32) -> Expression { +// self.query_cell(Rotation(rotation)) +// } +// } +// +// impl Ord for Column { +// fn cmp(&self, other: &Self) -> std::cmp::Ordering { +// // This ordering is consensus-critical! The layouters rely on deterministic column +// // orderings. +// match self.column_type.into().cmp(&other.column_type.into()) { +// // Indices are assigned within column types. +// std::cmp::Ordering::Equal => self.index.cmp(&other.index), +// order => order, +// } +// } +// } +// +// impl PartialOrd for Column { +// fn partial_cmp(&self, other: &Self) -> Option { +// Some(self.cmp(other)) +// } +// } + +// TODO: No sealed Phase on the backend, only in the frontend! pub(crate) mod sealed { /// Phase of advice column #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] @@ -145,263 +151,263 @@ impl SealedPhase for super::ThirdPhase { } } -/// An advice column -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub struct Advice { - pub(crate) phase: sealed::Phase, -} - -impl Default for Advice { - fn default() -> Advice { - Advice { - phase: FirstPhase.to_sealed(), - } - } -} - -impl Advice { - /// Returns `Advice` in given `Phase` - pub fn new(phase: P) -> Advice { - Advice { - phase: phase.to_sealed(), - } - } - - /// Phase of this column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -impl std::fmt::Debug for Advice { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if self.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &self.phase); - } - debug_struct.finish() - } -} - -/// A fixed column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Fixed; - -/// An instance column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Instance; - -/// An enum over the Advice, Fixed, Instance structs -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub enum Any { - /// An Advice variant - Advice(Advice), - /// A Fixed variant - Fixed, - /// An Instance variant - Instance, -} - -impl Any { - /// Returns Advice variant in `FirstPhase` - pub fn advice() -> Any { - Any::Advice(Advice::default()) - } - - /// Returns Advice variant in given `Phase` - pub fn advice_in(phase: P) -> Any { - Any::Advice(Advice::new(phase)) - } -} - -impl std::fmt::Debug for Any { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Any::Advice(advice) => { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if advice.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &advice.phase); - } - debug_struct.finish() - } - Any::Fixed => f.debug_struct("Fixed").finish(), - Any::Instance => f.debug_struct("Instance").finish(), - } - } -} - -impl Ord for Any { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match (self, other) { - (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, - (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), - // Across column types, sort Instance < Advice < Fixed. - (Any::Instance, Any::Advice(_)) - | (Any::Advice(_), Any::Fixed) - | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, - (Any::Fixed, Any::Instance) - | (Any::Fixed, Any::Advice(_)) - | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, - } - } -} - -impl PartialOrd for Any { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl ColumnType for Advice { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: self.phase, - }) - } -} -impl ColumnType for Fixed { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Instance { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Any { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - match self { - Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: *phase, - }), - Any::Fixed => Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }), - Any::Instance => Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }), - } - } -} - -impl From for Any { - fn from(advice: Advice) -> Any { - Any::Advice(advice) - } -} - -impl From for Any { - fn from(_: Fixed) -> Any { - Any::Fixed - } -} - -impl From for Any { - fn from(_: Instance) -> Any { - Any::Instance - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Advice(advice.column_type), - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Fixed, - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Instance, - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Advice(advice) => Ok(Column { - index: any.index(), - column_type: *advice, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Fixed => Ok(Column { - index: any.index(), - column_type: Fixed, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Instance => Ok(Column { - index: any.index(), - column_type: Instance, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, -} +// /// An advice column +// #[derive(Clone, Copy, Eq, PartialEq, Hash)] +// pub struct Advice { +// pub(crate) phase: sealed::Phase, +// } +// +// impl Default for Advice { +// fn default() -> Advice { +// Advice { +// phase: FirstPhase.to_sealed(), +// } +// } +// } +// +// impl Advice { +// /// Returns `Advice` in given `Phase` +// pub fn new(phase: P) -> Advice { +// Advice { +// phase: phase.to_sealed(), +// } +// } +// +// /// Phase of this column +// pub fn phase(&self) -> u8 { +// self.phase.0 +// } +// } +// +// impl std::fmt::Debug for Advice { +// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +// let mut debug_struct = f.debug_struct("Advice"); +// // Only show advice's phase if it's not in first phase. +// if self.phase != FirstPhase.to_sealed() { +// debug_struct.field("phase", &self.phase); +// } +// debug_struct.finish() +// } +// } +// +// /// A fixed column +// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +// pub struct Fixed; +// +// /// An instance column +// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +// pub struct Instance; +// +// /// An enum over the Advice, Fixed, Instance structs +// #[derive(Clone, Copy, Eq, PartialEq, Hash)] +// pub enum Any { +// /// An Advice variant +// Advice(Advice), +// /// A Fixed variant +// Fixed, +// /// An Instance variant +// Instance, +// } +// +// impl Any { +// /// Returns Advice variant in `FirstPhase` +// pub fn advice() -> Any { +// Any::Advice(Advice::default()) +// } +// +// /// Returns Advice variant in given `Phase` +// pub fn advice_in(phase: P) -> Any { +// Any::Advice(Advice::new(phase)) +// } +// } +// +// impl std::fmt::Debug for Any { +// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +// match self { +// Any::Advice(advice) => { +// let mut debug_struct = f.debug_struct("Advice"); +// // Only show advice's phase if it's not in first phase. +// if advice.phase != FirstPhase.to_sealed() { +// debug_struct.field("phase", &advice.phase); +// } +// debug_struct.finish() +// } +// Any::Fixed => f.debug_struct("Fixed").finish(), +// Any::Instance => f.debug_struct("Instance").finish(), +// } +// } +// } +// +// impl Ord for Any { +// fn cmp(&self, other: &Self) -> std::cmp::Ordering { +// // This ordering is consensus-critical! The layouters rely on deterministic column +// // orderings. +// match (self, other) { +// (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, +// (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), +// // Across column types, sort Instance < Advice < Fixed. +// (Any::Instance, Any::Advice(_)) +// | (Any::Advice(_), Any::Fixed) +// | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, +// (Any::Fixed, Any::Instance) +// | (Any::Fixed, Any::Advice(_)) +// | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, +// } +// } +// } +// +// impl PartialOrd for Any { +// fn partial_cmp(&self, other: &Self) -> Option { +// Some(self.cmp(other)) +// } +// } +// +// impl ColumnType for Advice { +// fn query_cell(&self, index: usize, at: Rotation) -> Expression { +// Expression::Advice(AdviceQuery { +// index: None, +// column_index: index, +// rotation: at, +// phase: self.phase, +// }) +// } +// } +// impl ColumnType for Fixed { +// fn query_cell(&self, index: usize, at: Rotation) -> Expression { +// Expression::Fixed(FixedQuery { +// index: None, +// column_index: index, +// rotation: at, +// }) +// } +// } +// impl ColumnType for Instance { +// fn query_cell(&self, index: usize, at: Rotation) -> Expression { +// Expression::Instance(InstanceQuery { +// index: None, +// column_index: index, +// rotation: at, +// }) +// } +// } +// impl ColumnType for Any { +// fn query_cell(&self, index: usize, at: Rotation) -> Expression { +// match self { +// Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { +// index: None, +// column_index: index, +// rotation: at, +// phase: *phase, +// }), +// Any::Fixed => Expression::Fixed(FixedQuery { +// index: None, +// column_index: index, +// rotation: at, +// }), +// Any::Instance => Expression::Instance(InstanceQuery { +// index: None, +// column_index: index, +// rotation: at, +// }), +// } +// } +// } +// +// impl From for Any { +// fn from(advice: Advice) -> Any { +// Any::Advice(advice) +// } +// } +// +// impl From for Any { +// fn from(_: Fixed) -> Any { +// Any::Fixed +// } +// } +// +// impl From for Any { +// fn from(_: Instance) -> Any { +// Any::Instance +// } +// } +// +// impl From> for Column { +// fn from(advice: Column) -> Column { +// Column { +// index: advice.index(), +// column_type: Any::Advice(advice.column_type), +// } +// } +// } +// +// impl From> for Column { +// fn from(advice: Column) -> Column { +// Column { +// index: advice.index(), +// column_type: Any::Fixed, +// } +// } +// } +// +// impl From> for Column { +// fn from(advice: Column) -> Column { +// Column { +// index: advice.index(), +// column_type: Any::Instance, +// } +// } +// } +// +// impl TryFrom> for Column { +// type Error = &'static str; +// +// fn try_from(any: Column) -> Result { +// match any.column_type() { +// Any::Advice(advice) => Ok(Column { +// index: any.index(), +// column_type: *advice, +// }), +// _ => Err("Cannot convert into Column"), +// } +// } +// } +// +// impl TryFrom> for Column { +// type Error = &'static str; +// +// fn try_from(any: Column) -> Result { +// match any.column_type() { +// Any::Fixed => Ok(Column { +// index: any.index(), +// column_type: Fixed, +// }), +// _ => Err("Cannot convert into Column"), +// } +// } +// } +// +// impl TryFrom> for Column { +// type Error = &'static str; +// +// fn try_from(any: Column) -> Result { +// match any.column_type() { +// Any::Instance => Ok(Column { +// index: any.index(), +// column_type: Instance, +// }), +// _ => Err("Cannot convert into Column"), +// } +// } +// } + +// /// Query of fixed column at a certain relative location +// #[derive(Copy, Clone, Debug, PartialEq, Eq)] +// pub struct FixedQueryMid { +// /// Column index +// pub column_index: usize, +// /// Rotation of this query +// pub rotation: Rotation, +// } /// Query of fixed column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -426,16 +432,16 @@ impl FixedQuery { } } -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, - /// Phase of this advice column - pub phase: sealed::Phase, -} +// /// Query of advice column at a certain relative location +// #[derive(Copy, Clone, Debug, PartialEq, Eq)] +// pub struct AdviceQueryMid { +// /// Column index +// pub column_index: usize, +// /// Rotation of this query +// pub rotation: Rotation, +// /// Phase of this advice column +// pub phase: sealed::Phase, +// } /// Query of advice column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -467,14 +473,14 @@ impl AdviceQuery { } } -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, -} +// /// Query of instance column at a certain relative location +// #[derive(Copy, Clone, Debug, PartialEq, Eq)] +// pub struct InstanceQueryMid { +// /// Column index +// pub column_index: usize, +// /// Rotation of this query +// pub rotation: Rotation, +// } /// Query of instance column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -499,70 +505,70 @@ impl InstanceQuery { } } -/// A challenge squeezed from transcript after advice columns at the phase have been committed. -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Challenge { - index: usize, - pub(crate) phase: sealed::Phase, -} - -impl Challenge { - /// Index of this challenge. - pub fn index(&self) -> usize { - self.index - } - - /// Phase of this challenge. - pub fn phase(&self) -> u8 { - self.phase.0 - } - - /// Return Expression - pub fn expr(&self) -> Expression { - Expression::Challenge(*self) - } -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum ExpressionMid { - /// This is a constant polynomial - Constant(F), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQueryMid), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQueryMid), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQueryMid), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl ExpressionMid { - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - use ExpressionMid::*; - match self { - Constant(_) => 0, - Fixed(_) => 1, - Advice(_) => 1, - Instance(_) => 1, - Challenge(_) => 0, - Negated(poly) => poly.degree(), - Sum(a, b) => max(a.degree(), b.degree()), - Product(a, b) => a.degree() + b.degree(), - Scaled(poly, _) => poly.degree(), - } - } -} +// /// A challenge squeezed from transcript after advice columns at the phase have been committed. +// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +// pub struct Challenge { +// index: usize, +// pub(crate) phase: sealed::Phase, +// } +// +// impl Challenge { +// /// Index of this challenge. +// pub fn index(&self) -> usize { +// self.index +// } +// +// /// Phase of this challenge. +// pub fn phase(&self) -> u8 { +// self.phase.0 +// } +// +// /// Return Expression +// pub fn expr(&self) -> Expression { +// Expression::Challenge(*self) +// } +// } + +// /// Low-degree expression representing an identity that must hold over the committed columns. +// #[derive(Clone, Debug, PartialEq, Eq)] +// pub enum ExpressionMid { +// /// This is a constant polynomial +// Constant(F), +// /// This is a fixed column queried at a certain relative location +// Fixed(FixedQueryMid), +// /// This is an advice (witness) column queried at a certain relative location +// Advice(AdviceQueryMid), +// /// This is an instance (external) column queried at a certain relative location +// Instance(InstanceQueryMid), +// /// This is a challenge +// Challenge(Challenge), +// /// This is a negated polynomial +// Negated(Box>), +// /// This is the sum of two polynomials +// Sum(Box>, Box>), +// /// This is the product of two polynomials +// Product(Box>, Box>), +// /// This is a scaled polynomial +// Scaled(Box>, F), +// } + +// impl ExpressionMid { +// /// Compute the degree of this polynomial +// pub fn degree(&self) -> usize { +// use ExpressionMid::*; +// match self { +// Constant(_) => 0, +// Fixed(_) => 1, +// Advice(_) => 1, +// Instance(_) => 1, +// Challenge(_) => 0, +// Negated(poly) => poly.degree(), +// Sum(a, b) => max(a.degree(), b.degree()), +// Product(a, b) => a.degree() + b.degree(), +// Scaled(poly, _) => poly.degree(), +// } +// } +// } /// Low-degree expression representing an identity that must hold over the committed columns. #[derive(Clone, PartialEq, Eq)] @@ -607,7 +613,7 @@ impl Into> for Expression { }) => ExpressionMid::Advice(AdviceQueryMid { column_index, rotation, - phase, + phase: phase.0, }), Expression::Instance(InstanceQuery { column_index, @@ -1178,24 +1184,24 @@ impl>, Iter: IntoIterator> IntoIterato } } -/// A Gate contains a single polynomial identity with a name as metadata. -#[derive(Clone, Debug)] -pub struct GateV2Backend { - name: String, - poly: ExpressionMid, -} - -impl GateV2Backend { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the polynomial identity of this gate - pub fn polynomial(&self) -> &ExpressionMid { - &self.poly - } -} +// /// A Gate contains a single polynomial identity with a name as metadata. +// #[derive(Clone, Debug)] +// pub struct GateV2Backend { +// name: String, +// poly: ExpressionMid, +// } +// +// impl GateV2Backend { +// /// Returns the gate name. +// pub fn name(&self) -> &str { +// self.name.as_str() +// } +// +// /// Returns the polynomial identity of this gate +// pub fn polynomial(&self) -> &ExpressionMid { +// &self.poly +// } +// } /// Gate #[derive(Clone, Debug)] @@ -1222,21 +1228,21 @@ impl Gate { } } -/// Data that needs to be preprocessed from a circuit -#[derive(Debug, Clone)] -pub struct PreprocessingV2 { - // TODO(Edu): Can we replace this by a simpler structure? - pub(crate) permutation: permutation::keygen::Assembly, - pub(crate) fixed: Vec>, -} +// /// Data that needs to be preprocessed from a circuit +// #[derive(Debug, Clone)] +// pub struct PreprocessingV2 { +// // TODO(Edu): Can we replace this by a simpler structure? +// pub(crate) permutation: permutation::keygen::Assembly, +// pub(crate) fixed: Vec>, +// } -/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System -/// as well as the fixed columns and copy constraints information. -#[derive(Debug, Clone)] -pub struct CompiledCircuitV2 { - pub(crate) preprocessing: PreprocessingV2, - pub(crate) cs: ConstraintSystemV2Backend, -} +// /// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +// /// as well as the fixed columns and copy constraints information. +// #[derive(Debug, Clone)] +// pub struct CompiledCircuitV2 { +// pub(crate) preprocessing: PreprocessingV2, +// pub(crate) cs: ConstraintSystemV2Backend, +// } struct QueriesMap { advice_map: HashMap<(Column, Rotation), usize>, @@ -1291,7 +1297,7 @@ impl QueriesMap { index: Some(index), column_index: query.column_index, rotation: query.rotation, - phase: query.phase, + phase: sealed::Phase(query.phase), }) } ExpressionMid::Instance(query) => { @@ -1318,219 +1324,224 @@ impl QueriesMap { } } -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystemV2Backend { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - pub(crate) gates: Vec>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, -} - -impl Into> for ConstraintSystem { - fn into(self) -> ConstraintSystemV2Backend { - ConstraintSystemV2Backend { - num_fixed_columns: self.num_fixed_columns, - num_advice_columns: self.num_advice_columns, - num_instance_columns: self.num_instance_columns, - num_challenges: self.num_challenges, - unblinded_advice_columns: self.unblinded_advice_columns.clone(), - advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), - challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), - gates: self - .gates +// /// This is a description of the circuit environment, such as the gate, column and +// /// permutation arrangements. +// #[derive(Debug, Clone)] +// pub struct ConstraintSystemV2Backend { +// pub(crate) num_fixed_columns: usize, +// pub(crate) num_advice_columns: usize, +// pub(crate) num_instance_columns: usize, +// pub(crate) num_challenges: usize, +// +// /// Contains the index of each advice column that is left unblinded. +// pub(crate) unblinded_advice_columns: Vec, +// +// /// Contains the phase for each advice column. Should have same length as num_advice_columns. +// pub(crate) advice_column_phase: Vec, +// /// Contains the phase for each challenge. Should have same length as num_challenges. +// pub(crate) challenge_phase: Vec, +// +// pub(crate) gates: Vec>, +// +// // Permutation argument for performing equality constraints +// pub(crate) permutation: permutation::Argument, +// +// // Vector of lookup arguments, where each corresponds to a sequence of +// // input expressions and a sequence of table expressions involved in the lookup. +// pub(crate) lookups: Vec>, +// +// // Vector of shuffle arguments, where each corresponds to a sequence of +// // input expressions and a sequence of shuffle expressions involved in the shuffle. +// pub(crate) shuffles: Vec>, +// +// // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. +// pub(crate) general_column_annotations: HashMap, +// } + +// impl Into> for ConstraintSystem { +// fn into(self) -> ConstraintSystemV2Backend { +// ConstraintSystemV2Backend { +// num_fixed_columns: self.num_fixed_columns, +// num_advice_columns: self.num_advice_columns, +// num_instance_columns: self.num_instance_columns, +// num_challenges: self.num_challenges, +// unblinded_advice_columns: self.unblinded_advice_columns.clone(), +// advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), +// challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), +// gates: self +// .gates +// .iter() +// .map(|g| { +// g.polys.clone().into_iter().enumerate().map(|(i, e)| { +// let name = match g.constraint_name(i) { +// "" => g.name.clone(), +// constraint_name => format!("{}:{}", g.name, constraint_name), +// }; +// GateV2Backend { +// name, +// poly: e.into(), +// } +// }) +// }) +// .flatten() +// .collect(), +// permutation: self.permutation.clone().into(), +// lookups: self +// .lookups +// .iter() +// .map(|l| halo2_middleware::lookup::ArgumentV2 { +// name: l.name.clone(), +// input_expressions: l +// .input_expressions +// .clone() +// .into_iter() +// .map(|e| e.into()) +// .collect(), +// table_expressions: l +// .table_expressions +// .clone() +// .into_iter() +// .map(|e| e.into()) +// .collect(), +// }) +// .collect(), +// shuffles: self +// .shuffles +// .iter() +// .map(|s| halo2_middleware::shuffle::ArgumentV2 { +// name: s.name.clone(), +// input_expressions: s +// .input_expressions +// .clone() +// .into_iter() +// .map(|e| e.into()) +// .collect(), +// shuffle_expressions: s +// .shuffle_expressions +// .clone() +// .into_iter() +// .map(|e| e.into()) +// .collect(), +// }) +// .collect(), +// general_column_annotations: self.general_column_annotations.clone(), +// } +// } +// } + +/// Collect queries used in gates while mapping those gates to equivalent ones with indexed +/// query references in the expressions. +fn collect_queries_gates( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.gates + .iter() + .map(|gate| Gate { + name: gate.name.clone(), + constraint_names: Vec::new(), + polys: vec![queries.as_expression(gate.polynomial())], + }) + .collect() +} + +/// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn collect_queries_lookups( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.lookups + .iter() + .map(|lookup| lookup::Argument { + name: lookup.name.clone(), + input_expressions: lookup + .input_expressions .iter() - .map(|g| { - g.polys.clone().into_iter().enumerate().map(|(i, e)| { - let name = match g.constraint_name(i) { - "" => g.name.clone(), - constraint_name => format!("{}:{}", g.name, constraint_name), - }; - GateV2Backend { - name, - poly: e.into(), - } - }) - }) - .flatten() + .map(|e| queries.as_expression(e)) .collect(), - permutation: self.permutation.clone(), - lookups: self - .lookups + table_expressions: lookup + .table_expressions .iter() - .map(|l| lookup::ArgumentV2 { - name: l.name.clone(), - input_expressions: l - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - table_expressions: l - .table_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) + .map(|e| queries.as_expression(e)) .collect(), - shuffles: self - .shuffles + }) + .collect() +} + +/// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn collect_queries_shuffles( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.shuffles + .iter() + .map(|shuffle| shuffle::Argument { + name: shuffle.name.clone(), + input_expressions: shuffle + .input_expressions .iter() - .map(|s| shuffle::ArgumentV2 { - name: s.name.clone(), - input_expressions: s - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - shuffle_expressions: s - .shuffle_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) + .map(|e| queries.as_expression(e)) .collect(), - general_column_annotations: self.general_column_annotations.clone(), - } - } -} - -impl ConstraintSystemV2Backend { - /// Collect queries used in gates while mapping those gates to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_gates(&self, queries: &mut QueriesMap) -> Vec> { - self.gates - .iter() - .map(|gate| Gate { - name: gate.name.clone(), - constraint_names: Vec::new(), - polys: vec![queries.as_expression(gate.polynomial())], - }) - .collect() - } - - /// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_lookups(&self, queries: &mut QueriesMap) -> Vec> { - self.lookups - .iter() - .map(|lookup| lookup::Argument { - name: lookup.name.clone(), - input_expressions: lookup - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - table_expressions: lookup - .table_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() + shuffle_expressions: shuffle + .shuffle_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() +} + +/// Collect all queries used in the expressions of gates, lookups and shuffles. Map the +/// expressions of gates, lookups and shuffles into equivalent ones with indexed query +/// references. +pub(crate) fn collect_queries( + cs2: &ConstraintSystemV2Backend, +) -> ( + Queries, + Vec>, + Vec>, + Vec>, +) { + let mut queries = QueriesMap { + advice_map: HashMap::new(), + instance_map: HashMap::new(), + fixed_map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), + }; + + let gates = collect_queries_gates(cs2, &mut queries); + let lookups = collect_queries_lookups(cs2, &mut queries); + let shuffles = collect_queries_shuffles(cs2, &mut queries); + + // Each column used in a copy constraint involves a query at rotation current. + for column in cs2.permutation.get_columns() { + match column.column_type { + Any::Instance => { + queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) + } + Any::Fixed => queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()), + Any::Advice(advice) => { + queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) + } + }; } - /// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_shuffles(&self, queries: &mut QueriesMap) -> Vec> { - self.shuffles - .iter() - .map(|shuffle| shuffle::Argument { - name: shuffle.name.clone(), - input_expressions: shuffle - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - shuffle_expressions: shuffle - .shuffle_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() + let mut num_advice_queries = vec![0; cs2.num_advice_columns]; + for (column, _) in queries.advice.iter() { + num_advice_queries[column.index()] += 1; } - /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the - /// expressions of gates, lookups and shuffles into equivalent ones with indexed query - /// references. - pub(crate) fn collect_queries( - &self, - ) -> ( - Queries, - Vec>, - Vec>, - Vec>, - ) { - let mut queries = QueriesMap { - advice_map: HashMap::new(), - instance_map: HashMap::new(), - fixed_map: HashMap::new(), - advice: Vec::new(), - instance: Vec::new(), - fixed: Vec::new(), - }; - - let gates = self.collect_queries_gates(&mut queries); - let lookups = self.collect_queries_lookups(&mut queries); - let shuffles = self.collect_queries_shuffles(&mut queries); - - // Each column used in a copy constraint involves a query at rotation current. - for column in self.permutation.get_columns() { - match column.column_type { - Any::Instance => { - queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) - } - Any::Fixed => { - queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()) - } - Any::Advice(advice) => { - queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) - } - }; - } - - let mut num_advice_queries = vec![0; self.num_advice_columns]; - for (column, _) in queries.advice.iter() { - num_advice_queries[column.index()] += 1; - } - - let queries = Queries { - advice: queries.advice, - instance: queries.instance, - fixed: queries.fixed, - num_advice_queries, - }; - (queries, gates, lookups, shuffles) - } + let queries = Queries { + advice: queries.advice, + instance: queries.instance, + fixed: queries.fixed, + num_advice_queries, + }; + (queries, gates, lookups, shuffles) } /// This is a description of the circuit environment, such as the gate, column and @@ -1583,7 +1594,7 @@ pub struct ConstraintSystem { impl From> for ConstraintSystem { fn from(cs2: ConstraintSystemV2Backend) -> Self { - let (queries, gates, lookups, shuffles) = cs2.collect_queries(); + let (queries, gates, lookups, shuffles) = collect_queries(&cs2); ConstraintSystem { num_fixed_columns: cs2.num_fixed_columns, num_advice_columns: cs2.num_advice_columns, @@ -1602,7 +1613,7 @@ impl From> for ConstraintSystem { num_advice_queries: queries.num_advice_queries, instance_queries: queries.instance, fixed_queries: queries.fixed, - permutation: cs2.permutation, + permutation: cs2.permutation.into(), lookups, shuffles, general_column_annotations: cs2.general_column_annotations, diff --git a/halo2_backend/src/plonk/error.rs b/backend/src/plonk/error.rs similarity index 98% rename from halo2_backend/src/plonk/error.rs rename to backend/src/plonk/error.rs index 93f21bf40d..50368bfc18 100644 --- a/halo2_backend/src/plonk/error.rs +++ b/backend/src/plonk/error.rs @@ -2,7 +2,7 @@ use std::error; use std::fmt; use std::io; -use super::{Any, Column}; +use halo2_middleware::circuit::{Any, Column}; /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up diff --git a/halo2_backend/src/plonk/evaluation.rs b/backend/src/plonk/evaluation.rs similarity index 99% rename from halo2_backend/src/plonk/evaluation.rs rename to backend/src/plonk/evaluation.rs index aeeb587a64..2cb9c5a9ae 100644 --- a/halo2_backend/src/plonk/evaluation.rs +++ b/backend/src/plonk/evaluation.rs @@ -1,11 +1,13 @@ use crate::multicore; -use crate::plonk::{lookup, permutation, Any, ProvingKey}; +use crate::plonk::{lookup, permutation, ProvingKey}; use crate::poly::Basis; use crate::{ arithmetic::{parallelize, CurveAffine}, - poly::{Coeff, ExtendedLagrangeCoeff, Polynomial, Rotation}, + poly::{Coeff, ExtendedLagrangeCoeff, Polynomial}, }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; use super::{shuffle, ConstraintSystem, Expression}; diff --git a/halo2_backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs similarity index 86% rename from halo2_backend/src/plonk/keygen.rs rename to backend/src/plonk/keygen.rs index f39db6c5de..f705c067e1 100644 --- a/halo2_backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -4,9 +4,8 @@ use ff::{Field, FromUniformBytes}; use group::Curve; use super::{ - circuit::{CompiledCircuitV2, ConstraintSystem}, - evaluation::Evaluator, - Error, Polynomial, ProvingKey, VerifyingKey, + circuit::ConstraintSystem, evaluation::Evaluator, permutation, Error, Polynomial, ProvingKey, + VerifyingKey, }; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -15,6 +14,7 @@ use crate::{ EvaluationDomain, }, }; +use halo2_middleware::circuit::CompiledCircuitV2; /// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. pub fn keygen_vk_v2<'params, C, P>( @@ -34,12 +34,12 @@ where return Err(Error::not_enough_rows_available(params.k())); } - let permutation_vk = - circuit - .preprocessing - .permutation - .clone() - .build_vk(params, &domain, &cs.permutation); + let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs2.permutation, + &circuit.preprocessing.permutation, + )? + .build_vk(params, &domain, &cs.permutation); let fixed_commitments = circuit .preprocessing @@ -96,12 +96,12 @@ where .map(|poly| vk.domain.coeff_to_extended(poly.clone())) .collect(); - let permutation_pk = - circuit - .preprocessing - .permutation - .clone() - .build_pk(params, &vk.domain, &cs.permutation); + let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_pk(params, &vk.domain, &cs.permutation.clone().into()); // Compute l_0(X) // TODO: this can be done more efficiently diff --git a/halo2_backend/src/plonk/lookup.rs b/backend/src/plonk/lookup.rs similarity index 90% rename from halo2_backend/src/plonk/lookup.rs rename to backend/src/plonk/lookup.rs index 97be4b36e0..9f9c70773b 100644 --- a/halo2_backend/src/plonk/lookup.rs +++ b/backend/src/plonk/lookup.rs @@ -1,17 +1,18 @@ -use super::circuit::{Expression, ExpressionMid}; +use super::circuit::Expression; use ff::Field; +// use halo2_middleware::circuit::ExpressionMid; use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; -/// Expressions involved in a lookup argument, with a name as metadata. -#[derive(Clone, Debug)] -pub struct ArgumentV2 { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) table_expressions: Vec>, -} +// /// Expressions involved in a lookup argument, with a name as metadata. +// #[derive(Clone, Debug)] +// pub struct ArgumentV2 { +// pub(crate) name: String, +// pub(crate) input_expressions: Vec>, +// pub(crate) table_expressions: Vec>, +// } /// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone)] diff --git a/halo2_backend/src/plonk/lookup/prover.rs b/backend/src/plonk/lookup/prover.rs similarity index 99% rename from halo2_backend/src/plonk/lookup/prover.rs rename to backend/src/plonk/lookup/prover.rs index 028b298853..71066da9e6 100644 --- a/halo2_backend/src/plonk/lookup/prover.rs +++ b/backend/src/plonk/lookup/prover.rs @@ -8,7 +8,7 @@ use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, poly::{ commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, }, transcript::{EncodedChallenge, TranscriptWrite}, }; @@ -17,6 +17,7 @@ use group::{ ff::{BatchInvert, Field}, Curve, }; +use halo2_middleware::poly::Rotation; use rand_core::RngCore; use std::{ collections::BTreeMap, diff --git a/halo2_backend/src/plonk/lookup/verifier.rs b/backend/src/plonk/lookup/verifier.rs similarity index 98% rename from halo2_backend/src/plonk/lookup/verifier.rs rename to backend/src/plonk/lookup/verifier.rs index 598691ba8f..11b8d30b49 100644 --- a/halo2_backend/src/plonk/lookup/verifier.rs +++ b/backend/src/plonk/lookup/verifier.rs @@ -7,10 +7,11 @@ use super::Argument; use crate::{ arithmetic::CurveAffine, plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, Rotation, VerifierQuery}, + poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; use ff::Field; +use halo2_middleware::poly::Rotation; pub struct PermutationCommitments { permuted_input_commitment: C, diff --git a/halo2_backend/src/plonk/permutation.rs b/backend/src/plonk/permutation.rs similarity index 90% rename from halo2_backend/src/plonk/permutation.rs rename to backend/src/plonk/permutation.rs index f41a570554..31c49e8a06 100644 --- a/halo2_backend/src/plonk/permutation.rs +++ b/backend/src/plonk/permutation.rs @@ -1,6 +1,5 @@ //! Implementation of permutation argument. -use super::circuit::{Any, Column}; use crate::{ arithmetic::CurveAffine, helpers::{ @@ -10,6 +9,8 @@ use crate::{ poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, SerdeFormat, }; +use halo2_middleware::circuit::{Any, Column}; +// use halo2_middleware::permutation::Argument; pub(crate) mod keygen; pub(crate) mod prover; @@ -19,6 +20,9 @@ pub use keygen::Assembly; use std::io; +// TODO: Use https://docs.rs/ref-cast/latest/ref_cast/index.html here? This way we can have +// Argument(halo2_middleware::permutation::Argument) and easily translate from one type to the +// other while using references. /// A permutation argument. #[derive(Debug, Clone)] pub struct Argument { @@ -26,6 +30,14 @@ pub struct Argument { pub(super) columns: Vec>, } +impl From for Argument { + fn from(arg: halo2_middleware::permutation::Argument) -> Self { + Self { + columns: arg.columns, + } + } +} + impl Argument { pub(crate) fn new() -> Self { Argument { columns: vec![] } diff --git a/halo2_backend/src/plonk/permutation/keygen.rs b/backend/src/plonk/permutation/keygen.rs similarity index 96% rename from halo2_backend/src/plonk/permutation/keygen.rs rename to backend/src/plonk/permutation/keygen.rs index 0d78f00ac5..d8e98562a4 100644 --- a/halo2_backend/src/plonk/permutation/keygen.rs +++ b/backend/src/plonk/permutation/keygen.rs @@ -4,12 +4,14 @@ use group::Curve; use super::{Argument, ProvingKey, VerifyingKey}; use crate::{ arithmetic::{parallelize, CurveAffine}, - plonk::{Any, Column, Error}, + plonk::Error, poly::{ commitment::{Blind, Params}, EvaluationDomain, }, }; +use halo2_middleware::circuit::{Any, Column}; +use halo2_middleware::permutation; #[cfg(feature = "thread-safe-region")] use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; @@ -36,7 +38,19 @@ pub struct Assembly { #[cfg(not(feature = "thread-safe-region"))] impl Assembly { - pub(crate) fn new(n: usize, p: &Argument) -> Self { + pub(crate) fn new_from_assembly_mid( + n: usize, + p: &permutation::Argument, + a: &permutation::AssemblyMid, + ) -> Result { + let mut assembly = Self::new(n, p); + for copy in &a.copies { + assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; + } + Ok(assembly) + } + + pub(crate) fn new(n: usize, p: &permutation::Argument) -> Self { // Initialize the copy vector to keep track of copy constraints in all // the permutation arguments. let mut columns = vec![]; diff --git a/halo2_backend/src/plonk/permutation/prover.rs b/backend/src/plonk/permutation/prover.rs similarity index 98% rename from halo2_backend/src/plonk/permutation/prover.rs rename to backend/src/plonk/permutation/prover.rs index d6b108554d..cd4ad43797 100644 --- a/halo2_backend/src/plonk/permutation/prover.rs +++ b/backend/src/plonk/permutation/prover.rs @@ -6,17 +6,19 @@ use group::{ use rand_core::RngCore; use std::iter::{self, ExactSizeIterator}; -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, ProvingKey}; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, plonk::{self, Error}, poly::{ commitment::{Blind, Params}, - Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, }, transcript::{EncodedChallenge, TranscriptWrite}, }; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; pub(crate) struct CommittedSet { pub(crate) permutation_product_poly: Polynomial, diff --git a/halo2_backend/src/plonk/permutation/verifier.rs b/backend/src/plonk/permutation/verifier.rs similarity index 98% rename from halo2_backend/src/plonk/permutation/verifier.rs rename to backend/src/plonk/permutation/verifier.rs index a4637422ae..96ec55ef41 100644 --- a/halo2_backend/src/plonk/permutation/verifier.rs +++ b/backend/src/plonk/permutation/verifier.rs @@ -1,14 +1,16 @@ use ff::{Field, PrimeField}; use std::iter; -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, VerifyingKey}; use crate::{ arithmetic::CurveAffine, plonk::{self, Error}, - poly::{commitment::MSM, Rotation, VerifierQuery}, + poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; pub struct Committed { permutation_product_commitments: Vec, diff --git a/halo2_backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs similarity index 100% rename from halo2_backend/src/plonk/prover.rs rename to backend/src/plonk/prover.rs diff --git a/halo2_backend/src/plonk/shuffle.rs b/backend/src/plonk/shuffle.rs similarity index 86% rename from halo2_backend/src/plonk/shuffle.rs rename to backend/src/plonk/shuffle.rs index 0779c2b451..a9d4f7faef 100644 --- a/halo2_backend/src/plonk/shuffle.rs +++ b/backend/src/plonk/shuffle.rs @@ -1,17 +1,17 @@ -use super::circuit::{Expression, ExpressionMid}; +use super::circuit::Expression; use ff::Field; use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; -/// Expressions involved in a shuffle argument, with a name as metadata. -#[derive(Clone, Debug)] -pub struct ArgumentV2 { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) shuffle_expressions: Vec>, -} +// /// Expressions involved in a shuffle argument, with a name as metadata. +// #[derive(Clone, Debug)] +// pub struct ArgumentV2 { +// pub(crate) name: String, +// pub(crate) input_expressions: Vec>, +// pub(crate) shuffle_expressions: Vec>, +// } /// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] diff --git a/halo2_backend/src/plonk/shuffle/prover.rs b/backend/src/plonk/shuffle/prover.rs similarity index 99% rename from halo2_backend/src/plonk/shuffle/prover.rs rename to backend/src/plonk/shuffle/prover.rs index fd30436a47..f730a8ecf7 100644 --- a/halo2_backend/src/plonk/shuffle/prover.rs +++ b/backend/src/plonk/shuffle/prover.rs @@ -7,12 +7,13 @@ use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, poly::{ commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, }, transcript::{EncodedChallenge, TranscriptWrite}, }; use ff::WithSmallOrderMulGroup; use group::{ff::BatchInvert, Curve}; +use halo2_middleware::poly::Rotation; use rand_core::RngCore; use std::{ iter, diff --git a/halo2_backend/src/plonk/shuffle/verifier.rs b/backend/src/plonk/shuffle/verifier.rs similarity index 98% rename from halo2_backend/src/plonk/shuffle/verifier.rs rename to backend/src/plonk/shuffle/verifier.rs index 759e86b234..b591bcd654 100644 --- a/halo2_backend/src/plonk/shuffle/verifier.rs +++ b/backend/src/plonk/shuffle/verifier.rs @@ -5,10 +5,11 @@ use super::Argument; use crate::{ arithmetic::CurveAffine, plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, Rotation, VerifierQuery}, + poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; use ff::Field; +use halo2_middleware::poly::Rotation; pub struct Committed { product_commitment: C, diff --git a/halo2_backend/src/plonk/vanishing.rs b/backend/src/plonk/vanishing.rs similarity index 100% rename from halo2_backend/src/plonk/vanishing.rs rename to backend/src/plonk/vanishing.rs diff --git a/halo2_backend/src/plonk/vanishing/prover.rs b/backend/src/plonk/vanishing/prover.rs similarity index 100% rename from halo2_backend/src/plonk/vanishing/prover.rs rename to backend/src/plonk/vanishing/prover.rs diff --git a/halo2_backend/src/plonk/vanishing/verifier.rs b/backend/src/plonk/vanishing/verifier.rs similarity index 100% rename from halo2_backend/src/plonk/vanishing/verifier.rs rename to backend/src/plonk/vanishing/verifier.rs diff --git a/halo2_backend/src/plonk/verifier.rs b/backend/src/plonk/verifier.rs similarity index 100% rename from halo2_backend/src/plonk/verifier.rs rename to backend/src/plonk/verifier.rs diff --git a/halo2_backend/src/plonk/verifier/batch.rs b/backend/src/plonk/verifier/batch.rs similarity index 100% rename from halo2_backend/src/plonk/verifier/batch.rs rename to backend/src/plonk/verifier/batch.rs diff --git a/halo2_backend/src/poly.rs b/backend/src/poly.rs similarity index 92% rename from halo2_backend/src/poly.rs rename to backend/src/poly.rs index b505d6b49b..529e157618 100644 --- a/halo2_backend/src/poly.rs +++ b/backend/src/poly.rs @@ -8,6 +8,7 @@ use crate::plonk::Assigned; use crate::SerdeFormat; use group::ff::{BatchInvert, Field}; +use halo2_middleware::poly::Rotation; use std::fmt::Debug; use std::io; use std::marker::PhantomData; @@ -321,25 +322,25 @@ impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { } } -/// Describes the relative rotation of a vector. Negative numbers represent -/// reverse (leftmost) rotations and positive numbers represent forward (rightmost) -/// rotations. Zero represents no rotation. -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Rotation(pub i32); - -impl Rotation { - /// The current location in the evaluation domain - pub fn cur() -> Rotation { - Rotation(0) - } - - /// The previous location in the evaluation domain - pub fn prev() -> Rotation { - Rotation(-1) - } - - /// The next location in the evaluation domain - pub fn next() -> Rotation { - Rotation(1) - } -} +// /// Describes the relative rotation of a vector. Negative numbers represent +// /// reverse (leftmost) rotations and positive numbers represent forward (rightmost) +// /// rotations. Zero represents no rotation. +// #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +// pub struct Rotation(pub i32); +// +// impl Rotation { +// /// The current location in the evaluation domain +// pub fn cur() -> Rotation { +// Rotation(0) +// } +// +// /// The previous location in the evaluation domain +// pub fn prev() -> Rotation { +// Rotation(-1) +// } +// +// /// The next location in the evaluation domain +// pub fn next() -> Rotation { +// Rotation(1) +// } +// } diff --git a/halo2_backend/src/poly/commitment.rs b/backend/src/poly/commitment.rs similarity index 100% rename from halo2_backend/src/poly/commitment.rs rename to backend/src/poly/commitment.rs diff --git a/halo2_backend/src/poly/domain.rs b/backend/src/poly/domain.rs similarity index 100% rename from halo2_backend/src/poly/domain.rs rename to backend/src/poly/domain.rs diff --git a/halo2_backend/src/poly/ipa/commitment.rs b/backend/src/poly/ipa/commitment.rs similarity index 100% rename from halo2_backend/src/poly/ipa/commitment.rs rename to backend/src/poly/ipa/commitment.rs diff --git a/halo2_backend/src/poly/ipa/commitment/prover.rs b/backend/src/poly/ipa/commitment/prover.rs similarity index 100% rename from halo2_backend/src/poly/ipa/commitment/prover.rs rename to backend/src/poly/ipa/commitment/prover.rs diff --git a/halo2_backend/src/poly/ipa/commitment/verifier.rs b/backend/src/poly/ipa/commitment/verifier.rs similarity index 100% rename from halo2_backend/src/poly/ipa/commitment/verifier.rs rename to backend/src/poly/ipa/commitment/verifier.rs diff --git a/halo2_backend/src/poly/ipa/mod.rs b/backend/src/poly/ipa/mod.rs similarity index 100% rename from halo2_backend/src/poly/ipa/mod.rs rename to backend/src/poly/ipa/mod.rs diff --git a/halo2_backend/src/poly/ipa/msm.rs b/backend/src/poly/ipa/msm.rs similarity index 100% rename from halo2_backend/src/poly/ipa/msm.rs rename to backend/src/poly/ipa/msm.rs diff --git a/halo2_backend/src/poly/ipa/multiopen.rs b/backend/src/poly/ipa/multiopen.rs similarity index 100% rename from halo2_backend/src/poly/ipa/multiopen.rs rename to backend/src/poly/ipa/multiopen.rs diff --git a/halo2_backend/src/poly/ipa/multiopen/prover.rs b/backend/src/poly/ipa/multiopen/prover.rs similarity index 100% rename from halo2_backend/src/poly/ipa/multiopen/prover.rs rename to backend/src/poly/ipa/multiopen/prover.rs diff --git a/halo2_backend/src/poly/ipa/multiopen/verifier.rs b/backend/src/poly/ipa/multiopen/verifier.rs similarity index 100% rename from halo2_backend/src/poly/ipa/multiopen/verifier.rs rename to backend/src/poly/ipa/multiopen/verifier.rs diff --git a/halo2_backend/src/poly/ipa/strategy.rs b/backend/src/poly/ipa/strategy.rs similarity index 100% rename from halo2_backend/src/poly/ipa/strategy.rs rename to backend/src/poly/ipa/strategy.rs diff --git a/halo2_backend/src/poly/kzg/commitment.rs b/backend/src/poly/kzg/commitment.rs similarity index 100% rename from halo2_backend/src/poly/kzg/commitment.rs rename to backend/src/poly/kzg/commitment.rs diff --git a/halo2_backend/src/poly/kzg/mod.rs b/backend/src/poly/kzg/mod.rs similarity index 100% rename from halo2_backend/src/poly/kzg/mod.rs rename to backend/src/poly/kzg/mod.rs diff --git a/halo2_backend/src/poly/kzg/msm.rs b/backend/src/poly/kzg/msm.rs similarity index 100% rename from halo2_backend/src/poly/kzg/msm.rs rename to backend/src/poly/kzg/msm.rs diff --git a/halo2_backend/src/poly/kzg/multiopen.rs b/backend/src/poly/kzg/multiopen.rs similarity index 100% rename from halo2_backend/src/poly/kzg/multiopen.rs rename to backend/src/poly/kzg/multiopen.rs diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc.rs b/backend/src/poly/kzg/multiopen/gwc.rs similarity index 100% rename from halo2_backend/src/poly/kzg/multiopen/gwc.rs rename to backend/src/poly/kzg/multiopen/gwc.rs diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs b/backend/src/poly/kzg/multiopen/gwc/prover.rs similarity index 100% rename from halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs rename to backend/src/poly/kzg/multiopen/gwc/prover.rs diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs b/backend/src/poly/kzg/multiopen/gwc/verifier.rs similarity index 100% rename from halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs rename to backend/src/poly/kzg/multiopen/gwc/verifier.rs diff --git a/halo2_backend/src/poly/kzg/multiopen/shplonk.rs b/backend/src/poly/kzg/multiopen/shplonk.rs similarity index 100% rename from halo2_backend/src/poly/kzg/multiopen/shplonk.rs rename to backend/src/poly/kzg/multiopen/shplonk.rs diff --git a/halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs b/backend/src/poly/kzg/multiopen/shplonk/prover.rs similarity index 100% rename from halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs rename to backend/src/poly/kzg/multiopen/shplonk/prover.rs diff --git a/halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs b/backend/src/poly/kzg/multiopen/shplonk/verifier.rs similarity index 100% rename from halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs rename to backend/src/poly/kzg/multiopen/shplonk/verifier.rs diff --git a/halo2_backend/src/poly/kzg/strategy.rs b/backend/src/poly/kzg/strategy.rs similarity index 100% rename from halo2_backend/src/poly/kzg/strategy.rs rename to backend/src/poly/kzg/strategy.rs diff --git a/halo2_backend/src/poly/multiopen_test.rs b/backend/src/poly/multiopen_test.rs similarity index 100% rename from halo2_backend/src/poly/multiopen_test.rs rename to backend/src/poly/multiopen_test.rs diff --git a/halo2_backend/src/poly/query.rs b/backend/src/poly/query.rs similarity index 100% rename from halo2_backend/src/poly/query.rs rename to backend/src/poly/query.rs diff --git a/halo2_backend/src/poly/strategy.rs b/backend/src/poly/strategy.rs similarity index 100% rename from halo2_backend/src/poly/strategy.rs rename to backend/src/poly/strategy.rs diff --git a/halo2_backend/src/transcript.rs b/backend/src/transcript.rs similarity index 100% rename from halo2_backend/src/transcript.rs rename to backend/src/transcript.rs diff --git a/halo2_common/Cargo.toml b/common/Cargo.toml similarity index 100% rename from halo2_common/Cargo.toml rename to common/Cargo.toml diff --git a/halo2_common/src/lib.rs b/common/src/lib.rs similarity index 100% rename from halo2_common/src/lib.rs rename to common/src/lib.rs diff --git a/halo2_frontend/Cargo.toml b/frontend/Cargo.toml similarity index 100% rename from halo2_frontend/Cargo.toml rename to frontend/Cargo.toml diff --git a/halo2_frontend/src/lib.rs b/frontend/src/lib.rs similarity index 100% rename from halo2_frontend/src/lib.rs rename to frontend/src/lib.rs diff --git a/halo2_middleware/src/circuit.rs b/halo2_middleware/src/circuit.rs deleted file mode 100644 index 2cc0f1c993..0000000000 --- a/halo2_middleware/src/circuit.rs +++ /dev/null @@ -1,162 +0,0 @@ -use crate::poly::Rotation; -use crate::{lookup, permutation, shuffle}; -use core::cmp::max; -use ff::Field; - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, -} - -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, - /// Phase of this advice column - pub phase: u8, -} - -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, -} - -/// A challenge squeezed from transcript after advice columns at the phase have been committed. -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Challenge { - index: usize, - pub(crate) phase: u8, -} - -impl Challenge { - /// Index of this challenge. - pub fn index(&self) -> usize { - self.index - } - - /// Phase of this challenge. - pub fn phase(&self) -> u8 { - self.phase - } -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum ExpressionMid { - /// This is a constant polynomial - Constant(F), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQueryMid), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQueryMid), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQueryMid), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl ExpressionMid { - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - use ExpressionMid::*; - match self { - Constant(_) => 0, - Fixed(_) => 1, - Advice(_) => 1, - Instance(_) => 1, - Challenge(_) => 0, - Negated(poly) => poly.degree(), - Sum(a, b) => max(a.degree(), b.degree()), - Product(a, b) => a.degree() + b.degree(), - Scaled(poly, _) => poly.degree(), - } - } -} - -/// A Gate contains a single polynomial identity with a name as metadata. -#[derive(Clone, Debug)] -pub struct GateV2Backend { - name: String, - poly: ExpressionMid, -} - -impl GateV2Backend { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the polynomial identity of this gate - pub fn polynomial(&self) -> &ExpressionMid { - &self.poly - } -} - -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystemV2Backend { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - pub(crate) gates: Vec>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, -} - -/// Data that needs to be preprocessed from a circuit -#[derive(Debug, Clone)] -pub struct PreprocessingV2 { - // TODO(Edu): Can we replace this by a simpler structure? - pub(crate) permutation: permutation::keygen::Assembly, - pub(crate) fixed: Vec>, -} - -/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System -/// as well as the fixed columns and copy constraints information. -#[derive(Debug, Clone)] -pub struct CompiledCircuitV2 { - pub(crate) preprocessing: PreprocessingV2, - pub(crate) cs: ConstraintSystemV2Backend, -} diff --git a/halo2_middleware/src/lib.rs b/halo2_middleware/src/lib.rs deleted file mode 100644 index 82e819f636..0000000000 --- a/halo2_middleware/src/lib.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod circuit; -pub mod lookup; -pub mod permutation; -pub mod poly; -pub mod shuffle; diff --git a/halo2_middleware/src/permutation.rs b/halo2_middleware/src/permutation.rs deleted file mode 100644 index 0eb27f3030..0000000000 --- a/halo2_middleware/src/permutation.rs +++ /dev/null @@ -1,8 +0,0 @@ -use crate::circuit::{Any, Column}; - -/// A permutation argument. -#[derive(Debug, Clone)] -pub struct Argument { - /// A sequence of columns involved in the argument. - pub(super) columns: Vec>, -} diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index ad70c2ea8c..1ecf84b69c 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1683,7 +1683,7 @@ impl Gate { #[derive(Debug, Clone)] pub struct PreprocessingV2 { // TODO(Edu): Can we replace this by a simpler structure? - pub(crate) permutation: permutation::keygen::Assembly, + pub(crate) permutation: permutation::keygen::AssemblyMid, pub(crate) fixed: Vec>, } @@ -2020,7 +2020,7 @@ pub fn compile_circuit>( let mut assembly = crate::plonk::keygen::Assembly { k, fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], - permutation: permutation::keygen::Assembly::new(n, &cs.permutation), + permutation: permutation::keygen::AssemblyFront::new(n, &cs.permutation), selectors: vec![vec![false; n]; cs.num_selectors], usable_rows: 0..n - (cs.blinding_factors() + 1), _marker: std::marker::PhantomData, @@ -2046,7 +2046,9 @@ pub fn compile_circuit>( fixed.extend(selector_polys.into_iter()); let preprocessing = PreprocessingV2 { - permutation: assembly.permutation, + permutation: permutation::keygen::AssemblyMid { + copies: assembly.permutation.copies, + }, fixed, }; diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 23f5385628..81b890cf65 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -52,7 +52,7 @@ where pub(crate) struct Assembly { pub(crate) k: u32, pub(crate) fixed: Vec, LagrangeCoeff>>, - pub(crate) permutation: permutation::keygen::Assembly, + pub(crate) permutation: permutation::keygen::AssemblyFront, pub(crate) selectors: Vec>, // A range of available rows for assignment and copies. pub(crate) usable_rows: Range, @@ -219,12 +219,12 @@ where return Err(Error::not_enough_rows_available(params.k())); } - let permutation_vk = - circuit - .preprocessing - .permutation - .clone() - .build_vk(params, &domain, &cs.permutation); + let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_vk(params, &domain, &cs.permutation); let fixed_commitments = circuit .preprocessing @@ -316,12 +316,12 @@ where .map(|poly| vk.domain.coeff_to_extended(poly.clone())) .collect(); - let permutation_pk = - circuit - .preprocessing - .permutation - .clone() - .build_pk(params, &vk.domain, &cs.permutation); + let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_pk(params, &vk.domain, &cs.permutation); // Compute l_0(X) // TODO: this can be done more efficiently diff --git a/halo2_proofs/src/plonk/permutation/keygen.rs b/halo2_proofs/src/plonk/permutation/keygen.rs index 0d78f00ac5..32ee0aa25e 100644 --- a/halo2_proofs/src/plonk/permutation/keygen.rs +++ b/halo2_proofs/src/plonk/permutation/keygen.rs @@ -34,8 +34,79 @@ pub struct Assembly { sizes: Vec>, } +// TODO: Dedup with other Cell definition +#[derive(Clone, Debug)] +pub struct Cell { + pub column: Column, + pub row: usize, +} + +#[derive(Clone, Debug)] +pub struct AssemblyMid { + pub copies: Vec<(Cell, Cell)>, +} + +#[derive(Clone, Debug)] +pub struct AssemblyFront { + n: usize, + columns: Vec>, + pub(crate) copies: Vec<(Cell, Cell)>, +} + +impl AssemblyFront { + pub(crate) fn new(n: usize, p: &Argument) -> Self { + Self { + n, + columns: p.columns.clone(), + copies: Vec::new(), + } + } + + pub(crate) fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.columns.contains(&left_column) { + return Err(Error::ColumnNotInPermutation(left_column)); + } + if !self.columns.contains(&right_column) { + return Err(Error::ColumnNotInPermutation(right_column)); + } + // Check bounds + if left_row >= self.n || right_row >= self.n { + return Err(Error::BoundsFailure); + } + self.copies.push(( + Cell { + column: left_column, + row: left_row, + }, + Cell { + column: right_column, + row: right_row, + }, + )); + Ok(()) + } +} + #[cfg(not(feature = "thread-safe-region"))] impl Assembly { + pub(crate) fn new_from_assembly_mid( + n: usize, + p: &Argument, + a: &AssemblyMid, + ) -> Result { + let mut assembly = Self::new(n, p); + for copy in &a.copies { + assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; + } + Ok(assembly) + } + pub(crate) fn new(n: usize, p: &Argument) -> Self { // Initialize the copy vector to keep track of copy constraints in all // the permutation arguments. @@ -164,6 +235,18 @@ pub struct Assembly { #[cfg(feature = "thread-safe-region")] impl Assembly { + pub(crate) fn new_from_assembly_mid( + n: usize, + p: &Argument, + a: &AssemblyMid, + ) -> Result { + let mut assembly = Self::new(n, p); + for copy in &a.copies { + assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; + } + Ok(assembly) + } + pub(crate) fn new(n: usize, p: &Argument) -> Self { Assembly { columns: p.columns.clone(), diff --git a/halo2_middleware/Cargo.toml b/middleware/Cargo.toml similarity index 100% rename from halo2_middleware/Cargo.toml rename to middleware/Cargo.toml diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs new file mode 100644 index 0000000000..19d327a196 --- /dev/null +++ b/middleware/src/circuit.rs @@ -0,0 +1,476 @@ +use crate::poly::Rotation; +use crate::{lookup, metadata, permutation, shuffle}; +use core::cmp::max; +use ff::Field; +use std::collections::HashMap; + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, + /// Phase of this advice column + pub phase: u8, +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQueryMid { + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +/// A challenge squeezed from transcript after advice columns at the phase have been committed. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Challenge { + index: usize, + pub(crate) phase: u8, +} + +impl Challenge { + /// Index of this challenge. + pub fn index(&self) -> usize { + self.index + } + + /// Phase of this challenge. + pub fn phase(&self) -> u8 { + self.phase + } +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ExpressionMid { + /// This is a constant polynomial + Constant(F), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQueryMid), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQueryMid), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQueryMid), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl ExpressionMid { + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + use ExpressionMid::*; + match self { + Constant(_) => 0, + Fixed(_) => 1, + Advice(_) => 1, + Instance(_) => 1, + Challenge(_) => 0, + Negated(poly) => poly.degree(), + Sum(a, b) => max(a.degree(), b.degree()), + Product(a, b) => a.degree() + b.degree(), + Scaled(poly, _) => poly.degree(), + } + } +} + +/// A Gate contains a single polynomial identity with a name as metadata. +#[derive(Clone, Debug)] +pub struct GateV2Backend { + pub name: String, + pub poly: ExpressionMid, +} + +impl GateV2Backend { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the polynomial identity of this gate + pub fn polynomial(&self) -> &ExpressionMid { + &self.poly + } +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystemV2Backend { + pub num_fixed_columns: usize, + pub num_advice_columns: usize, + pub num_instance_columns: usize, + pub num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub challenge_phase: Vec, + + pub gates: Vec>, + + // Permutation argument for performing equality constraints + pub permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub general_column_annotations: HashMap, +} + +/// Data that needs to be preprocessed from a circuit +#[derive(Debug, Clone)] +pub struct PreprocessingV2 { + pub permutation: permutation::AssemblyMid, + pub fixed: Vec>, +} + +/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +/// as well as the fixed columns and copy constraints information. +#[derive(Debug, Clone)] +pub struct CompiledCircuitV2 { + pub preprocessing: PreprocessingV2, + pub cs: ConstraintSystemV2Backend, +} + +/// A column type +pub trait ColumnType: + 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into +{ + /// Return expression from cell + fn query_cell(&self, index: usize, at: Rotation) -> ExpressionMid; +} + +/// A column with an index and type +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Column { + pub index: usize, + pub column_type: C, +} + +// TODO: Remove all these methods, and directly access the fields? +impl Column { + pub fn new(index: usize, column_type: C) -> Self { + Column { index, column_type } + } + + /// Index of this column. + pub fn index(&self) -> usize { + self.index + } + + /// Type of this column. + pub fn column_type(&self) -> &C { + &self.column_type + } + + /// Return expression from column at a relative position + pub fn query_cell(&self, at: Rotation) -> ExpressionMid { + self.column_type.query_cell(self.index, at) + } + + /// Return expression from column at the current row + pub fn cur(&self) -> ExpressionMid { + self.query_cell(Rotation::cur()) + } + + /// Return expression from column at the next row + pub fn next(&self) -> ExpressionMid { + self.query_cell(Rotation::next()) + } + + /// Return expression from column at the previous row + pub fn prev(&self) -> ExpressionMid { + self.query_cell(Rotation::prev()) + } + + /// Return expression from column at the specified rotation + pub fn rot(&self, rotation: i32) -> ExpressionMid { + self.query_cell(Rotation(rotation)) + } +} + +impl Ord for Column { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match self.column_type.into().cmp(&other.column_type.into()) { + // Indices are assigned within column types. + std::cmp::Ordering::Equal => self.index.cmp(&other.index), + order => order, + } + } +} + +impl PartialOrd for Column { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// An advice column +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub struct Advice { + pub phase: u8, +} + +impl Default for Advice { + fn default() -> Advice { + Advice { phase: 0 } + } +} + +impl Advice { + /// Returns `Advice` in given `Phase` + pub fn new(phase: u8) -> Advice { + Advice { phase } + } + + /// Phase of this column + pub fn phase(&self) -> u8 { + self.phase + } +} + +impl std::fmt::Debug for Advice { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut debug_struct = f.debug_struct("Advice"); + // Only show advice's phase if it's not in first phase. + if self.phase != 0 { + debug_struct.field("phase", &self.phase); + } + debug_struct.finish() + } +} + +/// A fixed column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Fixed; + +/// An instance column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Instance; + +/// An enum over the Advice, Fixed, Instance structs +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub enum Any { + /// An Advice variant + Advice(Advice), + /// A Fixed variant + Fixed, + /// An Instance variant + Instance, +} + +impl Any { + /// Returns Advice variant in `FirstPhase` + pub fn advice() -> Any { + Any::Advice(Advice::default()) + } + + /// Returns Advice variant in given `Phase` + pub fn advice_in(phase: u8) -> Any { + Any::Advice(Advice::new(phase)) + } +} + +impl std::fmt::Debug for Any { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Any::Advice(advice) => { + let mut debug_struct = f.debug_struct("Advice"); + // Only show advice's phase if it's not in first phase. + if advice.phase != 0 { + debug_struct.field("phase", &advice.phase); + } + debug_struct.finish() + } + Any::Fixed => f.debug_struct("Fixed").finish(), + Any::Instance => f.debug_struct("Instance").finish(), + } + } +} + +impl Ord for Any { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match (self, other) { + (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, + (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), + // Across column types, sort Instance < Advice < Fixed. + (Any::Instance, Any::Advice(_)) + | (Any::Advice(_), Any::Fixed) + | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, + (Any::Fixed, Any::Instance) + | (Any::Fixed, Any::Advice(_)) + | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, + } + } +} + +impl PartialOrd for Any { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl ColumnType for Advice { + fn query_cell(&self, index: usize, at: Rotation) -> ExpressionMid { + ExpressionMid::Advice(AdviceQueryMid { + column_index: index, + rotation: at, + phase: self.phase, + }) + } +} +impl ColumnType for Fixed { + fn query_cell(&self, index: usize, at: Rotation) -> ExpressionMid { + ExpressionMid::Fixed(FixedQueryMid { + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Instance { + fn query_cell(&self, index: usize, at: Rotation) -> ExpressionMid { + ExpressionMid::Instance(InstanceQueryMid { + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Any { + fn query_cell(&self, index: usize, at: Rotation) -> ExpressionMid { + match self { + Any::Advice(Advice { phase }) => ExpressionMid::Advice(AdviceQueryMid { + column_index: index, + rotation: at, + phase: *phase, + }), + Any::Fixed => ExpressionMid::Fixed(FixedQueryMid { + column_index: index, + rotation: at, + }), + Any::Instance => ExpressionMid::Instance(InstanceQueryMid { + column_index: index, + rotation: at, + }), + } + } +} + +impl From for Any { + fn from(advice: Advice) -> Any { + Any::Advice(advice) + } +} + +impl From for Any { + fn from(_: Fixed) -> Any { + Any::Fixed + } +} + +impl From for Any { + fn from(_: Instance) -> Any { + Any::Instance + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Advice(advice.column_type), + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Fixed, + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Instance, + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Advice(advice) => Ok(Column { + index: any.index(), + column_type: *advice, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Fixed => Ok(Column { + index: any.index(), + column_type: Fixed, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Instance => Ok(Column { + index: any.index(), + column_type: Instance, + }), + _ => Err("Cannot convert into Column"), + } + } +} diff --git a/middleware/src/lib.rs b/middleware/src/lib.rs new file mode 100644 index 0000000000..5a6047b469 --- /dev/null +++ b/middleware/src/lib.rs @@ -0,0 +1,105 @@ +pub mod circuit; +pub mod lookup; +pub mod metadata; +pub mod permutation; +pub mod poly; +pub mod shuffle; + +// TODO: Remove with permutation::Argument simplification +pub mod multicore { + pub use rayon::{ + current_num_threads, + iter::{IndexedParallelIterator, IntoParallelRefIterator}, + iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}, + join, scope, + slice::ParallelSliceMut, + Scope, + }; + + pub trait TryFoldAndReduce { + /// Implements `iter.try_fold().try_reduce()` for `rayon::iter::ParallelIterator`, + /// falling back on `Iterator::try_fold` when the `multicore` feature flag is + /// disabled. + /// The `try_fold_and_reduce` function can only be called by a iter with + /// `Result` item type because the `fold_op` must meet the trait + /// bounds of both `try_fold` and `try_reduce` from rayon. + fn try_fold_and_reduce( + self, + identity: impl Fn() -> T + Send + Sync, + fold_op: impl Fn(T, Result) -> Result + Send + Sync, + ) -> Result; + } + + impl TryFoldAndReduce for I + where + T: Send + Sync, + E: Send + Sync, + I: rayon::iter::ParallelIterator>, + { + fn try_fold_and_reduce( + self, + identity: impl Fn() -> T + Send + Sync, + fold_op: impl Fn(T, Result) -> Result + Send + Sync, + ) -> Result { + self.try_fold(&identity, &fold_op) + .try_reduce(&identity, |a, b| fold_op(a, Ok(b))) + } + } +} + +// TODO: Remove with permutation::Argument simplification +pub mod arithmetic { + use super::multicore; + + /// This utility function will parallelize an operation that is to be + /// performed over a mutable slice. + pub fn parallelize(v: &mut [T], f: F) { + // Algorithm rationale: + // + // Using the stdlib `chunks_mut` will lead to severe load imbalance. + // From https://github.com/rust-lang/rust/blob/e94bda3/library/core/src/slice/iter.rs#L1607-L1637 + // if the division is not exact, the last chunk will be the remainder. + // + // Dividing 40 items on 12 threads will lead to a chunk size of 40/12 = 3, + // There will be a 13 chunks of size 3 and 1 of size 1 distributed on 12 threads. + // This leads to 1 thread working on 6 iterations, 1 on 4 iterations and 10 on 3 iterations, + // a load imbalance of 2x. + // + // Instead we can divide work into chunks of size + // 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3 = 4*4 + 3*8 = 40 + // + // This would lead to a 6/4 = 1.5x speedup compared to naive chunks_mut + // + // See also OpenMP spec (page 60) + // http://www.openmp.org/mp-documents/openmp-4.5.pdf + // "When no chunk_size is specified, the iteration space is divided into chunks + // that are approximately equal in size, and at most one chunk is distributed to + // each thread. The size of the chunks is unspecified in this case." + // This implies chunks are the same size ±1 + + let f = &f; + let total_iters = v.len(); + let num_threads = multicore::current_num_threads(); + let base_chunk_size = total_iters / num_threads; + let cutoff_chunk_id = total_iters % num_threads; + let split_pos = cutoff_chunk_id * (base_chunk_size + 1); + let (v_hi, v_lo) = v.split_at_mut(split_pos); + + multicore::scope(|scope| { + // Skip special-case: number of iterations is cleanly divided by number of threads. + if cutoff_chunk_id != 0 { + for (chunk_id, chunk) in v_hi.chunks_exact_mut(base_chunk_size + 1).enumerate() { + let offset = chunk_id * (base_chunk_size + 1); + scope.spawn(move |_| f(chunk, offset)); + } + } + // Skip special-case: less iterations than number of threads. + if base_chunk_size != 0 { + for (chunk_id, chunk) in v_lo.chunks_exact_mut(base_chunk_size).enumerate() { + let offset = split_pos + (chunk_id * base_chunk_size); + scope.spawn(move |_| f(chunk, offset)); + } + } + }); + } +} diff --git a/halo2_middleware/src/lookup.rs b/middleware/src/lookup.rs similarity index 56% rename from halo2_middleware/src/lookup.rs rename to middleware/src/lookup.rs index e43ffb5dbf..4ef8dfea8f 100644 --- a/halo2_middleware/src/lookup.rs +++ b/middleware/src/lookup.rs @@ -4,7 +4,7 @@ use ff::Field; /// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone, Debug)] pub struct ArgumentV2 { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) table_expressions: Vec>, + pub name: String, + pub input_expressions: Vec>, + pub table_expressions: Vec>, } diff --git a/halo2_backend/src/dev/metadata.rs b/middleware/src/metadata.rs similarity index 81% rename from halo2_backend/src/dev/metadata.rs rename to middleware/src/metadata.rs index 9442acd2f1..61ff5a2ef7 100644 --- a/halo2_backend/src/dev/metadata.rs +++ b/middleware/src/metadata.rs @@ -1,7 +1,7 @@ -//! Metadata about circuits. - -use crate::plonk::{self, Any}; +use crate::circuit::{self, Any}; use std::fmt::{self, Debug}; + +// TODO: Could we replace this by circuit::Column? at least for the middleware? /// Metadata about a column within a circuit. #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Column { @@ -34,8 +34,8 @@ impl From<(Any, usize)> for Column { } } -impl From> for Column { - fn from(column: plonk::Column) -> Self { +impl From> for Column { + fn from(column: circuit::Column) -> Self { Column { column_type: *column.column_type(), index: column.index(), diff --git a/middleware/src/permutation.rs b/middleware/src/permutation.rs new file mode 100644 index 0000000000..bd6d848e93 --- /dev/null +++ b/middleware/src/permutation.rs @@ -0,0 +1,76 @@ +use crate::circuit::{Any, Column}; + +// TODO: Dedup with other Cell definition +#[derive(Clone, Debug)] +pub struct Cell { + pub column: Column, + pub row: usize, +} + +#[derive(Clone, Debug)] +pub struct AssemblyMid { + pub copies: Vec<(Cell, Cell)>, +} + +/// A permutation argument. +#[derive(Debug, Clone)] +pub struct Argument { + /// A sequence of columns involved in the argument. + pub columns: Vec>, +} + +// TODO: Remove all these methods, and directly access the fields? +impl Argument { + pub fn new() -> Self { + Argument { columns: vec![] } + } + + /// Returns the minimum circuit degree required by the permutation argument. + /// The argument may use larger degree gates depending on the actual + /// circuit's degree and how many columns are involved in the permutation. + pub(crate) fn required_degree(&self) -> usize { + // degree 2: + // l_0(X) * (1 - z(X)) = 0 + // + // We will fit as many polynomials p_i(X) as possible + // into the required degree of the circuit, so the + // following will not affect the required degree of + // this middleware. + // + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) + // - z(X) \prod (p(X) + \delta^i \beta X + \gamma) + // ) + // + // On the first sets of columns, except the first + // set, we will do + // + // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0 + // + // where z'(X) is the permutation for the previous set + // of columns. + // + // On the final set of columns, we will do + // + // degree 3: + // l_last(X) * (z'(X)^2 - z'(X)) = 0 + // + // which will allow the last value to be zero to + // ensure the argument is perfectly complete. + + // There are constraints of degree 3 regardless of the + // number of columns involved. + 3 + } + + pub(crate) fn add_column(&mut self, column: Column) { + if !self.columns.contains(&column) { + self.columns.push(column); + } + } + + /// Returns columns that participate on the permutation argument. + pub fn get_columns(&self) -> Vec> { + self.columns.clone() + } +} diff --git a/halo2_middleware/src/poly.rs b/middleware/src/poly.rs similarity index 100% rename from halo2_middleware/src/poly.rs rename to middleware/src/poly.rs diff --git a/halo2_middleware/src/shuffle.rs b/middleware/src/shuffle.rs similarity index 55% rename from halo2_middleware/src/shuffle.rs rename to middleware/src/shuffle.rs index 990d507c0d..293f7adc22 100644 --- a/halo2_middleware/src/shuffle.rs +++ b/middleware/src/shuffle.rs @@ -4,7 +4,7 @@ use ff::Field; /// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone, Debug)] pub struct ArgumentV2 { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) shuffle_expressions: Vec>, + pub name: String, + pub input_expressions: Vec>, + pub shuffle_expressions: Vec>, } From fad6b04be06ac7294d82a42a9ab945b4e61186f2 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 19 Jan 2024 10:14:24 +0000 Subject: [PATCH 34/79] Checkpoint --- backend/src/plonk.rs | 8 +- backend/src/plonk/circuit.rs | 557 ---------------------------------- backend/src/plonk/lookup.rs | 9 - backend/src/plonk/shuffle.rs | 8 - backend/src/poly.rs | 23 -- frontend/src/plonk.rs | 73 +++++ frontend/src/plonk/circuit.rs | 0 middleware/src/lib.rs | 2 + 8 files changed, 79 insertions(+), 601 deletions(-) create mode 100644 frontend/src/plonk.rs create mode 100644 frontend/src/plonk/circuit.rs diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index 1e0f8ed4d0..53ff0efe68 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -162,7 +162,7 @@ where Ok(()) } - // TODO + // TODO: Adapt to CompiledCircuit /* /// Reads a verification key from a buffer. /// @@ -265,7 +265,7 @@ where bytes } - // TODO + // TODO: Adapt to CompiledCircuit /* /// Reads a verification key from a slice of bytes using [`Self::read`]. pub fn from_bytes>( @@ -463,7 +463,7 @@ where Ok(()) } - // TODO + // TODO: Adapt to CompiledCircuit /* /// Reads a proving key from a buffer. /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. @@ -516,7 +516,7 @@ where bytes } - // TODO + // TODO: Adapt to CompiledCircuit /* /// Reads a proving key from a slice of bytes using [`Self::read`]. pub fn from_bytes>( diff --git a/backend/src/plonk/circuit.rs b/backend/src/plonk/circuit.rs index ad1f224517..ee5e605282 100644 --- a/backend/src/plonk/circuit.rs +++ b/backend/src/plonk/circuit.rs @@ -18,80 +18,6 @@ use std::{ ops::{Neg, Sub}, }; -// /// A column type -// pub trait ColumnType: -// 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into -// { -// /// Return expression from cell -// fn query_cell(&self, index: usize, at: Rotation) -> Expression; -// } -// -// /// A column with an index and type -// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -// pub struct Column { -// index: usize, -// column_type: C, -// } -// -// impl Column { -// pub(crate) fn new(index: usize, column_type: C) -> Self { -// Column { index, column_type } -// } -// -// /// Index of this column. -// pub fn index(&self) -> usize { -// self.index -// } -// -// /// Type of this column. -// pub fn column_type(&self) -> &C { -// &self.column_type -// } -// -// /// Return expression from column at a relative position -// pub fn query_cell(&self, at: Rotation) -> Expression { -// self.column_type.query_cell(self.index, at) -// } -// -// /// Return expression from column at the current row -// pub fn cur(&self) -> Expression { -// self.query_cell(Rotation::cur()) -// } -// -// /// Return expression from column at the next row -// pub fn next(&self) -> Expression { -// self.query_cell(Rotation::next()) -// } -// -// /// Return expression from column at the previous row -// pub fn prev(&self) -> Expression { -// self.query_cell(Rotation::prev()) -// } -// -// /// Return expression from column at the specified rotation -// pub fn rot(&self, rotation: i32) -> Expression { -// self.query_cell(Rotation(rotation)) -// } -// } -// -// impl Ord for Column { -// fn cmp(&self, other: &Self) -> std::cmp::Ordering { -// // This ordering is consensus-critical! The layouters rely on deterministic column -// // orderings. -// match self.column_type.into().cmp(&other.column_type.into()) { -// // Indices are assigned within column types. -// std::cmp::Ordering::Equal => self.index.cmp(&other.index), -// order => order, -// } -// } -// } -// -// impl PartialOrd for Column { -// fn partial_cmp(&self, other: &Self) -> Option { -// Some(self.cmp(other)) -// } -// } - // TODO: No sealed Phase on the backend, only in the frontend! pub(crate) mod sealed { /// Phase of advice column @@ -151,264 +77,6 @@ impl SealedPhase for super::ThirdPhase { } } -// /// An advice column -// #[derive(Clone, Copy, Eq, PartialEq, Hash)] -// pub struct Advice { -// pub(crate) phase: sealed::Phase, -// } -// -// impl Default for Advice { -// fn default() -> Advice { -// Advice { -// phase: FirstPhase.to_sealed(), -// } -// } -// } -// -// impl Advice { -// /// Returns `Advice` in given `Phase` -// pub fn new(phase: P) -> Advice { -// Advice { -// phase: phase.to_sealed(), -// } -// } -// -// /// Phase of this column -// pub fn phase(&self) -> u8 { -// self.phase.0 -// } -// } -// -// impl std::fmt::Debug for Advice { -// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { -// let mut debug_struct = f.debug_struct("Advice"); -// // Only show advice's phase if it's not in first phase. -// if self.phase != FirstPhase.to_sealed() { -// debug_struct.field("phase", &self.phase); -// } -// debug_struct.finish() -// } -// } -// -// /// A fixed column -// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -// pub struct Fixed; -// -// /// An instance column -// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -// pub struct Instance; -// -// /// An enum over the Advice, Fixed, Instance structs -// #[derive(Clone, Copy, Eq, PartialEq, Hash)] -// pub enum Any { -// /// An Advice variant -// Advice(Advice), -// /// A Fixed variant -// Fixed, -// /// An Instance variant -// Instance, -// } -// -// impl Any { -// /// Returns Advice variant in `FirstPhase` -// pub fn advice() -> Any { -// Any::Advice(Advice::default()) -// } -// -// /// Returns Advice variant in given `Phase` -// pub fn advice_in(phase: P) -> Any { -// Any::Advice(Advice::new(phase)) -// } -// } -// -// impl std::fmt::Debug for Any { -// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { -// match self { -// Any::Advice(advice) => { -// let mut debug_struct = f.debug_struct("Advice"); -// // Only show advice's phase if it's not in first phase. -// if advice.phase != FirstPhase.to_sealed() { -// debug_struct.field("phase", &advice.phase); -// } -// debug_struct.finish() -// } -// Any::Fixed => f.debug_struct("Fixed").finish(), -// Any::Instance => f.debug_struct("Instance").finish(), -// } -// } -// } -// -// impl Ord for Any { -// fn cmp(&self, other: &Self) -> std::cmp::Ordering { -// // This ordering is consensus-critical! The layouters rely on deterministic column -// // orderings. -// match (self, other) { -// (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, -// (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), -// // Across column types, sort Instance < Advice < Fixed. -// (Any::Instance, Any::Advice(_)) -// | (Any::Advice(_), Any::Fixed) -// | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, -// (Any::Fixed, Any::Instance) -// | (Any::Fixed, Any::Advice(_)) -// | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, -// } -// } -// } -// -// impl PartialOrd for Any { -// fn partial_cmp(&self, other: &Self) -> Option { -// Some(self.cmp(other)) -// } -// } -// -// impl ColumnType for Advice { -// fn query_cell(&self, index: usize, at: Rotation) -> Expression { -// Expression::Advice(AdviceQuery { -// index: None, -// column_index: index, -// rotation: at, -// phase: self.phase, -// }) -// } -// } -// impl ColumnType for Fixed { -// fn query_cell(&self, index: usize, at: Rotation) -> Expression { -// Expression::Fixed(FixedQuery { -// index: None, -// column_index: index, -// rotation: at, -// }) -// } -// } -// impl ColumnType for Instance { -// fn query_cell(&self, index: usize, at: Rotation) -> Expression { -// Expression::Instance(InstanceQuery { -// index: None, -// column_index: index, -// rotation: at, -// }) -// } -// } -// impl ColumnType for Any { -// fn query_cell(&self, index: usize, at: Rotation) -> Expression { -// match self { -// Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { -// index: None, -// column_index: index, -// rotation: at, -// phase: *phase, -// }), -// Any::Fixed => Expression::Fixed(FixedQuery { -// index: None, -// column_index: index, -// rotation: at, -// }), -// Any::Instance => Expression::Instance(InstanceQuery { -// index: None, -// column_index: index, -// rotation: at, -// }), -// } -// } -// } -// -// impl From for Any { -// fn from(advice: Advice) -> Any { -// Any::Advice(advice) -// } -// } -// -// impl From for Any { -// fn from(_: Fixed) -> Any { -// Any::Fixed -// } -// } -// -// impl From for Any { -// fn from(_: Instance) -> Any { -// Any::Instance -// } -// } -// -// impl From> for Column { -// fn from(advice: Column) -> Column { -// Column { -// index: advice.index(), -// column_type: Any::Advice(advice.column_type), -// } -// } -// } -// -// impl From> for Column { -// fn from(advice: Column) -> Column { -// Column { -// index: advice.index(), -// column_type: Any::Fixed, -// } -// } -// } -// -// impl From> for Column { -// fn from(advice: Column) -> Column { -// Column { -// index: advice.index(), -// column_type: Any::Instance, -// } -// } -// } -// -// impl TryFrom> for Column { -// type Error = &'static str; -// -// fn try_from(any: Column) -> Result { -// match any.column_type() { -// Any::Advice(advice) => Ok(Column { -// index: any.index(), -// column_type: *advice, -// }), -// _ => Err("Cannot convert into Column"), -// } -// } -// } -// -// impl TryFrom> for Column { -// type Error = &'static str; -// -// fn try_from(any: Column) -> Result { -// match any.column_type() { -// Any::Fixed => Ok(Column { -// index: any.index(), -// column_type: Fixed, -// }), -// _ => Err("Cannot convert into Column"), -// } -// } -// } -// -// impl TryFrom> for Column { -// type Error = &'static str; -// -// fn try_from(any: Column) -> Result { -// match any.column_type() { -// Any::Instance => Ok(Column { -// index: any.index(), -// column_type: Instance, -// }), -// _ => Err("Cannot convert into Column"), -// } -// } -// } - -// /// Query of fixed column at a certain relative location -// #[derive(Copy, Clone, Debug, PartialEq, Eq)] -// pub struct FixedQueryMid { -// /// Column index -// pub column_index: usize, -// /// Rotation of this query -// pub rotation: Rotation, -// } - /// Query of fixed column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct FixedQuery { @@ -432,17 +100,6 @@ impl FixedQuery { } } -// /// Query of advice column at a certain relative location -// #[derive(Copy, Clone, Debug, PartialEq, Eq)] -// pub struct AdviceQueryMid { -// /// Column index -// pub column_index: usize, -// /// Rotation of this query -// pub rotation: Rotation, -// /// Phase of this advice column -// pub phase: sealed::Phase, -// } - /// Query of advice column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct AdviceQuery { @@ -473,15 +130,6 @@ impl AdviceQuery { } } -// /// Query of instance column at a certain relative location -// #[derive(Copy, Clone, Debug, PartialEq, Eq)] -// pub struct InstanceQueryMid { -// /// Column index -// pub column_index: usize, -// /// Rotation of this query -// pub rotation: Rotation, -// } - /// Query of instance column at a certain relative location #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct InstanceQuery { @@ -505,71 +153,6 @@ impl InstanceQuery { } } -// /// A challenge squeezed from transcript after advice columns at the phase have been committed. -// #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -// pub struct Challenge { -// index: usize, -// pub(crate) phase: sealed::Phase, -// } -// -// impl Challenge { -// /// Index of this challenge. -// pub fn index(&self) -> usize { -// self.index -// } -// -// /// Phase of this challenge. -// pub fn phase(&self) -> u8 { -// self.phase.0 -// } -// -// /// Return Expression -// pub fn expr(&self) -> Expression { -// Expression::Challenge(*self) -// } -// } - -// /// Low-degree expression representing an identity that must hold over the committed columns. -// #[derive(Clone, Debug, PartialEq, Eq)] -// pub enum ExpressionMid { -// /// This is a constant polynomial -// Constant(F), -// /// This is a fixed column queried at a certain relative location -// Fixed(FixedQueryMid), -// /// This is an advice (witness) column queried at a certain relative location -// Advice(AdviceQueryMid), -// /// This is an instance (external) column queried at a certain relative location -// Instance(InstanceQueryMid), -// /// This is a challenge -// Challenge(Challenge), -// /// This is a negated polynomial -// Negated(Box>), -// /// This is the sum of two polynomials -// Sum(Box>, Box>), -// /// This is the product of two polynomials -// Product(Box>, Box>), -// /// This is a scaled polynomial -// Scaled(Box>, F), -// } - -// impl ExpressionMid { -// /// Compute the degree of this polynomial -// pub fn degree(&self) -> usize { -// use ExpressionMid::*; -// match self { -// Constant(_) => 0, -// Fixed(_) => 1, -// Advice(_) => 1, -// Instance(_) => 1, -// Challenge(_) => 0, -// Negated(poly) => poly.degree(), -// Sum(a, b) => max(a.degree(), b.degree()), -// Product(a, b) => a.degree() + b.degree(), -// Scaled(poly, _) => poly.degree(), -// } -// } -// } - /// Low-degree expression representing an identity that must hold over the committed columns. #[derive(Clone, PartialEq, Eq)] pub enum Expression { @@ -1184,25 +767,6 @@ impl>, Iter: IntoIterator> IntoIterato } } -// /// A Gate contains a single polynomial identity with a name as metadata. -// #[derive(Clone, Debug)] -// pub struct GateV2Backend { -// name: String, -// poly: ExpressionMid, -// } -// -// impl GateV2Backend { -// /// Returns the gate name. -// pub fn name(&self) -> &str { -// self.name.as_str() -// } -// -// /// Returns the polynomial identity of this gate -// pub fn polynomial(&self) -> &ExpressionMid { -// &self.poly -// } -// } - /// Gate #[derive(Clone, Debug)] pub struct Gate { @@ -1228,22 +792,6 @@ impl Gate { } } -// /// Data that needs to be preprocessed from a circuit -// #[derive(Debug, Clone)] -// pub struct PreprocessingV2 { -// // TODO(Edu): Can we replace this by a simpler structure? -// pub(crate) permutation: permutation::keygen::Assembly, -// pub(crate) fixed: Vec>, -// } - -// /// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System -// /// as well as the fixed columns and copy constraints information. -// #[derive(Debug, Clone)] -// pub struct CompiledCircuitV2 { -// pub(crate) preprocessing: PreprocessingV2, -// pub(crate) cs: ConstraintSystemV2Backend, -// } - struct QueriesMap { advice_map: HashMap<(Column, Rotation), usize>, instance_map: HashMap<(Column, Rotation), usize>, @@ -1324,111 +872,6 @@ impl QueriesMap { } } -// /// This is a description of the circuit environment, such as the gate, column and -// /// permutation arrangements. -// #[derive(Debug, Clone)] -// pub struct ConstraintSystemV2Backend { -// pub(crate) num_fixed_columns: usize, -// pub(crate) num_advice_columns: usize, -// pub(crate) num_instance_columns: usize, -// pub(crate) num_challenges: usize, -// -// /// Contains the index of each advice column that is left unblinded. -// pub(crate) unblinded_advice_columns: Vec, -// -// /// Contains the phase for each advice column. Should have same length as num_advice_columns. -// pub(crate) advice_column_phase: Vec, -// /// Contains the phase for each challenge. Should have same length as num_challenges. -// pub(crate) challenge_phase: Vec, -// -// pub(crate) gates: Vec>, -// -// // Permutation argument for performing equality constraints -// pub(crate) permutation: permutation::Argument, -// -// // Vector of lookup arguments, where each corresponds to a sequence of -// // input expressions and a sequence of table expressions involved in the lookup. -// pub(crate) lookups: Vec>, -// -// // Vector of shuffle arguments, where each corresponds to a sequence of -// // input expressions and a sequence of shuffle expressions involved in the shuffle. -// pub(crate) shuffles: Vec>, -// -// // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. -// pub(crate) general_column_annotations: HashMap, -// } - -// impl Into> for ConstraintSystem { -// fn into(self) -> ConstraintSystemV2Backend { -// ConstraintSystemV2Backend { -// num_fixed_columns: self.num_fixed_columns, -// num_advice_columns: self.num_advice_columns, -// num_instance_columns: self.num_instance_columns, -// num_challenges: self.num_challenges, -// unblinded_advice_columns: self.unblinded_advice_columns.clone(), -// advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), -// challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), -// gates: self -// .gates -// .iter() -// .map(|g| { -// g.polys.clone().into_iter().enumerate().map(|(i, e)| { -// let name = match g.constraint_name(i) { -// "" => g.name.clone(), -// constraint_name => format!("{}:{}", g.name, constraint_name), -// }; -// GateV2Backend { -// name, -// poly: e.into(), -// } -// }) -// }) -// .flatten() -// .collect(), -// permutation: self.permutation.clone().into(), -// lookups: self -// .lookups -// .iter() -// .map(|l| halo2_middleware::lookup::ArgumentV2 { -// name: l.name.clone(), -// input_expressions: l -// .input_expressions -// .clone() -// .into_iter() -// .map(|e| e.into()) -// .collect(), -// table_expressions: l -// .table_expressions -// .clone() -// .into_iter() -// .map(|e| e.into()) -// .collect(), -// }) -// .collect(), -// shuffles: self -// .shuffles -// .iter() -// .map(|s| halo2_middleware::shuffle::ArgumentV2 { -// name: s.name.clone(), -// input_expressions: s -// .input_expressions -// .clone() -// .into_iter() -// .map(|e| e.into()) -// .collect(), -// shuffle_expressions: s -// .shuffle_expressions -// .clone() -// .into_iter() -// .map(|e| e.into()) -// .collect(), -// }) -// .collect(), -// general_column_annotations: self.general_column_annotations.clone(), -// } -// } -// } - /// Collect queries used in gates while mapping those gates to equivalent ones with indexed /// query references in the expressions. fn collect_queries_gates( diff --git a/backend/src/plonk/lookup.rs b/backend/src/plonk/lookup.rs index 9f9c70773b..0485ab7a60 100644 --- a/backend/src/plonk/lookup.rs +++ b/backend/src/plonk/lookup.rs @@ -1,19 +1,10 @@ use super::circuit::Expression; use ff::Field; -// use halo2_middleware::circuit::ExpressionMid; use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; -// /// Expressions involved in a lookup argument, with a name as metadata. -// #[derive(Clone, Debug)] -// pub struct ArgumentV2 { -// pub(crate) name: String, -// pub(crate) input_expressions: Vec>, -// pub(crate) table_expressions: Vec>, -// } - /// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone)] pub struct Argument { diff --git a/backend/src/plonk/shuffle.rs b/backend/src/plonk/shuffle.rs index a9d4f7faef..77fc8a52d7 100644 --- a/backend/src/plonk/shuffle.rs +++ b/backend/src/plonk/shuffle.rs @@ -5,14 +5,6 @@ use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; -// /// Expressions involved in a shuffle argument, with a name as metadata. -// #[derive(Clone, Debug)] -// pub struct ArgumentV2 { -// pub(crate) name: String, -// pub(crate) input_expressions: Vec>, -// pub(crate) shuffle_expressions: Vec>, -// } - /// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] pub struct Argument { diff --git a/backend/src/poly.rs b/backend/src/poly.rs index 529e157618..100ee10eb8 100644 --- a/backend/src/poly.rs +++ b/backend/src/poly.rs @@ -321,26 +321,3 @@ impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { res } } - -// /// Describes the relative rotation of a vector. Negative numbers represent -// /// reverse (leftmost) rotations and positive numbers represent forward (rightmost) -// /// rotations. Zero represents no rotation. -// #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -// pub struct Rotation(pub i32); -// -// impl Rotation { -// /// The current location in the evaluation domain -// pub fn cur() -> Rotation { -// Rotation(0) -// } -// -// /// The previous location in the evaluation domain -// pub fn prev() -> Rotation { -// Rotation(-1) -// } -// -// /// The next location in the evaluation domain -// pub fn next() -> Rotation { -// Rotation(1) -// } -// } diff --git a/frontend/src/plonk.rs b/frontend/src/plonk.rs new file mode 100644 index 0000000000..95b8fd0559 --- /dev/null +++ b/frontend/src/plonk.rs @@ -0,0 +1,73 @@ +/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the +/// circuit into its columns and the column configuration; as well as doing the fixed column and +/// copy constraints assignments. The output of this function can then be used for the key +/// generation, and proof generation. +/// If `compress_selectors` is true, multiple selector columns may be multiplexed. +pub fn compile_circuit>( + k: u32, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result< + ( + CompiledCircuitV2, + ConcreteCircuit::Config, + ConstraintSystem, + ), + Error, +> { + let n = 2usize.pow(k); + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let cs = cs; + + if n < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(k)); + } + + let mut assembly = crate::plonk::keygen::Assembly { + k, + fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], + permutation: permutation::keygen::AssemblyFront::new(n, &cs.permutation), + selectors: vec![vec![false; n]; cs.num_selectors], + usable_rows: 0..n - (cs.blinding_factors() + 1), + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain URS + ConcreteCircuit::FloorPlanner::synthesize( + &mut assembly, + circuit, + config.clone(), + cs.constants.clone(), + )?; + + let fixed = batch_invert_assigned(assembly.fixed); + let (cs, selector_polys) = if compress_selectors { + cs.compress_selectors(assembly.selectors.clone()) + } else { + // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. + let selectors = std::mem::take(&mut assembly.selectors); + cs.directly_convert_selectors_to_fixed(selectors) + }; + let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); + fixed.extend(selector_polys.into_iter()); + + let preprocessing = PreprocessingV2 { + permutation: permutation::keygen::AssemblyMid { + copies: assembly.permutation.copies, + }, + fixed, + }; + + Ok(( + CompiledCircuitV2 { + cs: cs.clone().into(), + preprocessing, + }, + config, + cs, + )) +} diff --git a/frontend/src/plonk/circuit.rs b/frontend/src/plonk/circuit.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/middleware/src/lib.rs b/middleware/src/lib.rs index 5a6047b469..b523ace63d 100644 --- a/middleware/src/lib.rs +++ b/middleware/src/lib.rs @@ -5,6 +5,8 @@ pub mod permutation; pub mod poly; pub mod shuffle; +pub use ff; + // TODO: Remove with permutation::Argument simplification pub mod multicore { pub use rayon::{ From cd7cf537e254187b2562bb527439874293d6ee5f Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 11:33:35 +0000 Subject: [PATCH 35/79] Checkpoint --- backend/Cargo.toml | 1 + backend/src/arithmetic.rs | 57 +- backend/src/helpers.rs | 2 +- backend/src/multicore.rs | 38 - backend/src/plonk.rs | 58 - backend/src/plonk/assigned.rs | 1330 ++++++++-------- backend/src/plonk/circuit.rs | 1265 +--------------- backend/src/plonk/keygen.rs | 2 +- backend/src/plonk/prover.rs | 4 +- backend/src/plonk/vanishing/prover.rs | 2 +- backend/src/plonk/vanishing/verifier.rs | 2 +- backend/src/plonk/verifier.rs | 2 +- backend/src/plonk/verifier/batch.rs | 2 +- backend/src/poly.rs | 4 +- backend/src/poly/commitment.rs | 2 +- backend/src/poly/domain.rs | 8 +- backend/src/poly/ipa/commitment.rs | 4 +- backend/src/poly/ipa/commitment/prover.rs | 2 +- backend/src/poly/ipa/msm.rs | 2 +- backend/src/poly/ipa/multiopen.rs | 2 +- backend/src/poly/ipa/multiopen/prover.rs | 2 +- backend/src/poly/ipa/multiopen/verifier.rs | 2 +- backend/src/poly/ipa/strategy.rs | 2 +- backend/src/poly/kzg/commitment.rs | 4 +- backend/src/poly/kzg/msm.rs | 2 +- backend/src/poly/kzg/multiopen/gwc.rs | 2 +- .../src/poly/kzg/multiopen/gwc/verifier.rs | 2 +- backend/src/poly/kzg/multiopen/shplonk.rs | 4 +- .../src/poly/kzg/multiopen/shplonk/prover.rs | 2 +- .../poly/kzg/multiopen/shplonk/verifier.rs | 2 +- backend/src/poly/kzg/strategy.rs | 2 +- backend/src/poly/multiopen_test.rs | 2 +- common/Cargo.toml | 1 + common/src/arithmetic.rs | 53 + common/src/helpers.rs | 0 common/src/lib.rs | 5 + common/src/multicore.rs | 38 + common/src/plonk.rs | 1340 +++++++++++++++++ {backend => common}/src/plonk/lookup.rs | 2 +- .../src/plonk/lookup/prover.rs | 2 +- .../src/plonk/lookup/verifier.rs | 2 +- {backend => common}/src/plonk/permutation.rs | 0 .../src/plonk/permutation/keygen.rs | 2 +- .../src/plonk/permutation/prover.rs | 2 +- .../src/plonk/permutation/verifier.rs | 2 +- {backend => common}/src/plonk/shuffle.rs | 2 +- .../src/plonk/shuffle/prover.rs | 2 +- .../src/plonk/shuffle/verifier.rs | 2 +- common/src/poly.rs | 323 ++++ frontend/Cargo.toml | 2 + frontend/src/circuit.rs | 594 ++++++++ frontend/src/circuit/floor_planner.rs | 6 + .../src/circuit/floor_planner/single_pass.rs | 434 ++++++ frontend/src/circuit/floor_planner/v1.rs | 550 +++++++ .../src/circuit/floor_planner/v1/strategy.rs | 283 ++++ frontend/src/circuit/layouter.rs | 318 ++++ frontend/src/circuit/table_layouter.rs | 415 +++++ frontend/src/circuit/value.rs | 704 +++++++++ frontend/src/error.rs | 136 ++ frontend/src/lib.rs | 6 + frontend/src/plonk.rs | 13 + frontend/src/plonk/circuit.rs | 1264 ++++++++++++++++ .../src/plonk/circuit/compress_selectors.rs | 352 +++++ frontend/src/plonk/lookup.rs | 10 + frontend/src/plonk/permutation.rs | 8 + frontend/src/plonk/shuffle.rs | 10 + frontend/src/poly.rs | 85 ++ middleware/src/lib.rs | 1 + middleware/src/plonk.rs | 665 ++++++++ 69 files changed, 8329 insertions(+), 2122 deletions(-) create mode 100644 common/src/arithmetic.rs create mode 100644 common/src/helpers.rs create mode 100644 common/src/multicore.rs create mode 100644 common/src/plonk.rs rename {backend => common}/src/plonk/lookup.rs (99%) rename {backend => common}/src/plonk/lookup/prover.rs (99%) rename {backend => common}/src/plonk/lookup/verifier.rs (99%) rename {backend => common}/src/plonk/permutation.rs (100%) rename {backend => common}/src/plonk/permutation/keygen.rs (99%) rename {backend => common}/src/plonk/permutation/prover.rs (99%) rename {backend => common}/src/plonk/permutation/verifier.rs (99%) rename {backend => common}/src/plonk/shuffle.rs (98%) rename {backend => common}/src/plonk/shuffle/prover.rs (99%) rename {backend => common}/src/plonk/shuffle/verifier.rs (99%) create mode 100644 common/src/poly.rs create mode 100644 frontend/src/circuit.rs create mode 100644 frontend/src/circuit/floor_planner.rs create mode 100644 frontend/src/circuit/floor_planner/single_pass.rs create mode 100644 frontend/src/circuit/floor_planner/v1.rs create mode 100644 frontend/src/circuit/floor_planner/v1/strategy.rs create mode 100644 frontend/src/circuit/layouter.rs create mode 100644 frontend/src/circuit/table_layouter.rs create mode 100644 frontend/src/circuit/value.rs create mode 100644 frontend/src/error.rs create mode 100644 frontend/src/plonk/circuit/compress_selectors.rs create mode 100644 frontend/src/plonk/lookup.rs create mode 100644 frontend/src/plonk/permutation.rs create mode 100644 frontend/src/plonk/shuffle.rs create mode 100644 frontend/src/poly.rs create mode 100644 middleware/src/plonk.rs diff --git a/backend/Cargo.toml b/backend/Cargo.toml index cf1987c3dc..e97451c686 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -38,6 +38,7 @@ serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" halo2_middleware = { path = "../middleware" } +halo2_common = { path = "../common" } # Developer tooling dependencies plotters = { version = "0.3.0", default-features = false, optional = true } diff --git a/backend/src/arithmetic.rs b/backend/src/arithmetic.rs index 0163e355eb..063f2e3814 100644 --- a/backend/src/arithmetic.rs +++ b/backend/src/arithmetic.rs @@ -1,12 +1,13 @@ //! This module provides common utilities, traits and structures for group, //! field and polynomial arithmetic. -use super::multicore; -pub use ff::Field; use group::{ ff::{BatchInvert, PrimeField}, Curve, Group, GroupOpsOwned, ScalarMulOwned, }; +use halo2_common::arithmetic::parallelize; +use halo2_common::multicore; +pub use halo2_middleware::ff::Field; pub use halo2curves::{CurveAffine, CurveExt}; @@ -381,58 +382,6 @@ where q } -/// This utility function will parallelize an operation that is to be -/// performed over a mutable slice. -pub fn parallelize(v: &mut [T], f: F) { - // Algorithm rationale: - // - // Using the stdlib `chunks_mut` will lead to severe load imbalance. - // From https://github.com/rust-lang/rust/blob/e94bda3/library/core/src/slice/iter.rs#L1607-L1637 - // if the division is not exact, the last chunk will be the remainder. - // - // Dividing 40 items on 12 threads will lead to a chunk size of 40/12 = 3, - // There will be a 13 chunks of size 3 and 1 of size 1 distributed on 12 threads. - // This leads to 1 thread working on 6 iterations, 1 on 4 iterations and 10 on 3 iterations, - // a load imbalance of 2x. - // - // Instead we can divide work into chunks of size - // 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3 = 4*4 + 3*8 = 40 - // - // This would lead to a 6/4 = 1.5x speedup compared to naive chunks_mut - // - // See also OpenMP spec (page 60) - // http://www.openmp.org/mp-documents/openmp-4.5.pdf - // "When no chunk_size is specified, the iteration space is divided into chunks - // that are approximately equal in size, and at most one chunk is distributed to - // each thread. The size of the chunks is unspecified in this case." - // This implies chunks are the same size ±1 - - let f = &f; - let total_iters = v.len(); - let num_threads = multicore::current_num_threads(); - let base_chunk_size = total_iters / num_threads; - let cutoff_chunk_id = total_iters % num_threads; - let split_pos = cutoff_chunk_id * (base_chunk_size + 1); - let (v_hi, v_lo) = v.split_at_mut(split_pos); - - multicore::scope(|scope| { - // Skip special-case: number of iterations is cleanly divided by number of threads. - if cutoff_chunk_id != 0 { - for (chunk_id, chunk) in v_hi.chunks_exact_mut(base_chunk_size + 1).enumerate() { - let offset = chunk_id * (base_chunk_size + 1); - scope.spawn(move |_| f(chunk, offset)); - } - } - // Skip special-case: less iterations than number of threads. - if base_chunk_size != 0 { - for (chunk_id, chunk) in v_lo.chunks_exact_mut(base_chunk_size).enumerate() { - let offset = split_pos + (chunk_id * base_chunk_size); - scope.spawn(move |_| f(chunk, offset)); - } - } - }); -} - fn log2_floor(num: usize) -> u32 { assert!(num > 0); diff --git a/backend/src/helpers.rs b/backend/src/helpers.rs index faf7351a3e..3b1e5769f8 100644 --- a/backend/src/helpers.rs +++ b/backend/src/helpers.rs @@ -1,5 +1,5 @@ use crate::poly::Polynomial; -use ff::PrimeField; +use halo2_middleware::ff::PrimeField; use halo2curves::{serde::SerdeObject, CurveAffine}; use std::io; diff --git a/backend/src/multicore.rs b/backend/src/multicore.rs index 4d30b91a8b..e69de29bb2 100644 --- a/backend/src/multicore.rs +++ b/backend/src/multicore.rs @@ -1,38 +0,0 @@ -pub use rayon::{ - current_num_threads, - iter::{IndexedParallelIterator, IntoParallelRefIterator}, - iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}, - join, scope, - slice::ParallelSliceMut, - Scope, -}; - -pub trait TryFoldAndReduce { - /// Implements `iter.try_fold().try_reduce()` for `rayon::iter::ParallelIterator`, - /// falling back on `Iterator::try_fold` when the `multicore` feature flag is - /// disabled. - /// The `try_fold_and_reduce` function can only be called by a iter with - /// `Result` item type because the `fold_op` must meet the trait - /// bounds of both `try_fold` and `try_reduce` from rayon. - fn try_fold_and_reduce( - self, - identity: impl Fn() -> T + Send + Sync, - fold_op: impl Fn(T, Result) -> Result + Send + Sync, - ) -> Result; -} - -impl TryFoldAndReduce for I -where - T: Send + Sync, - E: Send + Sync, - I: rayon::iter::ParallelIterator>, -{ - fn try_fold_and_reduce( - self, - identity: impl Fn() -> T + Send + Sync, - fold_op: impl Fn(T, Result) -> Result + Send + Sync, - ) -> Result { - self.try_fold(&identity, &fold_op) - .try_reduce(&identity, |a, b| fold_op(a, Ok(b))) - } -} diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index 53ff0efe68..749490637b 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -44,64 +44,6 @@ pub use verifier::*; use evaluation::Evaluator; use std::io; -/// List of queries (columns and rotations) used by a circuit -#[derive(Debug, Clone)] -pub struct Queries { - /// List of unique advice queries - pub advice: Vec<(Column, Rotation)>, - /// List of unique instance queries - pub instance: Vec<(Column, Rotation)>, - /// List of unique fixed queries - pub fixed: Vec<(Column, Rotation)>, - /// Contains an integer for each advice column - /// identifying how many distinct queries it has - /// so far; should be same length as cs.num_advice_columns. - pub num_advice_queries: Vec, -} - -impl Queries { - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } - - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. - - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); - - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; - - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. - - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. - - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 - } -} - /// This is a verifying key which allows for the verification of proofs for a /// particular circuit. #[derive(Clone, Debug)] diff --git a/backend/src/plonk/assigned.rs b/backend/src/plonk/assigned.rs index 07de325678..907ab22650 100644 --- a/backend/src/plonk/assigned.rs +++ b/backend/src/plonk/assigned.rs @@ -1,665 +1,665 @@ -use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; - -use group::ff::Field; - -/// A value assigned to a cell within a circuit. -/// -/// Stored as a fraction, so the backend can use batch inversion. -/// -/// A denominator of zero maps to an assigned value of zero. -#[derive(Clone, Copy, Debug)] -pub enum Assigned { - /// The field element zero. - Zero, - /// A value that does not require inversion to evaluate. - Trivial(F), - /// A value stored as a fraction to enable batch inversion. - Rational(F, F), -} - -impl From<&Assigned> for Assigned { - fn from(val: &Assigned) -> Self { - *val - } -} - -impl From<&F> for Assigned { - fn from(numerator: &F) -> Self { - Assigned::Trivial(*numerator) - } -} - -impl From for Assigned { - fn from(numerator: F) -> Self { - Assigned::Trivial(numerator) - } -} - -impl From<(F, F)> for Assigned { - fn from((numerator, denominator): (F, F)) -> Self { - Assigned::Rational(numerator, denominator) - } -} - -impl PartialEq for Assigned { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - // At least one side is directly zero. - (Self::Zero, Self::Zero) => true, - (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - x.is_zero_vartime() - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, - (Self::Trivial(x), Self::Rational(numerator, denominator)) - | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { - &(*x * denominator) == numerator - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, - } - } -} - -impl Eq for Assigned {} - -impl Neg for Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - match self { - Self::Zero => Self::Zero, - Self::Trivial(numerator) => Self::Trivial(-numerator), - Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), - } - } -} - -impl Neg for &Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - -*self - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - // One side is directly zero. - (Self::Zero, _) => rhs, - (_, Self::Zero) => self, - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - other - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator + denominator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - self + Self::Trivial(rhs) - } -} - -impl Add for &Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for Assigned { - type Output = Assigned; - fn add(self, rhs: &Self) -> Assigned { - self + *rhs - } -} - -impl Add> for &Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for &Assigned { - type Output = Assigned; - fn add(self, rhs: &Assigned) -> Assigned { - *self + *rhs - } -} - -impl AddAssign for Assigned { - fn add_assign(&mut self, rhs: Self) { - *self = *self + rhs; - } -} - -impl AddAssign<&Assigned> for Assigned { - fn add_assign(&mut self, rhs: &Self) { - *self = *self + rhs; - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - self + (-rhs) - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - self + (-rhs) - } -} - -impl Sub for &Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for Assigned { - type Output = Assigned; - fn sub(self, rhs: &Self) -> Assigned { - self - *rhs - } -} - -impl Sub> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: &Assigned) -> Assigned { - *self - *rhs - } -} - -impl SubAssign for Assigned { - fn sub_assign(&mut self, rhs: Self) { - *self = *self - rhs; - } -} - -impl SubAssign<&Assigned> for Assigned { - fn sub_assign(&mut self, rhs: &Self) { - *self = *self - rhs; - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - (Self::Zero, _) | (_, Self::Zero) => Self::Zero, - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - self * Self::Trivial(rhs) - } -} - -impl Mul for &Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - *self * rhs - } -} - -impl Mul<&Assigned> for Assigned { - type Output = Assigned; - fn mul(self, rhs: &Assigned) -> Assigned { - self * *rhs - } -} - -impl MulAssign for Assigned { - fn mul_assign(&mut self, rhs: Self) { - *self = *self * rhs; - } -} - -impl MulAssign<&Assigned> for Assigned { - fn mul_assign(&mut self, rhs: &Self) { - *self = *self * rhs; - } -} - -impl Assigned { - /// Returns the numerator. - pub fn numerator(&self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => *x, - Self::Rational(numerator, _) => *numerator, - } - } - - /// Returns the denominator, if non-trivial. - pub fn denominator(&self) -> Option { - match self { - Self::Zero => None, - Self::Trivial(_) => None, - Self::Rational(_, denominator) => Some(*denominator), - } - } - - /// Returns true iff this element is zero. - pub fn is_zero_vartime(&self) -> bool { - match self { - Self::Zero => true, - Self::Trivial(x) => x.is_zero_vartime(), - // Assigned maps x/0 -> 0. - Self::Rational(numerator, denominator) => { - numerator.is_zero_vartime() || denominator.is_zero_vartime() - } - } - } - - /// Doubles this element. - #[must_use] - pub fn double(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.double()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.double(), *denominator) - } - } - } - - /// Squares this element. - #[must_use] - pub fn square(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.square()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.square(), denominator.square()) - } - } - } - - /// Cubes this element. - #[must_use] - pub fn cube(&self) -> Self { - self.square() * self - } - - /// Inverts this assigned value (taking the inverse of zero to be zero). - pub fn invert(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Rational(F::ONE, *x), - Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), - } - } - - /// Evaluates this assigned value directly, performing an unbatched inversion if - /// necessary. - /// - /// If the denominator is zero, this returns zero. - pub fn evaluate(self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => x, - Self::Rational(numerator, denominator) => { - if denominator == F::ONE { - numerator - } else { - numerator * denominator.invert().unwrap_or(F::ZERO) - } - } - } - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use super::Assigned; - // We use (numerator, denominator) in the comments below to denote a rational. - #[test] - fn add_trivial_to_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // 2 + (1,0) = 2 + 0 = 2 - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn add_rational_to_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) + (1,0) = (1,2) + 0 = (1,2) - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn sub_trivial_from_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - 2 = 0 - 2 = -2 - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // 2 - (1,0) = 2 - 0 = 2 - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn sub_rational_from_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - (1,2) = 0 - (1,2) = -(1,2) - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // (1,2) - (1,0) = (1,2) - 0 = (1,2) - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn mul_rational_by_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) * (1,0) = (1,2) * 0 = 0 - assert_eq!((a * b).evaluate(), Fp::zero()); - - // (1,0) * (1,2) = 0 * (1,2) = 0 - assert_eq!((b * a).evaluate(), Fp::zero()); - } -} - -#[cfg(test)] -mod proptests { - use std::{ - cmp, - ops::{Add, Mul, Neg, Sub}, - }; - - use group::ff::Field; - use halo2curves::pasta::Fp; - use proptest::{collection::vec, prelude::*, sample::select}; - - use super::Assigned; - - trait UnaryOperand: Neg { - fn double(&self) -> Self; - fn square(&self) -> Self; - fn cube(&self) -> Self; - fn inv0(&self) -> Self; - } - - impl UnaryOperand for F { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert().unwrap_or(F::ZERO) - } - } - - impl UnaryOperand for Assigned { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert() - } - } - - #[derive(Clone, Debug)] - enum UnaryOperator { - Neg, - Double, - Square, - Cube, - Inv0, - } - - const UNARY_OPERATORS: &[UnaryOperator] = &[ - UnaryOperator::Neg, - UnaryOperator::Double, - UnaryOperator::Square, - UnaryOperator::Cube, - UnaryOperator::Inv0, - ]; - - impl UnaryOperator { - fn apply(&self, a: F) -> F { - match self { - Self::Neg => -a, - Self::Double => a.double(), - Self::Square => a.square(), - Self::Cube => a.cube(), - Self::Inv0 => a.inv0(), - } - } - } - - trait BinaryOperand: Sized + Add + Sub + Mul {} - impl BinaryOperand for F {} - impl BinaryOperand for Assigned {} - - #[derive(Clone, Debug)] - enum BinaryOperator { - Add, - Sub, - Mul, - } - - const BINARY_OPERATORS: &[BinaryOperator] = &[ - BinaryOperator::Add, - BinaryOperator::Sub, - BinaryOperator::Mul, - ]; - - impl BinaryOperator { - fn apply(&self, a: F, b: F) -> F { - match self { - Self::Add => a + b, - Self::Sub => a - b, - Self::Mul => a * b, - } - } - } - - #[derive(Clone, Debug)] - enum Operator { - Unary(UnaryOperator), - Binary(BinaryOperator), - } - - prop_compose! { - /// Use narrow that can be easily reduced. - fn arb_element()(val in any::()) -> Fp { - Fp::from(val) - } - } - - prop_compose! { - fn arb_trivial()(element in arb_element()) -> Assigned { - Assigned::Trivial(element) - } - } - - prop_compose! { - /// Generates half of the denominators as zero to represent a deferred inversion. - fn arb_rational()( - numerator in arb_element(), - denominator in prop_oneof![ - 1 => Just(Fp::zero()), - 2 => arb_element(), - ], - ) -> Assigned { - Assigned::Rational(numerator, denominator) - } - } - - prop_compose! { - fn arb_operators(num_unary: usize, num_binary: usize)( - unary in vec(select(UNARY_OPERATORS), num_unary), - binary in vec(select(BINARY_OPERATORS), num_binary), - ) -> Vec { - unary.into_iter() - .map(Operator::Unary) - .chain(binary.into_iter().map(Operator::Binary)) - .collect() - } - } - - prop_compose! { - fn arb_testcase()( - num_unary in 0usize..5, - num_binary in 0usize..5, - )( - values in vec( - prop_oneof![ - 1 => Just(Assigned::Zero), - 2 => arb_trivial(), - 2 => arb_rational(), - ], - // Ensure that: - // - we have at least one value to apply unary operators to. - // - we can apply every binary operator pairwise sequentially. - cmp::max(usize::from(num_unary > 0), num_binary + 1)), - operations in arb_operators(num_unary, num_binary).prop_shuffle(), - ) -> (Vec>, Vec) { - (values, operations) - } - } - - proptest! { - #[test] - fn operation_commutativity((values, operations) in arb_testcase()) { - // Evaluate the values at the start. - let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); - - // Apply the operations to both the deferred and evaluated values. - fn evaluate( - items: Vec, - operators: &[Operator], - ) -> F { - let mut ops = operators.iter(); - - // Process all binary operators. We are guaranteed to have exactly as many - // binary operators as we need calls to the reduction closure. - let mut res = items.into_iter().reduce(|mut a, b| loop { - match ops.next() { - Some(Operator::Unary(op)) => a = op.apply(a), - Some(Operator::Binary(op)) => break op.apply(a, b), - None => unreachable!(), - } - }).unwrap(); - - // Process any unary operators that weren't handled in the reduce() call - // above (either if we only had one item, or there were unary operators - // after the last binary operator). We are guaranteed to have no binary - // operators remaining at this point. - loop { - match ops.next() { - Some(Operator::Unary(op)) => res = op.apply(res), - Some(Operator::Binary(_)) => unreachable!(), - None => break res, - } - } - } - let deferred_result = evaluate(values, &operations); - let evaluated_result = evaluate(elements, &operations); - - // The two should be equal, i.e. deferred inversion should commute with the - // list of operations. - assert_eq!(deferred_result.evaluate(), evaluated_result); - } - } -} +// use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +// +// use group::ff::Field; +// +// /// A value assigned to a cell within a circuit. +// /// +// /// Stored as a fraction, so the backend can use batch inversion. +// /// +// /// A denominator of zero maps to an assigned value of zero. +// #[derive(Clone, Copy, Debug)] +// pub enum Assigned { +// /// The field element zero. +// Zero, +// /// A value that does not require inversion to evaluate. +// Trivial(F), +// /// A value stored as a fraction to enable batch inversion. +// Rational(F, F), +// } +// +// impl From<&Assigned> for Assigned { +// fn from(val: &Assigned) -> Self { +// *val +// } +// } +// +// impl From<&F> for Assigned { +// fn from(numerator: &F) -> Self { +// Assigned::Trivial(*numerator) +// } +// } +// +// impl From for Assigned { +// fn from(numerator: F) -> Self { +// Assigned::Trivial(numerator) +// } +// } +// +// impl From<(F, F)> for Assigned { +// fn from((numerator, denominator): (F, F)) -> Self { +// Assigned::Rational(numerator, denominator) +// } +// } +// +// impl PartialEq for Assigned { +// fn eq(&self, other: &Self) -> bool { +// match (self, other) { +// // At least one side is directly zero. +// (Self::Zero, Self::Zero) => true, +// (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), +// +// // One side is x/0 which maps to zero. +// (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) +// if denominator.is_zero_vartime() => +// { +// x.is_zero_vartime() +// } +// +// // Okay, we need to do some actual math... +// (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, +// (Self::Trivial(x), Self::Rational(numerator, denominator)) +// | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { +// &(*x * denominator) == numerator +// } +// ( +// Self::Rational(lhs_numerator, lhs_denominator), +// Self::Rational(rhs_numerator, rhs_denominator), +// ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, +// } +// } +// } +// +// impl Eq for Assigned {} +// +// impl Neg for Assigned { +// type Output = Assigned; +// fn neg(self) -> Self::Output { +// match self { +// Self::Zero => Self::Zero, +// Self::Trivial(numerator) => Self::Trivial(-numerator), +// Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), +// } +// } +// } +// +// impl Neg for &Assigned { +// type Output = Assigned; +// fn neg(self) -> Self::Output { +// -*self +// } +// } +// +// impl Add for Assigned { +// type Output = Assigned; +// fn add(self, rhs: Assigned) -> Assigned { +// match (self, rhs) { +// // One side is directly zero. +// (Self::Zero, _) => rhs, +// (_, Self::Zero) => self, +// +// // One side is x/0 which maps to zero. +// (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) +// if denominator.is_zero_vartime() => +// { +// other +// } +// +// // Okay, we need to do some actual math... +// (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), +// (Self::Rational(numerator, denominator), Self::Trivial(other)) +// | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { +// Self::Rational(numerator + denominator * other, denominator) +// } +// ( +// Self::Rational(lhs_numerator, lhs_denominator), +// Self::Rational(rhs_numerator, rhs_denominator), +// ) => Self::Rational( +// lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, +// lhs_denominator * rhs_denominator, +// ), +// } +// } +// } +// +// impl Add for Assigned { +// type Output = Assigned; +// fn add(self, rhs: F) -> Assigned { +// self + Self::Trivial(rhs) +// } +// } +// +// impl Add for &Assigned { +// type Output = Assigned; +// fn add(self, rhs: F) -> Assigned { +// *self + rhs +// } +// } +// +// impl Add<&Assigned> for Assigned { +// type Output = Assigned; +// fn add(self, rhs: &Self) -> Assigned { +// self + *rhs +// } +// } +// +// impl Add> for &Assigned { +// type Output = Assigned; +// fn add(self, rhs: Assigned) -> Assigned { +// *self + rhs +// } +// } +// +// impl Add<&Assigned> for &Assigned { +// type Output = Assigned; +// fn add(self, rhs: &Assigned) -> Assigned { +// *self + *rhs +// } +// } +// +// impl AddAssign for Assigned { +// fn add_assign(&mut self, rhs: Self) { +// *self = *self + rhs; +// } +// } +// +// impl AddAssign<&Assigned> for Assigned { +// fn add_assign(&mut self, rhs: &Self) { +// *self = *self + rhs; +// } +// } +// +// impl Sub for Assigned { +// type Output = Assigned; +// fn sub(self, rhs: Assigned) -> Assigned { +// self + (-rhs) +// } +// } +// +// impl Sub for Assigned { +// type Output = Assigned; +// fn sub(self, rhs: F) -> Assigned { +// self + (-rhs) +// } +// } +// +// impl Sub for &Assigned { +// type Output = Assigned; +// fn sub(self, rhs: F) -> Assigned { +// *self - rhs +// } +// } +// +// impl Sub<&Assigned> for Assigned { +// type Output = Assigned; +// fn sub(self, rhs: &Self) -> Assigned { +// self - *rhs +// } +// } +// +// impl Sub> for &Assigned { +// type Output = Assigned; +// fn sub(self, rhs: Assigned) -> Assigned { +// *self - rhs +// } +// } +// +// impl Sub<&Assigned> for &Assigned { +// type Output = Assigned; +// fn sub(self, rhs: &Assigned) -> Assigned { +// *self - *rhs +// } +// } +// +// impl SubAssign for Assigned { +// fn sub_assign(&mut self, rhs: Self) { +// *self = *self - rhs; +// } +// } +// +// impl SubAssign<&Assigned> for Assigned { +// fn sub_assign(&mut self, rhs: &Self) { +// *self = *self - rhs; +// } +// } +// +// impl Mul for Assigned { +// type Output = Assigned; +// fn mul(self, rhs: Assigned) -> Assigned { +// match (self, rhs) { +// (Self::Zero, _) | (_, Self::Zero) => Self::Zero, +// (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), +// (Self::Rational(numerator, denominator), Self::Trivial(other)) +// | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { +// Self::Rational(numerator * other, denominator) +// } +// ( +// Self::Rational(lhs_numerator, lhs_denominator), +// Self::Rational(rhs_numerator, rhs_denominator), +// ) => Self::Rational( +// lhs_numerator * rhs_numerator, +// lhs_denominator * rhs_denominator, +// ), +// } +// } +// } +// +// impl Mul for Assigned { +// type Output = Assigned; +// fn mul(self, rhs: F) -> Assigned { +// self * Self::Trivial(rhs) +// } +// } +// +// impl Mul for &Assigned { +// type Output = Assigned; +// fn mul(self, rhs: F) -> Assigned { +// *self * rhs +// } +// } +// +// impl Mul<&Assigned> for Assigned { +// type Output = Assigned; +// fn mul(self, rhs: &Assigned) -> Assigned { +// self * *rhs +// } +// } +// +// impl MulAssign for Assigned { +// fn mul_assign(&mut self, rhs: Self) { +// *self = *self * rhs; +// } +// } +// +// impl MulAssign<&Assigned> for Assigned { +// fn mul_assign(&mut self, rhs: &Self) { +// *self = *self * rhs; +// } +// } +// +// impl Assigned { +// /// Returns the numerator. +// pub fn numerator(&self) -> F { +// match self { +// Self::Zero => F::ZERO, +// Self::Trivial(x) => *x, +// Self::Rational(numerator, _) => *numerator, +// } +// } +// +// /// Returns the denominator, if non-trivial. +// pub fn denominator(&self) -> Option { +// match self { +// Self::Zero => None, +// Self::Trivial(_) => None, +// Self::Rational(_, denominator) => Some(*denominator), +// } +// } +// +// /// Returns true iff this element is zero. +// pub fn is_zero_vartime(&self) -> bool { +// match self { +// Self::Zero => true, +// Self::Trivial(x) => x.is_zero_vartime(), +// // Assigned maps x/0 -> 0. +// Self::Rational(numerator, denominator) => { +// numerator.is_zero_vartime() || denominator.is_zero_vartime() +// } +// } +// } +// +// /// Doubles this element. +// #[must_use] +// pub fn double(&self) -> Self { +// match self { +// Self::Zero => Self::Zero, +// Self::Trivial(x) => Self::Trivial(x.double()), +// Self::Rational(numerator, denominator) => { +// Self::Rational(numerator.double(), *denominator) +// } +// } +// } +// +// /// Squares this element. +// #[must_use] +// pub fn square(&self) -> Self { +// match self { +// Self::Zero => Self::Zero, +// Self::Trivial(x) => Self::Trivial(x.square()), +// Self::Rational(numerator, denominator) => { +// Self::Rational(numerator.square(), denominator.square()) +// } +// } +// } +// +// /// Cubes this element. +// #[must_use] +// pub fn cube(&self) -> Self { +// self.square() * self +// } +// +// /// Inverts this assigned value (taking the inverse of zero to be zero). +// pub fn invert(&self) -> Self { +// match self { +// Self::Zero => Self::Zero, +// Self::Trivial(x) => Self::Rational(F::ONE, *x), +// Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), +// } +// } +// +// /// Evaluates this assigned value directly, performing an unbatched inversion if +// /// necessary. +// /// +// /// If the denominator is zero, this returns zero. +// pub fn evaluate(self) -> F { +// match self { +// Self::Zero => F::ZERO, +// Self::Trivial(x) => x, +// Self::Rational(numerator, denominator) => { +// if denominator == F::ONE { +// numerator +// } else { +// numerator * denominator.invert().unwrap_or(F::ZERO) +// } +// } +// } +// } +// } +// +// #[cfg(test)] +// mod tests { +// use halo2curves::pasta::Fp; +// +// use super::Assigned; +// // We use (numerator, denominator) in the comments below to denote a rational. +// #[test] +// fn add_trivial_to_inv0_rational() { +// // a = 2 +// // b = (1,0) +// let a = Assigned::Trivial(Fp::from(2)); +// let b = Assigned::Rational(Fp::one(), Fp::zero()); +// +// // 2 + (1,0) = 2 + 0 = 2 +// // This fails if addition is implemented using normal rules for rationals. +// assert_eq!((a + b).evaluate(), a.evaluate()); +// assert_eq!((b + a).evaluate(), a.evaluate()); +// } +// +// #[test] +// fn add_rational_to_inv0_rational() { +// // a = (1,2) +// // b = (1,0) +// let a = Assigned::Rational(Fp::one(), Fp::from(2)); +// let b = Assigned::Rational(Fp::one(), Fp::zero()); +// +// // (1,2) + (1,0) = (1,2) + 0 = (1,2) +// // This fails if addition is implemented using normal rules for rationals. +// assert_eq!((a + b).evaluate(), a.evaluate()); +// assert_eq!((b + a).evaluate(), a.evaluate()); +// } +// +// #[test] +// fn sub_trivial_from_inv0_rational() { +// // a = 2 +// // b = (1,0) +// let a = Assigned::Trivial(Fp::from(2)); +// let b = Assigned::Rational(Fp::one(), Fp::zero()); +// +// // (1,0) - 2 = 0 - 2 = -2 +// // This fails if subtraction is implemented using normal rules for rationals. +// assert_eq!((b - a).evaluate(), (-a).evaluate()); +// +// // 2 - (1,0) = 2 - 0 = 2 +// assert_eq!((a - b).evaluate(), a.evaluate()); +// } +// +// #[test] +// fn sub_rational_from_inv0_rational() { +// // a = (1,2) +// // b = (1,0) +// let a = Assigned::Rational(Fp::one(), Fp::from(2)); +// let b = Assigned::Rational(Fp::one(), Fp::zero()); +// +// // (1,0) - (1,2) = 0 - (1,2) = -(1,2) +// // This fails if subtraction is implemented using normal rules for rationals. +// assert_eq!((b - a).evaluate(), (-a).evaluate()); +// +// // (1,2) - (1,0) = (1,2) - 0 = (1,2) +// assert_eq!((a - b).evaluate(), a.evaluate()); +// } +// +// #[test] +// fn mul_rational_by_inv0_rational() { +// // a = (1,2) +// // b = (1,0) +// let a = Assigned::Rational(Fp::one(), Fp::from(2)); +// let b = Assigned::Rational(Fp::one(), Fp::zero()); +// +// // (1,2) * (1,0) = (1,2) * 0 = 0 +// assert_eq!((a * b).evaluate(), Fp::zero()); +// +// // (1,0) * (1,2) = 0 * (1,2) = 0 +// assert_eq!((b * a).evaluate(), Fp::zero()); +// } +// } +// +// #[cfg(test)] +// mod proptests { +// use std::{ +// cmp, +// ops::{Add, Mul, Neg, Sub}, +// }; +// +// use group::ff::Field; +// use halo2curves::pasta::Fp; +// use proptest::{collection::vec, prelude::*, sample::select}; +// +// use super::Assigned; +// +// trait UnaryOperand: Neg { +// fn double(&self) -> Self; +// fn square(&self) -> Self; +// fn cube(&self) -> Self; +// fn inv0(&self) -> Self; +// } +// +// impl UnaryOperand for F { +// fn double(&self) -> Self { +// self.double() +// } +// +// fn square(&self) -> Self { +// self.square() +// } +// +// fn cube(&self) -> Self { +// self.cube() +// } +// +// fn inv0(&self) -> Self { +// self.invert().unwrap_or(F::ZERO) +// } +// } +// +// impl UnaryOperand for Assigned { +// fn double(&self) -> Self { +// self.double() +// } +// +// fn square(&self) -> Self { +// self.square() +// } +// +// fn cube(&self) -> Self { +// self.cube() +// } +// +// fn inv0(&self) -> Self { +// self.invert() +// } +// } +// +// #[derive(Clone, Debug)] +// enum UnaryOperator { +// Neg, +// Double, +// Square, +// Cube, +// Inv0, +// } +// +// const UNARY_OPERATORS: &[UnaryOperator] = &[ +// UnaryOperator::Neg, +// UnaryOperator::Double, +// UnaryOperator::Square, +// UnaryOperator::Cube, +// UnaryOperator::Inv0, +// ]; +// +// impl UnaryOperator { +// fn apply(&self, a: F) -> F { +// match self { +// Self::Neg => -a, +// Self::Double => a.double(), +// Self::Square => a.square(), +// Self::Cube => a.cube(), +// Self::Inv0 => a.inv0(), +// } +// } +// } +// +// trait BinaryOperand: Sized + Add + Sub + Mul {} +// impl BinaryOperand for F {} +// impl BinaryOperand for Assigned {} +// +// #[derive(Clone, Debug)] +// enum BinaryOperator { +// Add, +// Sub, +// Mul, +// } +// +// const BINARY_OPERATORS: &[BinaryOperator] = &[ +// BinaryOperator::Add, +// BinaryOperator::Sub, +// BinaryOperator::Mul, +// ]; +// +// impl BinaryOperator { +// fn apply(&self, a: F, b: F) -> F { +// match self { +// Self::Add => a + b, +// Self::Sub => a - b, +// Self::Mul => a * b, +// } +// } +// } +// +// #[derive(Clone, Debug)] +// enum Operator { +// Unary(UnaryOperator), +// Binary(BinaryOperator), +// } +// +// prop_compose! { +// /// Use narrow that can be easily reduced. +// fn arb_element()(val in any::()) -> Fp { +// Fp::from(val) +// } +// } +// +// prop_compose! { +// fn arb_trivial()(element in arb_element()) -> Assigned { +// Assigned::Trivial(element) +// } +// } +// +// prop_compose! { +// /// Generates half of the denominators as zero to represent a deferred inversion. +// fn arb_rational()( +// numerator in arb_element(), +// denominator in prop_oneof![ +// 1 => Just(Fp::zero()), +// 2 => arb_element(), +// ], +// ) -> Assigned { +// Assigned::Rational(numerator, denominator) +// } +// } +// +// prop_compose! { +// fn arb_operators(num_unary: usize, num_binary: usize)( +// unary in vec(select(UNARY_OPERATORS), num_unary), +// binary in vec(select(BINARY_OPERATORS), num_binary), +// ) -> Vec { +// unary.into_iter() +// .map(Operator::Unary) +// .chain(binary.into_iter().map(Operator::Binary)) +// .collect() +// } +// } +// +// prop_compose! { +// fn arb_testcase()( +// num_unary in 0usize..5, +// num_binary in 0usize..5, +// )( +// values in vec( +// prop_oneof![ +// 1 => Just(Assigned::Zero), +// 2 => arb_trivial(), +// 2 => arb_rational(), +// ], +// // Ensure that: +// // - we have at least one value to apply unary operators to. +// // - we can apply every binary operator pairwise sequentially. +// cmp::max(usize::from(num_unary > 0), num_binary + 1)), +// operations in arb_operators(num_unary, num_binary).prop_shuffle(), +// ) -> (Vec>, Vec) { +// (values, operations) +// } +// } +// +// proptest! { +// #[test] +// fn operation_commutativity((values, operations) in arb_testcase()) { +// // Evaluate the values at the start. +// let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); +// +// // Apply the operations to both the deferred and evaluated values. +// fn evaluate( +// items: Vec, +// operators: &[Operator], +// ) -> F { +// let mut ops = operators.iter(); +// +// // Process all binary operators. We are guaranteed to have exactly as many +// // binary operators as we need calls to the reduction closure. +// let mut res = items.into_iter().reduce(|mut a, b| loop { +// match ops.next() { +// Some(Operator::Unary(op)) => a = op.apply(a), +// Some(Operator::Binary(op)) => break op.apply(a, b), +// None => unreachable!(), +// } +// }).unwrap(); +// +// // Process any unary operators that weren't handled in the reduce() call +// // above (either if we only had one item, or there were unary operators +// // after the last binary operator). We are guaranteed to have no binary +// // operators remaining at this point. +// loop { +// match ops.next() { +// Some(Operator::Unary(op)) => res = op.apply(res), +// Some(Operator::Binary(_)) => unreachable!(), +// None => break res, +// } +// } +// } +// let deferred_result = evaluate(values, &operations); +// let evaluated_result = evaluate(elements, &operations); +// +// // The two should be equal, i.e. deferred inversion should commute with the +// // list of operations. +// assert_eq!(deferred_result.evaluate(), evaluated_result); +// } +// } +// } diff --git a/backend/src/plonk/circuit.rs b/backend/src/plonk/circuit.rs index ee5e605282..72fa556d11 100644 --- a/backend/src/plonk/circuit.rs +++ b/backend/src/plonk/circuit.rs @@ -2,14 +2,14 @@ use super::{lookup, permutation, shuffle, Queries}; // use crate::dev::metadata; use core::cmp::max; use core::ops::{Add, Mul}; -use ff::Field; +use halo2_common::plonk::{ConstraintSystem, Expression}; use halo2_middleware::circuit::{ Advice, AdviceQueryMid, Any, Challenge, Column, ConstraintSystemV2Backend, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, }; +use halo2_middleware::ff::Field; use halo2_middleware::metadata; use halo2_middleware::poly::Rotation; -use sealed::SealedPhase; use std::collections::HashMap; use std::fmt::Debug; use std::iter::{Product, Sum}; @@ -18,631 +18,6 @@ use std::{ ops::{Neg, Sub}, }; -// TODO: No sealed Phase on the backend, only in the frontend! -pub(crate) mod sealed { - /// Phase of advice column - #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct Phase(pub(crate) u8); - - impl Phase { - pub fn prev(&self) -> Option { - self.0.checked_sub(1).map(Phase) - } - } - - impl SealedPhase for Phase { - fn to_sealed(self) -> Phase { - self - } - } - - /// Sealed trait to help keep `Phase` private. - pub trait SealedPhase { - fn to_sealed(self) -> Phase; - } -} - -/// Phase of advice column -pub trait Phase: SealedPhase {} - -impl Phase for P {} - -/// First phase -#[derive(Debug)] -pub struct FirstPhase; - -impl SealedPhase for super::FirstPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(0) - } -} - -/// Second phase -#[derive(Debug)] -pub struct SecondPhase; - -impl SealedPhase for super::SecondPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(1) - } -} - -/// Third phase -#[derive(Debug)] -pub struct ThirdPhase; - -impl SealedPhase for super::ThirdPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(2) - } -} - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl FixedQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, - /// Phase of this advice column - pub(crate) phase: sealed::Phase, -} - -impl AdviceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } - - /// Phase of this advice column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl InstanceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, PartialEq, Eq)] -pub enum Expression { - /// This is a constant polynomial - Constant(F), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQuery), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQuery), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQuery), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl Into> for Expression { - fn into(self) -> ExpressionMid { - match self { - Expression::Constant(c) => ExpressionMid::Constant(c), - Expression::Fixed(FixedQuery { - column_index, - rotation, - .. - }) => ExpressionMid::Fixed(FixedQueryMid { - column_index, - rotation, - }), - Expression::Advice(AdviceQuery { - column_index, - rotation, - phase, - .. - }) => ExpressionMid::Advice(AdviceQueryMid { - column_index, - rotation, - phase: phase.0, - }), - Expression::Instance(InstanceQuery { - column_index, - rotation, - .. - }) => ExpressionMid::Instance(InstanceQueryMid { - column_index, - rotation, - }), - Expression::Challenge(c) => ExpressionMid::Challenge(c), - Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), - Expression::Sum(lhs, rhs) => { - ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) - } - Expression::Product(lhs, rhs) => { - ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) - } - Expression::Scaled(e, c) => ExpressionMid::Scaled(Box::new((*e).into()), c), - } - } -} - -impl Expression { - /// Evaluate the polynomial using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - constant: &impl Fn(F) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - product(a, b) - } - Expression::Scaled(a, f) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - scaled(a, *f) - } - } - } - - /// Evaluate the polynomial lazily using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate_lazy( - &self, - constant: &impl Fn(F) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - zero: &T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - let b = b.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let (a, b) = if a.complexity() <= b.complexity() { - (a, b) - } else { - (b, a) - }; - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - - if a == *zero { - a - } else { - let b = b.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - product(a, b) - } - } - Expression::Scaled(a, f) => { - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - scaled(a, *f) - } - } - } - - fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { - match self { - Expression::Constant(scalar) => write!(writer, "{scalar:?}"), - Expression::Fixed(query) => { - write!( - writer, - "fixed[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Advice(query) => { - write!( - writer, - "advice[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Instance(query) => { - write!( - writer, - "instance[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Challenge(challenge) => { - write!(writer, "challenge[{}]", challenge.index()) - } - Expression::Negated(a) => { - writer.write_all(b"(-")?; - a.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Sum(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"+")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Product(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"*")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Scaled(a, f) => { - a.write_identifier(writer)?; - write!(writer, "*{f:?}") - } - } - } - - /// Identifier for this expression. Expressions with identical identifiers - /// do the same calculation (but the expressions don't need to be exactly equal - /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). - pub fn identifier(&self) -> String { - let mut cursor = std::io::Cursor::new(Vec::new()); - self.write_identifier(&mut cursor).unwrap(); - String::from_utf8(cursor.into_inner()).unwrap() - } - - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.degree(), - Expression::Sum(a, b) => max(a.degree(), b.degree()), - Expression::Product(a, b) => a.degree() + b.degree(), - Expression::Scaled(poly, _) => poly.degree(), - } - } - - /// Approximate the computational complexity of this expression. - pub fn complexity(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.complexity() + 5, - Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, - Expression::Product(a, b) => a.complexity() + b.complexity() + 30, - Expression::Scaled(poly, _) => poly.complexity() + 30, - } - } - - /// Square this expression. - pub fn square(self) -> Self { - self.clone() * self - } -} - -impl std::fmt::Debug for Expression { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), - // Skip enum variant and print query struct directly to maintain backwards compatibility. - Expression::Fixed(query) => { - let mut debug_struct = f.debug_struct("Fixed"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Advice(query) => { - let mut debug_struct = f.debug_struct("Advice"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation); - // Only show advice's phase if it's not in first phase. - if query.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &query.phase); - } - debug_struct.finish() - } - Expression::Instance(query) => { - let mut debug_struct = f.debug_struct("Instance"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Challenge(challenge) => { - f.debug_tuple("Challenge").field(challenge).finish() - } - Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), - Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), - Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), - Expression::Scaled(poly, scalar) => { - f.debug_tuple("Scaled").field(poly).field(scalar).finish() - } - } - } -} - -impl Neg for Expression { - type Output = Expression; - fn neg(self) -> Self::Output { - Expression::Negated(Box::new(self)) - } -} - -impl Add for Expression { - type Output = Expression; - fn add(self, rhs: Expression) -> Expression { - Expression::Sum(Box::new(self), Box::new(rhs)) - } -} - -impl Sub for Expression { - type Output = Expression; - fn sub(self, rhs: Expression) -> Expression { - Expression::Sum(Box::new(self), Box::new(-rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: Expression) -> Expression { - Expression::Product(Box::new(self), Box::new(rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: F) -> Expression { - Expression::Scaled(Box::new(self), rhs) - } -} - -impl Sum for Expression { - fn sum>(iter: I) -> Self { - iter.reduce(|acc, x| acc + x) - .unwrap_or(Expression::Constant(F::ZERO)) - } -} - -impl Product for Expression { - fn product>(iter: I) -> Self { - iter.reduce(|acc, x| acc * x) - .unwrap_or(Expression::Constant(F::ONE)) - } -} - /// Represents an index into a vector where each entry corresponds to a distinct /// point that polynomials are queried at. #[derive(Copy, Clone, Debug)] @@ -767,642 +142,6 @@ impl>, Iter: IntoIterator> IntoIterato } } -/// Gate -#[derive(Clone, Debug)] -pub struct Gate { - name: String, - constraint_names: Vec, - polys: Vec>, -} - -impl Gate { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the name of the constraint at index `constraint_index`. - pub fn constraint_name(&self, constraint_index: usize) -> &str { - self.constraint_names[constraint_index].as_str() - } - - /// Returns constraints of this gate - pub fn polynomials(&self) -> &[Expression] { - &self.polys - } -} - -struct QueriesMap { - advice_map: HashMap<(Column, Rotation), usize>, - instance_map: HashMap<(Column, Rotation), usize>, - fixed_map: HashMap<(Column, Rotation), usize>, - advice: Vec<(Column, Rotation)>, - instance: Vec<(Column, Rotation)>, - fixed: Vec<(Column, Rotation)>, -} - -impl QueriesMap { - fn add_advice(&mut self, col: Column, rot: Rotation) -> usize { - *self.advice_map.entry((col, rot)).or_insert_with(|| { - self.advice.push((col, rot)); - self.advice.len() - 1 - }) - } - fn add_instance(&mut self, col: Column, rot: Rotation) -> usize { - *self.instance_map.entry((col, rot)).or_insert_with(|| { - self.instance.push((col, rot)); - self.instance.len() - 1 - }) - } - fn add_fixed(&mut self, col: Column, rot: Rotation) -> usize { - *self.fixed_map.entry((col, rot)).or_insert_with(|| { - self.fixed.push((col, rot)); - self.fixed.len() - 1 - }) - } -} - -impl QueriesMap { - fn as_expression(&mut self, expr: &ExpressionMid) -> Expression { - match expr { - ExpressionMid::Constant(c) => Expression::Constant(*c), - ExpressionMid::Fixed(query) => { - let (col, rot) = (Column::new(query.column_index, Fixed), query.rotation); - let index = self.add_fixed(col, rot); - Expression::Fixed(FixedQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - }) - } - ExpressionMid::Advice(query) => { - let (col, rot) = ( - Column::new(query.column_index, Advice { phase: query.phase }), - query.rotation, - ); - let index = self.add_advice(col, rot); - Expression::Advice(AdviceQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - phase: sealed::Phase(query.phase), - }) - } - ExpressionMid::Instance(query) => { - let (col, rot) = (Column::new(query.column_index, Instance), query.rotation); - let index = self.add_instance(col, rot); - Expression::Instance(InstanceQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - }) - } - ExpressionMid::Challenge(c) => Expression::Challenge(*c), - ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), - ExpressionMid::Sum(lhs, rhs) => Expression::Sum( - Box::new(self.as_expression(lhs)), - Box::new(self.as_expression(rhs)), - ), - ExpressionMid::Product(lhs, rhs) => Expression::Product( - Box::new(self.as_expression(lhs)), - Box::new(self.as_expression(rhs)), - ), - ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.as_expression(e)), *c), - } - } -} - -/// Collect queries used in gates while mapping those gates to equivalent ones with indexed -/// query references in the expressions. -fn collect_queries_gates( - cs2: &ConstraintSystemV2Backend, - queries: &mut QueriesMap, -) -> Vec> { - cs2.gates - .iter() - .map(|gate| Gate { - name: gate.name.clone(), - constraint_names: Vec::new(), - polys: vec![queries.as_expression(gate.polynomial())], - }) - .collect() -} - -/// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed -/// query references in the expressions. -fn collect_queries_lookups( - cs2: &ConstraintSystemV2Backend, - queries: &mut QueriesMap, -) -> Vec> { - cs2.lookups - .iter() - .map(|lookup| lookup::Argument { - name: lookup.name.clone(), - input_expressions: lookup - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - table_expressions: lookup - .table_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() -} - -/// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed -/// query references in the expressions. -fn collect_queries_shuffles( - cs2: &ConstraintSystemV2Backend, - queries: &mut QueriesMap, -) -> Vec> { - cs2.shuffles - .iter() - .map(|shuffle| shuffle::Argument { - name: shuffle.name.clone(), - input_expressions: shuffle - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - shuffle_expressions: shuffle - .shuffle_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() -} - -/// Collect all queries used in the expressions of gates, lookups and shuffles. Map the -/// expressions of gates, lookups and shuffles into equivalent ones with indexed query -/// references. -pub(crate) fn collect_queries( - cs2: &ConstraintSystemV2Backend, -) -> ( - Queries, - Vec>, - Vec>, - Vec>, -) { - let mut queries = QueriesMap { - advice_map: HashMap::new(), - instance_map: HashMap::new(), - fixed_map: HashMap::new(), - advice: Vec::new(), - instance: Vec::new(), - fixed: Vec::new(), - }; - - let gates = collect_queries_gates(cs2, &mut queries); - let lookups = collect_queries_lookups(cs2, &mut queries); - let shuffles = collect_queries_shuffles(cs2, &mut queries); - - // Each column used in a copy constraint involves a query at rotation current. - for column in cs2.permutation.get_columns() { - match column.column_type { - Any::Instance => { - queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) - } - Any::Fixed => queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()), - Any::Advice(advice) => { - queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) - } - }; - } - - let mut num_advice_queries = vec![0; cs2.num_advice_columns]; - for (column, _) in queries.advice.iter() { - num_advice_queries[column.index()] += 1; - } - - let queries = Queries { - advice: queries.advice, - instance: queries.instance, - fixed: queries.fixed, - num_advice_queries, - }; - (queries, gates, lookups, shuffles) -} - -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystem { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_selectors: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - pub(crate) gates: Vec>, - pub(crate) advice_queries: Vec<(Column, Rotation)>, - // Contains an integer for each advice column - // identifying how many distinct queries it has - // so far; should be same length as num_advice_columns. - pub(crate) num_advice_queries: Vec, - pub(crate) instance_queries: Vec<(Column, Rotation)>, - pub(crate) fixed_queries: Vec<(Column, Rotation)>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, - - // Vector of fixed columns, which can be used to store constant values - // that are copied into advice columns. - pub(crate) constants: Vec>, - - pub(crate) minimum_degree: Option, -} - -impl From> for ConstraintSystem { - fn from(cs2: ConstraintSystemV2Backend) -> Self { - let (queries, gates, lookups, shuffles) = collect_queries(&cs2); - ConstraintSystem { - num_fixed_columns: cs2.num_fixed_columns, - num_advice_columns: cs2.num_advice_columns, - num_instance_columns: cs2.num_instance_columns, - num_selectors: 0, - num_challenges: cs2.num_challenges, - unblinded_advice_columns: cs2.unblinded_advice_columns, - advice_column_phase: cs2 - .advice_column_phase - .into_iter() - .map(sealed::Phase) - .collect(), - challenge_phase: cs2.challenge_phase.into_iter().map(sealed::Phase).collect(), - gates, - advice_queries: queries.advice, - num_advice_queries: queries.num_advice_queries, - instance_queries: queries.instance, - fixed_queries: queries.fixed, - permutation: cs2.permutation.into(), - lookups, - shuffles, - general_column_annotations: cs2.general_column_annotations, - constants: Vec::new(), - minimum_degree: None, - } - } -} - -/// Represents the minimal parameters that determine a `ConstraintSystem`. -#[allow(dead_code)] -pub struct PinnedConstraintSystem<'a, F: Field> { - num_fixed_columns: &'a usize, - num_advice_columns: &'a usize, - num_instance_columns: &'a usize, - num_selectors: &'a usize, - num_challenges: &'a usize, - advice_column_phase: &'a Vec, - challenge_phase: &'a Vec, - gates: PinnedGates<'a, F>, - advice_queries: &'a Vec<(Column, Rotation)>, - instance_queries: &'a Vec<(Column, Rotation)>, - fixed_queries: &'a Vec<(Column, Rotation)>, - permutation: &'a permutation::Argument, - lookups: &'a Vec>, - shuffles: &'a Vec>, - constants: &'a Vec>, - minimum_degree: &'a Option, -} - -impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("PinnedConstraintSystem"); - debug_struct - .field("num_fixed_columns", self.num_fixed_columns) - .field("num_advice_columns", self.num_advice_columns) - .field("num_instance_columns", self.num_instance_columns) - .field("num_selectors", self.num_selectors); - // Only show multi-phase related fields if it's used. - if *self.num_challenges > 0 { - debug_struct - .field("num_challenges", self.num_challenges) - .field("advice_column_phase", self.advice_column_phase) - .field("challenge_phase", self.challenge_phase); - } - debug_struct - .field("gates", &self.gates) - .field("advice_queries", self.advice_queries) - .field("instance_queries", self.instance_queries) - .field("fixed_queries", self.fixed_queries) - .field("permutation", self.permutation) - .field("lookups", self.lookups); - if !self.shuffles.is_empty() { - debug_struct.field("shuffles", self.shuffles); - } - debug_struct - .field("constants", self.constants) - .field("minimum_degree", self.minimum_degree); - debug_struct.finish() - } -} - -struct PinnedGates<'a, F: Field>(&'a Vec>); - -impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - f.debug_list() - .entries(self.0.iter().flat_map(|gate| gate.polynomials().iter())) - .finish() - } -} - -impl Default for ConstraintSystem { - fn default() -> ConstraintSystem { - ConstraintSystem { - num_fixed_columns: 0, - num_advice_columns: 0, - num_instance_columns: 0, - num_selectors: 0, - num_challenges: 0, - unblinded_advice_columns: Vec::new(), - advice_column_phase: Vec::new(), - challenge_phase: Vec::new(), - gates: vec![], - fixed_queries: Vec::new(), - advice_queries: Vec::new(), - num_advice_queries: Vec::new(), - instance_queries: Vec::new(), - permutation: permutation::Argument::new(), - lookups: Vec::new(), - shuffles: Vec::new(), - general_column_annotations: HashMap::new(), - constants: vec![], - minimum_degree: None, - } - } -} - -impl ConstraintSystem { - /// Obtain a pinned version of this constraint system; a structure with the - /// minimal parameters needed to determine the rest of the constraint - /// system. - pub fn pinned(&self) -> PinnedConstraintSystem<'_, F> { - PinnedConstraintSystem { - num_fixed_columns: &self.num_fixed_columns, - num_advice_columns: &self.num_advice_columns, - num_instance_columns: &self.num_instance_columns, - num_selectors: &self.num_selectors, - num_challenges: &self.num_challenges, - advice_column_phase: &self.advice_column_phase, - challenge_phase: &self.challenge_phase, - gates: PinnedGates(&self.gates), - fixed_queries: &self.fixed_queries, - advice_queries: &self.advice_queries, - instance_queries: &self.instance_queries, - permutation: &self.permutation, - lookups: &self.lookups, - shuffles: &self.shuffles, - constants: &self.constants, - minimum_degree: &self.minimum_degree, - } - } - - pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, advice_query) in self.advice_queries.iter().enumerate() { - if advice_query == &(column, at) { - return index; - } - } - - panic!("get_advice_query_index called for non-existent query"); - } - - pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, fixed_query) in self.fixed_queries.iter().enumerate() { - if fixed_query == &(column, at) { - return index; - } - } - - panic!("get_fixed_query_index called for non-existent query"); - } - - pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, instance_query) in self.instance_queries.iter().enumerate() { - if instance_query == &(column, at) { - return index; - } - } - - panic!("get_instance_query_index called for non-existent query"); - } - - pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { - match column.column_type() { - Any::Advice(_) => { - self.get_advice_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Fixed => { - self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Instance => { - self.get_instance_query_index(Column::::try_from(column).unwrap(), at) - } - } - } - - /// Returns the list of phases - pub fn phases(&self) -> impl Iterator { - let max_phase = self - .advice_column_phase - .iter() - .max() - .map(|phase| phase.0) - .unwrap_or_default(); - (0..=max_phase).map(sealed::Phase) - } - - /// Compute the degree of the constraint system (the maximum degree of all - /// constraints). - pub fn degree(&self) -> usize { - // The permutation argument will serve alongside the gates, so must be - // accounted for. - let mut degree = self.permutation.required_degree(); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.lookups - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.shuffles - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // Account for each gate to ensure our quotient polynomial is the - // correct degree and that our extended domain is the right size. - degree = std::cmp::max( - degree, - self.gates - .iter() - .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) - .max() - .unwrap_or(0), - ); - - std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) - } - - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. - - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); - - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; - - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. - - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. - - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 - } - - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } - - /// Returns number of fixed columns - pub fn num_fixed_columns(&self) -> usize { - self.num_fixed_columns - } - - /// Returns number of advice columns - pub fn num_advice_columns(&self) -> usize { - self.num_advice_columns - } - - /// Returns number of instance columns - pub fn num_instance_columns(&self) -> usize { - self.num_instance_columns - } - - /// Returns number of selectors - pub fn num_selectors(&self) -> usize { - self.num_selectors - } - - /// Returns number of challenges - pub fn num_challenges(&self) -> usize { - self.num_challenges - } - - /// Returns phase of advice columns - pub fn advice_column_phase(&self) -> Vec { - self.advice_column_phase - .iter() - .map(|phase| phase.0) - .collect() - } - - /// Returns phase of challenges - pub fn challenge_phase(&self) -> Vec { - self.challenge_phase.iter().map(|phase| phase.0).collect() - } - - /// Returns gates - pub fn gates(&self) -> &Vec> { - &self.gates - } - - /// Returns general column annotations - pub fn general_column_annotations(&self) -> &HashMap { - &self.general_column_annotations - } - - /// Returns advice queries - pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { - &self.advice_queries - } - - /// Returns instance queries - pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { - &self.instance_queries - } - - /// Returns fixed queries - pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { - &self.fixed_queries - } - - /// Returns permutation argument - pub fn permutation(&self) -> &permutation::Argument { - &self.permutation - } - - /// Returns lookup arguments - pub fn lookups(&self) -> &Vec> { - &self.lookups - } - - /// Returns shuffle arguments - pub fn shuffles(&self) -> &Vec> { - &self.shuffles - } - - /// Returns constants - pub fn constants(&self) -> &Vec> { - &self.constants - } -} - #[cfg(test)] mod tests { use super::Expression; diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs index f705c067e1..92646bfb8d 100644 --- a/backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -1,6 +1,6 @@ #![allow(clippy::int_plus_one)] -use ff::{Field, FromUniformBytes}; +use halo2_middleware::ff::{Field, FromUniformBytes}; use group::Curve; use super::{ diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index e6e99d948d..4ac4b953d6 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -1,5 +1,5 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use group::Curve; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use rand_core::RngCore; use std::collections::{BTreeSet, HashSet}; use std::{collections::HashMap, iter}; @@ -12,7 +12,6 @@ use super::{ use crate::{ arithmetic::{eval_polynomial, CurveAffine}, - plonk::Assigned, poly::{ commitment::{Blind, CommitmentScheme, Params, Prover}, Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, @@ -23,6 +22,7 @@ use crate::{ transcript::{EncodedChallenge, TranscriptWrite}, }; use group::prime::PrimeCurveAffine; +use halo2_middleware::plonk::Assigned; /// Collection of instance data used during proving for a single circuit proof. #[derive(Debug)] diff --git a/backend/src/plonk/vanishing/prover.rs b/backend/src/plonk/vanishing/prover.rs index 7943086826..0679ee1988 100644 --- a/backend/src/plonk/vanishing/prover.rs +++ b/backend/src/plonk/vanishing/prover.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, iter}; -use ff::Field; +use halo2_middleware::ff::Field; use group::Curve; use rand_chacha::ChaCha20Rng; use rand_core::{RngCore, SeedableRng}; diff --git a/backend/src/plonk/vanishing/verifier.rs b/backend/src/plonk/vanishing/verifier.rs index 0881dfb2c0..05ccb02a5b 100644 --- a/backend/src/plonk/vanishing/verifier.rs +++ b/backend/src/plonk/vanishing/verifier.rs @@ -1,6 +1,6 @@ use std::iter; -use ff::Field; +use halo2_middleware::ff::Field; use crate::{ arithmetic::CurveAffine, diff --git a/backend/src/plonk/verifier.rs b/backend/src/plonk/verifier.rs index 5d613227ff..d92c2ff985 100644 --- a/backend/src/plonk/verifier.rs +++ b/backend/src/plonk/verifier.rs @@ -1,4 +1,4 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use group::Curve; use std::iter; diff --git a/backend/src/plonk/verifier/batch.rs b/backend/src/plonk/verifier/batch.rs index ba3e2419e6..d52f96ab0c 100644 --- a/backend/src/plonk/verifier/batch.rs +++ b/backend/src/plonk/verifier/batch.rs @@ -1,4 +1,4 @@ -use ff::FromUniformBytes; +use halo2_middleware::ff::FromUniformBytes; use group::ff::Field; use halo2curves::CurveAffine; use rand_core::OsRng; diff --git a/backend/src/poly.rs b/backend/src/poly.rs index 100ee10eb8..08dcc0c455 100644 --- a/backend/src/poly.rs +++ b/backend/src/poly.rs @@ -4,10 +4,10 @@ use crate::arithmetic::parallelize; use crate::helpers::SerdePrimeField; -use crate::plonk::Assigned; use crate::SerdeFormat; +use halo2_middleware::plonk::Assigned; -use group::ff::{BatchInvert, Field}; +use halo2_middleware::ff::{BatchInvert, Field}; use halo2_middleware::poly::Rotation; use std::fmt::Debug; use std::io; diff --git a/backend/src/poly/commitment.rs b/backend/src/poly/commitment.rs index feae085655..78b17fc808 100644 --- a/backend/src/poly/commitment.rs +++ b/backend/src/poly/commitment.rs @@ -5,7 +5,7 @@ use super::{ }; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead, TranscriptWrite}; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::CurveAffine; use rand_core::RngCore; use std::{ diff --git a/backend/src/poly/domain.rs b/backend/src/poly/domain.rs index ae9b8bf9ae..f25f183d70 100644 --- a/backend/src/poly/domain.rs +++ b/backend/src/poly/domain.rs @@ -1,14 +1,12 @@ //! Contains utilities for performing polynomial arithmetic over an evaluation //! domain that is of a suitable size for the application. -use crate::{ - arithmetic::{best_fft, parallelize}, - plonk::Assigned, -}; +use crate::arithmetic::{best_fft, parallelize}; +use halo2_middleware::plonk::Assigned; use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, Rotation}; -use ff::WithSmallOrderMulGroup; use group::ff::{BatchInvert, Field}; +use halo2_middleware::ff::WithSmallOrderMulGroup; use std::marker::PhantomData; diff --git a/backend/src/poly/ipa/commitment.rs b/backend/src/poly/ipa/commitment.rs index 7be053c49c..c479ea1ce1 100644 --- a/backend/src/poly/ipa/commitment.rs +++ b/backend/src/poly/ipa/commitment.rs @@ -234,7 +234,7 @@ mod test { use crate::poly::ipa::commitment::{create_proof, verify_proof, ParamsIPA}; use crate::poly::ipa::msm::MSMIPA; - use ff::Field; + use halo2_middleware::ff::Field; use group::Curve; #[test] @@ -291,7 +291,7 @@ mod test { fn test_opening_proof() { const K: u32 = 6; - use ff::Field; + use halo2_middleware::ff::Field; use rand_core::OsRng; use super::super::commitment::{Blind, Params}; diff --git a/backend/src/poly/ipa/commitment/prover.rs b/backend/src/poly/ipa/commitment/prover.rs index 344dbc0e65..ee92c7677f 100644 --- a/backend/src/poly/ipa/commitment/prover.rs +++ b/backend/src/poly/ipa/commitment/prover.rs @@ -1,4 +1,4 @@ -use ff::Field; +use halo2_middleware::ff::Field; use rand_core::RngCore; use super::ParamsIPA; diff --git a/backend/src/poly/ipa/msm.rs b/backend/src/poly/ipa/msm.rs index a615ddce49..921b95587b 100644 --- a/backend/src/poly/ipa/msm.rs +++ b/backend/src/poly/ipa/msm.rs @@ -1,6 +1,6 @@ use crate::arithmetic::{best_multiexp, CurveAffine}; use crate::poly::{commitment::MSM, ipa::commitment::ParamsVerifierIPA}; -use ff::Field; +use halo2_middleware::ff::Field; use group::Group; use std::collections::BTreeMap; diff --git a/backend/src/poly/ipa/multiopen.rs b/backend/src/poly/ipa/multiopen.rs index b78acb5934..1df7f41daa 100644 --- a/backend/src/poly/ipa/multiopen.rs +++ b/backend/src/poly/ipa/multiopen.rs @@ -5,7 +5,7 @@ use super::*; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; use std::collections::{BTreeMap, BTreeSet}; mod prover; diff --git a/backend/src/poly/ipa/multiopen/prover.rs b/backend/src/poly/ipa/multiopen/prover.rs index 2ae745d457..3510756812 100644 --- a/backend/src/poly/ipa/multiopen/prover.rs +++ b/backend/src/poly/ipa/multiopen/prover.rs @@ -7,7 +7,7 @@ use crate::poly::query::ProverQuery; use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; -use ff::Field; +use halo2_middleware::ff::Field; use group::Curve; use rand_core::RngCore; use std::io; diff --git a/backend/src/poly/ipa/multiopen/verifier.rs b/backend/src/poly/ipa/multiopen/verifier.rs index d559e33384..7910a0662e 100644 --- a/backend/src/poly/ipa/multiopen/verifier.rs +++ b/backend/src/poly/ipa/multiopen/verifier.rs @@ -1,6 +1,6 @@ use std::fmt::Debug; -use ff::Field; +use halo2_middleware::ff::Field; use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; use crate::arithmetic::{eval_polynomial, lagrange_interpolate, CurveAffine}; diff --git a/backend/src/poly/ipa/strategy.rs b/backend/src/poly/ipa/strategy.rs index d2d1b3d364..2064c89478 100644 --- a/backend/src/poly/ipa/strategy.rs +++ b/backend/src/poly/ipa/strategy.rs @@ -9,7 +9,7 @@ use crate::{ strategy::{Guard, VerificationStrategy}, }, }; -use ff::Field; +use halo2_middleware::ff::Field; use group::Curve; use halo2curves::CurveAffine; use rand_core::OsRng; diff --git a/backend/src/poly/kzg/commitment.rs b/backend/src/poly/kzg/commitment.rs index 114b9ac013..a89bd12ffa 100644 --- a/backend/src/poly/kzg/commitment.rs +++ b/backend/src/poly/kzg/commitment.rs @@ -4,7 +4,7 @@ use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, Par use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; use crate::SerdeFormat; -use ff::{Field, PrimeField}; +use halo2_middleware::ff::{Field, PrimeField}; use group::{prime::PrimeCurveAffine, Curve, Group}; use halo2curves::pairing::Engine; use halo2curves::CurveExt; @@ -365,7 +365,7 @@ mod test { use crate::poly::commitment::ParamsProver; use crate::poly::commitment::{Blind, Params}; use crate::poly::kzg::commitment::ParamsKZG; - use ff::Field; + use halo2_middleware::ff::Field; #[test] fn test_commit_lagrange() { diff --git a/backend/src/poly/kzg/msm.rs b/backend/src/poly/kzg/msm.rs index f9b8c284bd..6244209965 100644 --- a/backend/src/poly/kzg/msm.rs +++ b/backend/src/poly/kzg/msm.rs @@ -37,7 +37,7 @@ where /// Prepares all scalars in the MSM to linear combination pub fn combine_with_base(&mut self, base: E::Fr) { - use ff::Field; + use halo2_middleware::ff::Field; let mut acc = E::Fr::ONE; if !self.scalars.is_empty() { for scalar in self.scalars.iter_mut().rev() { diff --git a/backend/src/poly/kzg/multiopen/gwc.rs b/backend/src/poly/kzg/multiopen/gwc.rs index 3fd28dd00a..8c8e056e83 100644 --- a/backend/src/poly/kzg/multiopen/gwc.rs +++ b/backend/src/poly/kzg/multiopen/gwc.rs @@ -5,7 +5,7 @@ pub use prover::ProverGWC; pub use verifier::VerifierGWC; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; use std::marker::PhantomData; #[derive(Clone, Copy, Debug)] diff --git a/backend/src/poly/kzg/multiopen/gwc/verifier.rs b/backend/src/poly/kzg/multiopen/gwc/verifier.rs index fcfda6941f..261f5e2234 100644 --- a/backend/src/poly/kzg/multiopen/gwc/verifier.rs +++ b/backend/src/poly/kzg/multiopen/gwc/verifier.rs @@ -13,7 +13,7 @@ use crate::poly::query::{CommitmentReference, VerifierQuery}; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead}; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::pairing::{Engine, MultiMillerLoop}; use halo2curves::CurveExt; diff --git a/backend/src/poly/kzg/multiopen/shplonk.rs b/backend/src/poly/kzg/multiopen/shplonk.rs index d0814e83e3..5f963f4049 100644 --- a/backend/src/poly/kzg/multiopen/shplonk.rs +++ b/backend/src/poly/kzg/multiopen/shplonk.rs @@ -3,7 +3,7 @@ mod verifier; use crate::multicore::{IntoParallelIterator, ParallelIterator}; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; pub use prover::ProverSHPLONK; use std::collections::BTreeSet; pub use verifier::VerifierSHPLONK; @@ -142,7 +142,7 @@ where #[cfg(test)] mod proptests { use super::{construct_intermediate_sets, Commitment, IntermediateSets}; - use ff::FromUniformBytes; + use halo2_middleware::ff::FromUniformBytes; use halo2curves::pasta::Fp; use proptest::{collection::vec, prelude::*, sample::select}; use std::convert::TryFrom; diff --git a/backend/src/poly/kzg/multiopen/shplonk/prover.rs b/backend/src/poly/kzg/multiopen/shplonk/prover.rs index 5001d69094..e857dc59bd 100644 --- a/backend/src/poly/kzg/multiopen/shplonk/prover.rs +++ b/backend/src/poly/kzg/multiopen/shplonk/prover.rs @@ -13,7 +13,7 @@ use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; use crate::multicore::{IntoParallelIterator, ParallelIterator}; -use ff::Field; +use halo2_middleware::ff::Field; use group::Curve; use halo2curves::pairing::Engine; use halo2curves::CurveExt; diff --git a/backend/src/poly/kzg/multiopen/shplonk/verifier.rs b/backend/src/poly/kzg/multiopen/shplonk/verifier.rs index 5d03940177..f5a4d824f6 100644 --- a/backend/src/poly/kzg/multiopen/shplonk/verifier.rs +++ b/backend/src/poly/kzg/multiopen/shplonk/verifier.rs @@ -15,7 +15,7 @@ use crate::poly::kzg::strategy::GuardKZG; use crate::poly::query::{CommitmentReference, VerifierQuery}; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead}; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::pairing::{Engine, MultiMillerLoop}; use halo2curves::CurveExt; use std::ops::MulAssign; diff --git a/backend/src/poly/kzg/strategy.rs b/backend/src/poly/kzg/strategy.rs index ee80d800ac..78d182fbf6 100644 --- a/backend/src/poly/kzg/strategy.rs +++ b/backend/src/poly/kzg/strategy.rs @@ -10,7 +10,7 @@ use crate::{ strategy::{Guard, VerificationStrategy}, }, }; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::{ pairing::{Engine, MultiMillerLoop}, CurveAffine, CurveExt, diff --git a/backend/src/poly/multiopen_test.rs b/backend/src/poly/multiopen_test.rs index 47c6731167..b961e7d05d 100644 --- a/backend/src/poly/multiopen_test.rs +++ b/backend/src/poly/multiopen_test.rs @@ -14,7 +14,7 @@ mod test { Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read, Keccak256Write, TranscriptReadBuffer, TranscriptWriterBuffer, }; - use ff::WithSmallOrderMulGroup; + use halo2_middleware::ff::WithSmallOrderMulGroup; use group::Curve; use rand_core::OsRng; diff --git a/common/Cargo.toml b/common/Cargo.toml index c7ea0882a0..5cc521d3be 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -37,6 +37,7 @@ rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" +halo2_middleware = { path = "../middleware" } # Developer tooling dependencies plotters = { version = "0.3.0", default-features = false, optional = true } diff --git a/common/src/arithmetic.rs b/common/src/arithmetic.rs new file mode 100644 index 0000000000..3ff8e76c76 --- /dev/null +++ b/common/src/arithmetic.rs @@ -0,0 +1,53 @@ +use crate::multicore; + +/// This utility function will parallelize an operation that is to be +/// performed over a mutable slice. +pub fn parallelize(v: &mut [T], f: F) { + // Algorithm rationale: + // + // Using the stdlib `chunks_mut` will lead to severe load imbalance. + // From https://github.com/rust-lang/rust/blob/e94bda3/library/core/src/slice/iter.rs#L1607-L1637 + // if the division is not exact, the last chunk will be the remainder. + // + // Dividing 40 items on 12 threads will lead to a chunk size of 40/12 = 3, + // There will be a 13 chunks of size 3 and 1 of size 1 distributed on 12 threads. + // This leads to 1 thread working on 6 iterations, 1 on 4 iterations and 10 on 3 iterations, + // a load imbalance of 2x. + // + // Instead we can divide work into chunks of size + // 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3 = 4*4 + 3*8 = 40 + // + // This would lead to a 6/4 = 1.5x speedup compared to naive chunks_mut + // + // See also OpenMP spec (page 60) + // http://www.openmp.org/mp-documents/openmp-4.5.pdf + // "When no chunk_size is specified, the iteration space is divided into chunks + // that are approximately equal in size, and at most one chunk is distributed to + // each thread. The size of the chunks is unspecified in this case." + // This implies chunks are the same size ±1 + + let f = &f; + let total_iters = v.len(); + let num_threads = multicore::current_num_threads(); + let base_chunk_size = total_iters / num_threads; + let cutoff_chunk_id = total_iters % num_threads; + let split_pos = cutoff_chunk_id * (base_chunk_size + 1); + let (v_hi, v_lo) = v.split_at_mut(split_pos); + + multicore::scope(|scope| { + // Skip special-case: number of iterations is cleanly divided by number of threads. + if cutoff_chunk_id != 0 { + for (chunk_id, chunk) in v_hi.chunks_exact_mut(base_chunk_size + 1).enumerate() { + let offset = chunk_id * (base_chunk_size + 1); + scope.spawn(move |_| f(chunk, offset)); + } + } + // Skip special-case: less iterations than number of threads. + if base_chunk_size != 0 { + for (chunk_id, chunk) in v_lo.chunks_exact_mut(base_chunk_size).enumerate() { + let offset = split_pos + (chunk_id * base_chunk_size); + scope.spawn(move |_| f(chunk, offset)); + } + } + }); +} diff --git a/common/src/helpers.rs b/common/src/helpers.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/common/src/lib.rs b/common/src/lib.rs index e69de29bb2..a809a4c9ad 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -0,0 +1,5 @@ +pub mod arithmetic; +pub mod helpers; +pub mod multicore; +pub mod plonk; +pub mod poly; diff --git a/common/src/multicore.rs b/common/src/multicore.rs new file mode 100644 index 0000000000..4d30b91a8b --- /dev/null +++ b/common/src/multicore.rs @@ -0,0 +1,38 @@ +pub use rayon::{ + current_num_threads, + iter::{IndexedParallelIterator, IntoParallelRefIterator}, + iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}, + join, scope, + slice::ParallelSliceMut, + Scope, +}; + +pub trait TryFoldAndReduce { + /// Implements `iter.try_fold().try_reduce()` for `rayon::iter::ParallelIterator`, + /// falling back on `Iterator::try_fold` when the `multicore` feature flag is + /// disabled. + /// The `try_fold_and_reduce` function can only be called by a iter with + /// `Result` item type because the `fold_op` must meet the trait + /// bounds of both `try_fold` and `try_reduce` from rayon. + fn try_fold_and_reduce( + self, + identity: impl Fn() -> T + Send + Sync, + fold_op: impl Fn(T, Result) -> Result + Send + Sync, + ) -> Result; +} + +impl TryFoldAndReduce for I +where + T: Send + Sync, + E: Send + Sync, + I: rayon::iter::ParallelIterator>, +{ + fn try_fold_and_reduce( + self, + identity: impl Fn() -> T + Send + Sync, + fold_op: impl Fn(T, Result) -> Result + Send + Sync, + ) -> Result { + self.try_fold(&identity, &fold_op) + .try_reduce(&identity, |a, b| fold_op(a, Ok(b))) + } +} diff --git a/common/src/plonk.rs b/common/src/plonk.rs new file mode 100644 index 0000000000..695a48ee9a --- /dev/null +++ b/common/src/plonk.rs @@ -0,0 +1,1340 @@ +mod lookup; +pub mod permutation; +mod shuffle; + +// use super::{lookup, permutation, shuffle}; +use core::cmp::max; +use core::ops::{Add, Mul}; +use halo2_middleware::circuit::{ + Advice, AdviceQueryMid, Any, Challenge, Column, ConstraintSystemV2Backend, ExpressionMid, + Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, +}; +use halo2_middleware::ff::Field; +use halo2_middleware::metadata; +use halo2_middleware::poly::Rotation; +use sealed::SealedPhase; +use std::collections::HashMap; +use std::iter::{Product, Sum}; +use std::{ + convert::TryFrom, + ops::{Neg, Sub}, +}; + +// TODO: No sealed Phase on the backend, only in the frontend! +pub(crate) mod sealed { + /// Phase of advice column + #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] + pub struct Phase(pub(crate) u8); + + impl Phase { + pub fn prev(&self) -> Option { + self.0.checked_sub(1).map(Phase) + } + } + + impl SealedPhase for Phase { + fn to_sealed(self) -> Phase { + self + } + } + + /// Sealed trait to help keep `Phase` private. + pub trait SealedPhase { + fn to_sealed(self) -> Phase; + } +} + +/// Phase of advice column +pub trait Phase: SealedPhase {} + +impl Phase for P {} + +/// First phase +#[derive(Debug)] +pub struct FirstPhase; + +impl SealedPhase for super::FirstPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(0) + } +} + +/// Second phase +#[derive(Debug)] +pub struct SecondPhase; + +impl SealedPhase for super::SecondPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(1) + } +} + +/// Third phase +#[derive(Debug)] +pub struct ThirdPhase; + +impl SealedPhase for super::ThirdPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(2) + } +} + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl FixedQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, + /// Phase of this advice column + pub(crate) phase: sealed::Phase, +} + +impl AdviceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } + + /// Phase of this advice column + pub fn phase(&self) -> u8 { + self.phase.0 + } +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl InstanceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, PartialEq, Eq)] +pub enum Expression { + /// This is a constant polynomial + Constant(F), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQuery), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQuery), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQuery), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl Into> for Expression { + fn into(self) -> ExpressionMid { + match self { + Expression::Constant(c) => ExpressionMid::Constant(c), + Expression::Fixed(FixedQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Fixed(FixedQueryMid { + column_index, + rotation, + }), + Expression::Advice(AdviceQuery { + column_index, + rotation, + phase, + .. + }) => ExpressionMid::Advice(AdviceQueryMid { + column_index, + rotation, + phase: phase.0, + }), + Expression::Instance(InstanceQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Instance(InstanceQueryMid { + column_index, + rotation, + }), + Expression::Challenge(c) => ExpressionMid::Challenge(c), + Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), + Expression::Sum(lhs, rhs) => { + ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Product(lhs, rhs) => { + ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Scaled(e, c) => ExpressionMid::Scaled(Box::new((*e).into()), c), + } + } +} + +impl Expression { + /// Evaluate the polynomial using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + constant: &impl Fn(F) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + product(a, b) + } + Expression::Scaled(a, f) => { + let a = a.evaluate( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + scaled(a, *f) + } + } + } + + /// Evaluate the polynomial lazily using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate_lazy( + &self, + constant: &impl Fn(F) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + zero: &T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + let b = b.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let (a, b) = if a.complexity() <= b.complexity() { + (a, b) + } else { + (b, a) + }; + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + + if a == *zero { + a + } else { + let b = b.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + product(a, b) + } + } + Expression::Scaled(a, f) => { + let a = a.evaluate_lazy( + constant, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + scaled(a, *f) + } + } + } + + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + match self { + Expression::Constant(scalar) => write!(writer, "{scalar:?}"), + Expression::Fixed(query) => { + write!( + writer, + "fixed[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Advice(query) => { + write!( + writer, + "advice[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Instance(query) => { + write!( + writer, + "instance[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Challenge(challenge) => { + write!(writer, "challenge[{}]", challenge.index()) + } + Expression::Negated(a) => { + writer.write_all(b"(-")?; + a.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Sum(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"+")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Product(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"*")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Scaled(a, f) => { + a.write_identifier(writer)?; + write!(writer, "*{f:?}") + } + } + } + + /// Identifier for this expression. Expressions with identical identifiers + /// do the same calculation (but the expressions don't need to be exactly equal + /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). + pub fn identifier(&self) -> String { + let mut cursor = std::io::Cursor::new(Vec::new()); + self.write_identifier(&mut cursor).unwrap(); + String::from_utf8(cursor.into_inner()).unwrap() + } + + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.degree(), + Expression::Sum(a, b) => max(a.degree(), b.degree()), + Expression::Product(a, b) => a.degree() + b.degree(), + Expression::Scaled(poly, _) => poly.degree(), + } + } + + /// Approximate the computational complexity of this expression. + pub fn complexity(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.complexity() + 5, + Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, + Expression::Product(a, b) => a.complexity() + b.complexity() + 30, + Expression::Scaled(poly, _) => poly.complexity() + 30, + } + } + + /// Square this expression. + pub fn square(self) -> Self { + self.clone() * self + } +} + +impl std::fmt::Debug for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), + // Skip enum variant and print query struct directly to maintain backwards compatibility. + Expression::Fixed(query) => { + let mut debug_struct = f.debug_struct("Fixed"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Advice(query) => { + let mut debug_struct = f.debug_struct("Advice"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation); + // Only show advice's phase if it's not in first phase. + if query.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &query.phase); + } + debug_struct.finish() + } + Expression::Instance(query) => { + let mut debug_struct = f.debug_struct("Instance"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Challenge(challenge) => { + f.debug_tuple("Challenge").field(challenge).finish() + } + Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), + Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), + Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), + Expression::Scaled(poly, scalar) => { + f.debug_tuple("Scaled").field(poly).field(scalar).finish() + } + } + } +} + +impl Neg for Expression { + type Output = Expression; + fn neg(self) -> Self::Output { + Expression::Negated(Box::new(self)) + } +} + +impl Add for Expression { + type Output = Expression; + fn add(self, rhs: Expression) -> Expression { + Expression::Sum(Box::new(self), Box::new(rhs)) + } +} + +impl Sub for Expression { + type Output = Expression; + fn sub(self, rhs: Expression) -> Expression { + Expression::Sum(Box::new(self), Box::new(-rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: Expression) -> Expression { + Expression::Product(Box::new(self), Box::new(rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: F) -> Expression { + Expression::Scaled(Box::new(self), rhs) + } +} + +impl Sum for Expression { + fn sum>(iter: I) -> Self { + iter.reduce(|acc, x| acc + x) + .unwrap_or(Expression::Constant(F::ZERO)) + } +} + +impl Product for Expression { + fn product>(iter: I) -> Self { + iter.reduce(|acc, x| acc * x) + .unwrap_or(Expression::Constant(F::ONE)) + } +} + +/// Gate +#[derive(Clone, Debug)] +pub struct Gate { + name: String, + constraint_names: Vec, + polys: Vec>, +} + +impl Gate { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the name of the constraint at index `constraint_index`. + pub fn constraint_name(&self, constraint_index: usize) -> &str { + self.constraint_names[constraint_index].as_str() + } + + /// Returns constraints of this gate + pub fn polynomials(&self) -> &[Expression] { + &self.polys + } +} + +/// Represents the minimal parameters that determine a `ConstraintSystem`. +#[allow(dead_code)] +pub struct PinnedConstraintSystem<'a, F: Field> { + num_fixed_columns: &'a usize, + num_advice_columns: &'a usize, + num_instance_columns: &'a usize, + num_selectors: &'a usize, + num_challenges: &'a usize, + advice_column_phase: &'a Vec, + challenge_phase: &'a Vec, + gates: PinnedGates<'a, F>, + advice_queries: &'a Vec<(Column, Rotation)>, + instance_queries: &'a Vec<(Column, Rotation)>, + fixed_queries: &'a Vec<(Column, Rotation)>, + permutation: &'a permutation::Argument, + lookups: &'a Vec>, + shuffles: &'a Vec>, + constants: &'a Vec>, + minimum_degree: &'a Option, +} + +impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut debug_struct = f.debug_struct("PinnedConstraintSystem"); + debug_struct + .field("num_fixed_columns", self.num_fixed_columns) + .field("num_advice_columns", self.num_advice_columns) + .field("num_instance_columns", self.num_instance_columns) + .field("num_selectors", self.num_selectors); + // Only show multi-phase related fields if it's used. + if *self.num_challenges > 0 { + debug_struct + .field("num_challenges", self.num_challenges) + .field("advice_column_phase", self.advice_column_phase) + .field("challenge_phase", self.challenge_phase); + } + debug_struct + .field("gates", &self.gates) + .field("advice_queries", self.advice_queries) + .field("instance_queries", self.instance_queries) + .field("fixed_queries", self.fixed_queries) + .field("permutation", self.permutation) + .field("lookups", self.lookups); + if !self.shuffles.is_empty() { + debug_struct.field("shuffles", self.shuffles); + } + debug_struct + .field("constants", self.constants) + .field("minimum_degree", self.minimum_degree); + debug_struct.finish() + } +} + +struct PinnedGates<'a, F: Field>(&'a Vec>); + +impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + f.debug_list() + .entries(self.0.iter().flat_map(|gate| gate.polynomials().iter())) + .finish() + } +} + +struct QueriesMap { + advice_map: HashMap<(Column, Rotation), usize>, + instance_map: HashMap<(Column, Rotation), usize>, + fixed_map: HashMap<(Column, Rotation), usize>, + advice: Vec<(Column, Rotation)>, + instance: Vec<(Column, Rotation)>, + fixed: Vec<(Column, Rotation)>, +} + +impl QueriesMap { + fn add_advice(&mut self, col: Column, rot: Rotation) -> usize { + *self.advice_map.entry((col, rot)).or_insert_with(|| { + self.advice.push((col, rot)); + self.advice.len() - 1 + }) + } + fn add_instance(&mut self, col: Column, rot: Rotation) -> usize { + *self.instance_map.entry((col, rot)).or_insert_with(|| { + self.instance.push((col, rot)); + self.instance.len() - 1 + }) + } + fn add_fixed(&mut self, col: Column, rot: Rotation) -> usize { + *self.fixed_map.entry((col, rot)).or_insert_with(|| { + self.fixed.push((col, rot)); + self.fixed.len() - 1 + }) + } +} + +impl QueriesMap { + fn as_expression(&mut self, expr: &ExpressionMid) -> Expression { + match expr { + ExpressionMid::Constant(c) => Expression::Constant(*c), + ExpressionMid::Fixed(query) => { + let (col, rot) = (Column::new(query.column_index, Fixed), query.rotation); + let index = self.add_fixed(col, rot); + Expression::Fixed(FixedQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Advice(query) => { + let (col, rot) = ( + Column::new(query.column_index, Advice { phase: query.phase }), + query.rotation, + ); + let index = self.add_advice(col, rot); + Expression::Advice(AdviceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + phase: sealed::Phase(query.phase), + }) + } + ExpressionMid::Instance(query) => { + let (col, rot) = (Column::new(query.column_index, Instance), query.rotation); + let index = self.add_instance(col, rot); + Expression::Instance(InstanceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Challenge(c) => Expression::Challenge(*c), + ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), + ExpressionMid::Sum(lhs, rhs) => Expression::Sum( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + ExpressionMid::Product(lhs, rhs) => Expression::Product( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.as_expression(e)), *c), + } + } +} + +/// List of queries (columns and rotations) used by a circuit +#[derive(Debug, Clone)] +pub struct Queries { + /// List of unique advice queries + pub advice: Vec<(Column, Rotation)>, + /// List of unique instance queries + pub instance: Vec<(Column, Rotation)>, + /// List of unique fixed queries + pub fixed: Vec<(Column, Rotation)>, + /// Contains an integer for each advice column + /// identifying how many distinct queries it has + /// so far; should be same length as cs.num_advice_columns. + pub num_advice_queries: Vec, +} + +impl Queries { + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } +} + +/// Collect queries used in gates while mapping those gates to equivalent ones with indexed +/// query references in the expressions. +fn collect_queries_gates( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.gates + .iter() + .map(|gate| Gate { + name: gate.name.clone(), + constraint_names: Vec::new(), + polys: vec![queries.as_expression(gate.polynomial())], + }) + .collect() +} + +/// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn collect_queries_lookups( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.lookups + .iter() + .map(|lookup| lookup::Argument { + name: lookup.name.clone(), + input_expressions: lookup + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + table_expressions: lookup + .table_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() +} + +/// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn collect_queries_shuffles( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.shuffles + .iter() + .map(|shuffle| shuffle::Argument { + name: shuffle.name.clone(), + input_expressions: shuffle + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + shuffle_expressions: shuffle + .shuffle_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() +} + +/// Collect all queries used in the expressions of gates, lookups and shuffles. Map the +/// expressions of gates, lookups and shuffles into equivalent ones with indexed query +/// references. +pub(crate) fn collect_queries( + cs2: &ConstraintSystemV2Backend, +) -> ( + Queries, + Vec>, + Vec>, + Vec>, +) { + let mut queries = QueriesMap { + advice_map: HashMap::new(), + instance_map: HashMap::new(), + fixed_map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), + }; + + let gates = collect_queries_gates(cs2, &mut queries); + let lookups = collect_queries_lookups(cs2, &mut queries); + let shuffles = collect_queries_shuffles(cs2, &mut queries); + + // Each column used in a copy constraint involves a query at rotation current. + for column in cs2.permutation.get_columns() { + match column.column_type { + Any::Instance => { + queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) + } + Any::Fixed => queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()), + Any::Advice(advice) => { + queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) + } + }; + } + + let mut num_advice_queries = vec![0; cs2.num_advice_columns]; + for (column, _) in queries.advice.iter() { + num_advice_queries[column.index()] += 1; + } + + let queries = Queries { + advice: queries.advice, + instance: queries.instance, + fixed: queries.fixed, + num_advice_queries, + }; + (queries, gates, lookups, shuffles) +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystem { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_selectors: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + pub(crate) gates: Vec>, + pub(crate) advice_queries: Vec<(Column, Rotation)>, + // Contains an integer for each advice column + // identifying how many distinct queries it has + // so far; should be same length as num_advice_columns. + pub(crate) num_advice_queries: Vec, + pub(crate) instance_queries: Vec<(Column, Rotation)>, + pub(crate) fixed_queries: Vec<(Column, Rotation)>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, + + // Vector of fixed columns, which can be used to store constant values + // that are copied into advice columns. + pub(crate) constants: Vec>, + + pub(crate) minimum_degree: Option, +} + +impl From> for ConstraintSystem { + fn from(cs2: ConstraintSystemV2Backend) -> Self { + let (queries, gates, lookups, shuffles) = collect_queries(&cs2); + ConstraintSystem { + num_fixed_columns: cs2.num_fixed_columns, + num_advice_columns: cs2.num_advice_columns, + num_instance_columns: cs2.num_instance_columns, + num_selectors: 0, + num_challenges: cs2.num_challenges, + unblinded_advice_columns: cs2.unblinded_advice_columns, + advice_column_phase: cs2 + .advice_column_phase + .into_iter() + .map(sealed::Phase) + .collect(), + challenge_phase: cs2.challenge_phase.into_iter().map(sealed::Phase).collect(), + gates, + advice_queries: queries.advice, + num_advice_queries: queries.num_advice_queries, + instance_queries: queries.instance, + fixed_queries: queries.fixed, + permutation: cs2.permutation.into(), + lookups, + shuffles, + general_column_annotations: cs2.general_column_annotations, + constants: Vec::new(), + minimum_degree: None, + } + } +} + +impl Default for ConstraintSystem { + fn default() -> ConstraintSystem { + ConstraintSystem { + num_fixed_columns: 0, + num_advice_columns: 0, + num_instance_columns: 0, + num_selectors: 0, + num_challenges: 0, + unblinded_advice_columns: Vec::new(), + advice_column_phase: Vec::new(), + challenge_phase: Vec::new(), + gates: vec![], + fixed_queries: Vec::new(), + advice_queries: Vec::new(), + num_advice_queries: Vec::new(), + instance_queries: Vec::new(), + permutation: permutation::Argument::new(), + lookups: Vec::new(), + shuffles: Vec::new(), + general_column_annotations: HashMap::new(), + constants: vec![], + minimum_degree: None, + } + } +} + +impl ConstraintSystem { + /// Obtain a pinned version of this constraint system; a structure with the + /// minimal parameters needed to determine the rest of the constraint + /// system. + pub fn pinned(&self) -> PinnedConstraintSystem<'_, F> { + PinnedConstraintSystem { + num_fixed_columns: &self.num_fixed_columns, + num_advice_columns: &self.num_advice_columns, + num_instance_columns: &self.num_instance_columns, + num_selectors: &self.num_selectors, + num_challenges: &self.num_challenges, + advice_column_phase: &self.advice_column_phase, + challenge_phase: &self.challenge_phase, + gates: PinnedGates(&self.gates), + fixed_queries: &self.fixed_queries, + advice_queries: &self.advice_queries, + instance_queries: &self.instance_queries, + permutation: &self.permutation, + lookups: &self.lookups, + shuffles: &self.shuffles, + constants: &self.constants, + minimum_degree: &self.minimum_degree, + } + } + + pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, advice_query) in self.advice_queries.iter().enumerate() { + if advice_query == &(column, at) { + return index; + } + } + + panic!("get_advice_query_index called for non-existent query"); + } + + pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, fixed_query) in self.fixed_queries.iter().enumerate() { + if fixed_query == &(column, at) { + return index; + } + } + + panic!("get_fixed_query_index called for non-existent query"); + } + + pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, instance_query) in self.instance_queries.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + + panic!("get_instance_query_index called for non-existent query"); + } + + pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { + match column.column_type() { + Any::Advice(_) => { + self.get_advice_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Fixed => { + self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Instance => { + self.get_instance_query_index(Column::::try_from(column).unwrap(), at) + } + } + } + + /// Returns the list of phases + pub fn phases(&self) -> impl Iterator { + let max_phase = self + .advice_column_phase + .iter() + .max() + .map(|phase| phase.0) + .unwrap_or_default(); + (0..=max_phase).map(sealed::Phase) + } + + /// Compute the degree of the constraint system (the maximum degree of all + /// constraints). + pub fn degree(&self) -> usize { + // The permutation argument will serve alongside the gates, so must be + // accounted for. + let mut degree = self.permutation.required_degree(); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.lookups + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.shuffles + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // Account for each gate to ensure our quotient polynomial is the + // correct degree and that our extended domain is the right size. + degree = std::cmp::max( + degree, + self.gates + .iter() + .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) + .max() + .unwrap_or(0), + ); + + std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } + + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Returns number of fixed columns + pub fn num_fixed_columns(&self) -> usize { + self.num_fixed_columns + } + + /// Returns number of advice columns + pub fn num_advice_columns(&self) -> usize { + self.num_advice_columns + } + + /// Returns number of instance columns + pub fn num_instance_columns(&self) -> usize { + self.num_instance_columns + } + + /// Returns number of selectors + pub fn num_selectors(&self) -> usize { + self.num_selectors + } + + /// Returns number of challenges + pub fn num_challenges(&self) -> usize { + self.num_challenges + } + + /// Returns phase of advice columns + pub fn advice_column_phase(&self) -> Vec { + self.advice_column_phase + .iter() + .map(|phase| phase.0) + .collect() + } + + /// Returns phase of challenges + pub fn challenge_phase(&self) -> Vec { + self.challenge_phase.iter().map(|phase| phase.0).collect() + } + + /// Returns gates + pub fn gates(&self) -> &Vec> { + &self.gates + } + + /// Returns general column annotations + pub fn general_column_annotations(&self) -> &HashMap { + &self.general_column_annotations + } + + /// Returns advice queries + pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { + &self.advice_queries + } + + /// Returns instance queries + pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { + &self.instance_queries + } + + /// Returns fixed queries + pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { + &self.fixed_queries + } + + /// Returns permutation argument + pub fn permutation(&self) -> &permutation::Argument { + &self.permutation + } + + /// Returns lookup arguments + pub fn lookups(&self) -> &Vec> { + &self.lookups + } + + /// Returns shuffle arguments + pub fn shuffles(&self) -> &Vec> { + &self.shuffles + } + + /// Returns constants + pub fn constants(&self) -> &Vec> { + &self.constants + } +} diff --git a/backend/src/plonk/lookup.rs b/common/src/plonk/lookup.rs similarity index 99% rename from backend/src/plonk/lookup.rs rename to common/src/plonk/lookup.rs index 0485ab7a60..5be61e7111 100644 --- a/backend/src/plonk/lookup.rs +++ b/common/src/plonk/lookup.rs @@ -1,5 +1,5 @@ use super::circuit::Expression; -use ff::Field; +use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; pub(crate) mod prover; diff --git a/backend/src/plonk/lookup/prover.rs b/common/src/plonk/lookup/prover.rs similarity index 99% rename from backend/src/plonk/lookup/prover.rs rename to common/src/plonk/lookup/prover.rs index 71066da9e6..ff1e169983 100644 --- a/backend/src/plonk/lookup/prover.rs +++ b/common/src/plonk/lookup/prover.rs @@ -12,7 +12,7 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use ff::WithSmallOrderMulGroup; +use halo2_middleware::ff::WithSmallOrderMulGroup; use group::{ ff::{BatchInvert, Field}, Curve, diff --git a/backend/src/plonk/lookup/verifier.rs b/common/src/plonk/lookup/verifier.rs similarity index 99% rename from backend/src/plonk/lookup/verifier.rs rename to common/src/plonk/lookup/verifier.rs index 11b8d30b49..773b6f8393 100644 --- a/backend/src/plonk/lookup/verifier.rs +++ b/common/src/plonk/lookup/verifier.rs @@ -10,7 +10,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use ff::Field; +use halo2_middleware::ff::Field; use halo2_middleware::poly::Rotation; pub struct PermutationCommitments { diff --git a/backend/src/plonk/permutation.rs b/common/src/plonk/permutation.rs similarity index 100% rename from backend/src/plonk/permutation.rs rename to common/src/plonk/permutation.rs diff --git a/backend/src/plonk/permutation/keygen.rs b/common/src/plonk/permutation/keygen.rs similarity index 99% rename from backend/src/plonk/permutation/keygen.rs rename to common/src/plonk/permutation/keygen.rs index d8e98562a4..5fd5529807 100644 --- a/backend/src/plonk/permutation/keygen.rs +++ b/common/src/plonk/permutation/keygen.rs @@ -1,4 +1,4 @@ -use ff::{Field, PrimeField}; +use halo2_middleware::ff::{Field, PrimeField}; use group::Curve; use super::{Argument, ProvingKey, VerifyingKey}; diff --git a/backend/src/plonk/permutation/prover.rs b/common/src/plonk/permutation/prover.rs similarity index 99% rename from backend/src/plonk/permutation/prover.rs rename to common/src/plonk/permutation/prover.rs index cd4ad43797..eb155663be 100644 --- a/backend/src/plonk/permutation/prover.rs +++ b/common/src/plonk/permutation/prover.rs @@ -1,4 +1,4 @@ -use ff::PrimeField; +use halo2_middleware::ff::PrimeField; use group::{ ff::{BatchInvert, Field}, Curve, diff --git a/backend/src/plonk/permutation/verifier.rs b/common/src/plonk/permutation/verifier.rs similarity index 99% rename from backend/src/plonk/permutation/verifier.rs rename to common/src/plonk/permutation/verifier.rs index 96ec55ef41..195a771999 100644 --- a/backend/src/plonk/permutation/verifier.rs +++ b/common/src/plonk/permutation/verifier.rs @@ -1,4 +1,4 @@ -use ff::{Field, PrimeField}; +use halo2_middleware::ff::{Field, PrimeField}; use std::iter; use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; diff --git a/backend/src/plonk/shuffle.rs b/common/src/plonk/shuffle.rs similarity index 98% rename from backend/src/plonk/shuffle.rs rename to common/src/plonk/shuffle.rs index 77fc8a52d7..26b1596ff0 100644 --- a/backend/src/plonk/shuffle.rs +++ b/common/src/plonk/shuffle.rs @@ -1,5 +1,5 @@ use super::circuit::Expression; -use ff::Field; +use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; pub(crate) mod prover; diff --git a/backend/src/plonk/shuffle/prover.rs b/common/src/plonk/shuffle/prover.rs similarity index 99% rename from backend/src/plonk/shuffle/prover.rs rename to common/src/plonk/shuffle/prover.rs index f730a8ecf7..5740792b15 100644 --- a/backend/src/plonk/shuffle/prover.rs +++ b/common/src/plonk/shuffle/prover.rs @@ -11,7 +11,7 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use ff::WithSmallOrderMulGroup; +use halo2_middleware::ff::WithSmallOrderMulGroup; use group::{ff::BatchInvert, Curve}; use halo2_middleware::poly::Rotation; use rand_core::RngCore; diff --git a/backend/src/plonk/shuffle/verifier.rs b/common/src/plonk/shuffle/verifier.rs similarity index 99% rename from backend/src/plonk/shuffle/verifier.rs rename to common/src/plonk/shuffle/verifier.rs index b591bcd654..c9806455cc 100644 --- a/backend/src/plonk/shuffle/verifier.rs +++ b/common/src/plonk/shuffle/verifier.rs @@ -8,7 +8,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use ff::Field; +use halo2_middleware::ff::Field; use halo2_middleware::poly::Rotation; pub struct Committed { diff --git a/common/src/poly.rs b/common/src/poly.rs new file mode 100644 index 0000000000..edd4445d5f --- /dev/null +++ b/common/src/poly.rs @@ -0,0 +1,323 @@ +//! Contains utilities for performing arithmetic over univariate polynomials in +//! various forms, including computing commitments to them and provably opening +//! the committed polynomials at arbitrary points. + +use crate::arithmetic::parallelize; +use crate::helpers::SerdePrimeField; +use crate::SerdeFormat; +use halo2_middleware::plonk::Assigned; + +use halo2_middleware::ff::{BatchInvert, Field}; +use halo2_middleware::poly::Rotation; +use std::fmt::Debug; +use std::io; +use std::marker::PhantomData; +use std::ops::{Add, Deref, DerefMut, Index, IndexMut, Mul, RangeFrom, RangeFull, Sub}; + +/// Generic commitment scheme structures +// pub mod commitment; +// mod domain; +// mod query; +// mod strategy; + +// /// Inner product argument commitment scheme +// pub mod ipa; + +// /// KZG commitment scheme +// pub mod kzg; + +// #[cfg(test)] +// mod multiopen_test; + +// pub use domain::*; +// pub use query::{ProverQuery, VerifierQuery}; +// pub use strategy::{Guard, VerificationStrategy}; + +/// This is an error that could occur during proving or circuit synthesis. +// TODO: these errors need to be cleaned up +#[derive(Debug)] +pub enum Error { + /// OpeningProof is not well-formed + OpeningError, + /// Caller needs to re-sample a point + SamplingError, +} + +/// The basis over which a polynomial is described. +pub trait Basis: Copy + Debug + Send + Sync {} + +/// The polynomial is defined as coefficients +#[derive(Clone, Copy, Debug)] +pub struct Coeff; +impl Basis for Coeff {} + +/// The polynomial is defined as coefficients of Lagrange basis polynomials +#[derive(Clone, Copy, Debug)] +pub struct LagrangeCoeff; +impl Basis for LagrangeCoeff {} + +/// The polynomial is defined as coefficients of Lagrange basis polynomials in +/// an extended size domain which supports multiplication +#[derive(Clone, Copy, Debug)] +pub struct ExtendedLagrangeCoeff; +impl Basis for ExtendedLagrangeCoeff {} + +/// Represents a univariate polynomial defined over a field and a particular +/// basis. +#[derive(Clone, Debug)] +pub struct Polynomial { + pub(crate) values: Vec, + pub(crate) _marker: PhantomData, +} + +impl Polynomial { + pub(crate) fn new_empty(size: usize, zero: F) -> Self { + Polynomial { + values: vec![zero; size], + _marker: PhantomData, + } + } +} + +impl Polynomial { + /// Obtains a polynomial in Lagrange form when given a vector of Lagrange + /// coefficients of size `n`; panics if the provided vector is the wrong + /// length. + pub(crate) fn new_lagrange_from_vec(values: Vec) -> Polynomial { + Polynomial { + values, + _marker: PhantomData, + } + } +} + +impl Index for Polynomial { + type Output = F; + + fn index(&self, index: usize) -> &F { + self.values.index(index) + } +} + +impl IndexMut for Polynomial { + fn index_mut(&mut self, index: usize) -> &mut F { + self.values.index_mut(index) + } +} + +impl Index> for Polynomial { + type Output = [F]; + + fn index(&self, index: RangeFrom) -> &[F] { + self.values.index(index) + } +} + +impl IndexMut> for Polynomial { + fn index_mut(&mut self, index: RangeFrom) -> &mut [F] { + self.values.index_mut(index) + } +} + +impl Index for Polynomial { + type Output = [F]; + + fn index(&self, index: RangeFull) -> &[F] { + self.values.index(index) + } +} + +impl IndexMut for Polynomial { + fn index_mut(&mut self, index: RangeFull) -> &mut [F] { + self.values.index_mut(index) + } +} + +impl Deref for Polynomial { + type Target = [F]; + + fn deref(&self) -> &[F] { + &self.values[..] + } +} + +impl DerefMut for Polynomial { + fn deref_mut(&mut self) -> &mut [F] { + &mut self.values[..] + } +} + +impl Polynomial { + /// Iterate over the values, which are either in coefficient or evaluation + /// form depending on the basis `B`. + pub fn iter(&self) -> impl Iterator { + self.values.iter() + } + + /// Iterate over the values mutably, which are either in coefficient or + /// evaluation form depending on the basis `B`. + pub fn iter_mut(&mut self) -> impl Iterator { + self.values.iter_mut() + } + + /// Gets the size of this polynomial in terms of the number of + /// coefficients used to describe it. + pub fn num_coeffs(&self) -> usize { + self.values.len() + } +} + +impl Polynomial { + /// Reads polynomial from buffer using `SerdePrimeField::read`. + pub(crate) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + let mut poly_len = [0u8; 4]; + reader.read_exact(&mut poly_len)?; + let poly_len = u32::from_be_bytes(poly_len); + + (0..poly_len) + .map(|_| F::read(reader, format)) + .collect::>>() + .map(|values| Self { + values, + _marker: PhantomData, + }) + } + + /// Writes polynomial to buffer using `SerdePrimeField::write`. + pub(crate) fn write( + &self, + writer: &mut W, + format: SerdeFormat, + ) -> io::Result<()> { + writer.write_all(&(self.values.len() as u32).to_be_bytes())?; + for value in self.values.iter() { + value.write(writer, format)?; + } + Ok(()) + } +} + +pub(crate) fn batch_invert_assigned( + assigned: Vec, LagrangeCoeff>>, +) -> Vec> { + let mut assigned_denominators: Vec<_> = assigned + .iter() + .map(|f| { + f.iter() + .map(|value| value.denominator()) + .collect::>() + }) + .collect(); + + assigned_denominators + .iter_mut() + .flat_map(|f| { + f.iter_mut() + // If the denominator is trivial, we can skip it, reducing the + // size of the batch inversion. + .filter_map(|d| d.as_mut()) + }) + .batch_invert(); + + assigned + .iter() + .zip(assigned_denominators) + .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) + .collect() +} + +impl Polynomial, LagrangeCoeff> { + pub(crate) fn invert( + &self, + inv_denoms: impl Iterator + ExactSizeIterator, + ) -> Polynomial { + assert_eq!(inv_denoms.len(), self.values.len()); + Polynomial { + values: self + .values + .iter() + .zip(inv_denoms) + .map(|(a, inv_den)| a.numerator() * inv_den) + .collect(), + _marker: self._marker, + } + } +} + +impl<'a, F: Field, B: Basis> Add<&'a Polynomial> for Polynomial { + type Output = Polynomial; + + fn add(mut self, rhs: &'a Polynomial) -> Polynomial { + parallelize(&mut self.values, |lhs, start| { + for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { + *lhs += *rhs; + } + }); + + self + } +} + +impl<'a, F: Field, B: Basis> Sub<&'a Polynomial> for Polynomial { + type Output = Polynomial; + + fn sub(mut self, rhs: &'a Polynomial) -> Polynomial { + parallelize(&mut self.values, |lhs, start| { + for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { + *lhs -= *rhs; + } + }); + + self + } +} + +impl Polynomial { + /// Rotates the values in a Lagrange basis polynomial by `Rotation` + pub fn rotate(&self, rotation: Rotation) -> Polynomial { + let mut values = self.values.clone(); + if rotation.0 < 0 { + values.rotate_right((-rotation.0) as usize); + } else { + values.rotate_left(rotation.0 as usize); + } + Polynomial { + values, + _marker: PhantomData, + } + } +} + +impl Mul for Polynomial { + type Output = Polynomial; + + fn mul(mut self, rhs: F) -> Polynomial { + if rhs == F::ZERO { + return Polynomial { + values: vec![F::ZERO; self.len()], + _marker: PhantomData, + }; + } + if rhs == F::ONE { + return self; + } + + parallelize(&mut self.values, |lhs, _| { + for lhs in lhs.iter_mut() { + *lhs *= rhs; + } + }); + + self + } +} + +impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { + type Output = Polynomial; + + fn sub(self, rhs: F) -> Polynomial { + let mut res = self.clone(); + res.values[0] -= rhs; + res + } +} diff --git a/frontend/Cargo.toml b/frontend/Cargo.toml index bd71de0373..ada6ac6c30 100644 --- a/frontend/Cargo.toml +++ b/frontend/Cargo.toml @@ -37,6 +37,8 @@ rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" +halo2_middleware = { path = "../middleware" } +halo2_common = { path = "../common" } # Developer tooling dependencies plotters = { version = "0.3.0", default-features = false, optional = true } diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs new file mode 100644 index 0000000000..bd08c27c67 --- /dev/null +++ b/frontend/src/circuit.rs @@ -0,0 +1,594 @@ +//! Traits and structs for implementing circuit components. + +use crate::error::Error; +use crate::plonk::{Selector, TableColumn}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::ff::Field; +use halo2_middleware::plonk::Assigned; +use std::{fmt, marker::PhantomData}; + +mod value; +pub use value::Value; + +pub mod floor_planner; +pub use floor_planner::single_pass::SimpleFloorPlanner; + +pub mod layouter; +mod table_layouter; + +pub use table_layouter::{SimpleTableLayouter, TableLayouter}; + +/// A chip implements a set of instructions that can be used by gadgets. +/// +/// The chip stores state that is required at circuit synthesis time in +/// [`Chip::Config`], which can be fetched via [`Chip::config`]. +/// +/// The chip also loads any fixed configuration needed at synthesis time +/// using its own implementation of `load`, and stores it in [`Chip::Loaded`]. +/// This can be accessed via [`Chip::loaded`]. +pub trait Chip: Sized { + /// A type that holds the configuration for this chip, and any other state it may need + /// during circuit synthesis, that can be derived during [`Circuit::configure`]. + /// + /// [`Circuit::configure`]: crate::plonk::Circuit::configure + type Config: fmt::Debug + Clone; + + /// A type that holds any general chip state that needs to be loaded at the start of + /// [`Circuit::synthesize`]. This might simply be `()` for some chips. + /// + /// [`Circuit::synthesize`]: crate::plonk::Circuit::synthesize + type Loaded: fmt::Debug + Clone; + + /// The chip holds its own configuration. + fn config(&self) -> &Self::Config; + + /// Provides access to general chip state loaded at the beginning of circuit + /// synthesis. + /// + /// Panics if called before `Chip::load`. + fn loaded(&self) -> &Self::Loaded; +} + +/// Index of a region in a layouter +#[derive(Clone, Copy, Debug)] +pub struct RegionIndex(usize); + +impl From for RegionIndex { + fn from(idx: usize) -> RegionIndex { + RegionIndex(idx) + } +} + +impl std::ops::Deref for RegionIndex { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// Starting row of a region in a layouter +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct RegionStart(usize); + +impl From for RegionStart { + fn from(idx: usize) -> RegionStart { + RegionStart(idx) + } +} + +impl std::ops::Deref for RegionStart { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// A pointer to a cell within a circuit. +#[derive(Clone, Copy, Debug)] +pub struct Cell { + /// Identifies the region in which this cell resides. + pub region_index: RegionIndex, + /// The relative offset of this cell within its region. + pub row_offset: usize, + /// The column of this cell. + pub column: Column, +} + +/// An assigned cell. +#[derive(Clone, Debug)] +pub struct AssignedCell { + value: Value, + cell: Cell, + _marker: PhantomData, +} + +impl AssignedCell { + /// Returns the value of the [`AssignedCell`]. + pub fn value(&self) -> Value<&V> { + self.value.as_ref() + } + + /// Returns the cell. + pub fn cell(&self) -> Cell { + self.cell + } +} + +impl AssignedCell +where + for<'v> Assigned: From<&'v V>, +{ + /// Returns the field element value of the [`AssignedCell`]. + pub fn value_field(&self) -> Value> { + self.value.to_field() + } +} + +impl AssignedCell, F> { + /// Evaluates this assigned cell's value directly, performing an unbatched inversion + /// if necessary. + /// + /// If the denominator is zero, the returned cell's value is zero. + pub fn evaluate(self) -> AssignedCell { + AssignedCell { + value: self.value.evaluate(), + cell: self.cell, + _marker: Default::default(), + } + } +} + +impl AssignedCell +where + for<'v> Assigned: From<&'v V>, +{ + /// Copies the value to a given advice cell and constrains them to be equal. + /// + /// Returns an error if either this cell or the given cell are in columns + /// where equality has not been enabled. + pub fn copy_advice( + &self, + annotation: A, + region: &mut Region<'_, F>, + column: Column, + offset: usize, + ) -> Result + where + A: Fn() -> AR, + AR: Into, + { + let assigned_cell = + region.assign_advice(annotation, column, offset, || self.value.clone())?; + region.constrain_equal(assigned_cell.cell(), self.cell())?; + + Ok(assigned_cell) + } +} + +/// A region of the circuit in which a [`Chip`] can assign cells. +/// +/// Inside a region, the chip may freely use relative offsets; the [`Layouter`] will +/// treat these assignments as a single "region" within the circuit. +/// +/// The [`Layouter`] is allowed to optimise between regions as it sees fit. Chips must use +/// [`Region::constrain_equal`] to copy in variables assigned in other regions. +/// +/// TODO: It would be great if we could constrain the columns in these types to be +/// "logical" columns that are guaranteed to correspond to the chip (and have come from +/// `Chip::Config`). +#[derive(Debug)] +pub struct Region<'r, F: Field> { + region: &'r mut dyn layouter::RegionLayouter, +} + +impl<'r, F: Field> From<&'r mut dyn layouter::RegionLayouter> for Region<'r, F> { + fn from(region: &'r mut dyn layouter::RegionLayouter) -> Self { + Region { region } + } +} + +impl<'r, F: Field> Region<'r, F> { + /// Enables a selector at the given offset. + pub(crate) fn enable_selector( + &mut self, + annotation: A, + selector: &Selector, + offset: usize, + ) -> Result<(), Error> + where + A: Fn() -> AR, + AR: Into, + { + self.region + .enable_selector(&|| annotation().into(), selector, offset) + } + + /// Allows the circuit implementor to name/annotate a Column within a Region context. + /// + /// This is useful in order to improve the amount of information that `prover.verify()` + /// and `prover.assert_satisfied()` can provide. + pub fn name_column(&mut self, annotation: A, column: T) + where + A: Fn() -> AR, + AR: Into, + T: Into>, + { + self.region + .name_column(&|| annotation().into(), column.into()); + } + + /// Assign an advice column value (witness). + /// + /// Even though `to` has `FnMut` bounds, it is guaranteed to be called at most once. + pub fn assign_advice<'v, V, VR, A, AR>( + &'v mut self, + annotation: A, + column: Column, + offset: usize, + mut to: V, + ) -> Result, Error> + where + V: FnMut() -> Value + 'v, + for<'vr> Assigned: From<&'vr VR>, + A: Fn() -> AR, + AR: Into, + { + let mut value = Value::unknown(); + let cell = + self.region + .assign_advice(&|| annotation().into(), column, offset, &mut || { + let v = to(); + let value_f = v.to_field(); + value = v; + value_f + })?; + + Ok(AssignedCell { + value, + cell, + _marker: PhantomData, + }) + } + + /// Assigns a constant value to the column `advice` at `offset` within this region. + /// + /// The constant value will be assigned to a cell within one of the fixed columns + /// configured via `ConstraintSystem::enable_constant`. + /// + /// Returns the advice cell. + pub fn assign_advice_from_constant( + &mut self, + annotation: A, + column: Column, + offset: usize, + constant: VR, + ) -> Result, Error> + where + for<'vr> Assigned: From<&'vr VR>, + A: Fn() -> AR, + AR: Into, + { + let cell = self.region.assign_advice_from_constant( + &|| annotation().into(), + column, + offset, + (&constant).into(), + )?; + + Ok(AssignedCell { + value: Value::known(constant), + cell, + _marker: PhantomData, + }) + } + + /// Assign the value of the instance column's cell at absolute location + /// `row` to the column `advice` at `offset` within this region. + /// + /// Returns the advice cell, and its value if known. + pub fn assign_advice_from_instance( + &mut self, + annotation: A, + instance: Column, + row: usize, + advice: Column, + offset: usize, + ) -> Result, Error> + where + A: Fn() -> AR, + AR: Into, + { + let (cell, value) = self.region.assign_advice_from_instance( + &|| annotation().into(), + instance, + row, + advice, + offset, + )?; + + Ok(AssignedCell { + value, + cell, + _marker: PhantomData, + }) + } + + /// Returns the value of the instance column's cell at absolute location `row`. + /// + /// This method is only provided for convenience; it does not create any constraints. + /// Callers still need to use [`Self::assign_advice_from_instance`] to constrain the + /// instance values in their circuit. + pub fn instance_value( + &mut self, + instance: Column, + row: usize, + ) -> Result, Error> { + self.region.instance_value(instance, row) + } + + /// Assign a fixed value. + /// + /// Even though `to` has `FnMut` bounds, it is guaranteed to be called at most once. + pub fn assign_fixed<'v, V, VR, A, AR>( + &'v mut self, + annotation: A, + column: Column, + offset: usize, + mut to: V, + ) -> Result, Error> + where + V: FnMut() -> Value + 'v, + for<'vr> Assigned: From<&'vr VR>, + A: Fn() -> AR, + AR: Into, + { + let mut value = Value::unknown(); + let cell = + self.region + .assign_fixed(&|| annotation().into(), column, offset, &mut || { + let v = to(); + let value_f = v.to_field(); + value = v; + value_f + })?; + + Ok(AssignedCell { + value, + cell, + _marker: PhantomData, + }) + } + + /// Constrains a cell to have a constant value. + /// + /// Returns an error if the cell is in a column where equality has not been enabled. + pub fn constrain_constant(&mut self, cell: Cell, constant: VR) -> Result<(), Error> + where + VR: Into>, + { + self.region.constrain_constant(cell, constant.into()) + } + + /// Constrains two cells to have the same value. + /// + /// Returns an error if either of the cells are in columns where equality + /// has not been enabled. + pub fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { + self.region.constrain_equal(left, right) + } +} + +/// A lookup table in the circuit. +#[derive(Debug)] +pub struct Table<'r, F: Field> { + table: &'r mut dyn TableLayouter, +} + +impl<'r, F: Field> From<&'r mut dyn TableLayouter> for Table<'r, F> { + fn from(table: &'r mut dyn TableLayouter) -> Self { + Table { table } + } +} + +impl<'r, F: Field> Table<'r, F> { + /// Assigns a fixed value to a table cell. + /// + /// Returns an error if the table cell has already been assigned to. + /// + /// Even though `to` has `FnMut` bounds, it is guaranteed to be called at most once. + pub fn assign_cell<'v, V, VR, A, AR>( + &'v mut self, + annotation: A, + column: TableColumn, + offset: usize, + mut to: V, + ) -> Result<(), Error> + where + V: FnMut() -> Value + 'v, + VR: Into>, + A: Fn() -> AR, + AR: Into, + { + self.table + .assign_cell(&|| annotation().into(), column, offset, &mut || { + to().into_field() + }) + } +} + +/// A layout strategy within a circuit. The layouter is chip-agnostic and applies its +/// strategy to the context and config it is given. +/// +/// This abstracts over the circuit assignments, handling row indices etc. +/// +pub trait Layouter { + /// Represents the type of the "root" of this layouter, so that nested namespaces + /// can minimize indirection. + type Root: Layouter; + + /// Assign a region of gates to an absolute row number. + /// + /// Inside the closure, the chip may freely use relative offsets; the `Layouter` will + /// treat these assignments as a single "region" within the circuit. Outside this + /// closure, the `Layouter` is allowed to optimise as it sees fit. + /// + /// ```ignore + /// fn assign_region(&mut self, || "region name", |region| { + /// let config = chip.config(); + /// region.assign_advice(config.a, offset, || { Some(value)}); + /// }); + /// ``` + fn assign_region(&mut self, name: N, assignment: A) -> Result + where + A: FnMut(Region<'_, F>) -> Result, + N: Fn() -> NR, + NR: Into; + + /// Assign a table region to an absolute row number. + /// + /// ```ignore + /// fn assign_table(&mut self, || "table name", |table| { + /// let config = chip.config(); + /// table.assign_fixed(config.a, offset, || { Some(value)}); + /// }); + /// ``` + fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> + where + A: FnMut(Table<'_, F>) -> Result<(), Error>, + N: Fn() -> NR, + NR: Into; + + /// Constrains a [`Cell`] to equal an instance column's row value at an + /// absolute position. + fn constrain_instance( + &mut self, + cell: Cell, + column: Column, + row: usize, + ) -> Result<(), Error>; + + /// Queries the value of the given challenge. + /// + /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. + fn get_challenge(&self, challenge: Challenge) -> Value; + + /// Gets the "root" of this assignment, bypassing the namespacing. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + fn get_root(&mut self) -> &mut Self::Root; + + /// Creates a new (sub)namespace and enters into it. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR; + + /// Exits out of the existing namespace. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + fn pop_namespace(&mut self, gadget_name: Option); + + /// Enters into a namespace. + fn namespace(&mut self, name_fn: N) -> NamespacedLayouter<'_, F, Self::Root> + where + NR: Into, + N: FnOnce() -> NR, + { + self.get_root().push_namespace(name_fn); + + NamespacedLayouter(self.get_root(), PhantomData) + } +} + +/// This is a "namespaced" layouter which borrows a `Layouter` (pushing a namespace +/// context) and, when dropped, pops out of the namespace context. +#[derive(Debug)] +pub struct NamespacedLayouter<'a, F: Field, L: Layouter + 'a>(&'a mut L, PhantomData); + +impl<'a, F: Field, L: Layouter + 'a> Layouter for NamespacedLayouter<'a, F, L> { + type Root = L::Root; + + fn assign_region(&mut self, name: N, assignment: A) -> Result + where + A: FnMut(Region<'_, F>) -> Result, + N: Fn() -> NR, + NR: Into, + { + self.0.assign_region(name, assignment) + } + + fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> + where + A: FnMut(Table<'_, F>) -> Result<(), Error>, + N: Fn() -> NR, + NR: Into, + { + self.0.assign_table(name, assignment) + } + + fn constrain_instance( + &mut self, + cell: Cell, + column: Column, + row: usize, + ) -> Result<(), Error> { + self.0.constrain_instance(cell, column, row) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.0.get_challenge(challenge) + } + + fn get_root(&mut self) -> &mut Self::Root { + self.0.get_root() + } + + fn push_namespace(&mut self, _name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + panic!("Only the root's push_namespace should be called"); + } + + fn pop_namespace(&mut self, _gadget_name: Option) { + panic!("Only the root's pop_namespace should be called"); + } +} + +impl<'a, F: Field, L: Layouter + 'a> Drop for NamespacedLayouter<'a, F, L> { + fn drop(&mut self) { + let gadget_name = { + #[cfg(feature = "gadget-traces")] + { + let mut gadget_name = None; + let mut is_second_frame = false; + backtrace::trace(|frame| { + if is_second_frame { + // Resolve this instruction pointer to a symbol name. + backtrace::resolve_frame(frame, |symbol| { + gadget_name = symbol.name().map(|name| format!("{name:#}")); + }); + + // We are done! + false + } else { + // We want the next frame. + is_second_frame = true; + true + } + }); + gadget_name + } + + #[cfg(not(feature = "gadget-traces"))] + None + }; + + self.get_root().pop_namespace(gadget_name); + } +} diff --git a/frontend/src/circuit/floor_planner.rs b/frontend/src/circuit/floor_planner.rs new file mode 100644 index 0000000000..1b629034e6 --- /dev/null +++ b/frontend/src/circuit/floor_planner.rs @@ -0,0 +1,6 @@ +//! Implementations of common circuit floor planners. + +pub(super) mod single_pass; + +mod v1; +pub use v1::{V1Pass, V1}; diff --git a/frontend/src/circuit/floor_planner/single_pass.rs b/frontend/src/circuit/floor_planner/single_pass.rs new file mode 100644 index 0000000000..665b882a66 --- /dev/null +++ b/frontend/src/circuit/floor_planner/single_pass.rs @@ -0,0 +1,434 @@ +use std::cmp; +use std::collections::HashMap; +use std::fmt; +use std::marker::PhantomData; + +use ff::Field; + +use crate::error::Error; +use crate::{ + circuit::{ + layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, + table_layouter::{compute_table_lengths, SimpleTableLayouter}, + Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, + }, + plonk::{Assignment, Circuit, FloorPlanner, Selector, TableColumn}, +}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; + +/// A simple [`FloorPlanner`] that performs minimal optimizations. +/// +/// This floor planner is suitable for debugging circuits. It aims to reflect the circuit +/// "business logic" in the circuit layout as closely as possible. It uses a single-pass +/// layouter that does not reorder regions for optimal packing. +#[derive(Debug)] +pub struct SimpleFloorPlanner; + +impl FloorPlanner for SimpleFloorPlanner { + fn synthesize + SyncDeps, C: Circuit>( + cs: &mut CS, + circuit: &C, + config: C::Config, + constants: Vec>, + ) -> Result<(), Error> { + let layouter = SingleChipLayouter::new(cs, constants)?; + circuit.synthesize(config, layouter) + } +} + +/// A [`Layouter`] for a single-chip circuit. +pub struct SingleChipLayouter<'a, F: Field, CS: Assignment + 'a> { + cs: &'a mut CS, + constants: Vec>, + /// Stores the starting row for each region. + regions: Vec, + /// Stores the first empty row for each column. + columns: HashMap, + /// Stores the table fixed columns. + table_columns: Vec, + _marker: PhantomData, +} + +impl<'a, F: Field, CS: Assignment + 'a> fmt::Debug for SingleChipLayouter<'a, F, CS> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SingleChipLayouter") + .field("regions", &self.regions) + .field("columns", &self.columns) + .finish() + } +} + +impl<'a, F: Field, CS: Assignment> SingleChipLayouter<'a, F, CS> { + /// Creates a new single-chip layouter. + pub fn new(cs: &'a mut CS, constants: Vec>) -> Result { + let ret = SingleChipLayouter { + cs, + constants, + regions: vec![], + columns: HashMap::default(), + table_columns: vec![], + _marker: PhantomData, + }; + Ok(ret) + } +} + +impl<'a, F: Field, CS: Assignment + 'a + SyncDeps> Layouter + for SingleChipLayouter<'a, F, CS> +{ + type Root = Self; + + fn assign_region(&mut self, name: N, mut assignment: A) -> Result + where + A: FnMut(Region<'_, F>) -> Result, + N: Fn() -> NR, + NR: Into, + { + let region_index = self.regions.len(); + + // Get shape of the region. + let mut shape = RegionShape::new(region_index.into()); + { + let region: &mut dyn RegionLayouter = &mut shape; + assignment(region.into())?; + } + + // Lay out this region. We implement the simplest approach here: position the + // region starting at the earliest row for which none of the columns are in use. + let mut region_start = 0; + for column in &shape.columns { + region_start = cmp::max(region_start, self.columns.get(column).cloned().unwrap_or(0)); + } + self.regions.push(region_start.into()); + + // Update column usage information. + for column in shape.columns { + self.columns.insert(column, region_start + shape.row_count); + } + + // Assign region cells. + self.cs.enter_region(name); + let mut region = SingleChipLayouterRegion::new(self, region_index.into()); + let result = { + let region: &mut dyn RegionLayouter = &mut region; + assignment(region.into()) + }?; + let constants_to_assign = region.constants; + self.cs.exit_region(); + + // Assign constants. For the simple floor planner, we assign constants in order in + // the first `constants` column. + if self.constants.is_empty() { + if !constants_to_assign.is_empty() { + return Err(Error::NotEnoughColumnsForConstants); + } + } else { + let constants_column = self.constants[0]; + let next_constant_row = self + .columns + .entry(Column::::from(constants_column).into()) + .or_default(); + for (constant, advice) in constants_to_assign { + self.cs.assign_fixed( + || format!("Constant({:?})", constant.evaluate()), + constants_column, + *next_constant_row, + || Value::known(constant), + )?; + self.cs.copy( + constants_column.into(), + *next_constant_row, + advice.column, + *self.regions[*advice.region_index] + advice.row_offset, + )?; + *next_constant_row += 1; + } + } + + Ok(result) + } + + fn assign_table(&mut self, name: N, mut assignment: A) -> Result<(), Error> + where + A: FnMut(Table<'_, F>) -> Result<(), Error>, + N: Fn() -> NR, + NR: Into, + { + // Maintenance hazard: there is near-duplicate code in `v1::AssignmentPass::assign_table`. + // Assign table cells. + self.cs.enter_region(name); + let mut table = SimpleTableLayouter::new(self.cs, &self.table_columns); + { + let table: &mut dyn TableLayouter = &mut table; + assignment(table.into()) + }?; + let default_and_assigned = table.default_and_assigned; + self.cs.exit_region(); + + // Check that all table columns have the same length `first_unused`, + // and all cells up to that length are assigned. + let first_unused = compute_table_lengths(&default_and_assigned)?; + + // Record these columns so that we can prevent them from being used again. + for column in default_and_assigned.keys() { + self.table_columns.push(*column); + } + + for (col, (default_val, _)) in default_and_assigned { + // default_val must be Some because we must have assigned + // at least one cell in each column, and in that case we checked + // that all cells up to first_unused were assigned. + self.cs + .fill_from_row(col.inner(), first_unused, default_val.unwrap())?; + } + + Ok(()) + } + + fn constrain_instance( + &mut self, + cell: Cell, + instance: Column, + row: usize, + ) -> Result<(), Error> { + self.cs.copy( + cell.column, + *self.regions[*cell.region_index] + cell.row_offset, + instance.into(), + row, + ) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.cs.get_challenge(challenge) + } + + fn get_root(&mut self) -> &mut Self::Root { + self + } + + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + self.cs.push_namespace(name_fn) + } + + fn pop_namespace(&mut self, gadget_name: Option) { + self.cs.pop_namespace(gadget_name) + } +} + +struct SingleChipLayouterRegion<'r, 'a, F: Field, CS: Assignment + 'a> { + layouter: &'r mut SingleChipLayouter<'a, F, CS>, + region_index: RegionIndex, + /// Stores the constants to be assigned, and the cells to which they are copied. + constants: Vec<(Assigned, Cell)>, +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a> fmt::Debug + for SingleChipLayouterRegion<'r, 'a, F, CS> +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SingleChipLayouterRegion") + .field("layouter", &self.layouter) + .field("region_index", &self.region_index) + .finish() + } +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a> SingleChipLayouterRegion<'r, 'a, F, CS> { + fn new(layouter: &'r mut SingleChipLayouter<'a, F, CS>, region_index: RegionIndex) -> Self { + SingleChipLayouterRegion { + layouter, + region_index, + constants: vec![], + } + } +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a + SyncDeps> RegionLayouter + for SingleChipLayouterRegion<'r, 'a, F, CS> +{ + fn enable_selector<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + selector: &Selector, + offset: usize, + ) -> Result<(), Error> { + self.layouter.cs.enable_selector( + annotation, + selector, + *self.layouter.regions[*self.region_index] + offset, + ) + } + + fn name_column<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + ) { + self.layouter.cs.annotate_column(annotation, column); + } + + fn assign_advice<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + self.layouter.cs.assign_advice( + annotation, + column, + *self.layouter.regions[*self.region_index] + offset, + to, + )?; + + Ok(Cell { + region_index: self.region_index, + row_offset: offset, + column: column.into(), + }) + } + + fn assign_advice_from_constant<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + constant: Assigned, + ) -> Result { + let advice = + self.assign_advice(annotation, column, offset, &mut || Value::known(constant))?; + self.constrain_constant(advice, constant)?; + + Ok(advice) + } + + fn assign_advice_from_instance<'v>( + &mut self, + annotation: &'v (dyn Fn() -> String + 'v), + instance: Column, + row: usize, + advice: Column, + offset: usize, + ) -> Result<(Cell, Value), Error> { + let value = self.layouter.cs.query_instance(instance, row)?; + + let cell = self.assign_advice(annotation, advice, offset, &mut || value.to_field())?; + + self.layouter.cs.copy( + cell.column, + *self.layouter.regions[*cell.region_index] + cell.row_offset, + instance.into(), + row, + )?; + + Ok((cell, value)) + } + + fn instance_value( + &mut self, + instance: Column, + row: usize, + ) -> Result, Error> { + self.layouter.cs.query_instance(instance, row) + } + + fn assign_fixed<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + self.layouter.cs.assign_fixed( + annotation, + column, + *self.layouter.regions[*self.region_index] + offset, + to, + )?; + + Ok(Cell { + region_index: self.region_index, + row_offset: offset, + column: column.into(), + }) + } + + fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error> { + self.constants.push((constant, cell)); + Ok(()) + } + + fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { + self.layouter.cs.copy( + left.column, + *self.layouter.regions[*left.region_index] + left.row_offset, + right.column, + *self.layouter.regions[*right.region_index] + right.row_offset, + )?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::vesta; + + use super::SimpleFloorPlanner; + use crate::{ + dev::MockProver, + plonk::{Advice, Circuit, Column, Error}, + }; + + #[test] + fn not_enough_columns_for_constants() { + struct MyCircuit {} + + impl Circuit for MyCircuit { + type Config = Column; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + MyCircuit {} + } + + fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { + meta.advice_column() + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl crate::circuit::Layouter, + ) -> Result<(), crate::plonk::Error> { + layouter.assign_region( + || "assign constant", + |mut region| { + region.assign_advice_from_constant( + || "one", + config, + 0, + vesta::Scalar::one(), + ) + }, + )?; + + Ok(()) + } + } + + let circuit = MyCircuit {}; + assert!(matches!( + MockProver::run(3, &circuit, vec![]).unwrap_err(), + Error::NotEnoughColumnsForConstants, + )); + } +} diff --git a/frontend/src/circuit/floor_planner/v1.rs b/frontend/src/circuit/floor_planner/v1.rs new file mode 100644 index 0000000000..a8d52fe0ce --- /dev/null +++ b/frontend/src/circuit/floor_planner/v1.rs @@ -0,0 +1,550 @@ +use std::fmt; + +use ff::Field; + +use crate::error::Error; +use crate::{ + circuit::{ + layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, + table_layouter::{compute_table_lengths, SimpleTableLayouter}, + Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, + }, + plonk::{Assignment, Circuit, FloorPlanner, Selector, TableColumn}, +}; +use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; + +mod strategy; + +/// The version 1 [`FloorPlanner`] provided by `halo2`. +/// +/// - No column optimizations are performed. Circuit configuration is left entirely to the +/// circuit designer. +/// - A dual-pass layouter is used to measures regions prior to assignment. +/// - Regions are measured as rectangles, bounded on the cells they assign. +/// - Regions are laid out using a greedy first-fit strategy, after sorting regions by +/// their "advice area" (number of advice columns * rows). +#[derive(Debug)] +pub struct V1; + +struct V1Plan<'a, F: Field, CS: Assignment + 'a> { + cs: &'a mut CS, + /// Stores the starting row for each region. + regions: Vec, + /// Stores the constants to be assigned, and the cells to which they are copied. + constants: Vec<(Assigned, Cell)>, + /// Stores the table fixed columns. + table_columns: Vec, +} + +impl<'a, F: Field, CS: Assignment + 'a> fmt::Debug for V1Plan<'a, F, CS> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("floor_planner::V1Plan").finish() + } +} + +impl<'a, F: Field, CS: Assignment + SyncDeps> V1Plan<'a, F, CS> { + /// Creates a new v1 layouter. + pub fn new(cs: &'a mut CS) -> Result { + let ret = V1Plan { + cs, + regions: vec![], + constants: vec![], + table_columns: vec![], + }; + Ok(ret) + } +} + +impl FloorPlanner for V1 { + fn synthesize + SyncDeps, C: Circuit>( + cs: &mut CS, + circuit: &C, + config: C::Config, + constants: Vec>, + ) -> Result<(), Error> { + let mut plan = V1Plan::new(cs)?; + + // First pass: measure the regions within the circuit. + let mut measure = MeasurementPass::new(); + { + let pass = &mut measure; + circuit + .without_witnesses() + .synthesize(config.clone(), V1Pass::<_, CS>::measure(pass))?; + } + + // Planning: + // - Position the regions. + let (regions, column_allocations) = strategy::slot_in_biggest_advice_first(measure.regions); + plan.regions = regions; + + // - Determine how many rows our planned circuit will require. + let first_unassigned_row = column_allocations + .values() + .map(|a| a.unbounded_interval_start()) + .max() + .unwrap_or(0); + + // - Position the constants within those rows. + let fixed_allocations: Vec<_> = constants + .into_iter() + .map(|c| { + ( + c, + column_allocations + .get(&Column::::from(c).into()) + .cloned() + .unwrap_or_default(), + ) + }) + .collect(); + let constant_positions = || { + fixed_allocations.iter().flat_map(|(c, a)| { + let c = *c; + a.free_intervals(0, Some(first_unassigned_row)) + .flat_map(move |e| e.range().unwrap().map(move |i| (c, i))) + }) + }; + + // Second pass: + // - Assign the regions. + let mut assign = AssignmentPass::new(&mut plan); + { + let pass = &mut assign; + circuit.synthesize(config, V1Pass::assign(pass))?; + } + + // - Assign the constants. + if constant_positions().count() < plan.constants.len() { + return Err(Error::NotEnoughColumnsForConstants); + } + for ((fixed_column, fixed_row), (value, advice)) in + constant_positions().zip(plan.constants.into_iter()) + { + plan.cs.assign_fixed( + || format!("Constant({:?})", value.evaluate()), + fixed_column, + fixed_row, + || Value::known(value), + )?; + plan.cs.copy( + fixed_column.into(), + fixed_row, + advice.column, + *plan.regions[*advice.region_index] + advice.row_offset, + )?; + } + + Ok(()) + } +} + +#[derive(Debug)] +enum Pass<'p, 'a, F: Field, CS: Assignment + 'a> { + Measurement(&'p mut MeasurementPass), + Assignment(&'p mut AssignmentPass<'p, 'a, F, CS>), +} + +/// A single pass of the [`V1`] layouter. +#[derive(Debug)] +pub struct V1Pass<'p, 'a, F: Field, CS: Assignment + 'a>(Pass<'p, 'a, F, CS>); + +impl<'p, 'a, F: Field, CS: Assignment + 'a> V1Pass<'p, 'a, F, CS> { + fn measure(pass: &'p mut MeasurementPass) -> Self { + V1Pass(Pass::Measurement(pass)) + } + + fn assign(pass: &'p mut AssignmentPass<'p, 'a, F, CS>) -> Self { + V1Pass(Pass::Assignment(pass)) + } +} + +impl<'p, 'a, F: Field, CS: Assignment + SyncDeps> Layouter for V1Pass<'p, 'a, F, CS> { + type Root = Self; + + fn assign_region(&mut self, name: N, assignment: A) -> Result + where + A: FnMut(Region<'_, F>) -> Result, + N: Fn() -> NR, + NR: Into, + { + match &mut self.0 { + Pass::Measurement(pass) => pass.assign_region(assignment), + Pass::Assignment(pass) => pass.assign_region(name, assignment), + } + } + + fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> + where + A: FnMut(Table<'_, F>) -> Result<(), Error>, + N: Fn() -> NR, + NR: Into, + { + match &mut self.0 { + Pass::Measurement(_) => Ok(()), + Pass::Assignment(pass) => pass.assign_table(name, assignment), + } + } + + fn constrain_instance( + &mut self, + cell: Cell, + instance: Column, + row: usize, + ) -> Result<(), Error> { + match &mut self.0 { + Pass::Measurement(_) => Ok(()), + Pass::Assignment(pass) => pass.constrain_instance(cell, instance, row), + } + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + match &self.0 { + Pass::Measurement(_) => Value::unknown(), + Pass::Assignment(pass) => pass.plan.cs.get_challenge(challenge), + } + } + + fn get_root(&mut self) -> &mut Self::Root { + self + } + + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + if let Pass::Assignment(pass) = &mut self.0 { + pass.plan.cs.push_namespace(name_fn); + } + } + + fn pop_namespace(&mut self, gadget_name: Option) { + if let Pass::Assignment(pass) = &mut self.0 { + pass.plan.cs.pop_namespace(gadget_name); + } + } +} + +/// Measures the circuit. +#[derive(Debug)] +pub struct MeasurementPass { + regions: Vec, +} + +impl MeasurementPass { + fn new() -> Self { + MeasurementPass { regions: vec![] } + } + + fn assign_region(&mut self, mut assignment: A) -> Result + where + A: FnMut(Region<'_, F>) -> Result, + { + let region_index = self.regions.len(); + + // Get shape of the region. + let mut shape = RegionShape::new(region_index.into()); + let result = { + let region: &mut dyn RegionLayouter = &mut shape; + assignment(region.into()) + }?; + self.regions.push(shape); + + Ok(result) + } +} + +/// Assigns the circuit. +#[derive(Debug)] +pub struct AssignmentPass<'p, 'a, F: Field, CS: Assignment + 'a> { + plan: &'p mut V1Plan<'a, F, CS>, + /// Counter tracking which region we need to assign next. + region_index: usize, +} + +impl<'p, 'a, F: Field, CS: Assignment + SyncDeps> AssignmentPass<'p, 'a, F, CS> { + fn new(plan: &'p mut V1Plan<'a, F, CS>) -> Self { + AssignmentPass { + plan, + region_index: 0, + } + } + + fn assign_region(&mut self, name: N, mut assignment: A) -> Result + where + A: FnMut(Region<'_, F>) -> Result, + N: Fn() -> NR, + NR: Into, + { + // Get the next region we are assigning. + let region_index = self.region_index; + self.region_index += 1; + + self.plan.cs.enter_region(name); + let mut region = V1Region::new(self.plan, region_index.into()); + let result = { + let region: &mut dyn RegionLayouter = &mut region; + assignment(region.into()) + }?; + self.plan.cs.exit_region(); + + Ok(result) + } + + fn assign_table(&mut self, name: N, mut assignment: A) -> Result + where + A: FnMut(Table<'_, F>) -> Result, + N: Fn() -> NR, + NR: Into, + { + // Maintenance hazard: there is near-duplicate code in `SingleChipLayouter::assign_table`. + + // Assign table cells. + self.plan.cs.enter_region(name); + let mut table = SimpleTableLayouter::new(self.plan.cs, &self.plan.table_columns); + let result = { + let table: &mut dyn TableLayouter = &mut table; + assignment(table.into()) + }?; + let default_and_assigned = table.default_and_assigned; + self.plan.cs.exit_region(); + + // Check that all table columns have the same length `first_unused`, + // and all cells up to that length are assigned. + let first_unused = compute_table_lengths(&default_and_assigned)?; + + // Record these columns so that we can prevent them from being used again. + for column in default_and_assigned.keys() { + self.plan.table_columns.push(*column); + } + + for (col, (default_val, _)) in default_and_assigned { + // default_val must be Some because we must have assigned + // at least one cell in each column, and in that case we checked + // that all cells up to first_unused were assigned. + self.plan + .cs + .fill_from_row(col.inner(), first_unused, default_val.unwrap())?; + } + + Ok(result) + } + + fn constrain_instance( + &mut self, + cell: Cell, + instance: Column, + row: usize, + ) -> Result<(), Error> { + self.plan.cs.copy( + cell.column, + *self.plan.regions[*cell.region_index] + cell.row_offset, + instance.into(), + row, + ) + } +} + +struct V1Region<'r, 'a, F: Field, CS: Assignment + 'a> { + plan: &'r mut V1Plan<'a, F, CS>, + region_index: RegionIndex, +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a> fmt::Debug for V1Region<'r, 'a, F, CS> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("V1Region") + .field("plan", &self.plan) + .field("region_index", &self.region_index) + .finish() + } +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a> V1Region<'r, 'a, F, CS> { + fn new(plan: &'r mut V1Plan<'a, F, CS>, region_index: RegionIndex) -> Self { + V1Region { plan, region_index } + } +} + +impl<'r, 'a, F: Field, CS: Assignment + SyncDeps> RegionLayouter for V1Region<'r, 'a, F, CS> { + fn enable_selector<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + selector: &Selector, + offset: usize, + ) -> Result<(), Error> { + self.plan.cs.enable_selector( + annotation, + selector, + *self.plan.regions[*self.region_index] + offset, + ) + } + + fn assign_advice<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + self.plan.cs.assign_advice( + annotation, + column, + *self.plan.regions[*self.region_index] + offset, + to, + )?; + + Ok(Cell { + region_index: self.region_index, + row_offset: offset, + column: column.into(), + }) + } + + fn assign_advice_from_constant<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + constant: Assigned, + ) -> Result { + let advice = + self.assign_advice(annotation, column, offset, &mut || Value::known(constant))?; + self.constrain_constant(advice, constant)?; + + Ok(advice) + } + + fn assign_advice_from_instance<'v>( + &mut self, + annotation: &'v (dyn Fn() -> String + 'v), + instance: Column, + row: usize, + advice: Column, + offset: usize, + ) -> Result<(Cell, Value), Error> { + let value = self.plan.cs.query_instance(instance, row)?; + + let cell = self.assign_advice(annotation, advice, offset, &mut || value.to_field())?; + + self.plan.cs.copy( + cell.column, + *self.plan.regions[*cell.region_index] + cell.row_offset, + instance.into(), + row, + )?; + + Ok((cell, value)) + } + + fn instance_value( + &mut self, + instance: Column, + row: usize, + ) -> Result, Error> { + self.plan.cs.query_instance(instance, row) + } + + fn assign_fixed<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + self.plan.cs.assign_fixed( + annotation, + column, + *self.plan.regions[*self.region_index] + offset, + to, + )?; + + Ok(Cell { + region_index: self.region_index, + row_offset: offset, + column: column.into(), + }) + } + + fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error> { + self.plan.constants.push((constant, cell)); + Ok(()) + } + + fn name_column<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + ) { + self.plan.cs.annotate_column(annotation, column) + } + + fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { + self.plan.cs.copy( + left.column, + *self.plan.regions[*left.region_index] + left.row_offset, + right.column, + *self.plan.regions[*right.region_index] + right.row_offset, + )?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::vesta; + + use crate::{ + dev::MockProver, + plonk::{Advice, Circuit, Column, Error}, + }; + + #[test] + fn not_enough_columns_for_constants() { + struct MyCircuit {} + + impl Circuit for MyCircuit { + type Config = Column; + type FloorPlanner = super::V1; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + MyCircuit {} + } + + fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { + meta.advice_column() + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl crate::circuit::Layouter, + ) -> Result<(), crate::plonk::Error> { + layouter.assign_region( + || "assign constant", + |mut region| { + region.assign_advice_from_constant( + || "one", + config, + 0, + vesta::Scalar::one(), + ) + }, + )?; + + Ok(()) + } + } + + let circuit = MyCircuit {}; + assert!(matches!( + MockProver::run(3, &circuit, vec![]).unwrap_err(), + Error::NotEnoughColumnsForConstants, + )); + } +} diff --git a/frontend/src/circuit/floor_planner/v1/strategy.rs b/frontend/src/circuit/floor_planner/v1/strategy.rs new file mode 100644 index 0000000000..86db63124d --- /dev/null +++ b/frontend/src/circuit/floor_planner/v1/strategy.rs @@ -0,0 +1,283 @@ +use std::{ + cmp, + collections::{BTreeSet, HashMap}, + ops::Range, +}; + +use super::{RegionColumn, RegionShape}; +use crate::circuit::RegionStart; +use halo2_middleware::circuit::Any; + +/// A region allocated within a column. +#[derive(Clone, Default, Debug, PartialEq, Eq)] +struct AllocatedRegion { + // The starting position of the region. + start: usize, + // The length of the region. + length: usize, +} + +impl Ord for AllocatedRegion { + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.start.cmp(&other.start) + } +} + +impl PartialOrd for AllocatedRegion { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// An area of empty space within a column. +pub(crate) struct EmptySpace { + // The starting position (inclusive) of the empty space. + start: usize, + // The ending position (exclusive) of the empty space, or `None` if unbounded. + end: Option, +} + +impl EmptySpace { + pub(crate) fn range(&self) -> Option> { + self.end.map(|end| self.start..end) + } +} + +/// Allocated rows within a column. +/// +/// This is a set of [a_start, a_end) pairs representing disjoint allocated intervals. +#[derive(Clone, Default, Debug)] +pub struct Allocations(BTreeSet); + +impl Allocations { + /// Returns the row that forms the unbounded unallocated interval [row, None). + pub(crate) fn unbounded_interval_start(&self) -> usize { + self.0 + .iter() + .last() + .map(|r| r.start + r.length) + .unwrap_or(0) + } + + /// Return all the *unallocated* nonempty intervals intersecting [start, end). + /// + /// `end = None` represents an unbounded end. + pub(crate) fn free_intervals( + &self, + start: usize, + end: Option, + ) -> impl Iterator + '_ { + self.0 + .iter() + .map(Some) + .chain(Some(None)) + .scan(start, move |row, region| { + Some(if let Some(region) = region { + if end.map(|end| region.start >= end).unwrap_or(false) { + None + } else { + let ret = if *row < region.start { + Some(EmptySpace { + start: *row, + end: Some(region.start), + }) + } else { + None + }; + + *row = cmp::max(*row, region.start + region.length); + + ret + } + } else if end.map(|end| *row < end).unwrap_or(true) { + Some(EmptySpace { start: *row, end }) + } else { + None + }) + }) + .flatten() + } +} + +/// Allocated rows within a circuit. +pub type CircuitAllocations = HashMap; + +/// - `start` is the current start row of the region (not of this column). +/// - `slack` is the maximum number of rows the start could be moved down, taking into +/// account prior columns. +fn first_fit_region( + column_allocations: &mut CircuitAllocations, + region_columns: &[RegionColumn], + region_length: usize, + start: usize, + slack: Option, +) -> Option { + let (c, remaining_columns) = match region_columns.split_first() { + Some(cols) => cols, + None => return Some(start), + }; + let end = slack.map(|slack| start + region_length + slack); + + // Iterate over the unallocated non-empty intervals in c that intersect [start, end). + for space in column_allocations + .entry(*c) + .or_default() + .clone() + .free_intervals(start, end) + { + // Do we have enough room for this column of the region in this interval? + let s_slack = space + .end + .map(|end| (end as isize - space.start as isize) - region_length as isize); + if let Some((slack, s_slack)) = slack.zip(s_slack) { + assert!(s_slack <= slack as isize); + } + if s_slack.unwrap_or(0) >= 0 { + let row = first_fit_region( + column_allocations, + remaining_columns, + region_length, + space.start, + s_slack.map(|s| s as usize), + ); + if let Some(row) = row { + if let Some(end) = end { + assert!(row + region_length <= end); + } + column_allocations + .get_mut(c) + .unwrap() + .0 + .insert(AllocatedRegion { + start: row, + length: region_length, + }); + return Some(row); + } + } + } + + // No placement worked; the caller will need to try other possibilities. + None +} + +/// Positions the regions starting at the earliest row for which none of the columns are +/// in use, taking into account gaps between earlier regions. +fn slot_in( + region_shapes: Vec, +) -> (Vec<(RegionStart, RegionShape)>, CircuitAllocations) { + // Tracks the empty regions for each column. + let mut column_allocations: CircuitAllocations = Default::default(); + + let regions = region_shapes + .into_iter() + .map(|region| { + // Sort the region's columns to ensure determinism. + // - An unstable sort is fine, because region.columns() returns a set. + // - The sort order relies on Column's Ord implementation! + let mut region_columns: Vec<_> = region.columns().iter().cloned().collect(); + region_columns.sort_unstable(); + + let region_start = first_fit_region( + &mut column_allocations, + ®ion_columns, + region.row_count(), + 0, + None, + ) + .expect("We can always fit a region somewhere"); + + (region_start.into(), region) + }) + .collect(); + + // Return the column allocations for potential further processing. + (regions, column_allocations) +} + +/// Sorts the regions by advice area and then lays them out with the [`slot_in`] strategy. +pub fn slot_in_biggest_advice_first( + region_shapes: Vec, +) -> (Vec, CircuitAllocations) { + let mut sorted_regions: Vec<_> = region_shapes.into_iter().collect(); + let sort_key = |shape: &RegionShape| { + // Count the number of advice columns + let advice_cols = shape + .columns() + .iter() + .filter(|c| match c { + RegionColumn::Column(c) => matches!(c.column_type(), Any::Advice(_)), + _ => false, + }) + .count(); + // Sort by advice area (since this has the most contention). + advice_cols * shape.row_count() + }; + + // This used to incorrectly use `sort_unstable_by_key` with non-unique keys, which gave + // output that differed between 32-bit and 64-bit platforms, and potentially between Rust + // versions. + // We now use `sort_by_cached_key` with non-unique keys, and rely on `region_shapes` + // being sorted by region index (which we also rely on below to return `RegionStart`s + // in the correct order). + #[cfg(not(feature = "floor-planner-v1-legacy-pdqsort"))] + sorted_regions.sort_by_cached_key(sort_key); + + // To preserve compatibility, when the "floor-planner-v1-legacy-pdqsort" feature is enabled, + // we use a copy of the pdqsort implementation from the Rust 1.56.1 standard library, fixed + // to its behaviour on 64-bit platforms. + // https://github.com/rust-lang/rust/blob/1.56.1/library/core/src/slice/mod.rs#L2365-L2402 + #[cfg(feature = "floor-planner-v1-legacy-pdqsort")] + halo2_legacy_pdqsort::sort::quicksort(&mut sorted_regions, |a, b| sort_key(a).lt(&sort_key(b))); + + sorted_regions.reverse(); + + // Lay out the sorted regions. + let (mut regions, column_allocations) = slot_in(sorted_regions); + + // Un-sort the regions so they match the original indexing. + regions.sort_unstable_by_key(|(_, region)| region.region_index().0); + let regions = regions.into_iter().map(|(start, _)| start).collect(); + + (regions, column_allocations) +} + +#[test] +fn test_slot_in() { + use crate::plonk::Column; + + let regions = vec![ + RegionShape { + region_index: 0.into(), + columns: vec![Column::new(0, Any::advice()), Column::new(1, Any::advice())] + .into_iter() + .map(|a| a.into()) + .collect(), + row_count: 15, + }, + RegionShape { + region_index: 1.into(), + columns: vec![Column::new(2, Any::advice())] + .into_iter() + .map(|a| a.into()) + .collect(), + row_count: 10, + }, + RegionShape { + region_index: 2.into(), + columns: vec![Column::new(2, Any::advice()), Column::new(0, Any::advice())] + .into_iter() + .map(|a| a.into()) + .collect(), + row_count: 10, + }, + ]; + assert_eq!( + slot_in(regions) + .0 + .into_iter() + .map(|(i, _)| i) + .collect::>(), + vec![0.into(), 0.into(), 15.into()] + ); +} diff --git a/frontend/src/circuit/layouter.rs b/frontend/src/circuit/layouter.rs new file mode 100644 index 0000000000..9cd2dd57a2 --- /dev/null +++ b/frontend/src/circuit/layouter.rs @@ -0,0 +1,318 @@ +//! Implementations of common circuit layouters. + +use std::cmp; +use std::collections::HashSet; +use std::fmt; + +use ff::Field; + +pub use super::table_layouter::TableLayouter; +use super::{Cell, RegionIndex, Value}; +use crate::error::Error; +use crate::plonk::Selector; +use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; + +/// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. +#[cfg(feature = "thread-safe-region")] +pub trait SyncDeps: Send + Sync {} + +#[cfg(feature = "thread-safe-region")] +impl SyncDeps for T {} + +/// Intermediate trait requirements for [`RegionLayouter`]. +#[cfg(not(feature = "thread-safe-region"))] +pub trait SyncDeps {} + +#[cfg(not(feature = "thread-safe-region"))] +impl SyncDeps for T {} + +/// Helper trait for implementing a custom [`Layouter`]. +/// +/// This trait is used for implementing region assignments: +/// +/// ```ignore +/// impl<'a, F: Field, C: Chip, CS: Assignment + 'a> Layouter for MyLayouter<'a, C, CS> { +/// fn assign_region( +/// &mut self, +/// assignment: impl FnOnce(Region<'_, F, C>) -> Result<(), Error>, +/// ) -> Result<(), Error> { +/// let region_index = self.regions.len(); +/// self.regions.push(self.current_gate); +/// +/// let mut region = MyRegion::new(self, region_index); +/// { +/// let region: &mut dyn RegionLayouter = &mut region; +/// assignment(region.into())?; +/// } +/// self.current_gate += region.row_count; +/// +/// Ok(()) +/// } +/// } +/// ``` +/// +/// TODO: It would be great if we could constrain the columns in these types to be +/// "logical" columns that are guaranteed to correspond to the chip (and have come from +/// `Chip::Config`). +/// +/// [`Layouter`]: super::Layouter +pub trait RegionLayouter: fmt::Debug + SyncDeps { + /// Enables a selector at the given offset. + fn enable_selector<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + selector: &Selector, + offset: usize, + ) -> Result<(), Error>; + + /// Allows the circuit implementor to name/annotate a Column within a Region context. + /// + /// This is useful in order to improve the amount of information that `prover.verify()` + /// and `prover.assert_satisfied()` can provide. + fn name_column<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + ); + + /// Assign an advice column value (witness) + fn assign_advice<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result; + + /// Assigns a constant value to the column `advice` at `offset` within this region. + /// + /// The constant value will be assigned to a cell within one of the fixed columns + /// configured via `ConstraintSystem::enable_constant`. + /// + /// Returns the advice cell that has been equality-constrained to the constant. + fn assign_advice_from_constant<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + constant: Assigned, + ) -> Result; + + /// Assign the value of the instance column's cell at absolute location + /// `row` to the column `advice` at `offset` within this region. + /// + /// Returns the advice cell that has been equality-constrained to the + /// instance cell, and its value if known. + fn assign_advice_from_instance<'v>( + &mut self, + annotation: &'v (dyn Fn() -> String + 'v), + instance: Column, + row: usize, + advice: Column, + offset: usize, + ) -> Result<(Cell, Value), Error>; + + /// Returns the value of the instance column's cell at absolute location `row`. + fn instance_value(&mut self, instance: Column, row: usize) + -> Result, Error>; + + /// Assigns a fixed value + fn assign_fixed<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result; + + /// Constrains a cell to have a constant value. + /// + /// Returns an error if the cell is in a column where equality has not been enabled. + fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error>; + + /// Constraint two cells to have the same value. + /// + /// Returns an error if either of the cells is not within the given permutation. + fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error>; +} + +/// The shape of a region. For a region at a certain index, we track +/// the set of columns it uses as well as the number of rows it uses. +#[derive(Clone, Debug)] +pub struct RegionShape { + pub(super) region_index: RegionIndex, + pub(super) columns: HashSet, + pub(super) row_count: usize, +} + +/// The virtual column involved in a region. This includes concrete columns, +/// as well as selectors that are not concrete columns at this stage. +#[derive(Eq, PartialEq, Copy, Clone, Debug, Hash)] +pub enum RegionColumn { + /// Concrete column + Column(Column), + /// Virtual column representing a (boolean) selector + Selector(Selector), +} + +impl From> for RegionColumn { + fn from(column: Column) -> RegionColumn { + RegionColumn::Column(column) + } +} + +impl From for RegionColumn { + fn from(selector: Selector) -> RegionColumn { + RegionColumn::Selector(selector) + } +} + +impl Ord for RegionColumn { + fn cmp(&self, other: &Self) -> cmp::Ordering { + match (self, other) { + (Self::Column(ref a), Self::Column(ref b)) => a.cmp(b), + (Self::Selector(ref a), Self::Selector(ref b)) => a.0.cmp(&b.0), + (Self::Column(_), Self::Selector(_)) => cmp::Ordering::Less, + (Self::Selector(_), Self::Column(_)) => cmp::Ordering::Greater, + } + } +} + +impl PartialOrd for RegionColumn { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl RegionShape { + /// Create a new `RegionShape` for a region at `region_index`. + pub fn new(region_index: RegionIndex) -> Self { + RegionShape { + region_index, + columns: HashSet::default(), + row_count: 0, + } + } + + /// Get the `region_index` of a `RegionShape`. + pub fn region_index(&self) -> RegionIndex { + self.region_index + } + + /// Get a reference to the set of `columns` used in a `RegionShape`. + pub fn columns(&self) -> &HashSet { + &self.columns + } + + /// Get the `row_count` of a `RegionShape`. + pub fn row_count(&self) -> usize { + self.row_count + } +} + +impl RegionLayouter for RegionShape { + fn enable_selector<'v>( + &'v mut self, + _: &'v (dyn Fn() -> String + 'v), + selector: &Selector, + offset: usize, + ) -> Result<(), Error> { + // Track the selector's fixed column as part of the region's shape. + self.columns.insert((*selector).into()); + self.row_count = cmp::max(self.row_count, offset + 1); + Ok(()) + } + + fn assign_advice<'v>( + &'v mut self, + _: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + _to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + self.columns.insert(Column::::from(column).into()); + self.row_count = cmp::max(self.row_count, offset + 1); + + Ok(Cell { + region_index: self.region_index, + row_offset: offset, + column: column.into(), + }) + } + + fn assign_advice_from_constant<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + constant: Assigned, + ) -> Result { + // The rest is identical to witnessing an advice cell. + self.assign_advice(annotation, column, offset, &mut || Value::known(constant)) + } + + fn assign_advice_from_instance<'v>( + &mut self, + _: &'v (dyn Fn() -> String + 'v), + _: Column, + _: usize, + advice: Column, + offset: usize, + ) -> Result<(Cell, Value), Error> { + self.columns.insert(Column::::from(advice).into()); + self.row_count = cmp::max(self.row_count, offset + 1); + + Ok(( + Cell { + region_index: self.region_index, + row_offset: offset, + column: advice.into(), + }, + Value::unknown(), + )) + } + + fn instance_value( + &mut self, + _instance: Column, + _row: usize, + ) -> Result, Error> { + Ok(Value::unknown()) + } + + fn assign_fixed<'v>( + &'v mut self, + _: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + _to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + self.columns.insert(Column::::from(column).into()); + self.row_count = cmp::max(self.row_count, offset + 1); + + Ok(Cell { + region_index: self.region_index, + row_offset: offset, + column: column.into(), + }) + } + + fn name_column<'v>( + &'v mut self, + _annotation: &'v (dyn Fn() -> String + 'v), + _column: Column, + ) { + // Do nothing + } + + fn constrain_constant(&mut self, _cell: Cell, _constant: Assigned) -> Result<(), Error> { + // Global constants don't affect the region shape. + Ok(()) + } + + fn constrain_equal(&mut self, _left: Cell, _right: Cell) -> Result<(), Error> { + // Equality constraints don't affect the region shape. + Ok(()) + } +} diff --git a/frontend/src/circuit/table_layouter.rs b/frontend/src/circuit/table_layouter.rs new file mode 100644 index 0000000000..ce0330ec5c --- /dev/null +++ b/frontend/src/circuit/table_layouter.rs @@ -0,0 +1,415 @@ +//! Implementations of common table layouters. + +use std::{ + collections::HashMap, + fmt::{self, Debug}, +}; + +use ff::Field; + +use crate::error::{Error, TableError}; +use crate::plonk::{Assignment, TableColumn}; +use halo2_middleware::plonk::Assigned; + +use super::Value; + +/// Helper trait for implementing a custom [`Layouter`]. +/// +/// This trait is used for implementing table assignments. +/// +/// [`Layouter`]: super::Layouter +pub trait TableLayouter: std::fmt::Debug { + /// Assigns a fixed value to a table cell. + /// + /// Returns an error if the table cell has already been assigned to. + fn assign_cell<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: TableColumn, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result<(), Error>; +} + +/// The default value to fill a table column with. +/// +/// - The outer `Option` tracks whether the value in row 0 of the table column has been +/// assigned yet. This will always be `Some` once a valid table has been completely +/// assigned. +/// - The inner `Value` tracks whether the underlying `Assignment` is evaluating +/// witnesses or not. +type DefaultTableValue = Option>>; + +/// A table layouter that can be used to assign values to a table. +pub struct SimpleTableLayouter<'r, 'a, F: Field, CS: Assignment + 'a> { + cs: &'a mut CS, + used_columns: &'r [TableColumn], + /// maps from a fixed column to a pair (default value, vector saying which rows are assigned) + pub default_and_assigned: HashMap, Vec)>, +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a> fmt::Debug for SimpleTableLayouter<'r, 'a, F, CS> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SimpleTableLayouter") + .field("used_columns", &self.used_columns) + .field("default_and_assigned", &self.default_and_assigned) + .finish() + } +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a> SimpleTableLayouter<'r, 'a, F, CS> { + /// Returns a new SimpleTableLayouter + pub fn new(cs: &'a mut CS, used_columns: &'r [TableColumn]) -> Self { + SimpleTableLayouter { + cs, + used_columns, + default_and_assigned: HashMap::default(), + } + } +} + +impl<'r, 'a, F: Field, CS: Assignment + 'a> TableLayouter + for SimpleTableLayouter<'r, 'a, F, CS> +{ + fn assign_cell<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: TableColumn, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result<(), Error> { + if self.used_columns.contains(&column) { + return Err(Error::TableError(TableError::UsedColumn(column))); + } + + let entry = self.default_and_assigned.entry(column).or_default(); + + let mut value = Value::unknown(); + self.cs.assign_fixed( + annotation, + column.inner(), + offset, // tables are always assigned starting at row 0 + || { + let res = to(); + value = res; + res + }, + )?; + + match (entry.0.is_none(), offset) { + // Use the value at offset 0 as the default value for this table column. + (true, 0) => entry.0 = Some(value), + // Since there is already an existing default value for this table column, + // the caller should not be attempting to assign another value at offset 0. + (false, 0) => { + return Err(Error::TableError(TableError::OverwriteDefault( + column, + format!("{:?}", entry.0.unwrap()), + format!("{value:?}"), + ))) + } + _ => (), + } + if entry.1.len() <= offset { + entry.1.resize(offset + 1, false); + } + entry.1[offset] = true; + + Ok(()) + } +} + +pub(crate) fn compute_table_lengths( + default_and_assigned: &HashMap, Vec)>, +) -> Result { + let column_lengths: Result, Error> = default_and_assigned + .iter() + .map(|(col, (default_value, assigned))| { + if default_value.is_none() || assigned.is_empty() { + return Err(Error::TableError(TableError::ColumnNotAssigned(*col))); + } + if assigned.iter().all(|b| *b) { + // All values in the column have been assigned + Ok((col, assigned.len())) + } else { + Err(Error::TableError(TableError::ColumnNotAssigned(*col))) + } + }) + .collect(); + let column_lengths = column_lengths?; + column_lengths + .into_iter() + .try_fold((None, 0), |acc, (col, col_len)| { + if acc.1 == 0 || acc.1 == col_len { + Ok((Some(*col), col_len)) + } else { + let mut cols = [(*col, col_len), (acc.0.unwrap(), acc.1)]; + cols.sort(); + Err(Error::TableError(TableError::UnevenColumnLengths( + cols[0], cols[1], + ))) + } + }) + .map(|col_len| col_len.1) +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use crate::{ + circuit::{Layouter, SimpleFloorPlanner}, + dev::MockProver, + plonk::{Circuit, ConstraintSystem}, + poly::Rotation, + }; + + use super::*; + + #[test] + fn table_no_default() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: TableColumn, + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = meta.lookup_table_column(); + + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + vec![(a, table)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "duplicate assignment", + |mut table| { + table.assign_cell( + || "default", + config.table, + 1, + || Value::known(Fp::zero()), + ) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "TableColumn { inner: Column { index: 0, column_type: Fixed } } not fully assigned. Help: assign a value at offset 0." + ); + } + + #[test] + fn table_overwrite_default() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: TableColumn, + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = meta.lookup_table_column(); + + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + vec![(a, table)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "duplicate assignment", + |mut table| { + table.assign_cell( + || "default", + config.table, + 0, + || Value::known(Fp::zero()), + )?; + table.assign_cell( + || "duplicate", + config.table, + 0, + || Value::known(Fp::zero()), + ) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "Attempted to overwrite default value Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } with Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } in TableColumn { inner: Column { index: 0, column_type: Fixed } }" + ); + } + + #[test] + fn table_reuse_column() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: TableColumn, + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = meta.lookup_table_column(); + + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + vec![(a, table)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "first assignment", + |mut table| { + table.assign_cell( + || "default", + config.table, + 0, + || Value::known(Fp::zero()), + ) + }, + )?; + + layouter.assign_table( + || "reuse", + |mut table| { + table.assign_cell(|| "reuse", config.table, 1, || Value::known(Fp::zero())) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "TableColumn { inner: Column { index: 0, column_type: Fixed } } has already been used" + ); + } + + #[test] + fn table_uneven_columns() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: (TableColumn, TableColumn), + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = (meta.lookup_table_column(), meta.lookup_table_column()); + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + + vec![(a.clone(), table.0), (a, table.1)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "table with uneven columns", + |mut table| { + table.assign_cell(|| "", config.table.0, 0, || Value::known(Fp::zero()))?; + table.assign_cell(|| "", config.table.0, 1, || Value::known(Fp::zero()))?; + + table.assign_cell(|| "", config.table.1, 0, || Value::known(Fp::zero())) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "TableColumn { inner: Column { index: 0, column_type: Fixed } } has length 2 while TableColumn { inner: Column { index: 1, column_type: Fixed } } has length 1" + ); + } +} diff --git a/frontend/src/circuit/value.rs b/frontend/src/circuit/value.rs new file mode 100644 index 0000000000..54111f3ba1 --- /dev/null +++ b/frontend/src/circuit/value.rs @@ -0,0 +1,704 @@ +use std::borrow::Borrow; +use std::ops::{Add, Mul, Neg, Sub}; + +use group::ff::Field; + +use crate::error::Error; +use halo2_middleware::plonk::Assigned; + +/// A value that might exist within a circuit. +/// +/// This behaves like `Option` but differs in two key ways: +/// - It does not expose the enum cases, or provide an `Option::unwrap` equivalent. This +/// helps to ensure that unwitnessed values correctly propagate. +/// - It provides pass-through implementations of common traits such as `Add` and `Mul`, +/// for improved usability. +#[derive(Clone, Copy, Debug)] +pub struct Value { + inner: Option, +} + +impl Default for Value { + fn default() -> Self { + Self::unknown() + } +} + +impl Value { + /// Constructs an unwitnessed value. + pub const fn unknown() -> Self { + Self { inner: None } + } + + /// Constructs a known value. + /// + /// # Examples + /// + /// ``` + /// use halo2_proofs::circuit::Value; + /// + /// let v = Value::known(37); + /// ``` + pub const fn known(value: V) -> Self { + Self { inner: Some(value) } + } + + /// Obtains the inner value for assigning into the circuit. + /// + /// Returns `Error::Synthesis` if this is [`Value::unknown()`]. + pub(crate) fn assign(self) -> Result { + self.inner.ok_or(Error::Synthesis) + } + + /// Converts from `&Value` to `Value<&V>`. + pub fn as_ref(&self) -> Value<&V> { + Value { + inner: self.inner.as_ref(), + } + } + + /// Converts from `&mut Value` to `Value<&mut V>`. + pub fn as_mut(&mut self) -> Value<&mut V> { + Value { + inner: self.inner.as_mut(), + } + } + + /// ONLY FOR INTERNAL CRATE USAGE; DO NOT EXPOSE! + pub(crate) fn into_option(self) -> Option { + self.inner + } + + /// Enforces an assertion on the contained value, if known. + /// + /// The assertion is ignored if `self` is [`Value::unknown()`]. Do not try to enforce + /// circuit constraints with this method! + /// + /// # Panics + /// + /// Panics if `f` returns `false`. + pub fn assert_if_known bool>(&self, f: F) { + if let Some(value) = self.inner.as_ref() { + assert!(f(value)); + } + } + + /// Checks the contained value for an error condition, if known. + /// + /// The error check is ignored if `self` is [`Value::unknown()`]. Do not try to + /// enforce circuit constraints with this method! + pub fn error_if_known_and bool>(&self, f: F) -> Result<(), Error> { + match self.inner.as_ref() { + Some(value) if f(value) => Err(Error::Synthesis), + _ => Ok(()), + } + } + + /// Maps a `Value` to `Value` by applying a function to the contained value. + pub fn map W>(self, f: F) -> Value { + Value { + inner: self.inner.map(f), + } + } + + /// Returns [`Value::unknown()`] if the value is [`Value::unknown()`], otherwise calls + /// `f` with the wrapped value and returns the result. + pub fn and_then Value>(self, f: F) -> Value { + match self.inner { + Some(v) => f(v), + None => Value::unknown(), + } + } + + /// Zips `self` with another `Value`. + /// + /// If `self` is `Value::known(s)` and `other` is `Value::known(o)`, this method + /// returns `Value::known((s, o))`. Otherwise, [`Value::unknown()`] is returned. + pub fn zip(self, other: Value) -> Value<(V, W)> { + Value { + inner: self.inner.zip(other.inner), + } + } +} + +impl Value<(V, W)> { + /// Unzips a value containing a tuple of two values. + /// + /// If `self` is `Value::known((a, b)), this method returns + /// `(Value::known(a), Value::known(b))`. Otherwise, + /// `(Value::unknown(), Value::unknown())` is returned. + pub fn unzip(self) -> (Value, Value) { + match self.inner { + Some((a, b)) => (Value::known(a), Value::known(b)), + None => (Value::unknown(), Value::unknown()), + } + } +} + +impl Value<&V> { + /// Maps a `Value<&V>` to a `Value` by copying the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn copied(self) -> Value + where + V: Copy, + { + Value { + inner: self.inner.copied(), + } + } + + /// Maps a `Value<&V>` to a `Value` by cloning the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn cloned(self) -> Value + where + V: Clone, + { + Value { + inner: self.inner.cloned(), + } + } +} + +impl Value<&mut V> { + /// Maps a `Value<&mut V>` to a `Value` by copying the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn copied(self) -> Value + where + V: Copy, + { + Value { + inner: self.inner.copied(), + } + } + + /// Maps a `Value<&mut V>` to a `Value` by cloning the contents of the value. + #[must_use = "`self` will be dropped if the result is not used"] + pub fn cloned(self) -> Value + where + V: Clone, + { + Value { + inner: self.inner.cloned(), + } + } +} + +impl Value<[V; LEN]> { + /// Transposes a `Value<[V; LEN]>` into a `[Value; LEN]`. + /// + /// [`Value::unknown()`] will be mapped to `[Value::unknown(); LEN]`. + pub fn transpose_array(self) -> [Value; LEN] { + let mut ret = [Value::unknown(); LEN]; + if let Some(arr) = self.inner { + for (entry, value) in ret.iter_mut().zip(arr) { + *entry = Value::known(value); + } + } + ret + } +} + +impl Value +where + I: IntoIterator, + I::IntoIter: ExactSizeIterator, +{ + /// Transposes a `Value>` into a `Vec>`. + /// + /// [`Value::unknown()`] will be mapped to `vec![Value::unknown(); length]`. + /// + /// # Panics + /// + /// Panics if `self` is `Value::known(values)` and `values.len() != length`. + pub fn transpose_vec(self, length: usize) -> Vec> { + match self.inner { + Some(values) => { + let values = values.into_iter(); + assert_eq!(values.len(), length); + values.map(Value::known).collect() + } + None => (0..length).map(|_| Value::unknown()).collect(), + } + } +} + +// +// FromIterator +// + +impl> FromIterator> for Value { + /// Takes each element in the [`Iterator`]: if it is [`Value::unknown()`], no further + /// elements are taken, and the [`Value::unknown()`] is returned. Should no + /// [`Value::unknown()`] occur, a container of type `V` containing the values of each + /// [`Value`] is returned. + fn from_iter>>(iter: I) -> Self { + Self { + inner: iter.into_iter().map(|v| v.inner).collect(), + } + } +} + +// +// Neg +// + +impl Neg for Value { + type Output = Value; + + fn neg(self) -> Self::Output { + Value { + inner: self.inner.map(|v| -v), + } + } +} + +// +// Add +// + +impl Add for Value +where + V: Add, +{ + type Output = Value; + + fn add(self, rhs: Self) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add for &Value +where + for<'v> &'v V: Add, +{ + type Output = Value; + + fn add(self, rhs: Self) -> Self::Output { + Value { + inner: self + .inner + .as_ref() + .zip(rhs.inner.as_ref()) + .map(|(a, b)| a + b), + } + } +} + +impl Add> for Value +where + for<'v> V: Add<&'v V, Output = O>, +{ + type Output = Value; + + fn add(self, rhs: Value<&V>) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add> for Value<&V> +where + for<'v> &'v V: Add, +{ + type Output = Value; + + fn add(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add<&Value> for Value +where + for<'v> V: Add<&'v V, Output = O>, +{ + type Output = Value; + + fn add(self, rhs: &Self) -> Self::Output { + self + rhs.as_ref() + } +} + +impl Add> for &Value +where + for<'v> &'v V: Add, +{ + type Output = Value; + + fn add(self, rhs: Value) -> Self::Output { + self.as_ref() + rhs + } +} + +// +// Sub +// + +impl Sub for Value +where + V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Self) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub for &Value +where + for<'v> &'v V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Self) -> Self::Output { + Value { + inner: self + .inner + .as_ref() + .zip(rhs.inner.as_ref()) + .map(|(a, b)| a - b), + } + } +} + +impl Sub> for Value +where + for<'v> V: Sub<&'v V, Output = O>, +{ + type Output = Value; + + fn sub(self, rhs: Value<&V>) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub> for Value<&V> +where + for<'v> &'v V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub<&Value> for Value +where + for<'v> V: Sub<&'v V, Output = O>, +{ + type Output = Value; + + fn sub(self, rhs: &Self) -> Self::Output { + self - rhs.as_ref() + } +} + +impl Sub> for &Value +where + for<'v> &'v V: Sub, +{ + type Output = Value; + + fn sub(self, rhs: Value) -> Self::Output { + self.as_ref() - rhs + } +} + +// +// Mul +// + +impl Mul for Value +where + V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Self) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul for &Value +where + for<'v> &'v V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Self) -> Self::Output { + Value { + inner: self + .inner + .as_ref() + .zip(rhs.inner.as_ref()) + .map(|(a, b)| a * b), + } + } +} + +impl Mul> for Value +where + for<'v> V: Mul<&'v V, Output = O>, +{ + type Output = Value; + + fn mul(self, rhs: Value<&V>) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul> for Value<&V> +where + for<'v> &'v V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul<&Value> for Value +where + for<'v> V: Mul<&'v V, Output = O>, +{ + type Output = Value; + + fn mul(self, rhs: &Self) -> Self::Output { + self * rhs.as_ref() + } +} + +impl Mul> for &Value +where + for<'v> &'v V: Mul, +{ + type Output = Value; + + fn mul(self, rhs: Value) -> Self::Output { + self.as_ref() * rhs + } +} + +// +// Assigned +// + +impl From> for Value> { + fn from(value: Value) -> Self { + Self { + inner: value.inner.map(Assigned::from), + } + } +} + +impl Add> for Value> { + type Output = Value>; + + fn add(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add for Value> { + type Output = Value>; + + fn add(self, rhs: F) -> Self::Output { + self + Value::known(rhs) + } +} + +impl Add> for Value<&Assigned> { + type Output = Value>; + + fn add(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), + } + } +} + +impl Add for Value<&Assigned> { + type Output = Value>; + + fn add(self, rhs: F) -> Self::Output { + self + Value::known(rhs) + } +} + +impl Sub> for Value> { + type Output = Value>; + + fn sub(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub for Value> { + type Output = Value>; + + fn sub(self, rhs: F) -> Self::Output { + self - Value::known(rhs) + } +} + +impl Sub> for Value<&Assigned> { + type Output = Value>; + + fn sub(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), + } + } +} + +impl Sub for Value<&Assigned> { + type Output = Value>; + + fn sub(self, rhs: F) -> Self::Output { + self - Value::known(rhs) + } +} + +impl Mul> for Value> { + type Output = Value>; + + fn mul(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul for Value> { + type Output = Value>; + + fn mul(self, rhs: F) -> Self::Output { + self * Value::known(rhs) + } +} + +impl Mul> for Value<&Assigned> { + type Output = Value>; + + fn mul(self, rhs: Value) -> Self::Output { + Value { + inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), + } + } +} + +impl Mul for Value<&Assigned> { + type Output = Value>; + + fn mul(self, rhs: F) -> Self::Output { + self * Value::known(rhs) + } +} + +impl Value { + /// Returns the field element corresponding to this value. + pub fn to_field(&self) -> Value> + where + for<'v> Assigned: From<&'v V>, + { + Value { + inner: self.inner.as_ref().map(|v| v.into()), + } + } + + /// Returns the field element corresponding to this value. + pub fn into_field(self) -> Value> + where + V: Into>, + { + Value { + inner: self.inner.map(|v| v.into()), + } + } + + /// Doubles this field element. + /// + /// # Examples + /// + /// If you have a `Value`, convert it to `Value>` first: + /// ``` + /// # use halo2curves::pasta::pallas::Base as F; + /// use halo2_proofs::{circuit::Value, plonk::Assigned}; + /// + /// let v = Value::known(F::from(2)); + /// let v: Value> = v.into(); + /// v.double(); + /// ``` + pub fn double(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().double()), + } + } + + /// Squares this field element. + pub fn square(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().square()), + } + } + + /// Cubes this field element. + pub fn cube(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().cube()), + } + } + + /// Inverts this assigned value (taking the inverse of zero to be zero). + pub fn invert(&self) -> Value> + where + V: Borrow>, + { + Value { + inner: self.inner.as_ref().map(|v| v.borrow().invert()), + } + } +} + +impl Value> { + /// Evaluates this value directly, performing an unbatched inversion if necessary. + /// + /// If the denominator is zero, the returned value is zero. + pub fn evaluate(self) -> Value { + Value { + inner: self.inner.map(|v| v.evaluate()), + } + } +} diff --git a/frontend/src/error.rs b/frontend/src/error.rs new file mode 100644 index 0000000000..362634f6c9 --- /dev/null +++ b/frontend/src/error.rs @@ -0,0 +1,136 @@ +use std::error; +use std::fmt; +use std::io; + +use crate::plonk::TableColumn; +use halo2_middleware::circuit::{Any, Column}; + +/// This is an error that could occur during proving or circuit synthesis. +// TODO: these errors need to be cleaned up +#[derive(Debug)] +pub enum Error { + /// This is an error that can occur during synthesis of the circuit, for + /// example, when the witness is not present. + Synthesis, + /// The provided instances do not match the circuit parameters. + InvalidInstances, + /// The constraint system is not satisfied. + ConstraintSystemFailure, + /// Out of bounds index passed to a backend + BoundsFailure, + /// Opening error + Opening, + /// Transcript error + Transcript(io::Error), + /// `k` is too small for the given circuit. + NotEnoughRowsAvailable { + /// The current value of `k` being used. + current_k: u32, + }, + /// Instance provided exceeds number of available rows + InstanceTooLarge, + /// Circuit synthesis requires global constants, but circuit configuration did not + /// call [`ConstraintSystem::enable_constant`] on fixed columns with sufficient space. + /// + /// [`ConstraintSystem::enable_constant`]: crate::plonk::ConstraintSystem::enable_constant + NotEnoughColumnsForConstants, + /// The instance sets up a copy constraint involving a column that has not been + /// included in the permutation. + ColumnNotInPermutation(Column), + /// An error relating to a lookup table. + TableError(TableError), + /// Generic error not covered by previous cases + Other(String), +} + +impl From for Error { + fn from(error: io::Error) -> Self { + // The only place we can get io::Error from is the transcript. + Error::Transcript(error) + } +} + +impl Error { + /// Constructs an `Error::NotEnoughRowsAvailable`. + pub(crate) fn not_enough_rows_available(current_k: u32) -> Self { + Error::NotEnoughRowsAvailable { current_k } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Error::Synthesis => write!(f, "General synthesis error"), + Error::InvalidInstances => write!(f, "Provided instances do not match the circuit"), + Error::ConstraintSystemFailure => write!(f, "The constraint system is not satisfied"), + Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"), + Error::Opening => write!(f, "Multi-opening proof was invalid"), + Error::Transcript(e) => write!(f, "Transcript error: {e}"), + Error::NotEnoughRowsAvailable { current_k } => write!( + f, + "k = {current_k} is too small for the given circuit. Try using a larger value of k", + ), + Error::InstanceTooLarge => write!(f, "Instance vectors are larger than the circuit"), + Error::NotEnoughColumnsForConstants => { + write!( + f, + "Too few fixed columns are enabled for global constants usage" + ) + } + Error::ColumnNotInPermutation(column) => write!( + f, + "Column {column:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", + ), + Error::TableError(error) => write!(f, "{error}"), + Error::Other(error) => write!(f, "Other: {error}"), + } + } +} + +impl error::Error for Error { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Error::Transcript(e) => Some(e), + _ => None, + } + } +} + +/// This is an error that could occur during table synthesis. +#[derive(Debug)] +pub enum TableError { + /// A `TableColumn` has not been assigned. + ColumnNotAssigned(TableColumn), + /// A Table has columns of uneven lengths. + UnevenColumnLengths((TableColumn, usize), (TableColumn, usize)), + /// Attempt to assign a used `TableColumn` + UsedColumn(TableColumn), + /// Attempt to overwrite a default value + OverwriteDefault(TableColumn, String, String), +} + +impl fmt::Display for TableError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TableError::ColumnNotAssigned(col) => { + write!( + f, + "{col:?} not fully assigned. Help: assign a value at offset 0.", + ) + } + TableError::UnevenColumnLengths((col, col_len), (table, table_len)) => write!( + f, + "{col:?} has length {col_len} while {table:?} has length {table_len}", + ), + TableError::UsedColumn(col) => { + write!(f, "{col:?} has already been used") + } + TableError::OverwriteDefault(col, default, val) => { + write!( + f, + "Attempted to overwrite default value {default} with {val} in {col:?}", + ) + } + } + } +} diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index e69de29bb2..c6d33bfd6f 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -0,0 +1,6 @@ +#![allow(unused)] // TODO: Remove + +pub mod circuit; +pub mod error; +pub mod plonk; +pub mod poly; diff --git a/frontend/src/plonk.rs b/frontend/src/plonk.rs index 95b8fd0559..86da807379 100644 --- a/frontend/src/plonk.rs +++ b/frontend/src/plonk.rs @@ -1,3 +1,16 @@ +use crate::error::Error; +use crate::poly::batch_invert_assigned; +use crate::poly::Polynomial; +use halo2_middleware::circuit::{CompiledCircuitV2, PreprocessingV2}; +use halo2_middleware::ff::Field; + +mod circuit; +mod lookup; +pub mod permutation; +mod shuffle; + +pub use circuit::*; + /// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the /// circuit into its columns and the column configuration; as well as doing the fixed column and /// copy constraints assignments. The output of this function can then be used for the key diff --git a/frontend/src/plonk/circuit.rs b/frontend/src/plonk/circuit.rs index e69de29bb2..c90512a2ba 100644 --- a/frontend/src/plonk/circuit.rs +++ b/frontend/src/plonk/circuit.rs @@ -0,0 +1,1264 @@ +use super::{lookup, permutation, shuffle}; +use crate::circuit::layouter::SyncDeps; +use crate::circuit::{Layouter, Region, Value}; +use crate::error::Error; +use core::cmp::max; +use core::ops::{Add, Mul}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::ff::Field; +use halo2_middleware::metadata; +use halo2_middleware::plonk::Assigned; +use halo2_middleware::poly::Rotation; +use sealed::SealedPhase; +use std::collections::HashMap; +use std::iter::{Product, Sum}; +use std::{ + convert::TryFrom, + ops::{Neg, Sub}, +}; + +mod compress_selectors; + +pub(crate) mod sealed { + /// Phase of advice column + #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] + pub struct Phase(pub(crate) u8); + + impl Phase { + pub fn prev(&self) -> Option { + self.0.checked_sub(1).map(Phase) + } + } + + impl SealedPhase for Phase { + fn to_sealed(self) -> Phase { + self + } + } + + /// Sealed trait to help keep `Phase` private. + pub trait SealedPhase { + fn to_sealed(self) -> Phase; + } +} + +/// Phase of advice column +pub trait Phase: SealedPhase {} + +impl Phase for P {} + +/// First phase +#[derive(Debug)] +pub struct FirstPhase; + +impl SealedPhase for super::FirstPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(0) + } +} + +/// Second phase +#[derive(Debug)] +pub struct SecondPhase; + +impl SealedPhase for super::SecondPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(1) + } +} + +/// Third phase +#[derive(Debug)] +pub struct ThirdPhase; + +impl SealedPhase for super::ThirdPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(2) + } +} + +/// A selector, representing a fixed boolean value per row of the circuit. +/// +/// Selectors can be used to conditionally enable (portions of) gates: +/// ``` +/// use halo2_proofs::poly::Rotation; +/// # use halo2curves::pasta::Fp; +/// # use halo2_proofs::plonk::ConstraintSystem; +/// +/// # let mut meta = ConstraintSystem::::default(); +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("foo", |meta| { +/// let a = meta.query_advice(a, Rotation::prev()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let s = meta.query_selector(s); +/// +/// // On rows where the selector is enabled, a is constrained to equal b. +/// // On rows where the selector is disabled, a and b can take any value. +/// vec![s * (a - b)] +/// }); +/// ``` +/// +/// Selectors are disabled on all rows by default, and must be explicitly enabled on each +/// row when required: +/// ``` +/// use halo2_proofs::{ +/// circuit::{Chip, Layouter, Value}, +/// plonk::{Advice, Column, Error, Selector}, +/// }; +/// use ff::Field; +/// # use halo2_proofs::plonk::Fixed; +/// +/// struct Config { +/// a: Column, +/// b: Column, +/// s: Selector, +/// } +/// +/// fn circuit_logic>(chip: C, mut layouter: impl Layouter) -> Result<(), Error> { +/// let config = chip.config(); +/// # let config: Config = todo!(); +/// layouter.assign_region(|| "bar", |mut region| { +/// region.assign_advice(|| "a", config.a, 0, || Value::known(F::ONE))?; +/// region.assign_advice(|| "a", config.b, 1, || Value::known(F::ONE))?; +/// config.s.enable(&mut region, 1) +/// })?; +/// Ok(()) +/// } +/// ``` +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct Selector(pub(crate) usize, bool); + +impl Selector { + /// Enable this selector at the given offset within the given region. + pub fn enable(&self, region: &mut Region, offset: usize) -> Result<(), Error> { + region.enable_selector(|| "", self, offset) + } + + /// Is this selector "simple"? Simple selectors can only be multiplied + /// by expressions that contain no other simple selectors. + pub fn is_simple(&self) -> bool { + self.1 + } + + /// Returns index of this selector + pub fn index(&self) -> usize { + self.0 + } + + /// Return expression from selector + pub fn expr(&self) -> Expression { + Expression::Selector(*self) + } +} + +/// A fixed column of a lookup table. +/// +/// A lookup table can be loaded into this column via [`Layouter::assign_table`]. Columns +/// can currently only contain a single table, but they may be used in multiple lookup +/// arguments via [`ConstraintSystem::lookup`]. +/// +/// Lookup table columns are always "encumbered" by the lookup arguments they are used in; +/// they cannot simultaneously be used as general fixed columns. +/// +/// [`Layouter::assign_table`]: crate::circuit::Layouter::assign_table +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub struct TableColumn { + /// The fixed column that this table column is stored in. + /// + /// # Security + /// + /// This inner column MUST NOT be exposed in the public API, or else chip developers + /// can load lookup tables into their circuits without default-value-filling the + /// columns, which can cause soundness bugs. + inner: Column, +} + +impl TableColumn { + /// Returns inner column + pub fn inner(&self) -> Column { + self.inner + } +} + +/// This trait allows a [`Circuit`] to direct some backend to assign a witness +/// for a constraint system. +pub trait Assignment { + /// Creates a new region and enters into it. + /// + /// Panics if we are currently in a region (if `exit_region` was not called). + /// + /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. + /// + /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region + fn enter_region(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR; + + /// Allows the developer to include an annotation for an specific column within a `Region`. + /// + /// This is usually useful for debugging circuit failures. + fn annotate_column(&mut self, annotation: A, column: Column) + where + A: FnOnce() -> AR, + AR: Into; + + /// Exits the current region. + /// + /// Panics if we are not currently in a region (if `enter_region` was not called). + /// + /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. + /// + /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region + fn exit_region(&mut self); + + /// Enables a selector at the given row. + fn enable_selector( + &mut self, + annotation: A, + selector: &Selector, + row: usize, + ) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into; + + /// Queries the cell of an instance column at a particular absolute row. + /// + /// Returns the cell's value, if known. + fn query_instance(&self, column: Column, row: usize) -> Result, Error>; + + /// Assign an advice column value (witness) + fn assign_advice( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into; + + /// Assign a fixed value + fn assign_fixed( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into; + + /// Assign two cells to have the same value + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error>; + + /// Fills a fixed `column` starting from the given `row` with value `to`. + fn fill_from_row( + &mut self, + column: Column, + row: usize, + to: Value>, + ) -> Result<(), Error>; + + /// Queries the value of the given challenge. + /// + /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. + fn get_challenge(&self, challenge: Challenge) -> Value; + + /// Creates a new (sub)namespace and enters into it. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + /// + /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR; + + /// Exits out of the existing namespace. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + /// + /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace + fn pop_namespace(&mut self, gadget_name: Option); +} + +/// A floor planning strategy for a circuit. +/// +/// The floor planner is chip-agnostic and applies its strategy to the circuit it is used +/// within. +pub trait FloorPlanner { + /// Given the provided `cs`, synthesize the given circuit. + /// + /// `constants` is the list of fixed columns that the layouter may use to assign + /// global constant values. These columns will all have been equality-enabled. + /// + /// Internally, a floor planner will perform the following operations: + /// - Instantiate a [`Layouter`] for this floor planner. + /// - Perform any necessary setup or measurement tasks, which may involve one or more + /// calls to `Circuit::default().synthesize(config, &mut layouter)`. + /// - Call `circuit.synthesize(config, &mut layouter)` exactly once. + fn synthesize + SyncDeps, C: Circuit>( + cs: &mut CS, + circuit: &C, + config: C::Config, + constants: Vec>, + ) -> Result<(), Error>; +} + +/// This is a trait that circuits provide implementations for so that the +/// backend prover can ask the circuit to synthesize using some given +/// [`ConstraintSystem`] implementation. +pub trait Circuit { + /// This is a configuration object that stores things like columns. + type Config: Clone; + /// The floor planner used for this circuit. This is an associated type of the + /// `Circuit` trait because its behaviour is circuit-critical. + type FloorPlanner: FloorPlanner; + /// Optional circuit configuration parameters. Requires the `circuit-params` feature. + #[cfg(feature = "circuit-params")] + type Params: Default; + + /// Returns a copy of this circuit with no witness values (i.e. all witnesses set to + /// `None`). For most circuits, this will be equal to `Self::default()`. + fn without_witnesses(&self) -> Self; + + /// Returns a reference to the parameters that should be used to configure the circuit. + /// Requires the `circuit-params` feature. + #[cfg(feature = "circuit-params")] + fn params(&self) -> Self::Params { + Self::Params::default() + } + + /// The circuit is given an opportunity to describe the exact gate + /// arrangement, column arrangement, etc. Takes a runtime parameter. The default + /// implementation calls `configure` ignoring the `_params` argument in order to easily support + /// circuits that don't use configuration parameters. + #[cfg(feature = "circuit-params")] + fn configure_with_params( + meta: &mut ConstraintSystem, + _params: Self::Params, + ) -> Self::Config { + Self::configure(meta) + } + + /// The circuit is given an opportunity to describe the exact gate + /// arrangement, column arrangement, etc. + fn configure(meta: &mut ConstraintSystem) -> Self::Config; + + /// Given the provided `cs`, synthesize the circuit. The concrete type of + /// the caller will be different depending on the context, and they may or + /// may not expect to have a witness present. + fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; +} + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl FixedQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, + /// Phase of this advice column + pub(crate) phase: sealed::Phase, +} + +impl AdviceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } + + /// Phase of this advice column + pub fn phase(&self) -> u8 { + self.phase.0 + } +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl InstanceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, PartialEq, Eq)] +pub enum Expression { + /// This is a constant polynomial + Constant(F), + /// This is a virtual selector + Selector(Selector), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQuery), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQuery), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQuery), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl Expression { + /// Make side effects + pub fn query_cells(&mut self, cells: &mut VirtualCells<'_, F>) { + match self { + Expression::Constant(_) => (), + Expression::Selector(selector) => { + if !cells.queried_selectors.contains(selector) { + cells.queried_selectors.push(*selector); + } + } + Expression::Fixed(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Fixed, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_fixed_index(col, query.rotation)); + } + } + Expression::Advice(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Advice { phase: query.phase }, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_advice_index(col, query.rotation)); + } + } + Expression::Instance(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Instance, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_instance_index(col, query.rotation)); + } + } + Expression::Challenge(_) => (), + Expression::Negated(a) => a.query_cells(cells), + Expression::Sum(a, b) => { + a.query_cells(cells); + b.query_cells(cells); + } + Expression::Product(a, b) => { + a.query_cells(cells); + b.query_cells(cells); + } + Expression::Scaled(a, _) => a.query_cells(cells), + }; + } + + /// Evaluate the polynomial using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + constant: &impl Fn(F) -> T, + selector_column: &impl Fn(Selector) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Selector(selector) => selector_column(*selector), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + product(a, b) + } + Expression::Scaled(a, f) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + scaled(a, *f) + } + } + } + + /// Evaluate the polynomial lazily using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate_lazy( + &self, + constant: &impl Fn(F) -> T, + selector_column: &impl Fn(Selector) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + zero: &T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Selector(selector) => selector_column(*selector), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + let b = b.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let (a, b) = if a.complexity() <= b.complexity() { + (a, b) + } else { + (b, a) + }; + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + + if a == *zero { + a + } else { + let b = b.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + product(a, b) + } + } + Expression::Scaled(a, f) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + scaled(a, *f) + } + } + } + + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + match self { + Expression::Constant(scalar) => write!(writer, "{scalar:?}"), + Expression::Selector(selector) => write!(writer, "selector[{}]", selector.0), + Expression::Fixed(query) => { + write!( + writer, + "fixed[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Advice(query) => { + write!( + writer, + "advice[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Instance(query) => { + write!( + writer, + "instance[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Challenge(challenge) => { + write!(writer, "challenge[{}]", challenge.index()) + } + Expression::Negated(a) => { + writer.write_all(b"(-")?; + a.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Sum(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"+")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Product(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"*")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Scaled(a, f) => { + a.write_identifier(writer)?; + write!(writer, "*{f:?}") + } + } + } + + /// Identifier for this expression. Expressions with identical identifiers + /// do the same calculation (but the expressions don't need to be exactly equal + /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). + pub fn identifier(&self) -> String { + let mut cursor = std::io::Cursor::new(Vec::new()); + self.write_identifier(&mut cursor).unwrap(); + String::from_utf8(cursor.into_inner()).unwrap() + } + + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Selector(_) => 1, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.degree(), + Expression::Sum(a, b) => max(a.degree(), b.degree()), + Expression::Product(a, b) => a.degree() + b.degree(), + Expression::Scaled(poly, _) => poly.degree(), + } + } + + /// Approximate the computational complexity of this expression. + pub fn complexity(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Selector(_) => 1, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.complexity() + 5, + Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, + Expression::Product(a, b) => a.complexity() + b.complexity() + 30, + Expression::Scaled(poly, _) => poly.complexity() + 30, + } + } + + /// Square this expression. + pub fn square(self) -> Self { + self.clone() * self + } + + /// Returns whether or not this expression contains a simple `Selector`. + fn contains_simple_selector(&self) -> bool { + self.evaluate( + &|_| false, + &|selector| selector.is_simple(), + &|_| false, + &|_| false, + &|_| false, + &|_| false, + &|a| a, + &|a, b| a || b, + &|a, b| a || b, + &|a, _| a, + ) + } + + /// Extracts a simple selector from this gate, if present + fn extract_simple_selector(&self) -> Option { + let op = |a, b| match (a, b) { + (Some(a), None) | (None, Some(a)) => Some(a), + (Some(_), Some(_)) => panic!("two simple selectors cannot be in the same expression"), + _ => None, + }; + + self.evaluate( + &|_| None, + &|selector| { + if selector.is_simple() { + Some(selector) + } else { + None + } + }, + &|_| None, + &|_| None, + &|_| None, + &|_| None, + &|a| a, + &op, + &op, + &|a, _| a, + ) + } +} + +impl std::fmt::Debug for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), + Expression::Selector(selector) => f.debug_tuple("Selector").field(selector).finish(), + // Skip enum variant and print query struct directly to maintain backwards compatibility. + Expression::Fixed(query) => { + let mut debug_struct = f.debug_struct("Fixed"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Advice(query) => { + let mut debug_struct = f.debug_struct("Advice"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation); + // Only show advice's phase if it's not in first phase. + if query.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &query.phase); + } + debug_struct.finish() + } + Expression::Instance(query) => { + let mut debug_struct = f.debug_struct("Instance"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Challenge(challenge) => { + f.debug_tuple("Challenge").field(challenge).finish() + } + Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), + Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), + Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), + Expression::Scaled(poly, scalar) => { + f.debug_tuple("Scaled").field(poly).field(scalar).finish() + } + } + } +} + +impl Neg for Expression { + type Output = Expression; + fn neg(self) -> Self::Output { + Expression::Negated(Box::new(self)) + } +} + +impl Add for Expression { + type Output = Expression; + fn add(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() || rhs.contains_simple_selector() { + panic!("attempted to use a simple selector in an addition"); + } + Expression::Sum(Box::new(self), Box::new(rhs)) + } +} + +impl Sub for Expression { + type Output = Expression; + fn sub(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() || rhs.contains_simple_selector() { + panic!("attempted to use a simple selector in a subtraction"); + } + Expression::Sum(Box::new(self), Box::new(-rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() && rhs.contains_simple_selector() { + panic!("attempted to multiply two expressions containing simple selectors"); + } + Expression::Product(Box::new(self), Box::new(rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: F) -> Expression { + Expression::Scaled(Box::new(self), rhs) + } +} + +impl Sum for Expression { + fn sum>(iter: I) -> Self { + iter.reduce(|acc, x| acc + x) + .unwrap_or(Expression::Constant(F::ZERO)) + } +} + +impl Product for Expression { + fn product>(iter: I) -> Self { + iter.reduce(|acc, x| acc * x) + .unwrap_or(Expression::Constant(F::ONE)) + } +} + +/// An individual polynomial constraint. +/// +/// These are returned by the closures passed to `ConstraintSystem::create_gate`. +#[derive(Debug)] +pub struct Constraint { + name: String, + poly: Expression, +} + +impl From> for Constraint { + fn from(poly: Expression) -> Self { + Constraint { + name: "".to_string(), + poly, + } + } +} + +impl> From<(S, Expression)> for Constraint { + fn from((name, poly): (S, Expression)) -> Self { + Constraint { + name: name.as_ref().to_string(), + poly, + } + } +} + +impl From> for Vec> { + fn from(poly: Expression) -> Self { + vec![Constraint { + name: "".to_string(), + poly, + }] + } +} + +/// A set of polynomial constraints with a common selector. +/// +/// ``` +/// use halo2_proofs::{plonk::{Constraints, Expression}, poly::Rotation}; +/// use halo2curves::pasta::Fp; +/// # use halo2_proofs::plonk::ConstraintSystem; +/// +/// # let mut meta = ConstraintSystem::::default(); +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let c = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("foo", |meta| { +/// let next = meta.query_advice(a, Rotation::next()); +/// let a = meta.query_advice(a, Rotation::cur()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let c = meta.query_advice(c, Rotation::cur()); +/// let s_ternary = meta.query_selector(s); +/// +/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); +/// +/// Constraints::with_selector( +/// s_ternary, +/// std::array::IntoIter::new([ +/// ("a is boolean", a.clone() * one_minus_a.clone()), +/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), +/// ]), +/// ) +/// }); +/// ``` +/// +/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to +/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, +/// you can pass an array directly. +#[derive(Debug)] +pub struct Constraints>, Iter: IntoIterator> { + selector: Expression, + constraints: Iter, +} + +impl>, Iter: IntoIterator> Constraints { + /// Constructs a set of constraints that are controlled by the given selector. + /// + /// Each constraint `c` in `iterator` will be converted into the constraint + /// `selector * c`. + pub fn with_selector(selector: Expression, constraints: Iter) -> Self { + Constraints { + selector, + constraints, + } + } +} + +fn apply_selector_to_constraint>>( + (selector, c): (Expression, C), +) -> Constraint { + let constraint: Constraint = c.into(); + Constraint { + name: constraint.name, + poly: selector * constraint.poly, + } +} + +type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; +type ConstraintsIterator = std::iter::Map< + std::iter::Zip>, I>, + ApplySelectorToConstraint, +>; + +impl>, Iter: IntoIterator> IntoIterator + for Constraints +{ + type Item = Constraint; + type IntoIter = ConstraintsIterator; + + fn into_iter(self) -> Self::IntoIter { + std::iter::repeat(self.selector) + .zip(self.constraints) + .map(apply_selector_to_constraint) + } +} + +/// Gate +#[derive(Clone, Debug)] +pub struct Gate { + name: String, + constraint_names: Vec, + polys: Vec>, + /// We track queried selectors separately from other cells, so that we can use them to + /// trigger debug checks on gates. + queried_selectors: Vec, + queried_cells: Vec, +} + +impl Gate { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the name of the constraint at index `constraint_index`. + pub fn constraint_name(&self, constraint_index: usize) -> &str { + self.constraint_names[constraint_index].as_str() + } + + /// Returns constraints of this gate + pub fn polynomials(&self) -> &[Expression] { + &self.polys + } + + pub(crate) fn queried_selectors(&self) -> &[Selector] { + &self.queried_selectors + } + + pub(crate) fn queried_cells(&self) -> &[VirtualCell] { + &self.queried_cells + } +} + +/// Exposes the "virtual cells" that can be queried while creating a custom gate or lookup +/// table. +#[derive(Debug)] +pub struct VirtualCells<'a, F: Field> { + meta: &'a mut ConstraintSystem, + queried_selectors: Vec, + queried_cells: Vec, +} + +impl<'a, F: Field> VirtualCells<'a, F> { + fn new(meta: &'a mut ConstraintSystem) -> Self { + VirtualCells { + meta, + queried_selectors: vec![], + queried_cells: vec![], + } + } + + /// Query a selector at the current position. + pub fn query_selector(&mut self, selector: Selector) -> Expression { + self.queried_selectors.push(selector); + Expression::Selector(selector) + } + + /// Query a fixed column at a relative position + pub fn query_fixed(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Fixed(FixedQuery { + index: Some(self.meta.query_fixed_index(column, at)), + column_index: column.index, + rotation: at, + }) + } + + /// Query an advice column at a relative position + pub fn query_advice(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Advice(AdviceQuery { + index: Some(self.meta.query_advice_index(column, at)), + column_index: column.index, + rotation: at, + phase: column.column_type().phase, + }) + } + + /// Query an instance column at a relative position + pub fn query_instance(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Instance(InstanceQuery { + index: Some(self.meta.query_instance_index(column, at)), + column_index: column.index, + rotation: at, + }) + } + + /// Query an Any column at a relative position + pub fn query_any>>(&mut self, column: C, at: Rotation) -> Expression { + let column = column.into(); + match column.column_type() { + Any::Advice(_) => self.query_advice(Column::::try_from(column).unwrap(), at), + Any::Fixed => self.query_fixed(Column::::try_from(column).unwrap(), at), + Any::Instance => self.query_instance(Column::::try_from(column).unwrap(), at), + } + } + + /// Query a challenge + pub fn query_challenge(&mut self, challenge: Challenge) -> Expression { + Expression::Challenge(challenge) + } +} + +/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset +/// within a custom gate. +#[derive(Clone, Debug)] +pub struct VirtualCell { + pub(crate) column: Column, + pub(crate) rotation: Rotation, +} + +impl>> From<(Col, Rotation)> for VirtualCell { + fn from((column, rotation): (Col, Rotation)) -> Self { + VirtualCell { + column: column.into(), + rotation, + } + } +} diff --git a/frontend/src/plonk/circuit/compress_selectors.rs b/frontend/src/plonk/circuit/compress_selectors.rs new file mode 100644 index 0000000000..053ebe3178 --- /dev/null +++ b/frontend/src/plonk/circuit/compress_selectors.rs @@ -0,0 +1,352 @@ +use super::Expression; +use ff::Field; + +/// This describes a selector and where it is activated. +#[derive(Debug, Clone)] +pub struct SelectorDescription { + /// The selector that this description references, by index. + pub selector: usize, + + /// The vector of booleans defining which rows are active for this selector. + pub activations: Vec, + + /// The maximum degree of a gate involving this selector, including the + /// virtual selector itself. This means this will be at least 1 for any + /// expression containing a simple selector, even if that selector is not + /// multiplied by anything. + pub max_degree: usize, +} + +/// This describes the assigned combination of a particular selector as well as +/// the expression it should be substituted with. +#[derive(Debug, Clone)] +pub struct SelectorAssignment { + /// The selector that this structure references, by index. + pub selector: usize, + + /// The combination this selector was assigned to + pub combination_index: usize, + + /// The expression we wish to substitute with + pub expression: Expression, +} + +/// This function takes a vector that defines each selector as well as a closure +/// used to allocate new fixed columns, and returns the assignment of each +/// combination as well as details about each selector assignment. +/// +/// This function takes +/// * `selectors`, a vector of `SelectorDescription`s that describe each +/// selector +/// * `max_degree`, the maximum allowed degree of any gate +/// * `allocate_fixed_columns`, a closure that constructs a new fixed column and +/// queries it at Rotation::cur(), returning the expression +/// +/// and returns `Vec>` containing the assignment of each new fixed column +/// (which each correspond to a combination) as well as a vector of +/// `SelectorAssignment` that the caller can use to perform the necessary +/// substitutions to the constraint system. +/// +/// This function is completely deterministic. +pub fn process( + mut selectors: Vec, + max_degree: usize, + mut allocate_fixed_column: E, +) -> (Vec>, Vec>) +where + E: FnMut() -> Expression, +{ + if selectors.is_empty() { + // There is nothing to optimize. + return (vec![], vec![]); + } + + // The length of all provided selectors must be the same. + let n = selectors[0].activations.len(); + assert!(selectors.iter().all(|a| a.activations.len() == n)); + + let mut combination_assignments = vec![]; + let mut selector_assignments = vec![]; + + // All provided selectors of degree 0 are assumed to be either concrete + // selectors or do not appear in a gate. Let's address these first. + selectors.retain(|selector| { + if selector.max_degree == 0 { + // This is a complex selector, or a selector that does not appear in any + // gate constraint. + let expression = allocate_fixed_column(); + + let combination_assignment = selector + .activations + .iter() + .map(|b| if *b { F::ONE } else { F::ZERO }) + .collect::>(); + let combination_index = combination_assignments.len(); + combination_assignments.push(combination_assignment); + selector_assignments.push(SelectorAssignment { + selector: selector.selector, + combination_index, + expression, + }); + + false + } else { + true + } + }); + + // All of the remaining `selectors` are simple. Let's try to combine them. + // First, we compute the exclusion matrix that has (j, k) = true if selector + // j and selector k conflict -- that is, they are both enabled on the same + // row. This matrix is symmetric and the diagonal entries are false, so we + // only need to store the lower triangular entries. + let mut exclusion_matrix = (0..selectors.len()) + .map(|i| vec![false; i]) + .collect::>(); + + for (i, rows) in selectors + .iter() + .map(|selector| &selector.activations) + .enumerate() + { + // Loop over the selectors previous to this one + for (j, other_selector) in selectors.iter().enumerate().take(i) { + // Look at what selectors are active at the same row + if rows + .iter() + .zip(other_selector.activations.iter()) + .any(|(l, r)| l & r) + { + // Mark them as incompatible + exclusion_matrix[i][j] = true; + } + } + } + + // Simple selectors that we've added to combinations already. + let mut added = vec![false; selectors.len()]; + + for (i, selector) in selectors.iter().enumerate() { + if added[i] { + continue; + } + added[i] = true; + assert!(selector.max_degree <= max_degree); + // This is used to keep track of the largest degree gate involved in the + // combination so far. We subtract by one to omit the virtual selector + // which will be substituted by the caller with the expression we give + // them. + let mut d = selector.max_degree - 1; + let mut combination = vec![selector]; + let mut combination_added = vec![i]; + + // Try to find other selectors that can join this one. + 'try_selectors: for (j, selector) in selectors.iter().enumerate().skip(i + 1) { + if d + combination.len() == max_degree { + // Short circuit; nothing can be added to this + // combination. + break 'try_selectors; + } + + // Skip selectors that have been added to previous combinations + if added[j] { + continue 'try_selectors; + } + + // Is this selector excluded from co-existing in the same + // combination with any of the other selectors so far? + for &i in combination_added.iter() { + if exclusion_matrix[j][i] { + continue 'try_selectors; + } + } + + // Can the new selector join the combination? Reminder: we use + // selector.max_degree - 1 to omit the influence of the virtual + // selector on the degree, as it will be substituted. + let new_d = std::cmp::max(d, selector.max_degree - 1); + if new_d + combination.len() + 1 > max_degree { + // Guess not. + continue 'try_selectors; + } + + d = new_d; + combination.push(selector); + combination_added.push(j); + added[j] = true; + } + + // Now, compute the selector and combination assignments. + let mut combination_assignment = vec![F::ZERO; n]; + let combination_len = combination.len(); + let combination_index = combination_assignments.len(); + let query = allocate_fixed_column(); + + let mut assigned_root = F::ONE; + selector_assignments.extend(combination.into_iter().map(|selector| { + // Compute the expression for substitution. This produces an expression of the + // form + // q * Prod[i = 1..=combination_len, i != assigned_root](i - q) + // + // which is non-zero only on rows where `combination_assignment` is set to + // `assigned_root`. In particular, rows set to 0 correspond to all selectors + // being disabled. + let mut expression = query.clone(); + let mut root = F::ONE; + for _ in 0..combination_len { + if root != assigned_root { + expression = expression * (Expression::Constant(root) - query.clone()); + } + root += F::ONE; + } + + // Update the combination assignment + for (combination, selector) in combination_assignment + .iter_mut() + .zip(selector.activations.iter()) + { + // This will not overwrite another selector's activations because + // we have ensured that selectors are disjoint. + if *selector { + *combination = assigned_root; + } + } + + assigned_root += F::ONE; + + SelectorAssignment { + selector: selector.selector, + combination_index, + expression, + } + })); + combination_assignments.push(combination_assignment); + } + + (combination_assignments, selector_assignments) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{plonk::FixedQuery, poly::Rotation}; + use halo2curves::pasta::Fp; + use proptest::collection::{vec, SizeRange}; + use proptest::prelude::*; + + prop_compose! { + fn arb_selector(assignment_size: usize, max_degree: usize) + (degree in 0..max_degree, + assignment in vec(any::(), assignment_size)) + -> (usize, Vec) { + (degree, assignment) + } + } + + prop_compose! { + fn arb_selector_list(assignment_size: usize, max_degree: usize, num_selectors: impl Into) + (list in vec(arb_selector(assignment_size, max_degree), num_selectors)) + -> Vec + { + list.into_iter().enumerate().map(|(i, (max_degree, activations))| { + SelectorDescription { + selector: i, + activations, + max_degree, + } + }).collect() + } + } + + prop_compose! { + fn arb_instance(max_assignment_size: usize, + max_degree: usize, + max_selectors: usize) + (assignment_size in 1..max_assignment_size, + degree in 1..max_degree, + num_selectors in 1..max_selectors) + (list in arb_selector_list(assignment_size, degree, num_selectors), + degree in Just(degree)) + -> (Vec, usize) + { + (list, degree) + } + } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(10000))] + #[test] + fn test_selector_combination((selectors, max_degree) in arb_instance(10, 10, 15)) { + let mut query = 0; + let (combination_assignments, selector_assignments) = + process::(selectors.clone(), max_degree, || { + let tmp = Expression::Fixed(FixedQuery { + index: Some(query), + column_index: query, + rotation: Rotation::cur(), + }); + query += 1; + tmp + }); + + { + let mut selectors_seen = vec![]; + assert_eq!(selectors.len(), selector_assignments.len()); + for selector in &selector_assignments { + // Every selector should be assigned to a combination + assert!(selector.combination_index < combination_assignments.len()); + assert!(!selectors_seen.contains(&selector.selector)); + selectors_seen.push(selector.selector); + } + } + + // Test that, for each selector, the provided expression + // 1. evaluates to zero on rows where the selector's activation is off + // 2. evaluates to nonzero on rows where the selector's activation is on + // 3. is of degree d such that d + (selector.max_degree - 1) <= max_degree + // OR selector.max_degree is zero + for selector in selector_assignments { + assert_eq!( + selectors[selector.selector].activations.len(), + combination_assignments[selector.combination_index].len() + ); + for (&activation, &assignment) in selectors[selector.selector] + .activations + .iter() + .zip(combination_assignments[selector.combination_index].iter()) + { + let eval = selector.expression.evaluate( + &|c| c, + &|_| panic!("should not occur in returned expressions"), + &|query| { + // Should be the correct combination in the expression + assert_eq!(selector.combination_index, query.index.unwrap()); + assignment + }, + &|_| panic!("should not occur in returned expressions"), + &|_| panic!("should not occur in returned expressions"), + &|_| panic!("should not occur in returned expressions"), + &|a| -a, + &|a, b| a + b, + &|a, b| a * b, + &|a, f| a * f, + ); + + if activation { + assert!(!eval.is_zero_vartime()); + } else { + assert!(eval.is_zero_vartime()); + } + } + + let expr_degree = selector.expression.degree(); + assert!(expr_degree <= max_degree); + if selectors[selector.selector].max_degree > 0 { + assert!( + (selectors[selector.selector].max_degree - 1) + expr_degree <= max_degree + ); + } + } + } + } +} diff --git a/frontend/src/plonk/lookup.rs b/frontend/src/plonk/lookup.rs new file mode 100644 index 0000000000..5182850dc0 --- /dev/null +++ b/frontend/src/plonk/lookup.rs @@ -0,0 +1,10 @@ +use super::circuit::Expression; +use halo2_middleware::ff::Field; + +/// Expressions involved in a lookup argument, with a name as metadata. +#[derive(Clone)] +pub struct Argument { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) table_expressions: Vec>, +} diff --git a/frontend/src/plonk/permutation.rs b/frontend/src/plonk/permutation.rs new file mode 100644 index 0000000000..94ad432394 --- /dev/null +++ b/frontend/src/plonk/permutation.rs @@ -0,0 +1,8 @@ +use halo2_middleware::circuit::{Any, Column}; + +/// A permutation argument. +#[derive(Debug, Clone)] +pub struct Argument { + /// A sequence of columns involved in the argument. + pub(super) columns: Vec>, +} diff --git a/frontend/src/plonk/shuffle.rs b/frontend/src/plonk/shuffle.rs new file mode 100644 index 0000000000..c109eea9a6 --- /dev/null +++ b/frontend/src/plonk/shuffle.rs @@ -0,0 +1,10 @@ +use super::circuit::Expression; +use halo2_middleware::ff::Field; + +/// Expressions involved in a shuffle argument, with a name as metadata. +#[derive(Clone)] +pub struct Argument { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) shuffle_expressions: Vec>, +} diff --git a/frontend/src/poly.rs b/frontend/src/poly.rs new file mode 100644 index 0000000000..7be272ca0a --- /dev/null +++ b/frontend/src/poly.rs @@ -0,0 +1,85 @@ +use halo2_middleware::ff::{BatchInvert, Field}; +use halo2_middleware::plonk::Assigned; +use std::fmt::Debug; +use std::marker::PhantomData; + +// TODO: We only need the batch_invert_assigned from all this code, probably we can simplify this a +// lot + +/// Represents a univariate polynomial defined over a field and a particular +/// basis. +#[derive(Clone, Debug)] +pub struct Polynomial { + pub(crate) values: Vec, + pub(crate) _marker: PhantomData, +} + +impl Polynomial { + pub(crate) fn new_empty(size: usize, zero: F) -> Self { + Polynomial { + values: vec![zero; size], + _marker: PhantomData, + } + } +} + +/// The basis over which a polynomial is described. +pub trait Basis: Copy + Debug + Send + Sync {} + +/// The polynomial is defined as coefficients +#[derive(Clone, Copy, Debug)] +pub struct Coeff; +impl Basis for Coeff {} + +/// The polynomial is defined as coefficients of Lagrange basis polynomials +#[derive(Clone, Copy, Debug)] +pub struct LagrangeCoeff; +impl Basis for LagrangeCoeff {} + +pub(crate) fn batch_invert_assigned( + assigned: Vec, LagrangeCoeff>>, +) -> Vec> { + let mut assigned_denominators: Vec<_> = assigned + .iter() + .map(|f| { + f.values + .iter() + .map(|value| value.denominator()) + .collect::>() + }) + .collect(); + + assigned_denominators + .iter_mut() + .flat_map(|f| { + f.iter_mut() + // If the denominator is trivial, we can skip it, reducing the + // size of the batch inversion. + .filter_map(|d| d.as_mut()) + }) + .batch_invert(); + + assigned + .iter() + .zip(assigned_denominators) + .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) + .collect() +} + +impl Polynomial, LagrangeCoeff> { + pub(crate) fn invert( + &self, + inv_denoms: impl Iterator + ExactSizeIterator, + ) -> Polynomial { + assert_eq!(inv_denoms.len(), self.values.len()); + Polynomial { + values: self + .values + .iter() + .zip(inv_denoms) + .map(|(a, inv_den)| a.numerator() * inv_den) + .collect(), + _marker: self._marker, + } + } +} diff --git a/middleware/src/lib.rs b/middleware/src/lib.rs index b523ace63d..c1362ccae0 100644 --- a/middleware/src/lib.rs +++ b/middleware/src/lib.rs @@ -2,6 +2,7 @@ pub mod circuit; pub mod lookup; pub mod metadata; pub mod permutation; +pub mod plonk; pub mod poly; pub mod shuffle; diff --git a/middleware/src/plonk.rs b/middleware/src/plonk.rs new file mode 100644 index 0000000000..ea0b1c0e78 --- /dev/null +++ b/middleware/src/plonk.rs @@ -0,0 +1,665 @@ +use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; + +use ff::Field; + +/// A value assigned to a cell within a circuit. +/// +/// Stored as a fraction, so the backend can use batch inversion. +/// +/// A denominator of zero maps to an assigned value of zero. +#[derive(Clone, Copy, Debug)] +pub enum Assigned { + /// The field element zero. + Zero, + /// A value that does not require inversion to evaluate. + Trivial(F), + /// A value stored as a fraction to enable batch inversion. + Rational(F, F), +} + +impl From<&Assigned> for Assigned { + fn from(val: &Assigned) -> Self { + *val + } +} + +impl From<&F> for Assigned { + fn from(numerator: &F) -> Self { + Assigned::Trivial(*numerator) + } +} + +impl From for Assigned { + fn from(numerator: F) -> Self { + Assigned::Trivial(numerator) + } +} + +impl From<(F, F)> for Assigned { + fn from((numerator, denominator): (F, F)) -> Self { + Assigned::Rational(numerator, denominator) + } +} + +impl PartialEq for Assigned { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + // At least one side is directly zero. + (Self::Zero, Self::Zero) => true, + (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + x.is_zero_vartime() + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, + (Self::Trivial(x), Self::Rational(numerator, denominator)) + | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { + &(*x * denominator) == numerator + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, + } + } +} + +impl Eq for Assigned {} + +impl Neg for Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + match self { + Self::Zero => Self::Zero, + Self::Trivial(numerator) => Self::Trivial(-numerator), + Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), + } + } +} + +impl Neg for &Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + -*self + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + // One side is directly zero. + (Self::Zero, _) => rhs, + (_, Self::Zero) => self, + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + other + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator + denominator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + self + Self::Trivial(rhs) + } +} + +impl Add for &Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for Assigned { + type Output = Assigned; + fn add(self, rhs: &Self) -> Assigned { + self + *rhs + } +} + +impl Add> for &Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for &Assigned { + type Output = Assigned; + fn add(self, rhs: &Assigned) -> Assigned { + *self + *rhs + } +} + +impl AddAssign for Assigned { + fn add_assign(&mut self, rhs: Self) { + *self = *self + rhs; + } +} + +impl AddAssign<&Assigned> for Assigned { + fn add_assign(&mut self, rhs: &Self) { + *self = *self + rhs; + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + self + (-rhs) + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + self + (-rhs) + } +} + +impl Sub for &Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for Assigned { + type Output = Assigned; + fn sub(self, rhs: &Self) -> Assigned { + self - *rhs + } +} + +impl Sub> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: &Assigned) -> Assigned { + *self - *rhs + } +} + +impl SubAssign for Assigned { + fn sub_assign(&mut self, rhs: Self) { + *self = *self - rhs; + } +} + +impl SubAssign<&Assigned> for Assigned { + fn sub_assign(&mut self, rhs: &Self) { + *self = *self - rhs; + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + (Self::Zero, _) | (_, Self::Zero) => Self::Zero, + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + self * Self::Trivial(rhs) + } +} + +impl Mul for &Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + *self * rhs + } +} + +impl Mul<&Assigned> for Assigned { + type Output = Assigned; + fn mul(self, rhs: &Assigned) -> Assigned { + self * *rhs + } +} + +impl MulAssign for Assigned { + fn mul_assign(&mut self, rhs: Self) { + *self = *self * rhs; + } +} + +impl MulAssign<&Assigned> for Assigned { + fn mul_assign(&mut self, rhs: &Self) { + *self = *self * rhs; + } +} + +impl Assigned { + /// Returns the numerator. + pub fn numerator(&self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => *x, + Self::Rational(numerator, _) => *numerator, + } + } + + /// Returns the denominator, if non-trivial. + pub fn denominator(&self) -> Option { + match self { + Self::Zero => None, + Self::Trivial(_) => None, + Self::Rational(_, denominator) => Some(*denominator), + } + } + + /// Returns true iff this element is zero. + pub fn is_zero_vartime(&self) -> bool { + match self { + Self::Zero => true, + Self::Trivial(x) => x.is_zero_vartime(), + // Assigned maps x/0 -> 0. + Self::Rational(numerator, denominator) => { + numerator.is_zero_vartime() || denominator.is_zero_vartime() + } + } + } + + /// Doubles this element. + #[must_use] + pub fn double(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.double()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.double(), *denominator) + } + } + } + + /// Squares this element. + #[must_use] + pub fn square(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.square()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.square(), denominator.square()) + } + } + } + + /// Cubes this element. + #[must_use] + pub fn cube(&self) -> Self { + self.square() * self + } + + /// Inverts this assigned value (taking the inverse of zero to be zero). + pub fn invert(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Rational(F::ONE, *x), + Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), + } + } + + /// Evaluates this assigned value directly, performing an unbatched inversion if + /// necessary. + /// + /// If the denominator is zero, this returns zero. + pub fn evaluate(self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => x, + Self::Rational(numerator, denominator) => { + if denominator == F::ONE { + numerator + } else { + numerator * denominator.invert().unwrap_or(F::ZERO) + } + } + } + } +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use super::Assigned; + // We use (numerator, denominator) in the comments below to denote a rational. + #[test] + fn add_trivial_to_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // 2 + (1,0) = 2 + 0 = 2 + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn add_rational_to_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) + (1,0) = (1,2) + 0 = (1,2) + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn sub_trivial_from_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - 2 = 0 - 2 = -2 + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // 2 - (1,0) = 2 - 0 = 2 + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn sub_rational_from_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - (1,2) = 0 - (1,2) = -(1,2) + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // (1,2) - (1,0) = (1,2) - 0 = (1,2) + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn mul_rational_by_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) * (1,0) = (1,2) * 0 = 0 + assert_eq!((a * b).evaluate(), Fp::zero()); + + // (1,0) * (1,2) = 0 * (1,2) = 0 + assert_eq!((b * a).evaluate(), Fp::zero()); + } +} + +#[cfg(test)] +mod proptests { + use std::{ + cmp, + ops::{Add, Mul, Neg, Sub}, + }; + + use group::ff::Field; + use halo2curves::pasta::Fp; + use proptest::{collection::vec, prelude::*, sample::select}; + + use super::Assigned; + + trait UnaryOperand: Neg { + fn double(&self) -> Self; + fn square(&self) -> Self; + fn cube(&self) -> Self; + fn inv0(&self) -> Self; + } + + impl UnaryOperand for F { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert().unwrap_or(F::ZERO) + } + } + + impl UnaryOperand for Assigned { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert() + } + } + + #[derive(Clone, Debug)] + enum UnaryOperator { + Neg, + Double, + Square, + Cube, + Inv0, + } + + const UNARY_OPERATORS: &[UnaryOperator] = &[ + UnaryOperator::Neg, + UnaryOperator::Double, + UnaryOperator::Square, + UnaryOperator::Cube, + UnaryOperator::Inv0, + ]; + + impl UnaryOperator { + fn apply(&self, a: F) -> F { + match self { + Self::Neg => -a, + Self::Double => a.double(), + Self::Square => a.square(), + Self::Cube => a.cube(), + Self::Inv0 => a.inv0(), + } + } + } + + trait BinaryOperand: Sized + Add + Sub + Mul {} + impl BinaryOperand for F {} + impl BinaryOperand for Assigned {} + + #[derive(Clone, Debug)] + enum BinaryOperator { + Add, + Sub, + Mul, + } + + const BINARY_OPERATORS: &[BinaryOperator] = &[ + BinaryOperator::Add, + BinaryOperator::Sub, + BinaryOperator::Mul, + ]; + + impl BinaryOperator { + fn apply(&self, a: F, b: F) -> F { + match self { + Self::Add => a + b, + Self::Sub => a - b, + Self::Mul => a * b, + } + } + } + + #[derive(Clone, Debug)] + enum Operator { + Unary(UnaryOperator), + Binary(BinaryOperator), + } + + prop_compose! { + /// Use narrow that can be easily reduced. + fn arb_element()(val in any::()) -> Fp { + Fp::from(val) + } + } + + prop_compose! { + fn arb_trivial()(element in arb_element()) -> Assigned { + Assigned::Trivial(element) + } + } + + prop_compose! { + /// Generates half of the denominators as zero to represent a deferred inversion. + fn arb_rational()( + numerator in arb_element(), + denominator in prop_oneof![ + 1 => Just(Fp::zero()), + 2 => arb_element(), + ], + ) -> Assigned { + Assigned::Rational(numerator, denominator) + } + } + + prop_compose! { + fn arb_operators(num_unary: usize, num_binary: usize)( + unary in vec(select(UNARY_OPERATORS), num_unary), + binary in vec(select(BINARY_OPERATORS), num_binary), + ) -> Vec { + unary.into_iter() + .map(Operator::Unary) + .chain(binary.into_iter().map(Operator::Binary)) + .collect() + } + } + + prop_compose! { + fn arb_testcase()( + num_unary in 0usize..5, + num_binary in 0usize..5, + )( + values in vec( + prop_oneof![ + 1 => Just(Assigned::Zero), + 2 => arb_trivial(), + 2 => arb_rational(), + ], + // Ensure that: + // - we have at least one value to apply unary operators to. + // - we can apply every binary operator pairwise sequentially. + cmp::max(usize::from(num_unary > 0), num_binary + 1)), + operations in arb_operators(num_unary, num_binary).prop_shuffle(), + ) -> (Vec>, Vec) { + (values, operations) + } + } + + proptest! { + #[test] + fn operation_commutativity((values, operations) in arb_testcase()) { + // Evaluate the values at the start. + let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); + + // Apply the operations to both the deferred and evaluated values. + fn evaluate( + items: Vec, + operators: &[Operator], + ) -> F { + let mut ops = operators.iter(); + + // Process all binary operators. We are guaranteed to have exactly as many + // binary operators as we need calls to the reduction closure. + let mut res = items.into_iter().reduce(|mut a, b| loop { + match ops.next() { + Some(Operator::Unary(op)) => a = op.apply(a), + Some(Operator::Binary(op)) => break op.apply(a, b), + None => unreachable!(), + } + }).unwrap(); + + // Process any unary operators that weren't handled in the reduce() call + // above (either if we only had one item, or there were unary operators + // after the last binary operator). We are guaranteed to have no binary + // operators remaining at this point. + loop { + match ops.next() { + Some(Operator::Unary(op)) => res = op.apply(res), + Some(Operator::Binary(_)) => unreachable!(), + None => break res, + } + } + } + let deferred_result = evaluate(values, &operations); + let evaluated_result = evaluate(elements, &operations); + + // The two should be equal, i.e. deferred inversion should commute with the + // list of operations. + assert_eq!(deferred_result.evaluate(), evaluated_result); + } + } +} From f99144a25ca0d4a41645fd3e6e75bb830e8b98d5 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 13:24:03 +0000 Subject: [PATCH 36/79] Checkpoint. Move everything to common --- backend/src/arithmetic.rs | 503 --- backend/src/dev.rs | 3 - backend/src/dev/metadata.rs | 44 - backend/src/helpers.rs | 154 - backend/src/lib.rs | 20 - backend/src/multicore.rs | 0 backend/src/plonk.rs | 504 --- backend/src/plonk/assigned.rs | 665 ---- backend/src/plonk/circuit.rs | 187 - backend/src/plonk/error.rs | 93 - backend/src/plonk/keygen.rs | 159 - backend/src/poly.rs | 323 -- common/src/arithmetic.rs | 503 ++- {frontend => common}/src/circuit.rs | 10 +- .../src/circuit/floor_planner.rs | 0 .../src/circuit/floor_planner/single_pass.rs | 9 +- .../src/circuit/floor_planner/v1.rs | 8 +- .../src/circuit/floor_planner/v1/strategy.rs | 3 +- {frontend => common}/src/circuit/layouter.rs | 5 +- .../src/circuit/table_layouter.rs | 6 +- {frontend => common}/src/circuit/value.rs | 3 +- common/src/dev.rs | 1855 ++++++++++ common/src/dev/cost.rs | 562 +++ common/src/dev/cost_model.rs | 323 ++ common/src/dev/failure.rs | 873 +++++ common/src/dev/failure/emitter.rs | 214 ++ common/src/dev/gates.rs | 314 ++ common/src/dev/graph.rs | 204 ++ common/src/dev/graph/layout.rs | 323 ++ common/src/dev/metadata.rs | 313 ++ common/src/dev/tfp.rs | 509 +++ common/src/dev/util.rs | 159 + common/src/helpers.rs | 154 + common/src/lib.rs | 20 +- common/src/plonk.rs | 1666 +++------ common/src/plonk/assigned.rs | 665 ++++ common/src/plonk/circuit.rs | 3162 +++++++++++++++++ .../src/plonk/circuit/compress_selectors.rs | 3 +- {frontend/src => common/src/plonk}/error.rs | 4 +- {backend => common}/src/plonk/evaluation.rs | 5 +- common/src/plonk/keygen.rs | 395 ++ common/src/plonk/lookup.rs | 3 +- common/src/plonk/lookup/prover.rs | 2 +- common/src/plonk/lookup/verifier.rs | 3 +- common/src/plonk/permutation.rs | 20 +- common/src/plonk/permutation/keygen.rs | 83 +- common/src/plonk/permutation/prover.rs | 5 +- common/src/plonk/permutation/verifier.rs | 5 +- {backend => common}/src/plonk/prover.rs | 264 +- common/src/plonk/shuffle.rs | 11 +- common/src/plonk/shuffle/prover.rs | 2 +- common/src/plonk/shuffle/verifier.rs | 3 +- {backend => common}/src/plonk/vanishing.rs | 0 .../src/plonk/vanishing/prover.rs | 2 +- .../src/plonk/vanishing/verifier.rs | 2 +- {backend => common}/src/plonk/verifier.rs | 5 +- .../src/plonk/verifier/batch.rs | 2 +- common/src/poly.rs | 30 +- {backend => common}/src/poly/commitment.rs | 2 +- {backend => common}/src/poly/domain.rs | 11 +- .../src/poly/ipa/commitment.rs | 4 +- .../src/poly/ipa/commitment/prover.rs | 2 +- .../src/poly/ipa/commitment/verifier.rs | 0 {backend => common}/src/poly/ipa/mod.rs | 0 {backend => common}/src/poly/ipa/msm.rs | 2 +- {backend => common}/src/poly/ipa/multiopen.rs | 2 +- .../src/poly/ipa/multiopen/prover.rs | 2 +- .../src/poly/ipa/multiopen/verifier.rs | 2 +- {backend => common}/src/poly/ipa/strategy.rs | 2 +- .../src/poly/kzg/commitment.rs | 4 +- {backend => common}/src/poly/kzg/mod.rs | 0 {backend => common}/src/poly/kzg/msm.rs | 2 +- {backend => common}/src/poly/kzg/multiopen.rs | 0 .../src/poly/kzg/multiopen/gwc.rs | 2 +- .../src/poly/kzg/multiopen/gwc/prover.rs | 0 .../src/poly/kzg/multiopen/gwc/verifier.rs | 2 +- .../src/poly/kzg/multiopen/shplonk.rs | 4 +- .../src/poly/kzg/multiopen/shplonk/prover.rs | 2 +- .../poly/kzg/multiopen/shplonk/verifier.rs | 2 +- {backend => common}/src/poly/kzg/strategy.rs | 2 +- .../src/poly/multiopen_test.rs | 2 +- {backend => common}/src/poly/query.rs | 0 {backend => common}/src/poly/strategy.rs | 0 {backend => common}/src/transcript.rs | 0 frontend/src/lib.rs | 6 - frontend/src/plonk.rs | 86 - frontend/src/plonk/circuit.rs | 1264 ------- frontend/src/plonk/lookup.rs | 10 - frontend/src/plonk/permutation.rs | 8 - frontend/src/plonk/shuffle.rs | 10 - frontend/src/poly.rs | 85 - middleware/src/circuit.rs | 4 +- 92 files changed, 11426 insertions(+), 5465 deletions(-) delete mode 100644 backend/src/arithmetic.rs delete mode 100644 backend/src/dev.rs delete mode 100644 backend/src/dev/metadata.rs delete mode 100644 backend/src/helpers.rs delete mode 100644 backend/src/multicore.rs delete mode 100644 backend/src/plonk.rs delete mode 100644 backend/src/plonk/assigned.rs delete mode 100644 backend/src/plonk/circuit.rs delete mode 100644 backend/src/plonk/error.rs delete mode 100644 backend/src/plonk/keygen.rs delete mode 100644 backend/src/poly.rs rename {frontend => common}/src/circuit.rs (98%) rename {frontend => common}/src/circuit/floor_planner.rs (100%) rename {frontend => common}/src/circuit/floor_planner/single_pass.rs (98%) rename {frontend => common}/src/circuit/floor_planner/v1.rs (98%) rename {frontend => common}/src/circuit/floor_planner/v1/strategy.rs (99%) rename {frontend => common}/src/circuit/layouter.rs (98%) rename {frontend => common}/src/circuit/table_layouter.rs (98%) rename {frontend => common}/src/circuit/value.rs (99%) create mode 100644 common/src/dev.rs create mode 100644 common/src/dev/cost.rs create mode 100644 common/src/dev/cost_model.rs create mode 100644 common/src/dev/failure.rs create mode 100644 common/src/dev/failure/emitter.rs create mode 100644 common/src/dev/gates.rs create mode 100644 common/src/dev/graph.rs create mode 100644 common/src/dev/graph/layout.rs create mode 100644 common/src/dev/metadata.rs create mode 100644 common/src/dev/tfp.rs create mode 100644 common/src/dev/util.rs create mode 100644 common/src/plonk/assigned.rs create mode 100644 common/src/plonk/circuit.rs rename {frontend => common}/src/plonk/circuit/compress_selectors.rs (99%) rename {frontend/src => common/src/plonk}/error.rs (98%) rename {backend => common}/src/plonk/evaluation.rs (99%) create mode 100644 common/src/plonk/keygen.rs rename {backend => common}/src/plonk/prover.rs (78%) rename {backend => common}/src/plonk/vanishing.rs (100%) rename {backend => common}/src/plonk/vanishing/prover.rs (99%) rename {backend => common}/src/plonk/vanishing/verifier.rs (99%) rename {backend => common}/src/plonk/verifier.rs (98%) rename {backend => common}/src/plonk/verifier/batch.rs (98%) rename {backend => common}/src/poly/commitment.rs (99%) rename {backend => common}/src/poly/domain.rs (99%) rename {backend => common}/src/poly/ipa/commitment.rs (99%) rename {backend => common}/src/poly/ipa/commitment/prover.rs (99%) rename {backend => common}/src/poly/ipa/commitment/verifier.rs (100%) rename {backend => common}/src/poly/ipa/mod.rs (100%) rename {backend => common}/src/poly/ipa/msm.rs (99%) rename {backend => common}/src/poly/ipa/multiopen.rs (99%) rename {backend => common}/src/poly/ipa/multiopen/prover.rs (99%) rename {backend => common}/src/poly/ipa/multiopen/verifier.rs (99%) rename {backend => common}/src/poly/ipa/strategy.rs (99%) rename {backend => common}/src/poly/kzg/commitment.rs (99%) rename {backend => common}/src/poly/kzg/mod.rs (100%) rename {backend => common}/src/poly/kzg/msm.rs (99%) rename {backend => common}/src/poly/kzg/multiopen.rs (100%) rename {backend => common}/src/poly/kzg/multiopen/gwc.rs (97%) rename {backend => common}/src/poly/kzg/multiopen/gwc/prover.rs (100%) rename {backend => common}/src/poly/kzg/multiopen/gwc/verifier.rs (99%) rename {backend => common}/src/poly/kzg/multiopen/shplonk.rs (98%) rename {backend => common}/src/poly/kzg/multiopen/shplonk/prover.rs (99%) rename {backend => common}/src/poly/kzg/multiopen/shplonk/verifier.rs (99%) rename {backend => common}/src/poly/kzg/strategy.rs (99%) rename {backend => common}/src/poly/multiopen_test.rs (99%) rename {backend => common}/src/poly/query.rs (100%) rename {backend => common}/src/poly/strategy.rs (100%) rename {backend => common}/src/transcript.rs (100%) delete mode 100644 frontend/src/plonk.rs delete mode 100644 frontend/src/plonk/circuit.rs delete mode 100644 frontend/src/plonk/lookup.rs delete mode 100644 frontend/src/plonk/permutation.rs delete mode 100644 frontend/src/plonk/shuffle.rs delete mode 100644 frontend/src/poly.rs diff --git a/backend/src/arithmetic.rs b/backend/src/arithmetic.rs deleted file mode 100644 index 063f2e3814..0000000000 --- a/backend/src/arithmetic.rs +++ /dev/null @@ -1,503 +0,0 @@ -//! This module provides common utilities, traits and structures for group, -//! field and polynomial arithmetic. - -use group::{ - ff::{BatchInvert, PrimeField}, - Curve, Group, GroupOpsOwned, ScalarMulOwned, -}; -use halo2_common::arithmetic::parallelize; -use halo2_common::multicore; -pub use halo2_middleware::ff::Field; - -pub use halo2curves::{CurveAffine, CurveExt}; - -/// This represents an element of a group with basic operations that can be -/// performed. This allows an FFT implementation (for example) to operate -/// generically over either a field or elliptic curve group. -pub trait FftGroup: - Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned -{ -} - -impl FftGroup for T -where - Scalar: Field, - T: Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned, -{ -} - -fn multiexp_serial(coeffs: &[C::Scalar], bases: &[C], acc: &mut C::Curve) { - let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); - - let c = if bases.len() < 4 { - 1 - } else if bases.len() < 32 { - 3 - } else { - (f64::from(bases.len() as u32)).ln().ceil() as usize - }; - - fn get_at(segment: usize, c: usize, bytes: &F::Repr) -> usize { - let skip_bits = segment * c; - let skip_bytes = skip_bits / 8; - - if skip_bytes >= (F::NUM_BITS as usize + 7) / 8 { - return 0; - } - - let mut v = [0; 8]; - for (v, o) in v.iter_mut().zip(bytes.as_ref()[skip_bytes..].iter()) { - *v = *o; - } - - let mut tmp = u64::from_le_bytes(v); - tmp >>= skip_bits - (skip_bytes * 8); - tmp %= 1 << c; - - tmp as usize - } - - let segments = (C::Scalar::NUM_BITS as usize / c) + 1; - - for current_segment in (0..segments).rev() { - for _ in 0..c { - *acc = acc.double(); - } - - #[derive(Clone, Copy)] - enum Bucket { - None, - Affine(C), - Projective(C::Curve), - } - - impl Bucket { - fn add_assign(&mut self, other: &C) { - *self = match *self { - Bucket::None => Bucket::Affine(*other), - Bucket::Affine(a) => Bucket::Projective(a + *other), - Bucket::Projective(mut a) => { - a += *other; - Bucket::Projective(a) - } - } - } - - fn add(self, mut other: C::Curve) -> C::Curve { - match self { - Bucket::None => other, - Bucket::Affine(a) => { - other += a; - other - } - Bucket::Projective(a) => other + &a, - } - } - } - - let mut buckets: Vec> = vec![Bucket::None; (1 << c) - 1]; - - for (coeff, base) in coeffs.iter().zip(bases.iter()) { - let coeff = get_at::(current_segment, c, coeff); - if coeff != 0 { - buckets[coeff - 1].add_assign(base); - } - } - - // Summation by parts - // e.g. 3a + 2b + 1c = a + - // (a) + b + - // ((a) + b) + c - let mut running_sum = C::Curve::identity(); - for exp in buckets.into_iter().rev() { - running_sum = exp.add(running_sum); - *acc += &running_sum; - } - } -} - -/// Performs a small multi-exponentiation operation. -/// Uses the double-and-add algorithm with doublings shared across points. -pub fn small_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { - let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); - let mut acc = C::Curve::identity(); - - // for byte idx - for byte_idx in (0..((C::Scalar::NUM_BITS as usize + 7) / 8)).rev() { - // for bit idx - for bit_idx in (0..8).rev() { - acc = acc.double(); - // for each coeff - for coeff_idx in 0..coeffs.len() { - let byte = coeffs[coeff_idx].as_ref()[byte_idx]; - if ((byte >> bit_idx) & 1) != 0 { - acc += bases[coeff_idx]; - } - } - } - } - - acc -} - -/// Performs a multi-exponentiation operation. -/// -/// This function will panic if coeffs and bases have a different length. -/// -/// This will use multithreading if beneficial. -pub fn best_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { - assert_eq!(coeffs.len(), bases.len()); - - let num_threads = multicore::current_num_threads(); - if coeffs.len() > num_threads { - let chunk = coeffs.len() / num_threads; - let num_chunks = coeffs.chunks(chunk).len(); - let mut results = vec![C::Curve::identity(); num_chunks]; - multicore::scope(|scope| { - let chunk = coeffs.len() / num_threads; - - for ((coeffs, bases), acc) in coeffs - .chunks(chunk) - .zip(bases.chunks(chunk)) - .zip(results.iter_mut()) - { - scope.spawn(move |_| { - multiexp_serial(coeffs, bases, acc); - }); - } - }); - results.iter().fold(C::Curve::identity(), |a, b| a + b) - } else { - let mut acc = C::Curve::identity(); - multiexp_serial(coeffs, bases, &mut acc); - acc - } -} - -/// Performs a radix-$2$ Fast-Fourier Transformation (FFT) on a vector of size -/// $n = 2^k$, when provided `log_n` = $k$ and an element of multiplicative -/// order $n$ called `omega` ($\omega$). The result is that the vector `a`, when -/// interpreted as the coefficients of a polynomial of degree $n - 1$, is -/// transformed into the evaluations of this polynomial at each of the $n$ -/// distinct powers of $\omega$. This transformation is invertible by providing -/// $\omega^{-1}$ in place of $\omega$ and dividing each resulting field element -/// by $n$. -/// -/// This will use multithreading if beneficial. -pub fn best_fft>(a: &mut [G], omega: Scalar, log_n: u32) { - fn bitreverse(mut n: usize, l: usize) -> usize { - let mut r = 0; - for _ in 0..l { - r = (r << 1) | (n & 1); - n >>= 1; - } - r - } - - let threads = multicore::current_num_threads(); - let log_threads = log2_floor(threads); - let n = a.len(); - assert_eq!(n, 1 << log_n); - - for k in 0..n { - let rk = bitreverse(k, log_n as usize); - if k < rk { - a.swap(rk, k); - } - } - - // precompute twiddle factors - let twiddles: Vec<_> = (0..(n / 2)) - .scan(Scalar::ONE, |w, _| { - let tw = *w; - *w *= ω - Some(tw) - }) - .collect(); - - if log_n <= log_threads { - let mut chunk = 2_usize; - let mut twiddle_chunk = n / 2; - for _ in 0..log_n { - a.chunks_mut(chunk).for_each(|coeffs| { - let (left, right) = coeffs.split_at_mut(chunk / 2); - - // case when twiddle factor is one - let (a, left) = left.split_at_mut(1); - let (b, right) = right.split_at_mut(1); - let t = b[0]; - b[0] = a[0]; - a[0] += &t; - b[0] -= &t; - - left.iter_mut() - .zip(right.iter_mut()) - .enumerate() - .for_each(|(i, (a, b))| { - let mut t = *b; - t *= &twiddles[(i + 1) * twiddle_chunk]; - *b = *a; - *a += &t; - *b -= &t; - }); - }); - chunk *= 2; - twiddle_chunk /= 2; - } - } else { - recursive_butterfly_arithmetic(a, n, 1, &twiddles) - } -} - -/// This perform recursive butterfly arithmetic -pub fn recursive_butterfly_arithmetic>( - a: &mut [G], - n: usize, - twiddle_chunk: usize, - twiddles: &[Scalar], -) { - if n == 2 { - let t = a[1]; - a[1] = a[0]; - a[0] += &t; - a[1] -= &t; - } else { - let (left, right) = a.split_at_mut(n / 2); - multicore::join( - || recursive_butterfly_arithmetic(left, n / 2, twiddle_chunk * 2, twiddles), - || recursive_butterfly_arithmetic(right, n / 2, twiddle_chunk * 2, twiddles), - ); - - // case when twiddle factor is one - let (a, left) = left.split_at_mut(1); - let (b, right) = right.split_at_mut(1); - let t = b[0]; - b[0] = a[0]; - a[0] += &t; - b[0] -= &t; - - left.iter_mut() - .zip(right.iter_mut()) - .enumerate() - .for_each(|(i, (a, b))| { - let mut t = *b; - t *= &twiddles[(i + 1) * twiddle_chunk]; - *b = *a; - *a += &t; - *b -= &t; - }); - } -} - -/// Convert coefficient bases group elements to lagrange basis by inverse FFT. -pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec { - let n_inv = C::Scalar::TWO_INV.pow_vartime([k as u64, 0, 0, 0]); - let mut omega_inv = C::Scalar::ROOT_OF_UNITY_INV; - for _ in k..C::Scalar::S { - omega_inv = omega_inv.square(); - } - - let mut g_lagrange_projective = g_projective; - best_fft(&mut g_lagrange_projective, omega_inv, k); - parallelize(&mut g_lagrange_projective, |g, _| { - for g in g.iter_mut() { - *g *= n_inv; - } - }); - - let mut g_lagrange = vec![C::identity(); 1 << k]; - parallelize(&mut g_lagrange, |g_lagrange, starts| { - C::Curve::batch_normalize( - &g_lagrange_projective[starts..(starts + g_lagrange.len())], - g_lagrange, - ); - }); - - g_lagrange -} - -/// This evaluates a provided polynomial (in coefficient form) at `point`. -pub fn eval_polynomial(poly: &[F], point: F) -> F { - fn evaluate(poly: &[F], point: F) -> F { - poly.iter() - .rev() - .fold(F::ZERO, |acc, coeff| acc * point + coeff) - } - let n = poly.len(); - let num_threads = multicore::current_num_threads(); - if n * 2 < num_threads { - evaluate(poly, point) - } else { - let chunk_size = (n + num_threads - 1) / num_threads; - let mut parts = vec![F::ZERO; num_threads]; - multicore::scope(|scope| { - for (chunk_idx, (out, poly)) in - parts.chunks_mut(1).zip(poly.chunks(chunk_size)).enumerate() - { - scope.spawn(move |_| { - let start = chunk_idx * chunk_size; - out[0] = evaluate(poly, point) * point.pow_vartime([start as u64, 0, 0, 0]); - }); - } - }); - parts.iter().fold(F::ZERO, |acc, coeff| acc + coeff) - } -} - -/// This computes the inner product of two vectors `a` and `b`. -/// -/// This function will panic if the two vectors are not the same size. -pub fn compute_inner_product(a: &[F], b: &[F]) -> F { - // TODO: parallelize? - assert_eq!(a.len(), b.len()); - - let mut acc = F::ZERO; - for (a, b) in a.iter().zip(b.iter()) { - acc += (*a) * (*b); - } - - acc -} - -/// Divides polynomial `a` in `X` by `X - b` with -/// no remainder. -pub fn kate_division<'a, F: Field, I: IntoIterator>(a: I, mut b: F) -> Vec -where - I::IntoIter: DoubleEndedIterator + ExactSizeIterator, -{ - b = -b; - let a = a.into_iter(); - - let mut q = vec![F::ZERO; a.len() - 1]; - - let mut tmp = F::ZERO; - for (q, r) in q.iter_mut().rev().zip(a.rev()) { - let mut lead_coeff = *r; - lead_coeff.sub_assign(&tmp); - *q = lead_coeff; - tmp = lead_coeff; - tmp.mul_assign(&b); - } - - q -} - -fn log2_floor(num: usize) -> u32 { - assert!(num > 0); - - let mut pow = 0; - - while (1 << (pow + 1)) <= num { - pow += 1; - } - - pow -} - -/// Returns coefficients of an n - 1 degree polynomial given a set of n points -/// and their evaluations. This function will panic if two values in `points` -/// are the same. -pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { - assert_eq!(points.len(), evals.len()); - if points.len() == 1 { - // Constant polynomial - vec![evals[0]] - } else { - let mut denoms = Vec::with_capacity(points.len()); - for (j, x_j) in points.iter().enumerate() { - let mut denom = Vec::with_capacity(points.len() - 1); - for x_k in points - .iter() - .enumerate() - .filter(|&(k, _)| k != j) - .map(|a| a.1) - { - denom.push(*x_j - x_k); - } - denoms.push(denom); - } - // Compute (x_j - x_k)^(-1) for each j != i - denoms.iter_mut().flat_map(|v| v.iter_mut()).batch_invert(); - - let mut final_poly = vec![F::ZERO; points.len()]; - for (j, (denoms, eval)) in denoms.into_iter().zip(evals.iter()).enumerate() { - let mut tmp: Vec = Vec::with_capacity(points.len()); - let mut product = Vec::with_capacity(points.len() - 1); - tmp.push(F::ONE); - for (x_k, denom) in points - .iter() - .enumerate() - .filter(|&(k, _)| k != j) - .map(|a| a.1) - .zip(denoms.into_iter()) - { - product.resize(tmp.len() + 1, F::ZERO); - for ((a, b), product) in tmp - .iter() - .chain(std::iter::once(&F::ZERO)) - .zip(std::iter::once(&F::ZERO).chain(tmp.iter())) - .zip(product.iter_mut()) - { - *product = *a * (-denom * x_k) + *b * denom; - } - std::mem::swap(&mut tmp, &mut product); - } - assert_eq!(tmp.len(), points.len()); - assert_eq!(product.len(), points.len() - 1); - for (final_coeff, interpolation_coeff) in final_poly.iter_mut().zip(tmp.into_iter()) { - *final_coeff += interpolation_coeff * eval; - } - } - final_poly - } -} - -pub(crate) fn evaluate_vanishing_polynomial(roots: &[F], z: F) -> F { - fn evaluate(roots: &[F], z: F) -> F { - roots.iter().fold(F::ONE, |acc, point| (z - point) * acc) - } - let n = roots.len(); - let num_threads = multicore::current_num_threads(); - if n * 2 < num_threads { - evaluate(roots, z) - } else { - let chunk_size = (n + num_threads - 1) / num_threads; - let mut parts = vec![F::ONE; num_threads]; - multicore::scope(|scope| { - for (out, roots) in parts.chunks_mut(1).zip(roots.chunks(chunk_size)) { - scope.spawn(move |_| out[0] = evaluate(roots, z)); - } - }); - parts.iter().fold(F::ONE, |acc, part| acc * part) - } -} - -pub(crate) fn powers(base: F) -> impl Iterator { - std::iter::successors(Some(F::ONE), move |power| Some(base * power)) -} - -#[cfg(test)] -use rand_core::OsRng; - -#[cfg(test)] -use crate::halo2curves::pasta::Fp; - -#[test] -fn test_lagrange_interpolate() { - let rng = OsRng; - - let points = (0..5).map(|_| Fp::random(rng)).collect::>(); - let evals = (0..5).map(|_| Fp::random(rng)).collect::>(); - - for coeffs in 0..5 { - let points = &points[0..coeffs]; - let evals = &evals[0..coeffs]; - - let poly = lagrange_interpolate(points, evals); - assert_eq!(poly.len(), points.len()); - - for (point, eval) in points.iter().zip(evals) { - assert_eq!(eval_polynomial(&poly, *point), *eval); - } - } -} diff --git a/backend/src/dev.rs b/backend/src/dev.rs deleted file mode 100644 index d848651ca0..0000000000 --- a/backend/src/dev.rs +++ /dev/null @@ -1,3 +0,0 @@ -//! Tools for developing circuits. - -pub mod metadata; diff --git a/backend/src/dev/metadata.rs b/backend/src/dev/metadata.rs deleted file mode 100644 index eeea0cb3d1..0000000000 --- a/backend/src/dev/metadata.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Metadata about circuits. - -// use crate::plonk::{self, Any}; -// use std::fmt::{self, Debug}; -// /// Metadata about a column within a circuit. -// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -// pub struct Column { -// /// The type of the column. -// pub(super) column_type: Any, -// /// The index of the column. -// pub(super) index: usize, -// } -// -// impl Column { -// /// Return the column type. -// pub fn column_type(&self) -> Any { -// self.column_type -// } -// /// Return the column index. -// pub fn index(&self) -> usize { -// self.index -// } -// } -// -// impl fmt::Display for Column { -// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { -// write!(f, "Column('{:?}', {})", self.column_type, self.index) -// } -// } -// -// impl From<(Any, usize)> for Column { -// fn from((column_type, index): (Any, usize)) -> Self { -// Column { column_type, index } -// } -// } -// -// impl From> for Column { -// fn from(column: plonk::Column) -> Self { -// Column { -// column_type: *column.column_type(), -// index: column.index(), -// } -// } -// } diff --git a/backend/src/helpers.rs b/backend/src/helpers.rs deleted file mode 100644 index 3b1e5769f8..0000000000 --- a/backend/src/helpers.rs +++ /dev/null @@ -1,154 +0,0 @@ -use crate::poly::Polynomial; -use halo2_middleware::ff::PrimeField; -use halo2curves::{serde::SerdeObject, CurveAffine}; -use std::io; - -/// This enum specifies how various types are serialized and deserialized. -#[derive(Clone, Copy, Debug)] -pub enum SerdeFormat { - /// Curve elements are serialized in compressed form. - /// Field elements are serialized in standard form, with endianness specified by the - /// `PrimeField` implementation. - Processed, - /// Curve elements are serialized in uncompressed form. Field elements are serialized - /// in their internal Montgomery representation. - /// When deserializing, checks are performed to ensure curve elements indeed lie on the curve and field elements - /// are less than modulus. - RawBytes, - /// Serialization is the same as `RawBytes`, but no checks are performed. - RawBytesUnchecked, -} - -// Keep this trait for compatibility with IPA serialization -pub(crate) trait CurveRead: CurveAffine { - /// Reads a compressed element from the buffer and attempts to parse it - /// using `from_bytes`. - fn read(reader: &mut R) -> io::Result { - let mut compressed = Self::Repr::default(); - reader.read_exact(compressed.as_mut())?; - Option::from(Self::from_bytes(&compressed)) - .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Invalid point encoding in proof")) - } -} -impl CurveRead for C {} - -pub trait SerdeCurveAffine: CurveAffine + SerdeObject { - /// Reads an element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompress it - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - match format { - SerdeFormat::Processed => ::read(reader), - SerdeFormat::RawBytes => ::read_raw(reader), - SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), - } - } - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - match format { - SerdeFormat::Processed => writer.write_all(self.to_bytes().as_ref()), - _ => self.write_raw(writer), - } - } - - /// Byte length of an affine curve element according to `format`. - fn byte_length(format: SerdeFormat) -> usize { - match format { - SerdeFormat::Processed => Self::default().to_bytes().as_ref().len(), - _ => Self::Repr::default().as_ref().len() * 2, - } - } -} -impl SerdeCurveAffine for C {} - -pub trait SerdePrimeField: PrimeField + SerdeObject { - /// Reads a field element as bytes from the buffer according to the `format`: - /// - `Processed`: Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads a field element from raw bytes in its internal Montgomery representations, - /// and checks that the element is less than the modulus. - /// - `RawBytesUnchecked`: Reads a field element in Montgomery form and performs no checks. - fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - match format { - SerdeFormat::Processed => { - let mut compressed = Self::Repr::default(); - reader.read_exact(compressed.as_mut())?; - Option::from(Self::from_repr(compressed)).ok_or_else(|| { - io::Error::new(io::ErrorKind::Other, "Invalid prime field point encoding") - }) - } - SerdeFormat::RawBytes => ::read_raw(reader), - SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), - } - } - - /// Writes a field element as bytes to the buffer according to the `format`: - /// - `Processed`: Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - match format { - SerdeFormat::Processed => writer.write_all(self.to_repr().as_ref()), - _ => self.write_raw(writer), - } - } -} -impl SerdePrimeField for F {} - -/// Convert a slice of `bool` into a `u8`. -/// -/// Panics if the slice has length greater than 8. -pub fn pack(bits: &[bool]) -> u8 { - let mut value = 0u8; - assert!(bits.len() <= 8); - for (bit_index, bit) in bits.iter().enumerate() { - value |= (*bit as u8) << bit_index; - } - value -} - -/// Writes the first `bits.len()` bits of a `u8` into `bits`. -pub fn unpack(byte: u8, bits: &mut [bool]) { - for (bit_index, bit) in bits.iter_mut().enumerate() { - *bit = (byte >> bit_index) & 1 == 1; - } -} - -/// Reads a vector of polynomials from buffer -pub(crate) fn read_polynomial_vec( - reader: &mut R, - format: SerdeFormat, -) -> io::Result>> { - let mut len = [0u8; 4]; - reader.read_exact(&mut len)?; - let len = u32::from_be_bytes(len); - - (0..len) - .map(|_| Polynomial::::read(reader, format)) - .collect::>>() -} - -/// Writes a slice of polynomials to buffer -pub(crate) fn write_polynomial_slice( - slice: &[Polynomial], - writer: &mut W, - format: SerdeFormat, -) -> io::Result<()> { - writer.write_all(&(slice.len() as u32).to_be_bytes())?; - for poly in slice.iter() { - poly.write(writer, format)?; - } - Ok(()) -} - -/// Gets the total number of bytes of a slice of polynomials, assuming all polynomials are the same length -pub(crate) fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize { - let field_len = F::default().to_repr().as_ref().len(); - 4 + slice.len() * (4 + field_len * slice.get(0).map(|poly| poly.len()).unwrap_or(0)) -} diff --git a/backend/src/lib.rs b/backend/src/lib.rs index 2d8e3e11bb..e69de29bb2 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -1,20 +0,0 @@ -//! # halo2_backend - -#![cfg_attr(docsrs, feature(doc_cfg))] -// The actual lints we want to disable. -#![allow(clippy::op_ref, clippy::many_single_char_names)] -#![deny(rustdoc::broken_intra_doc_links)] -#![deny(missing_debug_implementations)] -#![deny(missing_docs)] -#![deny(unsafe_code)] - -pub mod arithmetic; -pub use halo2curves; -mod multicore; -pub mod plonk; -pub mod poly; -pub mod transcript; - -pub mod dev; -mod helpers; -pub use helpers::SerdeFormat; diff --git a/backend/src/multicore.rs b/backend/src/multicore.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs deleted file mode 100644 index 749490637b..0000000000 --- a/backend/src/plonk.rs +++ /dev/null @@ -1,504 +0,0 @@ -//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] -//! that is designed specifically for the polynomial commitment scheme described -//! in the [Halo][halo] paper. -//! -//! [halo]: https://eprint.iacr.org/2019/1021 -//! [plonk]: https://eprint.iacr.org/2019/953 - -use blake2b_simd::Params as Blake2bParams; -use group::ff::{Field, FromUniformBytes, PrimeField}; - -use crate::arithmetic::CurveAffine; -use crate::helpers::{ - polynomial_slice_byte_length, write_polynomial_slice, SerdeCurveAffine, SerdePrimeField, -}; -use crate::poly::{ - Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, - Polynomial, -}; -use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; -use crate::SerdeFormat; -use halo2_middleware::circuit::{Advice, Column, Fixed, Instance}; -use halo2_middleware::poly::Rotation; - -mod assigned; -mod circuit; -mod error; -mod evaluation; -mod keygen; -mod lookup; -pub mod permutation; -mod shuffle; -mod vanishing; - -mod prover; -mod verifier; - -pub use assigned::*; -pub use circuit::*; -pub use error::*; -pub use keygen::*; -pub use prover::*; -pub use verifier::*; - -use evaluation::Evaluator; -use std::io; - -/// This is a verifying key which allows for the verification of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct VerifyingKey { - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - /// Cached maximum degree of `cs` (which doesn't change after construction). - cs_degree: usize, - /// The representative of this `VerifyingKey` in transcripts. - transcript_repr: C::Scalar, - selectors: Vec>, - /// Whether selector compression is turned on or not. - compress_selectors: bool, -} - -// Current version of the VK -const VERSION: u8 = 0x03; - -impl VerifyingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a verifying key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - // Version byte that will be checked on read. - writer.write_all(&[VERSION])?; - let k = &self.domain.k(); - assert!(*k <= C::Scalar::S); - // k value fits in 1 byte - writer.write_all(&[*k as u8])?; - writer.write_all(&[self.compress_selectors as u8])?; - writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; - for commitment in &self.fixed_commitments { - commitment.write(writer, format)?; - } - self.permutation.write(writer, format)?; - - if !self.compress_selectors { - assert!(self.selectors.is_empty()); - } - // write self.selectors - for selector in &self.selectors { - // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write - for bits in selector.chunks(8) { - writer.write_all(&[crate::helpers::pack(bits)])?; - } - } - Ok(()) - } - - // TODO: Adapt to CompiledCircuit - /* - /// Reads a verification key from a buffer. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let mut version_byte = [0u8; 1]; - reader.read_exact(&mut version_byte)?; - if VERSION != version_byte[0] { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected version byte", - )); - } - - let mut k = [0u8; 1]; - reader.read_exact(&mut k)?; - let k = u8::from_le_bytes(k); - if k as u32 > C::Scalar::S { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - format!( - "circuit size value (k): {} exceeds maxium: {}", - k, - C::Scalar::S - ), - )); - } - let mut compress_selectors = [0u8; 1]; - reader.read_exact(&mut compress_selectors)?; - if compress_selectors[0] != 0 && compress_selectors[0] != 1 { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected compress_selectors not boolean", - )); - } - let compress_selectors = compress_selectors[0] == 1; - let (domain, cs, _) = keygen::create_domain::( - k as u32, - #[cfg(feature = "circuit-params")] - params, - ); - let mut num_fixed_columns = [0u8; 4]; - reader.read_exact(&mut num_fixed_columns)?; - let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); - - let fixed_commitments: Vec<_> = (0..num_fixed_columns) - .map(|_| C::read(reader, format)) - .collect::>()?; - - let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; - - let (cs, selectors) = if compress_selectors { - // read selectors - let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] - .into_iter() - .map(|mut selector| { - let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; - reader.read_exact(&mut selector_bytes)?; - for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { - crate::helpers::unpack(byte, bits); - } - Ok(selector) - }) - .collect::>()?; - let (cs, _) = cs.compress_selectors(selectors.clone()); - (cs, selectors) - } else { - // we still need to replace selectors with fixed Expressions in `cs` - let fake_selectors = vec![vec![]; cs.num_selectors]; - let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); - (cs, vec![]) - }; - - Ok(Self::from_parts( - domain, - fixed_commitments, - permutation, - cs, - selectors, - compress_selectors, - )) - } - */ - - /// Writes a verifying key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - // TODO: Adapt to CompiledCircuit - /* - /// Reads a verification key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } - */ -} - -impl VerifyingKey { - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - 10 + (self.fixed_commitments.len() * C::byte_length(format)) - + self.permutation.bytes_length(format) - + self.selectors.len() - * (self - .selectors - .get(0) - .map(|selector| (selector.len() + 7) / 8) - .unwrap_or(0)) - } - - fn from_parts( - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - selectors: Vec>, - compress_selectors: bool, - ) -> Self - where - C::ScalarExt: FromUniformBytes<64>, - { - // Compute cached values. - let cs_degree = cs.degree(); - - let mut vk = Self { - domain, - fixed_commitments, - permutation, - cs, - cs_degree, - // Temporary, this is not pinned. - transcript_repr: C::Scalar::ZERO, - selectors, - compress_selectors, - }; - - let mut hasher = Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Verify-Key") - .to_state(); - - let s = format!("{:?}", vk.pinned()); - - hasher.update(&(s.len() as u64).to_le_bytes()); - hasher.update(s.as_bytes()); - - // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - - vk - } - - /// Hashes a verification key into a transcript. - pub fn hash_into, T: Transcript>( - &self, - transcript: &mut T, - ) -> io::Result<()> { - transcript.common_scalar(self.transcript_repr)?; - - Ok(()) - } - - /// Obtains a pinned representation of this verification key that contains - /// the minimal information necessary to reconstruct the verification key. - pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { - PinnedVerificationKey { - base_modulus: C::Base::MODULUS, - scalar_modulus: C::Scalar::MODULUS, - domain: self.domain.pinned(), - fixed_commitments: &self.fixed_commitments, - permutation: &self.permutation, - cs: self.cs.pinned(), - } - } - - /// Returns commitments of fixed polynomials - pub fn fixed_commitments(&self) -> &Vec { - &self.fixed_commitments - } - - /// Returns `VerifyingKey` of permutation - pub fn permutation(&self) -> &permutation::VerifyingKey { - &self.permutation - } - - /// Returns `ConstraintSystem` - pub fn cs(&self) -> &ConstraintSystem { - &self.cs - } - - /// Returns representative of this `VerifyingKey` in transcripts - pub fn transcript_repr(&self) -> C::Scalar { - self.transcript_repr - } -} - -/// Minimal representation of a verification key that can be used to identify -/// its active contents. -#[allow(dead_code)] -#[derive(Debug)] -pub struct PinnedVerificationKey<'a, C: CurveAffine> { - base_modulus: &'static str, - scalar_modulus: &'static str, - domain: PinnedEvaluationDomain<'a, C::Scalar>, - cs: PinnedConstraintSystem<'a, C::Scalar>, - fixed_commitments: &'a Vec, - permutation: &'a permutation::VerifyingKey, -} - -/// This is a proving key which allows for the creation of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct ProvingKey { - vk: VerifyingKey, - l0: Polynomial, - l_last: Polynomial, - l_active_row: Polynomial, - fixed_values: Vec>, - fixed_polys: Vec>, - fixed_cosets: Vec>, - permutation: permutation::ProvingKey, - ev: Evaluator, -} - -impl ProvingKey -where - C::Scalar: FromUniformBytes<64>, -{ - /// Get the underlying [`VerifyingKey`]. - pub fn get_vk(&self) -> &VerifyingKey { - &self.vk - } - - /// Gets the total number of bytes in the serialization of `self` - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - let scalar_len = C::Scalar::default().to_repr().as_ref().len(); - self.vk.bytes_length(format) - + 12 - + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) - + polynomial_slice_byte_length(&self.fixed_values) - + polynomial_slice_byte_length(&self.fixed_polys) - + polynomial_slice_byte_length(&self.fixed_cosets) - + self.permutation.bytes_length() - } -} - -impl ProvingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a proving key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - self.vk.write(writer, format)?; - self.l0.write(writer, format)?; - self.l_last.write(writer, format)?; - self.l_active_row.write(writer, format)?; - write_polynomial_slice(&self.fixed_values, writer, format)?; - write_polynomial_slice(&self.fixed_polys, writer, format)?; - write_polynomial_slice(&self.fixed_cosets, writer, format)?; - self.permutation.write(writer, format)?; - Ok(()) - } - - // TODO: Adapt to CompiledCircuit - /* - /// Reads a proving key from a buffer. - /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let vk = VerifyingKey::::read::( - reader, - format, - #[cfg(feature = "circuit-params")] - params, - )?; - let l0 = Polynomial::read(reader, format)?; - let l_last = Polynomial::read(reader, format)?; - let l_active_row = Polynomial::read(reader, format)?; - let fixed_values = read_polynomial_vec(reader, format)?; - let fixed_polys = read_polynomial_vec(reader, format)?; - let fixed_cosets = read_polynomial_vec(reader, format)?; - let permutation = permutation::ProvingKey::read(reader, format)?; - let ev = Evaluator::new(vk.cs()); - Ok(Self { - vk, - l0, - l_last, - l_active_row, - fixed_values, - fixed_polys, - fixed_cosets, - permutation, - ev, - }) - } - */ - - /// Writes a proving key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - // TODO: Adapt to CompiledCircuit - /* - /// Reads a proving key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } - */ -} - -impl VerifyingKey { - /// Get the underlying [`EvaluationDomain`]. - pub fn get_domain(&self) -> &EvaluationDomain { - &self.domain - } -} - -#[derive(Clone, Copy, Debug)] -struct Theta; -type ChallengeTheta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Beta; -type ChallengeBeta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Gamma; -type ChallengeGamma = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Y; -type ChallengeY = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct X; -type ChallengeX = ChallengeScalar; diff --git a/backend/src/plonk/assigned.rs b/backend/src/plonk/assigned.rs deleted file mode 100644 index 907ab22650..0000000000 --- a/backend/src/plonk/assigned.rs +++ /dev/null @@ -1,665 +0,0 @@ -// use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; -// -// use group::ff::Field; -// -// /// A value assigned to a cell within a circuit. -// /// -// /// Stored as a fraction, so the backend can use batch inversion. -// /// -// /// A denominator of zero maps to an assigned value of zero. -// #[derive(Clone, Copy, Debug)] -// pub enum Assigned { -// /// The field element zero. -// Zero, -// /// A value that does not require inversion to evaluate. -// Trivial(F), -// /// A value stored as a fraction to enable batch inversion. -// Rational(F, F), -// } -// -// impl From<&Assigned> for Assigned { -// fn from(val: &Assigned) -> Self { -// *val -// } -// } -// -// impl From<&F> for Assigned { -// fn from(numerator: &F) -> Self { -// Assigned::Trivial(*numerator) -// } -// } -// -// impl From for Assigned { -// fn from(numerator: F) -> Self { -// Assigned::Trivial(numerator) -// } -// } -// -// impl From<(F, F)> for Assigned { -// fn from((numerator, denominator): (F, F)) -> Self { -// Assigned::Rational(numerator, denominator) -// } -// } -// -// impl PartialEq for Assigned { -// fn eq(&self, other: &Self) -> bool { -// match (self, other) { -// // At least one side is directly zero. -// (Self::Zero, Self::Zero) => true, -// (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), -// -// // One side is x/0 which maps to zero. -// (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) -// if denominator.is_zero_vartime() => -// { -// x.is_zero_vartime() -// } -// -// // Okay, we need to do some actual math... -// (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, -// (Self::Trivial(x), Self::Rational(numerator, denominator)) -// | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { -// &(*x * denominator) == numerator -// } -// ( -// Self::Rational(lhs_numerator, lhs_denominator), -// Self::Rational(rhs_numerator, rhs_denominator), -// ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, -// } -// } -// } -// -// impl Eq for Assigned {} -// -// impl Neg for Assigned { -// type Output = Assigned; -// fn neg(self) -> Self::Output { -// match self { -// Self::Zero => Self::Zero, -// Self::Trivial(numerator) => Self::Trivial(-numerator), -// Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), -// } -// } -// } -// -// impl Neg for &Assigned { -// type Output = Assigned; -// fn neg(self) -> Self::Output { -// -*self -// } -// } -// -// impl Add for Assigned { -// type Output = Assigned; -// fn add(self, rhs: Assigned) -> Assigned { -// match (self, rhs) { -// // One side is directly zero. -// (Self::Zero, _) => rhs, -// (_, Self::Zero) => self, -// -// // One side is x/0 which maps to zero. -// (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) -// if denominator.is_zero_vartime() => -// { -// other -// } -// -// // Okay, we need to do some actual math... -// (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), -// (Self::Rational(numerator, denominator), Self::Trivial(other)) -// | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { -// Self::Rational(numerator + denominator * other, denominator) -// } -// ( -// Self::Rational(lhs_numerator, lhs_denominator), -// Self::Rational(rhs_numerator, rhs_denominator), -// ) => Self::Rational( -// lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, -// lhs_denominator * rhs_denominator, -// ), -// } -// } -// } -// -// impl Add for Assigned { -// type Output = Assigned; -// fn add(self, rhs: F) -> Assigned { -// self + Self::Trivial(rhs) -// } -// } -// -// impl Add for &Assigned { -// type Output = Assigned; -// fn add(self, rhs: F) -> Assigned { -// *self + rhs -// } -// } -// -// impl Add<&Assigned> for Assigned { -// type Output = Assigned; -// fn add(self, rhs: &Self) -> Assigned { -// self + *rhs -// } -// } -// -// impl Add> for &Assigned { -// type Output = Assigned; -// fn add(self, rhs: Assigned) -> Assigned { -// *self + rhs -// } -// } -// -// impl Add<&Assigned> for &Assigned { -// type Output = Assigned; -// fn add(self, rhs: &Assigned) -> Assigned { -// *self + *rhs -// } -// } -// -// impl AddAssign for Assigned { -// fn add_assign(&mut self, rhs: Self) { -// *self = *self + rhs; -// } -// } -// -// impl AddAssign<&Assigned> for Assigned { -// fn add_assign(&mut self, rhs: &Self) { -// *self = *self + rhs; -// } -// } -// -// impl Sub for Assigned { -// type Output = Assigned; -// fn sub(self, rhs: Assigned) -> Assigned { -// self + (-rhs) -// } -// } -// -// impl Sub for Assigned { -// type Output = Assigned; -// fn sub(self, rhs: F) -> Assigned { -// self + (-rhs) -// } -// } -// -// impl Sub for &Assigned { -// type Output = Assigned; -// fn sub(self, rhs: F) -> Assigned { -// *self - rhs -// } -// } -// -// impl Sub<&Assigned> for Assigned { -// type Output = Assigned; -// fn sub(self, rhs: &Self) -> Assigned { -// self - *rhs -// } -// } -// -// impl Sub> for &Assigned { -// type Output = Assigned; -// fn sub(self, rhs: Assigned) -> Assigned { -// *self - rhs -// } -// } -// -// impl Sub<&Assigned> for &Assigned { -// type Output = Assigned; -// fn sub(self, rhs: &Assigned) -> Assigned { -// *self - *rhs -// } -// } -// -// impl SubAssign for Assigned { -// fn sub_assign(&mut self, rhs: Self) { -// *self = *self - rhs; -// } -// } -// -// impl SubAssign<&Assigned> for Assigned { -// fn sub_assign(&mut self, rhs: &Self) { -// *self = *self - rhs; -// } -// } -// -// impl Mul for Assigned { -// type Output = Assigned; -// fn mul(self, rhs: Assigned) -> Assigned { -// match (self, rhs) { -// (Self::Zero, _) | (_, Self::Zero) => Self::Zero, -// (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), -// (Self::Rational(numerator, denominator), Self::Trivial(other)) -// | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { -// Self::Rational(numerator * other, denominator) -// } -// ( -// Self::Rational(lhs_numerator, lhs_denominator), -// Self::Rational(rhs_numerator, rhs_denominator), -// ) => Self::Rational( -// lhs_numerator * rhs_numerator, -// lhs_denominator * rhs_denominator, -// ), -// } -// } -// } -// -// impl Mul for Assigned { -// type Output = Assigned; -// fn mul(self, rhs: F) -> Assigned { -// self * Self::Trivial(rhs) -// } -// } -// -// impl Mul for &Assigned { -// type Output = Assigned; -// fn mul(self, rhs: F) -> Assigned { -// *self * rhs -// } -// } -// -// impl Mul<&Assigned> for Assigned { -// type Output = Assigned; -// fn mul(self, rhs: &Assigned) -> Assigned { -// self * *rhs -// } -// } -// -// impl MulAssign for Assigned { -// fn mul_assign(&mut self, rhs: Self) { -// *self = *self * rhs; -// } -// } -// -// impl MulAssign<&Assigned> for Assigned { -// fn mul_assign(&mut self, rhs: &Self) { -// *self = *self * rhs; -// } -// } -// -// impl Assigned { -// /// Returns the numerator. -// pub fn numerator(&self) -> F { -// match self { -// Self::Zero => F::ZERO, -// Self::Trivial(x) => *x, -// Self::Rational(numerator, _) => *numerator, -// } -// } -// -// /// Returns the denominator, if non-trivial. -// pub fn denominator(&self) -> Option { -// match self { -// Self::Zero => None, -// Self::Trivial(_) => None, -// Self::Rational(_, denominator) => Some(*denominator), -// } -// } -// -// /// Returns true iff this element is zero. -// pub fn is_zero_vartime(&self) -> bool { -// match self { -// Self::Zero => true, -// Self::Trivial(x) => x.is_zero_vartime(), -// // Assigned maps x/0 -> 0. -// Self::Rational(numerator, denominator) => { -// numerator.is_zero_vartime() || denominator.is_zero_vartime() -// } -// } -// } -// -// /// Doubles this element. -// #[must_use] -// pub fn double(&self) -> Self { -// match self { -// Self::Zero => Self::Zero, -// Self::Trivial(x) => Self::Trivial(x.double()), -// Self::Rational(numerator, denominator) => { -// Self::Rational(numerator.double(), *denominator) -// } -// } -// } -// -// /// Squares this element. -// #[must_use] -// pub fn square(&self) -> Self { -// match self { -// Self::Zero => Self::Zero, -// Self::Trivial(x) => Self::Trivial(x.square()), -// Self::Rational(numerator, denominator) => { -// Self::Rational(numerator.square(), denominator.square()) -// } -// } -// } -// -// /// Cubes this element. -// #[must_use] -// pub fn cube(&self) -> Self { -// self.square() * self -// } -// -// /// Inverts this assigned value (taking the inverse of zero to be zero). -// pub fn invert(&self) -> Self { -// match self { -// Self::Zero => Self::Zero, -// Self::Trivial(x) => Self::Rational(F::ONE, *x), -// Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), -// } -// } -// -// /// Evaluates this assigned value directly, performing an unbatched inversion if -// /// necessary. -// /// -// /// If the denominator is zero, this returns zero. -// pub fn evaluate(self) -> F { -// match self { -// Self::Zero => F::ZERO, -// Self::Trivial(x) => x, -// Self::Rational(numerator, denominator) => { -// if denominator == F::ONE { -// numerator -// } else { -// numerator * denominator.invert().unwrap_or(F::ZERO) -// } -// } -// } -// } -// } -// -// #[cfg(test)] -// mod tests { -// use halo2curves::pasta::Fp; -// -// use super::Assigned; -// // We use (numerator, denominator) in the comments below to denote a rational. -// #[test] -// fn add_trivial_to_inv0_rational() { -// // a = 2 -// // b = (1,0) -// let a = Assigned::Trivial(Fp::from(2)); -// let b = Assigned::Rational(Fp::one(), Fp::zero()); -// -// // 2 + (1,0) = 2 + 0 = 2 -// // This fails if addition is implemented using normal rules for rationals. -// assert_eq!((a + b).evaluate(), a.evaluate()); -// assert_eq!((b + a).evaluate(), a.evaluate()); -// } -// -// #[test] -// fn add_rational_to_inv0_rational() { -// // a = (1,2) -// // b = (1,0) -// let a = Assigned::Rational(Fp::one(), Fp::from(2)); -// let b = Assigned::Rational(Fp::one(), Fp::zero()); -// -// // (1,2) + (1,0) = (1,2) + 0 = (1,2) -// // This fails if addition is implemented using normal rules for rationals. -// assert_eq!((a + b).evaluate(), a.evaluate()); -// assert_eq!((b + a).evaluate(), a.evaluate()); -// } -// -// #[test] -// fn sub_trivial_from_inv0_rational() { -// // a = 2 -// // b = (1,0) -// let a = Assigned::Trivial(Fp::from(2)); -// let b = Assigned::Rational(Fp::one(), Fp::zero()); -// -// // (1,0) - 2 = 0 - 2 = -2 -// // This fails if subtraction is implemented using normal rules for rationals. -// assert_eq!((b - a).evaluate(), (-a).evaluate()); -// -// // 2 - (1,0) = 2 - 0 = 2 -// assert_eq!((a - b).evaluate(), a.evaluate()); -// } -// -// #[test] -// fn sub_rational_from_inv0_rational() { -// // a = (1,2) -// // b = (1,0) -// let a = Assigned::Rational(Fp::one(), Fp::from(2)); -// let b = Assigned::Rational(Fp::one(), Fp::zero()); -// -// // (1,0) - (1,2) = 0 - (1,2) = -(1,2) -// // This fails if subtraction is implemented using normal rules for rationals. -// assert_eq!((b - a).evaluate(), (-a).evaluate()); -// -// // (1,2) - (1,0) = (1,2) - 0 = (1,2) -// assert_eq!((a - b).evaluate(), a.evaluate()); -// } -// -// #[test] -// fn mul_rational_by_inv0_rational() { -// // a = (1,2) -// // b = (1,0) -// let a = Assigned::Rational(Fp::one(), Fp::from(2)); -// let b = Assigned::Rational(Fp::one(), Fp::zero()); -// -// // (1,2) * (1,0) = (1,2) * 0 = 0 -// assert_eq!((a * b).evaluate(), Fp::zero()); -// -// // (1,0) * (1,2) = 0 * (1,2) = 0 -// assert_eq!((b * a).evaluate(), Fp::zero()); -// } -// } -// -// #[cfg(test)] -// mod proptests { -// use std::{ -// cmp, -// ops::{Add, Mul, Neg, Sub}, -// }; -// -// use group::ff::Field; -// use halo2curves::pasta::Fp; -// use proptest::{collection::vec, prelude::*, sample::select}; -// -// use super::Assigned; -// -// trait UnaryOperand: Neg { -// fn double(&self) -> Self; -// fn square(&self) -> Self; -// fn cube(&self) -> Self; -// fn inv0(&self) -> Self; -// } -// -// impl UnaryOperand for F { -// fn double(&self) -> Self { -// self.double() -// } -// -// fn square(&self) -> Self { -// self.square() -// } -// -// fn cube(&self) -> Self { -// self.cube() -// } -// -// fn inv0(&self) -> Self { -// self.invert().unwrap_or(F::ZERO) -// } -// } -// -// impl UnaryOperand for Assigned { -// fn double(&self) -> Self { -// self.double() -// } -// -// fn square(&self) -> Self { -// self.square() -// } -// -// fn cube(&self) -> Self { -// self.cube() -// } -// -// fn inv0(&self) -> Self { -// self.invert() -// } -// } -// -// #[derive(Clone, Debug)] -// enum UnaryOperator { -// Neg, -// Double, -// Square, -// Cube, -// Inv0, -// } -// -// const UNARY_OPERATORS: &[UnaryOperator] = &[ -// UnaryOperator::Neg, -// UnaryOperator::Double, -// UnaryOperator::Square, -// UnaryOperator::Cube, -// UnaryOperator::Inv0, -// ]; -// -// impl UnaryOperator { -// fn apply(&self, a: F) -> F { -// match self { -// Self::Neg => -a, -// Self::Double => a.double(), -// Self::Square => a.square(), -// Self::Cube => a.cube(), -// Self::Inv0 => a.inv0(), -// } -// } -// } -// -// trait BinaryOperand: Sized + Add + Sub + Mul {} -// impl BinaryOperand for F {} -// impl BinaryOperand for Assigned {} -// -// #[derive(Clone, Debug)] -// enum BinaryOperator { -// Add, -// Sub, -// Mul, -// } -// -// const BINARY_OPERATORS: &[BinaryOperator] = &[ -// BinaryOperator::Add, -// BinaryOperator::Sub, -// BinaryOperator::Mul, -// ]; -// -// impl BinaryOperator { -// fn apply(&self, a: F, b: F) -> F { -// match self { -// Self::Add => a + b, -// Self::Sub => a - b, -// Self::Mul => a * b, -// } -// } -// } -// -// #[derive(Clone, Debug)] -// enum Operator { -// Unary(UnaryOperator), -// Binary(BinaryOperator), -// } -// -// prop_compose! { -// /// Use narrow that can be easily reduced. -// fn arb_element()(val in any::()) -> Fp { -// Fp::from(val) -// } -// } -// -// prop_compose! { -// fn arb_trivial()(element in arb_element()) -> Assigned { -// Assigned::Trivial(element) -// } -// } -// -// prop_compose! { -// /// Generates half of the denominators as zero to represent a deferred inversion. -// fn arb_rational()( -// numerator in arb_element(), -// denominator in prop_oneof![ -// 1 => Just(Fp::zero()), -// 2 => arb_element(), -// ], -// ) -> Assigned { -// Assigned::Rational(numerator, denominator) -// } -// } -// -// prop_compose! { -// fn arb_operators(num_unary: usize, num_binary: usize)( -// unary in vec(select(UNARY_OPERATORS), num_unary), -// binary in vec(select(BINARY_OPERATORS), num_binary), -// ) -> Vec { -// unary.into_iter() -// .map(Operator::Unary) -// .chain(binary.into_iter().map(Operator::Binary)) -// .collect() -// } -// } -// -// prop_compose! { -// fn arb_testcase()( -// num_unary in 0usize..5, -// num_binary in 0usize..5, -// )( -// values in vec( -// prop_oneof![ -// 1 => Just(Assigned::Zero), -// 2 => arb_trivial(), -// 2 => arb_rational(), -// ], -// // Ensure that: -// // - we have at least one value to apply unary operators to. -// // - we can apply every binary operator pairwise sequentially. -// cmp::max(usize::from(num_unary > 0), num_binary + 1)), -// operations in arb_operators(num_unary, num_binary).prop_shuffle(), -// ) -> (Vec>, Vec) { -// (values, operations) -// } -// } -// -// proptest! { -// #[test] -// fn operation_commutativity((values, operations) in arb_testcase()) { -// // Evaluate the values at the start. -// let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); -// -// // Apply the operations to both the deferred and evaluated values. -// fn evaluate( -// items: Vec, -// operators: &[Operator], -// ) -> F { -// let mut ops = operators.iter(); -// -// // Process all binary operators. We are guaranteed to have exactly as many -// // binary operators as we need calls to the reduction closure. -// let mut res = items.into_iter().reduce(|mut a, b| loop { -// match ops.next() { -// Some(Operator::Unary(op)) => a = op.apply(a), -// Some(Operator::Binary(op)) => break op.apply(a, b), -// None => unreachable!(), -// } -// }).unwrap(); -// -// // Process any unary operators that weren't handled in the reduce() call -// // above (either if we only had one item, or there were unary operators -// // after the last binary operator). We are guaranteed to have no binary -// // operators remaining at this point. -// loop { -// match ops.next() { -// Some(Operator::Unary(op)) => res = op.apply(res), -// Some(Operator::Binary(_)) => unreachable!(), -// None => break res, -// } -// } -// } -// let deferred_result = evaluate(values, &operations); -// let evaluated_result = evaluate(elements, &operations); -// -// // The two should be equal, i.e. deferred inversion should commute with the -// // list of operations. -// assert_eq!(deferred_result.evaluate(), evaluated_result); -// } -// } -// } diff --git a/backend/src/plonk/circuit.rs b/backend/src/plonk/circuit.rs deleted file mode 100644 index 72fa556d11..0000000000 --- a/backend/src/plonk/circuit.rs +++ /dev/null @@ -1,187 +0,0 @@ -use super::{lookup, permutation, shuffle, Queries}; -// use crate::dev::metadata; -use core::cmp::max; -use core::ops::{Add, Mul}; -use halo2_common::plonk::{ConstraintSystem, Expression}; -use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Any, Challenge, Column, ConstraintSystemV2Backend, ExpressionMid, - Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, -}; -use halo2_middleware::ff::Field; -use halo2_middleware::metadata; -use halo2_middleware::poly::Rotation; -use std::collections::HashMap; -use std::fmt::Debug; -use std::iter::{Product, Sum}; -use std::{ - convert::TryFrom, - ops::{Neg, Sub}, -}; - -/// Represents an index into a vector where each entry corresponds to a distinct -/// point that polynomials are queried at. -#[derive(Copy, Clone, Debug)] -pub(crate) struct PointIndex(pub usize); - -/// An individual polynomial constraint. -/// -/// These are returned by the closures passed to `ConstraintSystem::create_gate`. -#[derive(Debug)] -pub struct Constraint { - name: String, - poly: Expression, -} - -impl From> for Constraint { - fn from(poly: Expression) -> Self { - Constraint { - name: "".to_string(), - poly, - } - } -} - -impl> From<(S, Expression)> for Constraint { - fn from((name, poly): (S, Expression)) -> Self { - Constraint { - name: name.as_ref().to_string(), - poly, - } - } -} - -impl From> for Vec> { - fn from(poly: Expression) -> Self { - vec![Constraint { - name: "".to_string(), - poly, - }] - } -} - -/// A set of polynomial constraints with a common selector. -/// -/// ``` -/// use halo2_backend::{plonk::{Constraints, Expression}, poly::Rotation}; -/// use halo2curves::pasta::Fp; -/// # use halo2_backend::plonk::ConstraintSystem; -/// -/// # let mut meta = ConstraintSystem::::default(); -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let c = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("foo", |meta| { -/// let next = meta.query_advice(a, Rotation::next()); -/// let a = meta.query_advice(a, Rotation::cur()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let c = meta.query_advice(c, Rotation::cur()); -/// let s_ternary = meta.query_selector(s); -/// -/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); -/// -/// Constraints::with_selector( -/// s_ternary, -/// std::array::IntoIter::new([ -/// ("a is boolean", a.clone() * one_minus_a.clone()), -/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), -/// ]), -/// ) -/// }); -/// ``` -/// -/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to -/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, -/// you can pass an array directly. -#[derive(Debug)] -pub struct Constraints>, Iter: IntoIterator> { - selector: Expression, - constraints: Iter, -} - -impl>, Iter: IntoIterator> Constraints { - /// Constructs a set of constraints that are controlled by the given selector. - /// - /// Each constraint `c` in `iterator` will be converted into the constraint - /// `selector * c`. - pub fn with_selector(selector: Expression, constraints: Iter) -> Self { - Constraints { - selector, - constraints, - } - } -} - -fn apply_selector_to_constraint>>( - (selector, c): (Expression, C), -) -> Constraint { - let constraint: Constraint = c.into(); - Constraint { - name: constraint.name, - poly: selector * constraint.poly, - } -} - -type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; -type ConstraintsIterator = std::iter::Map< - std::iter::Zip>, I>, - ApplySelectorToConstraint, ->; - -impl>, Iter: IntoIterator> IntoIterator - for Constraints -{ - type Item = Constraint; - type IntoIter = ConstraintsIterator; - - fn into_iter(self) -> Self::IntoIter { - std::iter::repeat(self.selector) - .zip(self.constraints) - .map(apply_selector_to_constraint) - } -} - -#[cfg(test)] -mod tests { - use super::Expression; - use halo2curves::bn256::Fr; - - #[test] - fn iter_sum() { - let exprs: Vec> = vec![ - Expression::Constant(1.into()), - Expression::Constant(2.into()), - Expression::Constant(3.into()), - ]; - let happened: Expression = exprs.into_iter().sum(); - let expected: Expression = Expression::Sum( - Box::new(Expression::Sum( - Box::new(Expression::Constant(1.into())), - Box::new(Expression::Constant(2.into())), - )), - Box::new(Expression::Constant(3.into())), - ); - - assert_eq!(happened, expected); - } - - #[test] - fn iter_product() { - let exprs: Vec> = vec![ - Expression::Constant(1.into()), - Expression::Constant(2.into()), - Expression::Constant(3.into()), - ]; - let happened: Expression = exprs.into_iter().product(); - let expected: Expression = Expression::Product( - Box::new(Expression::Product( - Box::new(Expression::Constant(1.into())), - Box::new(Expression::Constant(2.into())), - )), - Box::new(Expression::Constant(3.into())), - ); - - assert_eq!(happened, expected); - } -} diff --git a/backend/src/plonk/error.rs b/backend/src/plonk/error.rs deleted file mode 100644 index 50368bfc18..0000000000 --- a/backend/src/plonk/error.rs +++ /dev/null @@ -1,93 +0,0 @@ -use std::error; -use std::fmt; -use std::io; - -use halo2_middleware::circuit::{Any, Column}; - -/// This is an error that could occur during proving or circuit synthesis. -// TODO: these errors need to be cleaned up -#[derive(Debug)] -pub enum Error { - /// This is an error that can occur during synthesis of the circuit, for - /// example, when the witness is not present. - Synthesis, - /// The provided instances do not match the circuit parameters. - InvalidInstances, - /// The constraint system is not satisfied. - ConstraintSystemFailure, - /// Out of bounds index passed to a backend - BoundsFailure, - /// Opening error - Opening, - /// Transcript error - Transcript(io::Error), - /// `k` is too small for the given circuit. - NotEnoughRowsAvailable { - /// The current value of `k` being used. - current_k: u32, - }, - /// Instance provided exceeds number of available rows - InstanceTooLarge, - /// Circuit synthesis requires global constants, but circuit configuration did not - /// call [`ConstraintSystem::enable_constant`] on fixed columns with sufficient space. - /// - /// [`ConstraintSystem::enable_constant`]: crate::plonk::ConstraintSystem::enable_constant - NotEnoughColumnsForConstants, - /// The instance sets up a copy constraint involving a column that has not been - /// included in the permutation. - ColumnNotInPermutation(Column), - /// Generic error not covered by previous cases - Other(String), -} - -impl From for Error { - fn from(error: io::Error) -> Self { - // The only place we can get io::Error from is the transcript. - Error::Transcript(error) - } -} - -impl Error { - /// Constructs an `Error::NotEnoughRowsAvailable`. - pub(crate) fn not_enough_rows_available(current_k: u32) -> Self { - Error::NotEnoughRowsAvailable { current_k } - } -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Error::Synthesis => write!(f, "General synthesis error"), - Error::InvalidInstances => write!(f, "Provided instances do not match the circuit"), - Error::ConstraintSystemFailure => write!(f, "The constraint system is not satisfied"), - Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"), - Error::Opening => write!(f, "Multi-opening proof was invalid"), - Error::Transcript(e) => write!(f, "Transcript error: {e}"), - Error::NotEnoughRowsAvailable { current_k } => write!( - f, - "k = {current_k} is too small for the given circuit. Try using a larger value of k", - ), - Error::InstanceTooLarge => write!(f, "Instance vectors are larger than the circuit"), - Error::NotEnoughColumnsForConstants => { - write!( - f, - "Too few fixed columns are enabled for global constants usage" - ) - } - Error::ColumnNotInPermutation(column) => write!( - f, - "Column {column:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", - ), - Error::Other(error) => write!(f, "Other: {error}"), - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match self { - Error::Transcript(e) => Some(e), - _ => None, - } - } -} diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs deleted file mode 100644 index 92646bfb8d..0000000000 --- a/backend/src/plonk/keygen.rs +++ /dev/null @@ -1,159 +0,0 @@ -#![allow(clippy::int_plus_one)] - -use halo2_middleware::ff::{Field, FromUniformBytes}; -use group::Curve; - -use super::{ - circuit::ConstraintSystem, evaluation::Evaluator, permutation, Error, Polynomial, ProvingKey, - VerifyingKey, -}; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - poly::{ - commitment::{Blind, Params}, - EvaluationDomain, - }, -}; -use halo2_middleware::circuit::CompiledCircuitV2; - -/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. -pub fn keygen_vk_v2<'params, C, P>( - params: &P, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - C::Scalar: FromUniformBytes<64>, -{ - let cs2 = &circuit.cs; - let cs: ConstraintSystem = cs2.clone().into(); - let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs2.permutation, - &circuit.preprocessing.permutation, - )? - .build_vk(params, &domain, &cs.permutation); - - let fixed_commitments = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - params - .commit_lagrange( - &Polynomial::new_lagrange_from_vec(poly.clone()), - Blind::default(), - ) - .to_affine() - }) - .collect(); - - Ok(VerifyingKey::from_parts( - domain, - fixed_commitments, - permutation_vk, - cs, - Vec::new(), - false, - )) -} - -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. -pub fn keygen_pk_v2<'params, C, P>( - params: &P, - vk: VerifyingKey, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, -{ - let cs = &circuit.cs; - - if (params.n() as usize) < vk.cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let fixed_polys: Vec<_> = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - vk.domain - .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) - }) - .collect(); - - let fixed_cosets = fixed_polys - .iter() - .map(|poly| vk.domain.coeff_to_extended(poly.clone())) - .collect(); - - let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs.permutation, - &circuit.preprocessing.permutation, - )? - .build_pk(params, &vk.domain, &cs.permutation.clone().into()); - - // Compute l_0(X) - // TODO: this can be done more efficiently - let mut l0 = vk.domain.empty_lagrange(); - l0[0] = C::Scalar::ONE; - let l0 = vk.domain.lagrange_to_coeff(l0); - let l0 = vk.domain.coeff_to_extended(l0); - - // Compute l_blind(X) which evaluates to 1 for each blinding factor row - // and 0 otherwise over the domain. - let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { - *evaluation = C::Scalar::ONE; - } - let l_blind = vk.domain.lagrange_to_coeff(l_blind); - let l_blind = vk.domain.coeff_to_extended(l_blind); - - // Compute l_last(X) which evaluates to 1 on the first inactive row (just - // before the blinding factors) and 0 otherwise over the domain - let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; - let l_last = vk.domain.lagrange_to_coeff(l_last); - let l_last = vk.domain.coeff_to_extended(l_last); - - // Compute l_active_row(X) - let one = C::Scalar::ONE; - let mut l_active_row = vk.domain.empty_extended(); - parallelize(&mut l_active_row, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = i + start; - *value = one - (l_last[idx] + l_blind[idx]); - } - }); - - // Compute the optimized evaluation data structure - let ev = Evaluator::new(&vk.cs); - - Ok(ProvingKey { - vk, - l0, - l_last, - l_active_row, - fixed_values: circuit - .preprocessing - .fixed - .clone() - .into_iter() - .map(Polynomial::new_lagrange_from_vec) - .collect(), - fixed_polys, - fixed_cosets, - permutation: permutation_pk, - ev, - }) -} diff --git a/backend/src/poly.rs b/backend/src/poly.rs deleted file mode 100644 index 08dcc0c455..0000000000 --- a/backend/src/poly.rs +++ /dev/null @@ -1,323 +0,0 @@ -//! Contains utilities for performing arithmetic over univariate polynomials in -//! various forms, including computing commitments to them and provably opening -//! the committed polynomials at arbitrary points. - -use crate::arithmetic::parallelize; -use crate::helpers::SerdePrimeField; -use crate::SerdeFormat; -use halo2_middleware::plonk::Assigned; - -use halo2_middleware::ff::{BatchInvert, Field}; -use halo2_middleware::poly::Rotation; -use std::fmt::Debug; -use std::io; -use std::marker::PhantomData; -use std::ops::{Add, Deref, DerefMut, Index, IndexMut, Mul, RangeFrom, RangeFull, Sub}; - -/// Generic commitment scheme structures -pub mod commitment; -mod domain; -mod query; -mod strategy; - -/// Inner product argument commitment scheme -pub mod ipa; - -/// KZG commitment scheme -pub mod kzg; - -#[cfg(test)] -mod multiopen_test; - -pub use domain::*; -pub use query::{ProverQuery, VerifierQuery}; -pub use strategy::{Guard, VerificationStrategy}; - -/// This is an error that could occur during proving or circuit synthesis. -// TODO: these errors need to be cleaned up -#[derive(Debug)] -pub enum Error { - /// OpeningProof is not well-formed - OpeningError, - /// Caller needs to re-sample a point - SamplingError, -} - -/// The basis over which a polynomial is described. -pub trait Basis: Copy + Debug + Send + Sync {} - -/// The polynomial is defined as coefficients -#[derive(Clone, Copy, Debug)] -pub struct Coeff; -impl Basis for Coeff {} - -/// The polynomial is defined as coefficients of Lagrange basis polynomials -#[derive(Clone, Copy, Debug)] -pub struct LagrangeCoeff; -impl Basis for LagrangeCoeff {} - -/// The polynomial is defined as coefficients of Lagrange basis polynomials in -/// an extended size domain which supports multiplication -#[derive(Clone, Copy, Debug)] -pub struct ExtendedLagrangeCoeff; -impl Basis for ExtendedLagrangeCoeff {} - -/// Represents a univariate polynomial defined over a field and a particular -/// basis. -#[derive(Clone, Debug)] -pub struct Polynomial { - pub(crate) values: Vec, - pub(crate) _marker: PhantomData, -} - -impl Polynomial { - pub(crate) fn new_empty(size: usize, zero: F) -> Self { - Polynomial { - values: vec![zero; size], - _marker: PhantomData, - } - } -} - -impl Polynomial { - /// Obtains a polynomial in Lagrange form when given a vector of Lagrange - /// coefficients of size `n`; panics if the provided vector is the wrong - /// length. - pub(crate) fn new_lagrange_from_vec(values: Vec) -> Polynomial { - Polynomial { - values, - _marker: PhantomData, - } - } -} - -impl Index for Polynomial { - type Output = F; - - fn index(&self, index: usize) -> &F { - self.values.index(index) - } -} - -impl IndexMut for Polynomial { - fn index_mut(&mut self, index: usize) -> &mut F { - self.values.index_mut(index) - } -} - -impl Index> for Polynomial { - type Output = [F]; - - fn index(&self, index: RangeFrom) -> &[F] { - self.values.index(index) - } -} - -impl IndexMut> for Polynomial { - fn index_mut(&mut self, index: RangeFrom) -> &mut [F] { - self.values.index_mut(index) - } -} - -impl Index for Polynomial { - type Output = [F]; - - fn index(&self, index: RangeFull) -> &[F] { - self.values.index(index) - } -} - -impl IndexMut for Polynomial { - fn index_mut(&mut self, index: RangeFull) -> &mut [F] { - self.values.index_mut(index) - } -} - -impl Deref for Polynomial { - type Target = [F]; - - fn deref(&self) -> &[F] { - &self.values[..] - } -} - -impl DerefMut for Polynomial { - fn deref_mut(&mut self) -> &mut [F] { - &mut self.values[..] - } -} - -impl Polynomial { - /// Iterate over the values, which are either in coefficient or evaluation - /// form depending on the basis `B`. - pub fn iter(&self) -> impl Iterator { - self.values.iter() - } - - /// Iterate over the values mutably, which are either in coefficient or - /// evaluation form depending on the basis `B`. - pub fn iter_mut(&mut self) -> impl Iterator { - self.values.iter_mut() - } - - /// Gets the size of this polynomial in terms of the number of - /// coefficients used to describe it. - pub fn num_coeffs(&self) -> usize { - self.values.len() - } -} - -impl Polynomial { - /// Reads polynomial from buffer using `SerdePrimeField::read`. - pub(crate) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - let mut poly_len = [0u8; 4]; - reader.read_exact(&mut poly_len)?; - let poly_len = u32::from_be_bytes(poly_len); - - (0..poly_len) - .map(|_| F::read(reader, format)) - .collect::>>() - .map(|values| Self { - values, - _marker: PhantomData, - }) - } - - /// Writes polynomial to buffer using `SerdePrimeField::write`. - pub(crate) fn write( - &self, - writer: &mut W, - format: SerdeFormat, - ) -> io::Result<()> { - writer.write_all(&(self.values.len() as u32).to_be_bytes())?; - for value in self.values.iter() { - value.write(writer, format)?; - } - Ok(()) - } -} - -pub(crate) fn batch_invert_assigned( - assigned: Vec, LagrangeCoeff>>, -) -> Vec> { - let mut assigned_denominators: Vec<_> = assigned - .iter() - .map(|f| { - f.iter() - .map(|value| value.denominator()) - .collect::>() - }) - .collect(); - - assigned_denominators - .iter_mut() - .flat_map(|f| { - f.iter_mut() - // If the denominator is trivial, we can skip it, reducing the - // size of the batch inversion. - .filter_map(|d| d.as_mut()) - }) - .batch_invert(); - - assigned - .iter() - .zip(assigned_denominators) - .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) - .collect() -} - -impl Polynomial, LagrangeCoeff> { - pub(crate) fn invert( - &self, - inv_denoms: impl Iterator + ExactSizeIterator, - ) -> Polynomial { - assert_eq!(inv_denoms.len(), self.values.len()); - Polynomial { - values: self - .values - .iter() - .zip(inv_denoms) - .map(|(a, inv_den)| a.numerator() * inv_den) - .collect(), - _marker: self._marker, - } - } -} - -impl<'a, F: Field, B: Basis> Add<&'a Polynomial> for Polynomial { - type Output = Polynomial; - - fn add(mut self, rhs: &'a Polynomial) -> Polynomial { - parallelize(&mut self.values, |lhs, start| { - for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { - *lhs += *rhs; - } - }); - - self - } -} - -impl<'a, F: Field, B: Basis> Sub<&'a Polynomial> for Polynomial { - type Output = Polynomial; - - fn sub(mut self, rhs: &'a Polynomial) -> Polynomial { - parallelize(&mut self.values, |lhs, start| { - for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { - *lhs -= *rhs; - } - }); - - self - } -} - -impl Polynomial { - /// Rotates the values in a Lagrange basis polynomial by `Rotation` - pub fn rotate(&self, rotation: Rotation) -> Polynomial { - let mut values = self.values.clone(); - if rotation.0 < 0 { - values.rotate_right((-rotation.0) as usize); - } else { - values.rotate_left(rotation.0 as usize); - } - Polynomial { - values, - _marker: PhantomData, - } - } -} - -impl Mul for Polynomial { - type Output = Polynomial; - - fn mul(mut self, rhs: F) -> Polynomial { - if rhs == F::ZERO { - return Polynomial { - values: vec![F::ZERO; self.len()], - _marker: PhantomData, - }; - } - if rhs == F::ONE { - return self; - } - - parallelize(&mut self.values, |lhs, _| { - for lhs in lhs.iter_mut() { - *lhs *= rhs; - } - }); - - self - } -} - -impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { - type Output = Polynomial; - - fn sub(self, rhs: F) -> Polynomial { - let mut res = self.clone(); - res.values[0] -= rhs; - res - } -} diff --git a/common/src/arithmetic.rs b/common/src/arithmetic.rs index 3ff8e76c76..0163e355eb 100644 --- a/common/src/arithmetic.rs +++ b/common/src/arithmetic.rs @@ -1,4 +1,385 @@ -use crate::multicore; +//! This module provides common utilities, traits and structures for group, +//! field and polynomial arithmetic. + +use super::multicore; +pub use ff::Field; +use group::{ + ff::{BatchInvert, PrimeField}, + Curve, Group, GroupOpsOwned, ScalarMulOwned, +}; + +pub use halo2curves::{CurveAffine, CurveExt}; + +/// This represents an element of a group with basic operations that can be +/// performed. This allows an FFT implementation (for example) to operate +/// generically over either a field or elliptic curve group. +pub trait FftGroup: + Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned +{ +} + +impl FftGroup for T +where + Scalar: Field, + T: Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned, +{ +} + +fn multiexp_serial(coeffs: &[C::Scalar], bases: &[C], acc: &mut C::Curve) { + let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); + + let c = if bases.len() < 4 { + 1 + } else if bases.len() < 32 { + 3 + } else { + (f64::from(bases.len() as u32)).ln().ceil() as usize + }; + + fn get_at(segment: usize, c: usize, bytes: &F::Repr) -> usize { + let skip_bits = segment * c; + let skip_bytes = skip_bits / 8; + + if skip_bytes >= (F::NUM_BITS as usize + 7) / 8 { + return 0; + } + + let mut v = [0; 8]; + for (v, o) in v.iter_mut().zip(bytes.as_ref()[skip_bytes..].iter()) { + *v = *o; + } + + let mut tmp = u64::from_le_bytes(v); + tmp >>= skip_bits - (skip_bytes * 8); + tmp %= 1 << c; + + tmp as usize + } + + let segments = (C::Scalar::NUM_BITS as usize / c) + 1; + + for current_segment in (0..segments).rev() { + for _ in 0..c { + *acc = acc.double(); + } + + #[derive(Clone, Copy)] + enum Bucket { + None, + Affine(C), + Projective(C::Curve), + } + + impl Bucket { + fn add_assign(&mut self, other: &C) { + *self = match *self { + Bucket::None => Bucket::Affine(*other), + Bucket::Affine(a) => Bucket::Projective(a + *other), + Bucket::Projective(mut a) => { + a += *other; + Bucket::Projective(a) + } + } + } + + fn add(self, mut other: C::Curve) -> C::Curve { + match self { + Bucket::None => other, + Bucket::Affine(a) => { + other += a; + other + } + Bucket::Projective(a) => other + &a, + } + } + } + + let mut buckets: Vec> = vec![Bucket::None; (1 << c) - 1]; + + for (coeff, base) in coeffs.iter().zip(bases.iter()) { + let coeff = get_at::(current_segment, c, coeff); + if coeff != 0 { + buckets[coeff - 1].add_assign(base); + } + } + + // Summation by parts + // e.g. 3a + 2b + 1c = a + + // (a) + b + + // ((a) + b) + c + let mut running_sum = C::Curve::identity(); + for exp in buckets.into_iter().rev() { + running_sum = exp.add(running_sum); + *acc += &running_sum; + } + } +} + +/// Performs a small multi-exponentiation operation. +/// Uses the double-and-add algorithm with doublings shared across points. +pub fn small_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { + let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); + let mut acc = C::Curve::identity(); + + // for byte idx + for byte_idx in (0..((C::Scalar::NUM_BITS as usize + 7) / 8)).rev() { + // for bit idx + for bit_idx in (0..8).rev() { + acc = acc.double(); + // for each coeff + for coeff_idx in 0..coeffs.len() { + let byte = coeffs[coeff_idx].as_ref()[byte_idx]; + if ((byte >> bit_idx) & 1) != 0 { + acc += bases[coeff_idx]; + } + } + } + } + + acc +} + +/// Performs a multi-exponentiation operation. +/// +/// This function will panic if coeffs and bases have a different length. +/// +/// This will use multithreading if beneficial. +pub fn best_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { + assert_eq!(coeffs.len(), bases.len()); + + let num_threads = multicore::current_num_threads(); + if coeffs.len() > num_threads { + let chunk = coeffs.len() / num_threads; + let num_chunks = coeffs.chunks(chunk).len(); + let mut results = vec![C::Curve::identity(); num_chunks]; + multicore::scope(|scope| { + let chunk = coeffs.len() / num_threads; + + for ((coeffs, bases), acc) in coeffs + .chunks(chunk) + .zip(bases.chunks(chunk)) + .zip(results.iter_mut()) + { + scope.spawn(move |_| { + multiexp_serial(coeffs, bases, acc); + }); + } + }); + results.iter().fold(C::Curve::identity(), |a, b| a + b) + } else { + let mut acc = C::Curve::identity(); + multiexp_serial(coeffs, bases, &mut acc); + acc + } +} + +/// Performs a radix-$2$ Fast-Fourier Transformation (FFT) on a vector of size +/// $n = 2^k$, when provided `log_n` = $k$ and an element of multiplicative +/// order $n$ called `omega` ($\omega$). The result is that the vector `a`, when +/// interpreted as the coefficients of a polynomial of degree $n - 1$, is +/// transformed into the evaluations of this polynomial at each of the $n$ +/// distinct powers of $\omega$. This transformation is invertible by providing +/// $\omega^{-1}$ in place of $\omega$ and dividing each resulting field element +/// by $n$. +/// +/// This will use multithreading if beneficial. +pub fn best_fft>(a: &mut [G], omega: Scalar, log_n: u32) { + fn bitreverse(mut n: usize, l: usize) -> usize { + let mut r = 0; + for _ in 0..l { + r = (r << 1) | (n & 1); + n >>= 1; + } + r + } + + let threads = multicore::current_num_threads(); + let log_threads = log2_floor(threads); + let n = a.len(); + assert_eq!(n, 1 << log_n); + + for k in 0..n { + let rk = bitreverse(k, log_n as usize); + if k < rk { + a.swap(rk, k); + } + } + + // precompute twiddle factors + let twiddles: Vec<_> = (0..(n / 2)) + .scan(Scalar::ONE, |w, _| { + let tw = *w; + *w *= ω + Some(tw) + }) + .collect(); + + if log_n <= log_threads { + let mut chunk = 2_usize; + let mut twiddle_chunk = n / 2; + for _ in 0..log_n { + a.chunks_mut(chunk).for_each(|coeffs| { + let (left, right) = coeffs.split_at_mut(chunk / 2); + + // case when twiddle factor is one + let (a, left) = left.split_at_mut(1); + let (b, right) = right.split_at_mut(1); + let t = b[0]; + b[0] = a[0]; + a[0] += &t; + b[0] -= &t; + + left.iter_mut() + .zip(right.iter_mut()) + .enumerate() + .for_each(|(i, (a, b))| { + let mut t = *b; + t *= &twiddles[(i + 1) * twiddle_chunk]; + *b = *a; + *a += &t; + *b -= &t; + }); + }); + chunk *= 2; + twiddle_chunk /= 2; + } + } else { + recursive_butterfly_arithmetic(a, n, 1, &twiddles) + } +} + +/// This perform recursive butterfly arithmetic +pub fn recursive_butterfly_arithmetic>( + a: &mut [G], + n: usize, + twiddle_chunk: usize, + twiddles: &[Scalar], +) { + if n == 2 { + let t = a[1]; + a[1] = a[0]; + a[0] += &t; + a[1] -= &t; + } else { + let (left, right) = a.split_at_mut(n / 2); + multicore::join( + || recursive_butterfly_arithmetic(left, n / 2, twiddle_chunk * 2, twiddles), + || recursive_butterfly_arithmetic(right, n / 2, twiddle_chunk * 2, twiddles), + ); + + // case when twiddle factor is one + let (a, left) = left.split_at_mut(1); + let (b, right) = right.split_at_mut(1); + let t = b[0]; + b[0] = a[0]; + a[0] += &t; + b[0] -= &t; + + left.iter_mut() + .zip(right.iter_mut()) + .enumerate() + .for_each(|(i, (a, b))| { + let mut t = *b; + t *= &twiddles[(i + 1) * twiddle_chunk]; + *b = *a; + *a += &t; + *b -= &t; + }); + } +} + +/// Convert coefficient bases group elements to lagrange basis by inverse FFT. +pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec { + let n_inv = C::Scalar::TWO_INV.pow_vartime([k as u64, 0, 0, 0]); + let mut omega_inv = C::Scalar::ROOT_OF_UNITY_INV; + for _ in k..C::Scalar::S { + omega_inv = omega_inv.square(); + } + + let mut g_lagrange_projective = g_projective; + best_fft(&mut g_lagrange_projective, omega_inv, k); + parallelize(&mut g_lagrange_projective, |g, _| { + for g in g.iter_mut() { + *g *= n_inv; + } + }); + + let mut g_lagrange = vec![C::identity(); 1 << k]; + parallelize(&mut g_lagrange, |g_lagrange, starts| { + C::Curve::batch_normalize( + &g_lagrange_projective[starts..(starts + g_lagrange.len())], + g_lagrange, + ); + }); + + g_lagrange +} + +/// This evaluates a provided polynomial (in coefficient form) at `point`. +pub fn eval_polynomial(poly: &[F], point: F) -> F { + fn evaluate(poly: &[F], point: F) -> F { + poly.iter() + .rev() + .fold(F::ZERO, |acc, coeff| acc * point + coeff) + } + let n = poly.len(); + let num_threads = multicore::current_num_threads(); + if n * 2 < num_threads { + evaluate(poly, point) + } else { + let chunk_size = (n + num_threads - 1) / num_threads; + let mut parts = vec![F::ZERO; num_threads]; + multicore::scope(|scope| { + for (chunk_idx, (out, poly)) in + parts.chunks_mut(1).zip(poly.chunks(chunk_size)).enumerate() + { + scope.spawn(move |_| { + let start = chunk_idx * chunk_size; + out[0] = evaluate(poly, point) * point.pow_vartime([start as u64, 0, 0, 0]); + }); + } + }); + parts.iter().fold(F::ZERO, |acc, coeff| acc + coeff) + } +} + +/// This computes the inner product of two vectors `a` and `b`. +/// +/// This function will panic if the two vectors are not the same size. +pub fn compute_inner_product(a: &[F], b: &[F]) -> F { + // TODO: parallelize? + assert_eq!(a.len(), b.len()); + + let mut acc = F::ZERO; + for (a, b) in a.iter().zip(b.iter()) { + acc += (*a) * (*b); + } + + acc +} + +/// Divides polynomial `a` in `X` by `X - b` with +/// no remainder. +pub fn kate_division<'a, F: Field, I: IntoIterator>(a: I, mut b: F) -> Vec +where + I::IntoIter: DoubleEndedIterator + ExactSizeIterator, +{ + b = -b; + let a = a.into_iter(); + + let mut q = vec![F::ZERO; a.len() - 1]; + + let mut tmp = F::ZERO; + for (q, r) in q.iter_mut().rev().zip(a.rev()) { + let mut lead_coeff = *r; + lead_coeff.sub_assign(&tmp); + *q = lead_coeff; + tmp = lead_coeff; + tmp.mul_assign(&b); + } + + q +} /// This utility function will parallelize an operation that is to be /// performed over a mutable slice. @@ -51,3 +432,123 @@ pub fn parallelize(v: &mu } }); } + +fn log2_floor(num: usize) -> u32 { + assert!(num > 0); + + let mut pow = 0; + + while (1 << (pow + 1)) <= num { + pow += 1; + } + + pow +} + +/// Returns coefficients of an n - 1 degree polynomial given a set of n points +/// and their evaluations. This function will panic if two values in `points` +/// are the same. +pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { + assert_eq!(points.len(), evals.len()); + if points.len() == 1 { + // Constant polynomial + vec![evals[0]] + } else { + let mut denoms = Vec::with_capacity(points.len()); + for (j, x_j) in points.iter().enumerate() { + let mut denom = Vec::with_capacity(points.len() - 1); + for x_k in points + .iter() + .enumerate() + .filter(|&(k, _)| k != j) + .map(|a| a.1) + { + denom.push(*x_j - x_k); + } + denoms.push(denom); + } + // Compute (x_j - x_k)^(-1) for each j != i + denoms.iter_mut().flat_map(|v| v.iter_mut()).batch_invert(); + + let mut final_poly = vec![F::ZERO; points.len()]; + for (j, (denoms, eval)) in denoms.into_iter().zip(evals.iter()).enumerate() { + let mut tmp: Vec = Vec::with_capacity(points.len()); + let mut product = Vec::with_capacity(points.len() - 1); + tmp.push(F::ONE); + for (x_k, denom) in points + .iter() + .enumerate() + .filter(|&(k, _)| k != j) + .map(|a| a.1) + .zip(denoms.into_iter()) + { + product.resize(tmp.len() + 1, F::ZERO); + for ((a, b), product) in tmp + .iter() + .chain(std::iter::once(&F::ZERO)) + .zip(std::iter::once(&F::ZERO).chain(tmp.iter())) + .zip(product.iter_mut()) + { + *product = *a * (-denom * x_k) + *b * denom; + } + std::mem::swap(&mut tmp, &mut product); + } + assert_eq!(tmp.len(), points.len()); + assert_eq!(product.len(), points.len() - 1); + for (final_coeff, interpolation_coeff) in final_poly.iter_mut().zip(tmp.into_iter()) { + *final_coeff += interpolation_coeff * eval; + } + } + final_poly + } +} + +pub(crate) fn evaluate_vanishing_polynomial(roots: &[F], z: F) -> F { + fn evaluate(roots: &[F], z: F) -> F { + roots.iter().fold(F::ONE, |acc, point| (z - point) * acc) + } + let n = roots.len(); + let num_threads = multicore::current_num_threads(); + if n * 2 < num_threads { + evaluate(roots, z) + } else { + let chunk_size = (n + num_threads - 1) / num_threads; + let mut parts = vec![F::ONE; num_threads]; + multicore::scope(|scope| { + for (out, roots) in parts.chunks_mut(1).zip(roots.chunks(chunk_size)) { + scope.spawn(move |_| out[0] = evaluate(roots, z)); + } + }); + parts.iter().fold(F::ONE, |acc, part| acc * part) + } +} + +pub(crate) fn powers(base: F) -> impl Iterator { + std::iter::successors(Some(F::ONE), move |power| Some(base * power)) +} + +#[cfg(test)] +use rand_core::OsRng; + +#[cfg(test)] +use crate::halo2curves::pasta::Fp; + +#[test] +fn test_lagrange_interpolate() { + let rng = OsRng; + + let points = (0..5).map(|_| Fp::random(rng)).collect::>(); + let evals = (0..5).map(|_| Fp::random(rng)).collect::>(); + + for coeffs in 0..5 { + let points = &points[0..coeffs]; + let evals = &evals[0..coeffs]; + + let poly = lagrange_interpolate(points, evals); + assert_eq!(poly.len(), points.len()); + + for (point, eval) in points.iter().zip(evals) { + assert_eq!(eval_polynomial(&poly, *point), *eval); + } + } +} diff --git a/frontend/src/circuit.rs b/common/src/circuit.rs similarity index 98% rename from frontend/src/circuit.rs rename to common/src/circuit.rs index bd08c27c67..546325edd4 100644 --- a/frontend/src/circuit.rs +++ b/common/src/circuit.rs @@ -1,12 +1,12 @@ //! Traits and structs for implementing circuit components. -use crate::error::Error; -use crate::plonk::{Selector, TableColumn}; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; -use halo2_middleware::ff::Field; -use halo2_middleware::plonk::Assigned; use std::{fmt, marker::PhantomData}; +use ff::Field; + +use crate::plonk::{Advice, Any, Assigned, Column, Error, Fixed, Instance, Selector, TableColumn}; +use halo2_middleware::circuit::Challenge; + mod value; pub use value::Value; diff --git a/frontend/src/circuit/floor_planner.rs b/common/src/circuit/floor_planner.rs similarity index 100% rename from frontend/src/circuit/floor_planner.rs rename to common/src/circuit/floor_planner.rs diff --git a/frontend/src/circuit/floor_planner/single_pass.rs b/common/src/circuit/floor_planner/single_pass.rs similarity index 98% rename from frontend/src/circuit/floor_planner/single_pass.rs rename to common/src/circuit/floor_planner/single_pass.rs index 665b882a66..2c3bb2437a 100644 --- a/frontend/src/circuit/floor_planner/single_pass.rs +++ b/common/src/circuit/floor_planner/single_pass.rs @@ -5,17 +5,18 @@ use std::marker::PhantomData; use ff::Field; -use crate::error::Error; use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{Assignment, Circuit, FloorPlanner, Selector, TableColumn}, + plonk::{ + Advice, Any, Assigned, Assignment, Circuit, Column, Error, Fixed, FloorPlanner, Instance, + Selector, TableColumn, + }, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use halo2_middleware::circuit::Challenge; /// A simple [`FloorPlanner`] that performs minimal optimizations. /// diff --git a/frontend/src/circuit/floor_planner/v1.rs b/common/src/circuit/floor_planner/v1.rs similarity index 98% rename from frontend/src/circuit/floor_planner/v1.rs rename to common/src/circuit/floor_planner/v1.rs index a8d52fe0ce..32af27d182 100644 --- a/frontend/src/circuit/floor_planner/v1.rs +++ b/common/src/circuit/floor_planner/v1.rs @@ -2,18 +2,18 @@ use std::fmt; use ff::Field; -use crate::error::Error; use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{Assignment, Circuit, FloorPlanner, Selector, TableColumn}, + plonk::{ + Advice, Any, Assigned, Assignment, Circuit, Column, Error, Fixed, FloorPlanner, Instance, + Selector, TableColumn, + }, }; use halo2_middleware::circuit::Challenge; -use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; mod strategy; diff --git a/frontend/src/circuit/floor_planner/v1/strategy.rs b/common/src/circuit/floor_planner/v1/strategy.rs similarity index 99% rename from frontend/src/circuit/floor_planner/v1/strategy.rs rename to common/src/circuit/floor_planner/v1/strategy.rs index 86db63124d..71745de245 100644 --- a/frontend/src/circuit/floor_planner/v1/strategy.rs +++ b/common/src/circuit/floor_planner/v1/strategy.rs @@ -5,8 +5,7 @@ use std::{ }; use super::{RegionColumn, RegionShape}; -use crate::circuit::RegionStart; -use halo2_middleware::circuit::Any; +use crate::{circuit::RegionStart, plonk::Any}; /// A region allocated within a column. #[derive(Clone, Default, Debug, PartialEq, Eq)] diff --git a/frontend/src/circuit/layouter.rs b/common/src/circuit/layouter.rs similarity index 98% rename from frontend/src/circuit/layouter.rs rename to common/src/circuit/layouter.rs index 9cd2dd57a2..f939c3fca5 100644 --- a/frontend/src/circuit/layouter.rs +++ b/common/src/circuit/layouter.rs @@ -8,10 +8,7 @@ use ff::Field; pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; -use crate::error::Error; -use crate::plonk::Selector; -use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::{Advice, Any, Assigned, Column, Error, Fixed, Instance, Selector}; /// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. #[cfg(feature = "thread-safe-region")] diff --git a/frontend/src/circuit/table_layouter.rs b/common/src/circuit/table_layouter.rs similarity index 98% rename from frontend/src/circuit/table_layouter.rs rename to common/src/circuit/table_layouter.rs index ce0330ec5c..7189621067 100644 --- a/frontend/src/circuit/table_layouter.rs +++ b/common/src/circuit/table_layouter.rs @@ -7,9 +7,7 @@ use std::{ use ff::Field; -use crate::error::{Error, TableError}; -use crate::plonk::{Assignment, TableColumn}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::{Assigned, Assignment, Error, TableColumn, TableError}; use super::Value; @@ -161,8 +159,8 @@ mod tests { circuit::{Layouter, SimpleFloorPlanner}, dev::MockProver, plonk::{Circuit, ConstraintSystem}, - poly::Rotation, }; + use halo2_middleware::poly::Rotation; use super::*; diff --git a/frontend/src/circuit/value.rs b/common/src/circuit/value.rs similarity index 99% rename from frontend/src/circuit/value.rs rename to common/src/circuit/value.rs index 54111f3ba1..f3ea6a39ea 100644 --- a/frontend/src/circuit/value.rs +++ b/common/src/circuit/value.rs @@ -3,8 +3,7 @@ use std::ops::{Add, Mul, Neg, Sub}; use group::ff::Field; -use crate::error::Error; -use halo2_middleware::plonk::Assigned; +use crate::plonk::{Assigned, Error}; /// A value that might exist within a circuit. /// diff --git a/common/src/dev.rs b/common/src/dev.rs new file mode 100644 index 0000000000..dc42abfa20 --- /dev/null +++ b/common/src/dev.rs @@ -0,0 +1,1855 @@ +//! Tools for developing circuits. + +use std::collections::HashMap; +use std::collections::HashSet; +use std::iter; +use std::ops::{Add, Mul, Neg, Range}; + +use blake2b_simd::blake2b; +use ff::Field; +use ff::FromUniformBytes; + +use crate::plonk::permutation::keygen::Assembly; +use crate::{ + circuit, + plonk::{ + permutation, + sealed::{self, SealedPhase}, + Advice, Any, Assigned, Assignment, Circuit, Column, ConstraintSystem, Error, Expression, + FirstPhase, Fixed, FloorPlanner, Instance, Phase, Selector, + }, +}; +use halo2_middleware::circuit::Challenge; + +use crate::multicore::{ + IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, + ParallelSliceMut, +}; + +pub mod metadata; +use metadata::Column as ColumnMetadata; +mod util; + +mod failure; +pub use failure::{FailureLocation, VerifyFailure}; + +pub mod cost; +pub use cost::CircuitCost; + +#[cfg(feature = "cost-estimator")] +pub mod cost_model; + +mod gates; +pub use gates::CircuitGates; + +mod tfp; +pub use tfp::TracingFloorPlanner; + +#[cfg(feature = "dev-graph")] +mod graph; + +#[cfg(feature = "dev-graph")] +#[cfg_attr(docsrs, doc(cfg(feature = "dev-graph")))] +pub use graph::{circuit_dot_graph, layout::CircuitLayout}; + +#[derive(Debug)] +struct Region { + /// The name of the region. Not required to be unique. + name: String, + /// The columns involved in this region. + columns: HashSet>, + /// The rows that this region starts and ends on, if known. + rows: Option<(usize, usize)>, + /// The selectors that have been enabled in this region. All other selectors are by + /// construction not enabled. + enabled_selectors: HashMap>, + /// Annotations given to Advice, Fixed or Instance columns within a region context. + annotations: HashMap, + /// The cells assigned in this region. We store this as a `Vec` so that if any cells + /// are double-assigned, they will be visibly darker. + cells: HashMap<(Column, usize), usize>, +} + +impl Region { + fn update_extent(&mut self, column: Column, row: usize) { + self.columns.insert(column); + + // The region start is the earliest row assigned to. + // The region end is the latest row assigned to. + let (mut start, mut end) = self.rows.unwrap_or((row, row)); + if row < start { + // The first row assigned was not at start 0 within the region. + start = row; + } + if row > end { + end = row; + } + self.rows = Some((start, end)); + } +} + +/// The value of a particular cell within the circuit. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CellValue { + /// An unassigned cell. + Unassigned, + /// A cell that has been assigned a value. + Assigned(F), + /// A unique poisoned cell. + Poison(usize), +} + +/// A value within an expression. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)] +enum Value { + Real(F), + Poison, +} + +impl From> for Value { + fn from(value: CellValue) -> Self { + match value { + // Cells that haven't been explicitly assigned to, default to zero. + CellValue::Unassigned => Value::Real(F::ZERO), + CellValue::Assigned(v) => Value::Real(v), + CellValue::Poison(_) => Value::Poison, + } + } +} + +impl Neg for Value { + type Output = Self; + + fn neg(self) -> Self::Output { + match self { + Value::Real(a) => Value::Real(-a), + _ => Value::Poison, + } + } +} + +impl Add for Value { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + match (self, rhs) { + (Value::Real(a), Value::Real(b)) => Value::Real(a + b), + _ => Value::Poison, + } + } +} + +impl Mul for Value { + type Output = Self; + + fn mul(self, rhs: Self) -> Self::Output { + match (self, rhs) { + (Value::Real(a), Value::Real(b)) => Value::Real(a * b), + // If poison is multiplied by zero, then we treat the poison as unconstrained + // and we don't propagate it. + (Value::Real(x), Value::Poison) | (Value::Poison, Value::Real(x)) + if x.is_zero_vartime() => + { + Value::Real(F::ZERO) + } + _ => Value::Poison, + } + } +} + +impl Mul for Value { + type Output = Self; + + fn mul(self, rhs: F) -> Self::Output { + match self { + Value::Real(lhs) => Value::Real(lhs * rhs), + // If poison is multiplied by zero, then we treat the poison as unconstrained + // and we don't propagate it. + Value::Poison if rhs.is_zero_vartime() => Value::Real(F::ZERO), + _ => Value::Poison, + } + } +} + +/// A test prover for debugging circuits. +/// +/// The normal proving process, when applied to a buggy circuit implementation, might +/// return proofs that do not validate when they should, but it can't indicate anything +/// other than "something is invalid". `MockProver` can be used to figure out _why_ these +/// are invalid: it stores all the private inputs along with the circuit internals, and +/// then checks every constraint manually. +/// +/// # Examples +/// +/// ``` +/// use halo2_proofs::{ +/// circuit::{Layouter, SimpleFloorPlanner, Value}, +/// dev::{FailureLocation, MockProver, VerifyFailure}, +/// plonk::{Advice, Any, Circuit, Column, ConstraintSystem, Error, Selector}, +/// poly::Rotation, +/// }; +/// use ff::PrimeField; +/// use halo2curves::pasta::Fp; +/// const K: u32 = 5; +/// +/// #[derive(Copy, Clone)] +/// struct MyConfig { +/// a: Column, +/// b: Column, +/// c: Column, +/// s: Selector, +/// } +/// +/// #[derive(Clone, Default)] +/// struct MyCircuit { +/// a: Value, +/// b: Value, +/// } +/// +/// impl Circuit for MyCircuit { +/// type Config = MyConfig; +/// type FloorPlanner = SimpleFloorPlanner; +/// #[cfg(feature = "circuit-params")] +/// type Params = (); +/// +/// fn without_witnesses(&self) -> Self { +/// Self::default() +/// } +/// +/// fn configure(meta: &mut ConstraintSystem) -> MyConfig { +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let c = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("R1CS constraint", |meta| { +/// let a = meta.query_advice(a, Rotation::cur()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let c = meta.query_advice(c, Rotation::cur()); +/// let s = meta.query_selector(s); +/// +/// // BUG: Should be a * b - c +/// Some(("buggy R1CS", s * (a * b + c))) +/// }); +/// +/// MyConfig { a, b, c, s } +/// } +/// +/// fn synthesize(&self, config: MyConfig, mut layouter: impl Layouter) -> Result<(), Error> { +/// layouter.assign_region(|| "Example region", |mut region| { +/// config.s.enable(&mut region, 0)?; +/// region.assign_advice(|| "a", config.a, 0, || { +/// self.a.map(F::from) +/// })?; +/// region.assign_advice(|| "b", config.b, 0, || { +/// self.b.map(F::from) +/// })?; +/// region.assign_advice(|| "c", config.c, 0, || { +/// (self.a * self.b).map(F::from) +/// })?; +/// Ok(()) +/// }) +/// } +/// } +/// +/// // Assemble the private inputs to the circuit. +/// let circuit = MyCircuit { +/// a: Value::known(2), +/// b: Value::known(4), +/// }; +/// +/// // This circuit has no public inputs. +/// let instance = vec![]; +/// +/// let prover = MockProver::::run(K, &circuit, instance).unwrap(); +/// assert_eq!( +/// prover.verify(), +/// Err(vec![VerifyFailure::ConstraintNotSatisfied { +/// constraint: ((0, "R1CS constraint").into(), 0, "buggy R1CS").into(), +/// location: FailureLocation::InRegion { +/// region: (0, "Example region").into(), +/// offset: 0, +/// }, +/// cell_values: vec![ +/// (((Any::advice(), 0).into(), 0).into(), "0x2".to_string()), +/// (((Any::advice(), 1).into(), 0).into(), "0x4".to_string()), +/// (((Any::advice(), 2).into(), 0).into(), "0x8".to_string()), +/// ], +/// }]) +/// ); +/// +/// // If we provide a too-small K, we get a panic. +/// use std::panic; +/// let result = panic::catch_unwind(|| { +/// MockProver::::run(2, &circuit, vec![]).unwrap_err() +/// }); +/// assert_eq!( +/// result.unwrap_err().downcast_ref::().unwrap(), +/// "n=4, minimum_rows=8, k=2" +/// ); +/// ``` +#[derive(Debug)] +pub struct MockProver { + k: u32, + n: u32, + cs: ConstraintSystem, + + /// The regions in the circuit. + regions: Vec, + /// The current region being assigned to. Will be `None` after the circuit has been + /// synthesized. + current_region: Option, + + // The fixed cells in the circuit, arranged as [column][row]. + fixed: Vec>>, + // The advice cells in the circuit, arranged as [column][row]. + advice: Vec>>, + // The instance cells in the circuit, arranged as [column][row]. + instance: Vec>>, + + selectors: Vec>, + + challenges: Vec, + + permutation: permutation::keygen::Assembly, + + // A range of available rows for assignment and copies. + usable_rows: Range, + + current_phase: sealed::Phase, +} + +/// Instance Value +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum InstanceValue { + /// Assigned instance value + Assigned(F), + /// Padding + Padding, +} + +impl InstanceValue { + fn value(&self) -> F { + match self { + InstanceValue::Assigned(v) => *v, + InstanceValue::Padding => F::ZERO, + } + } +} + +impl MockProver { + fn in_phase(&self, phase: P) -> bool { + self.current_phase == phase.to_sealed() + } +} + +impl Assignment for MockProver { + fn enter_region(&mut self, name: N) + where + NR: Into, + N: FnOnce() -> NR, + { + if !self.in_phase(FirstPhase) { + return; + } + + assert!(self.current_region.is_none()); + self.current_region = Some(Region { + name: name().into(), + columns: HashSet::default(), + rows: None, + annotations: HashMap::default(), + enabled_selectors: HashMap::default(), + cells: HashMap::default(), + }); + } + + fn exit_region(&mut self) { + if !self.in_phase(FirstPhase) { + return; + } + + self.regions.push(self.current_region.take().unwrap()); + } + + fn annotate_column(&mut self, annotation: A, column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + if !self.in_phase(FirstPhase) { + return; + } + + if let Some(region) = self.current_region.as_mut() { + region + .annotations + .insert(ColumnMetadata::from(column), annotation().into()); + } + } + + fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + if !self.in_phase(FirstPhase) { + return Ok(()); + } + + assert!( + self.usable_rows.contains(&row), + "row={} not in usable_rows={:?}, k={}", + row, + self.usable_rows, + self.k, + ); + + // Track that this selector was enabled. We require that all selectors are enabled + // inside some region (i.e. no floating selectors). + self.current_region + .as_mut() + .unwrap() + .enabled_selectors + .entry(*selector) + .or_default() + .push(row); + + self.selectors[selector.0][row] = true; + + Ok(()) + } + + fn query_instance( + &self, + column: Column, + row: usize, + ) -> Result, Error> { + assert!( + self.usable_rows.contains(&row), + "row={}, usable_rows={:?}, k={}", + row, + self.usable_rows, + self.k, + ); + + Ok(self + .instance + .get(column.index()) + .and_then(|column| column.get(row)) + .map(|v| circuit::Value::known(v.value())) + .expect("bound failure")) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> circuit::Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + if self.in_phase(FirstPhase) { + assert!( + self.usable_rows.contains(&row), + "row={}, usable_rows={:?}, k={}", + row, + self.usable_rows, + self.k, + ); + + if let Some(region) = self.current_region.as_mut() { + region.update_extent(column.into(), row); + region + .cells + .entry((column.into(), row)) + .and_modify(|count| *count += 1) + .or_default(); + } + } + + match to().into_field().evaluate().assign() { + Ok(to) => { + let value = self + .advice + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .expect("bounds failure"); + *value = CellValue::Assigned(to); + } + Err(err) => { + // Propagate `assign` error if the column is in current phase. + if self.in_phase(column.column_type().phase) { + return Err(err); + } + } + } + + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> circuit::Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + if !self.in_phase(FirstPhase) { + return Ok(()); + } + + assert!( + self.usable_rows.contains(&row), + "row={}, usable_rows={:?}, k={}", + row, + self.usable_rows, + self.k, + ); + + if let Some(region) = self.current_region.as_mut() { + region.update_extent(column.into(), row); + region + .cells + .entry((column.into(), row)) + .and_modify(|count| *count += 1) + .or_default(); + } + + *self + .fixed + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .expect("bounds failure") = CellValue::Assigned(to().into_field().evaluate().assign()?); + + Ok(()) + } + + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), crate::plonk::Error> { + if !self.in_phase(FirstPhase) { + return Ok(()); + } + + assert!( + self.usable_rows.contains(&left_row) && self.usable_rows.contains(&right_row), + "left_row={}, right_row={}, usable_rows={:?}, k={}", + left_row, + right_row, + self.usable_rows, + self.k, + ); + + self.permutation + .copy(left_column, left_row, right_column, right_row) + } + + fn fill_from_row( + &mut self, + col: Column, + from_row: usize, + to: circuit::Value>, + ) -> Result<(), Error> { + if !self.in_phase(FirstPhase) { + return Ok(()); + } + + assert!( + self.usable_rows.contains(&from_row), + "row={}, usable_rows={:?}, k={}", + from_row, + self.usable_rows, + self.k, + ); + + for row in self.usable_rows.clone().skip(from_row) { + self.assign_fixed(|| "", col, row, || to)?; + } + + Ok(()) + } + + fn get_challenge(&self, challenge: Challenge) -> circuit::Value { + if self.current_phase.0 <= challenge.phase() { + return circuit::Value::unknown(); + } + + circuit::Value::known(self.challenges[challenge.index()]) + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // TODO: Do something with namespaces :) + } + + fn pop_namespace(&mut self, _: Option) { + // TODO: Do something with namespaces :) + } +} + +impl + Ord> MockProver { + /// Runs a synthetic keygen-and-prove operation on the given circuit, collecting data + /// about the constraints and their assignments. + pub fn run>( + k: u32, + circuit: &ConcreteCircuit, + instance: Vec>, + ) -> Result { + let n = 1 << k; + + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let cs = cs; + + assert!( + n >= cs.minimum_rows(), + "n={}, minimum_rows={}, k={}", + n, + cs.minimum_rows(), + k, + ); + + assert_eq!(instance.len(), cs.num_instance_columns); + + let instance = instance + .into_iter() + .map(|instance| { + assert!( + instance.len() <= n - (cs.blinding_factors() + 1), + "instance.len={}, n={}, cs.blinding_factors={}", + instance.len(), + n, + cs.blinding_factors() + ); + + let mut instance_values = vec![InstanceValue::Padding; n]; + for (idx, value) in instance.into_iter().enumerate() { + instance_values[idx] = InstanceValue::Assigned(value); + } + + instance_values + }) + .collect::>(); + + // Fixed columns contain no blinding factors. + let fixed = vec![vec![CellValue::Unassigned; n]; cs.num_fixed_columns]; + let selectors = vec![vec![false; n]; cs.num_selectors]; + // Advice columns contain blinding factors. + let blinding_factors = cs.blinding_factors(); + let usable_rows = n - (blinding_factors + 1); + let advice = vec![ + { + let mut column = vec![CellValue::Unassigned; n]; + // Poison unusable rows. + for (i, cell) in column.iter_mut().enumerate().skip(usable_rows) { + *cell = CellValue::Poison(i); + } + column + }; + cs.num_advice_columns + ]; + let permutation = permutation::keygen::Assembly::new(n, &cs.permutation); + let constants = cs.constants.clone(); + + // Use hash chain to derive deterministic challenges for testing + let challenges = { + let mut hash: [u8; 64] = blake2b(b"Halo2-MockProver").as_bytes().try_into().unwrap(); + iter::repeat_with(|| { + hash = blake2b(&hash).as_bytes().try_into().unwrap(); + F::from_uniform_bytes(&hash) + }) + .take(cs.num_challenges) + .collect() + }; + + let mut prover = MockProver { + k, + n: n as u32, + cs, + regions: vec![], + current_region: None, + fixed, + advice, + instance, + selectors, + challenges, + permutation, + usable_rows: 0..usable_rows, + current_phase: FirstPhase.to_sealed(), + }; + + for current_phase in prover.cs.phases() { + prover.current_phase = current_phase; + ConcreteCircuit::FloorPlanner::synthesize( + &mut prover, + circuit, + config.clone(), + constants.clone(), + )?; + } + + let (cs, selector_polys) = prover.cs.compress_selectors(prover.selectors.clone()); + prover.cs = cs; + prover.fixed.extend(selector_polys.into_iter().map(|poly| { + let mut v = vec![CellValue::Unassigned; n]; + for (v, p) in v.iter_mut().zip(&poly[..]) { + *v = CellValue::Assigned(*p); + } + v + })); + + #[cfg(feature = "thread-safe-region")] + prover.permutation.build_ordered_mapping(); + + Ok(prover) + } + + /// Return the content of an advice column as assigned by the circuit. + pub fn advice_values(&self, column: Column) -> &[CellValue] { + &self.advice[column.index()] + } + + /// Return the content of a fixed column as assigned by the circuit. + pub fn fixed_values(&self, column: Column) -> &[CellValue] { + &self.fixed[column.index()] + } + + /// Returns `Ok(())` if this `MockProver` is satisfied, or a list of errors indicating + /// the reasons that the circuit is not satisfied. + /// Constraints and lookup are checked at `usable_rows`, parallelly. + pub fn verify(&self) -> Result<(), Vec> { + self.verify_at_rows(self.usable_rows.clone(), self.usable_rows.clone()) + } + + /// Returns `Ok(())` if this `MockProver` is satisfied, or a list of errors indicating + /// the reasons that the circuit is not satisfied. + /// Constraints are only checked at `gate_row_ids`, and lookup inputs are only checked at `lookup_input_row_ids`, parallelly. + pub fn verify_at_rows>( + &self, + gate_row_ids: I, + lookup_input_row_ids: I, + ) -> Result<(), Vec> { + let n = self.n as i32; + + let gate_row_ids = gate_row_ids.collect::>(); + let lookup_input_row_ids = lookup_input_row_ids.collect::>(); + + // check all the row ids are valid + gate_row_ids.par_iter().for_each(|row_id| { + if !self.usable_rows.contains(row_id) { + panic!("invalid gate row id {row_id}"); + } + }); + lookup_input_row_ids.par_iter().for_each(|row_id| { + if !self.usable_rows.contains(row_id) { + panic!("invalid gate row id {row_id}"); + } + }); + + // Check that within each region, all cells used in instantiated gates have been + // assigned to. + let selector_errors = self.regions.iter().enumerate().flat_map(|(r_i, r)| { + r.enabled_selectors.iter().flat_map(move |(selector, at)| { + // Find the gates enabled by this selector + self.cs + .gates + .iter() + // Assume that if a queried selector is enabled, the user wants to use the + // corresponding gate in some way. + // + // TODO: This will trip up on the reverse case, where leaving a selector + // un-enabled keeps a gate enabled. We could alternatively require that + // every selector is explicitly enabled or disabled on every row? But that + // seems messy and confusing. + .enumerate() + .filter(move |(_, g)| g.queried_selectors().contains(selector)) + .flat_map(move |(gate_index, gate)| { + at.par_iter() + .flat_map(move |selector_row| { + // Selectors are queried with no rotation. + let gate_row = *selector_row as i32; + + gate.queried_cells() + .iter() + .filter_map(move |cell| { + // Determine where this cell should have been assigned. + let cell_row = + ((gate_row + n + cell.rotation.0) % n) as usize; + + match cell.column.column_type() { + Any::Instance => { + // Handle instance cells, which are not in the region. + let instance_value = + &self.instance[cell.column.index()][cell_row]; + match instance_value { + InstanceValue::Assigned(_) => None, + _ => Some( + VerifyFailure::InstanceCellNotAssigned { + gate: (gate_index, gate.name()).into(), + region: (r_i, r.name.clone()).into(), + gate_offset: *selector_row, + column: cell.column.try_into().unwrap(), + row: cell_row, + }, + ), + } + } + _ => { + // Check that it was assigned! + if r.cells.contains_key(&(cell.column, cell_row)) { + None + } else { + Some(VerifyFailure::CellNotAssigned { + gate: (gate_index, gate.name()).into(), + region: ( + r_i, + r.name.clone(), + r.annotations.clone(), + ) + .into(), + gate_offset: *selector_row, + column: cell.column, + offset: cell_row as isize + - r.rows.unwrap().0 as isize, + }) + } + } + } + }) + .collect::>() + }) + .collect::>() + }) + }) + }); + + // Check that all gates are satisfied for all rows. + let gate_errors = self + .cs + .gates + .iter() + .enumerate() + .flat_map(|(gate_index, gate)| { + let blinding_rows = + (self.n as usize - (self.cs.blinding_factors() + 1))..(self.n as usize); + (gate_row_ids + .clone() + .into_par_iter() + .chain(blinding_rows.into_par_iter())) + .flat_map(move |row| { + let row = row as i32 + n; + gate.polynomials() + .iter() + .enumerate() + .filter_map(move |(poly_index, poly)| { + match poly.evaluate_lazy( + &|scalar| Value::Real(scalar), + &|_| panic!("virtual selectors are removed during optimization"), + &util::load(n, row, &self.cs.fixed_queries, &self.fixed), + &util::load(n, row, &self.cs.advice_queries, &self.advice), + &util::load_instance( + n, + row, + &self.cs.instance_queries, + &self.instance, + ), + &|challenge| Value::Real(self.challenges[challenge.index()]), + &|a| -a, + &|a, b| a + b, + &|a, b| a * b, + &|a, scalar| a * scalar, + &Value::Real(F::ZERO), + ) { + Value::Real(x) if x.is_zero_vartime() => None, + Value::Real(_) => Some(VerifyFailure::ConstraintNotSatisfied { + constraint: ( + (gate_index, gate.name()).into(), + poly_index, + gate.constraint_name(poly_index), + ) + .into(), + location: FailureLocation::find_expressions( + &self.cs, + &self.regions, + (row - n) as usize, + Some(poly).into_iter(), + ), + cell_values: util::cell_values( + gate, + poly, + &util::load(n, row, &self.cs.fixed_queries, &self.fixed), + &util::load(n, row, &self.cs.advice_queries, &self.advice), + &util::load_instance( + n, + row, + &self.cs.instance_queries, + &self.instance, + ), + ), + }), + Value::Poison => Some(VerifyFailure::ConstraintPoisoned { + constraint: ( + (gate_index, gate.name()).into(), + poly_index, + gate.constraint_name(poly_index), + ) + .into(), + }), + } + }) + .collect::>() + }) + .collect::>() + }); + + let load = |expression: &Expression, row| { + expression.evaluate_lazy( + &|scalar| Value::Real(scalar), + &|_| panic!("virtual selectors are removed during optimization"), + &|query| { + self.fixed[query.column_index] + [(row as i32 + n + query.rotation.0) as usize % n as usize] + .into() + }, + &|query| { + self.advice[query.column_index] + [(row as i32 + n + query.rotation.0) as usize % n as usize] + .into() + }, + &|query| { + Value::Real( + self.instance[query.column_index] + [(row as i32 + n + query.rotation.0) as usize % n as usize] + .value(), + ) + }, + &|challenge| Value::Real(self.challenges[challenge.index()]), + &|a| -a, + &|a, b| a + b, + &|a, b| a * b, + &|a, scalar| a * scalar, + &Value::Real(F::ZERO), + ) + }; + + let mut cached_table = Vec::new(); + let mut cached_table_identifier = Vec::new(); + // Check that all lookups exist in their respective tables. + let lookup_errors = + self.cs + .lookups + .iter() + .enumerate() + .flat_map(|(lookup_index, lookup)| { + assert!(lookup.table_expressions.len() == lookup.input_expressions.len()); + assert!(self.usable_rows.end > 0); + + // We optimize on the basis that the table might have been filled so that the last + // usable row now has the fill contents (it doesn't matter if there was no filling). + // Note that this "fill row" necessarily exists in the table, and we use that fact to + // slightly simplify the optimization: we're only trying to check that all input rows + // are contained in the table, and so we can safely just drop input rows that + // match the fill row. + let fill_row: Vec<_> = lookup + .table_expressions + .iter() + .map(move |c| load(c, self.usable_rows.end - 1)) + .collect(); + + let table_identifier = lookup + .table_expressions + .iter() + .map(Expression::identifier) + .collect::>(); + if table_identifier != cached_table_identifier { + cached_table_identifier = table_identifier; + + // In the real prover, the lookup expressions are never enforced on + // unusable rows, due to the (1 - (l_last(X) + l_blind(X))) term. + cached_table = self + .usable_rows + .clone() + .into_par_iter() + .filter_map(|table_row| { + let t = lookup + .table_expressions + .iter() + .map(move |c| load(c, table_row)) + .collect(); + + if t != fill_row { + Some(t) + } else { + None + } + }) + .collect(); + cached_table.par_sort_unstable(); + } + let table = &cached_table; + + let mut inputs: Vec<(Vec<_>, usize)> = lookup_input_row_ids + .clone() + .into_par_iter() + .filter_map(|input_row| { + let t = lookup + .input_expressions + .iter() + .map(move |c| load(c, input_row)) + .collect(); + + if t != fill_row { + // Also keep track of the original input row, since we're going to sort. + Some((t, input_row)) + } else { + None + } + }) + .collect(); + inputs.par_sort_unstable(); + + inputs + .par_iter() + .filter_map(move |(input, input_row)| { + if table.binary_search(input).is_err() { + Some(VerifyFailure::Lookup { + name: lookup.name.clone(), + lookup_index, + location: FailureLocation::find_expressions( + &self.cs, + &self.regions, + *input_row, + lookup.input_expressions.iter(), + ), + }) + } else { + None + } + }) + .collect::>() + }); + + let shuffle_errors = + self.cs + .shuffles + .iter() + .enumerate() + .flat_map(|(shuffle_index, shuffle)| { + assert!(shuffle.shuffle_expressions.len() == shuffle.input_expressions.len()); + assert!(self.usable_rows.end > 0); + + let mut shuffle_rows: Vec>> = self + .usable_rows + .clone() + .map(|row| { + let t = shuffle + .shuffle_expressions + .iter() + .map(move |c| load(c, row)) + .collect(); + t + }) + .collect(); + shuffle_rows.sort(); + + let mut input_rows: Vec<(Vec>, usize)> = self + .usable_rows + .clone() + .map(|input_row| { + let t = shuffle + .input_expressions + .iter() + .map(move |c| load(c, input_row)) + .collect(); + + (t, input_row) + }) + .collect(); + input_rows.sort(); + + input_rows + .iter() + .zip(shuffle_rows.iter()) + .filter_map(|((input_value, row), shuffle_value)| { + if shuffle_value != input_value { + Some(VerifyFailure::Shuffle { + name: shuffle.name.clone(), + shuffle_index, + location: FailureLocation::find_expressions( + &self.cs, + &self.regions, + *row, + shuffle.input_expressions.iter(), + ), + }) + } else { + None + } + }) + .collect::>() + }); + + let mapping = self.permutation.mapping(); + // Check that permutations preserve the original values of the cells. + let perm_errors = { + // Original values of columns involved in the permutation. + let original = |column, row| { + self.cs + .permutation + .get_columns() + .get(column) + .map(|c: &Column| match c.column_type() { + Any::Advice(_) => self.advice[c.index()][row], + Any::Fixed => self.fixed[c.index()][row], + Any::Instance => { + let cell: &InstanceValue = &self.instance[c.index()][row]; + CellValue::Assigned(cell.value()) + } + }) + .unwrap() + }; + + // Iterate over each column of the permutation + mapping.enumerate().flat_map(move |(column, values)| { + // Iterate over each row of the column to check that the cell's + // value is preserved by the mapping. + values + .enumerate() + .filter_map(move |(row, cell)| { + let original_cell = original(column, row); + let permuted_cell = original(cell.0, cell.1); + if original_cell == permuted_cell { + None + } else { + let columns = self.cs.permutation.get_columns(); + let column = columns.get(column).unwrap(); + Some(VerifyFailure::Permutation { + column: (*column).into(), + location: FailureLocation::find( + &self.regions, + row, + Some(column).into_iter().cloned().collect(), + ), + }) + } + }) + .collect::>() + }) + }; + + let mut errors: Vec<_> = iter::empty() + .chain(selector_errors) + .chain(gate_errors) + .chain(lookup_errors) + .chain(perm_errors) + .chain(shuffle_errors) + .collect(); + if errors.is_empty() { + Ok(()) + } else { + // Remove any duplicate `ConstraintPoisoned` errors (we check all unavailable + // rows in case the trigger is row-specific, but the error message only points + // at the constraint). + errors.dedup_by(|a, b| match (a, b) { + ( + a @ VerifyFailure::ConstraintPoisoned { .. }, + b @ VerifyFailure::ConstraintPoisoned { .. }, + ) => a == b, + _ => false, + }); + Err(errors) + } + } + + /// Panics if the circuit being checked by this `MockProver` is not satisfied. + /// + /// Any verification failures will be pretty-printed to stderr before the function + /// panics. + /// + /// Apart from the stderr output, this method is equivalent to: + /// ```ignore + /// assert_eq!(prover.verify(), Ok(())); + /// ``` + pub fn assert_satisfied(&self) { + if let Err(errs) = self.verify() { + for err in errs { + err.emit(self); + eprintln!(); + } + panic!("circuit was not satisfied"); + } + } + + /// Panics if the circuit being checked by this `MockProver` is not satisfied. + /// + /// Any verification failures will be pretty-printed to stderr before the function + /// panics. + /// + /// Constraints are only checked at `gate_row_ids`, and lookup inputs are only checked at `lookup_input_row_ids`, parallelly. + /// + /// Apart from the stderr output, this method is equivalent to: + /// ```ignore + /// assert_eq!(prover.verify_at_rows(), Ok(())); + /// ``` + pub fn assert_satisfied_at_rows>( + &self, + gate_row_ids: I, + lookup_input_row_ids: I, + ) { + if let Err(errs) = self.verify_at_rows(gate_row_ids, lookup_input_row_ids) { + for err in errs { + err.emit(self); + eprintln!(); + } + panic!("circuit was not satisfied"); + } + } + + /// Returns the constraint system + pub fn cs(&self) -> &ConstraintSystem { + &self.cs + } + + /// Returns the usable rows + pub fn usable_rows(&self) -> &Range { + &self.usable_rows + } + + /// Returns the list of Advice Columns used within a MockProver instance and the associated values contained on each Cell. + pub fn advice(&self) -> &Vec>> { + &self.advice + } + + /// Returns the list of Fixed Columns used within a MockProver instance and the associated values contained on each Cell. + pub fn fixed(&self) -> &Vec>> { + &self.fixed + } + + /// Returns the list of Selector Columns used within a MockProver instance and the associated values contained on each Cell. + pub fn selectors(&self) -> &Vec> { + &self.selectors + } + + /// Returns the list of Instance Columns used within a MockProver instance and the associated values contained on each Cell. + pub fn instance(&self) -> &Vec>> { + &self.instance + } + + /// Returns the permutation argument (`Assembly`) used within a MockProver instance. + pub fn permutation(&self) -> &Assembly { + &self.permutation + } +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use super::{FailureLocation, MockProver, VerifyFailure}; + use crate::{ + circuit::{Layouter, SimpleFloorPlanner, Value}, + plonk::{ + sealed::SealedPhase, Advice, Any, Circuit, Column, ConstraintSystem, Error, Expression, + FirstPhase, Fixed, Instance, Selector, TableColumn, + }, + }; + use halo2_middleware::poly::Rotation; + + #[test] + fn unassigned_cell() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + a: Column, + b: Column, + q: Selector, + } + + struct FaultyCircuit {} + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let b = meta.advice_column(); + let q = meta.selector(); + + meta.create_gate("Equality check", |cells| { + let a = cells.query_advice(a, Rotation::prev()); + let b = cells.query_advice(b, Rotation::cur()); + let q = cells.query_selector(q); + + // If q is enabled, a and b must be assigned to. + vec![q * (a - b)] + }); + + FaultyCircuitConfig { a, b, q } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_region( + || "Faulty synthesis", + |mut region| { + // Enable the equality gate. + config.q.enable(&mut region, 1)?; + + // Assign a = 0. + region.assign_advice(|| "a", config.a, 0, || Value::known(Fp::zero()))?; + + // Name Column a + region.name_column(|| "This is annotated!", config.a); + + // Name Column b + region.name_column(|| "This is also annotated!", config.b); + + // BUG: Forget to assign b = 0! This could go unnoticed during + // development, because cell values default to zero, which in this + // case is fine, but for other assignments would be broken. + Ok(()) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit {}, vec![]).unwrap(); + assert_eq!( + prover.verify(), + Err(vec![VerifyFailure::CellNotAssigned { + gate: (0, "Equality check").into(), + region: (0, "Faulty synthesis".to_owned()).into(), + gate_offset: 1, + column: Column::new( + 1, + Any::Advice(Advice { + phase: FirstPhase.to_sealed() + }) + ), + offset: 1, + }]) + ); + } + + #[test] + fn bad_lookup_any() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + a: Column, + table: Column, + advice_table: Column, + q: Selector, + } + + struct FaultyCircuit {} + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let q = meta.complex_selector(); + let table = meta.instance_column(); + let advice_table = meta.advice_column(); + + meta.annotate_lookup_any_column(table, || "Inst-Table"); + meta.enable_equality(table); + meta.annotate_lookup_any_column(advice_table, || "Adv-Table"); + meta.enable_equality(advice_table); + + meta.lookup_any("lookup", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + let q = cells.query_selector(q); + let advice_table = cells.query_advice(advice_table, Rotation::cur()); + let table = cells.query_instance(table, Rotation::cur()); + + // If q is enabled, a must be in the table. + // When q is not enabled, lookup the default value instead. + let not_q = Expression::Constant(Fp::one()) - q.clone(); + let default = Expression::Constant(Fp::from(2)); + vec![ + ( + q.clone() * a.clone() + not_q.clone() * default.clone(), + table, + ), + (q * a + not_q * default, advice_table), + ] + }); + + FaultyCircuitConfig { + a, + q, + table, + advice_table, + } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + // No assignment needed for the table as is an Instance Column. + + layouter.assign_region( + || "Good synthesis", + |mut region| { + // Enable the lookup on rows 0 and 1. + config.q.enable(&mut region, 0)?; + config.q.enable(&mut region, 1)?; + + for i in 0..4 { + // Load Advice lookup table with Instance lookup table values. + region.assign_advice_from_instance( + || "Advice from instance tables", + config.table, + i, + config.advice_table, + i, + )?; + } + + // Assign a = 2 and a = 6. + region.assign_advice( + || "a = 2", + config.a, + 0, + || Value::known(Fp::from(2)), + )?; + region.assign_advice( + || "a = 6", + config.a, + 1, + || Value::known(Fp::from(6)), + )?; + + Ok(()) + }, + )?; + + layouter.assign_region( + || "Faulty synthesis", + |mut region| { + // Enable the lookup on rows 0 and 1. + config.q.enable(&mut region, 0)?; + config.q.enable(&mut region, 1)?; + + for i in 0..4 { + // Load Advice lookup table with Instance lookup table values. + region.assign_advice_from_instance( + || "Advice from instance tables", + config.table, + i, + config.advice_table, + i, + )?; + } + + // Assign a = 4. + region.assign_advice( + || "a = 4", + config.a, + 0, + || Value::known(Fp::from(4)), + )?; + + // BUG: Assign a = 5, which doesn't exist in the table! + region.assign_advice( + || "a = 5", + config.a, + 1, + || Value::known(Fp::from(5)), + )?; + + region.name_column(|| "Witness example", config.a); + + Ok(()) + }, + ) + } + } + + let prover = MockProver::run( + K, + &FaultyCircuit {}, + // This is our "lookup table". + vec![vec![ + Fp::from(1u64), + Fp::from(2u64), + Fp::from(4u64), + Fp::from(6u64), + ]], + ) + .unwrap(); + assert_eq!( + prover.verify(), + Err(vec![VerifyFailure::Lookup { + name: "lookup".to_string(), + lookup_index: 0, + location: FailureLocation::InRegion { + region: (1, "Faulty synthesis").into(), + offset: 1, + } + }]) + ); + } + + #[test] + fn bad_fixed_lookup() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + a: Column, + q: Selector, + table: TableColumn, + } + + struct FaultyCircuit {} + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let q = meta.complex_selector(); + let table = meta.lookup_table_column(); + meta.annotate_lookup_column(table, || "Table1"); + + meta.lookup("lookup", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + let q = cells.query_selector(q); + + // If q is enabled, a must be in the table. + // When q is not enabled, lookup the default value instead. + let not_q = Expression::Constant(Fp::one()) - q.clone(); + let default = Expression::Constant(Fp::from(2)); + vec![(q * a + not_q * default, table)] + }); + + FaultyCircuitConfig { a, q, table } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "Doubling table", + |mut table| { + (1..(1 << (K - 1))) + .map(|i| { + table.assign_cell( + || format!("table[{}] = {}", i, 2 * i), + config.table, + i - 1, + || Value::known(Fp::from(2 * i as u64)), + ) + }) + .try_fold((), |_, res| res) + }, + )?; + + layouter.assign_region( + || "Good synthesis", + |mut region| { + // Enable the lookup on rows 0 and 1. + config.q.enable(&mut region, 0)?; + config.q.enable(&mut region, 1)?; + + // Assign a = 2 and a = 6. + region.assign_advice( + || "a = 2", + config.a, + 0, + || Value::known(Fp::from(2)), + )?; + region.assign_advice( + || "a = 6", + config.a, + 1, + || Value::known(Fp::from(6)), + )?; + + Ok(()) + }, + )?; + + layouter.assign_region( + || "Faulty synthesis", + |mut region| { + // Enable the lookup on rows 0 and 1. + config.q.enable(&mut region, 0)?; + config.q.enable(&mut region, 1)?; + + // Assign a = 4. + region.assign_advice( + || "a = 4", + config.a, + 0, + || Value::known(Fp::from(4)), + )?; + + // BUG: Assign a = 5, which doesn't exist in the table! + region.assign_advice( + || "a = 5", + config.a, + 1, + || Value::known(Fp::from(5)), + )?; + + region.name_column(|| "Witness example", config.a); + + Ok(()) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit {}, vec![]).unwrap(); + assert_eq!( + prover.verify(), + Err(vec![VerifyFailure::Lookup { + name: "lookup".to_string(), + lookup_index: 0, + location: FailureLocation::InRegion { + region: (2, "Faulty synthesis").into(), + offset: 1, + } + }]) + ); + } + + #[test] + fn contraint_unsatisfied() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + a: Column, + b: Column, + c: Column, + d: Column, + q: Selector, + } + + struct FaultyCircuit {} + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let b = meta.advice_column(); + let c = meta.advice_column(); + let d = meta.fixed_column(); + let q = meta.selector(); + + meta.create_gate("Equality check", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + let b = cells.query_advice(b, Rotation::cur()); + let c = cells.query_advice(c, Rotation::cur()); + let d = cells.query_fixed(d, Rotation::cur()); + let q = cells.query_selector(q); + + // If q is enabled, a and b must be assigned to. + vec![q * (a - b) * (c - d)] + }); + + FaultyCircuitConfig { a, b, c, d, q } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_region( + || "Correct synthesis", + |mut region| { + // Enable the equality gate. + config.q.enable(&mut region, 0)?; + + // Assign a = 1. + region.assign_advice(|| "a", config.a, 0, || Value::known(Fp::one()))?; + + // Assign b = 1. + region.assign_advice(|| "b", config.b, 0, || Value::known(Fp::one()))?; + + // Assign c = 5. + region.assign_advice( + || "c", + config.c, + 0, + || Value::known(Fp::from(5u64)), + )?; + // Assign d = 7. + region.assign_fixed( + || "d", + config.d, + 0, + || Value::known(Fp::from(7u64)), + )?; + Ok(()) + }, + )?; + layouter.assign_region( + || "Wrong synthesis", + |mut region| { + // Enable the equality gate. + config.q.enable(&mut region, 0)?; + + // Assign a = 1. + region.assign_advice(|| "a", config.a, 0, || Value::known(Fp::one()))?; + + // Assign b = 0. + region.assign_advice(|| "b", config.b, 0, || Value::known(Fp::zero()))?; + + // Name Column a + region.name_column(|| "This is Advice!", config.a); + // Name Column b + region.name_column(|| "This is Advice too!", config.b); + + // Assign c = 5. + region.assign_advice( + || "c", + config.c, + 0, + || Value::known(Fp::from(5u64)), + )?; + // Assign d = 7. + region.assign_fixed( + || "d", + config.d, + 0, + || Value::known(Fp::from(7u64)), + )?; + + // Name Column c + region.name_column(|| "Another one!", config.c); + // Name Column d + region.name_column(|| "This is a Fixed!", config.d); + + // Note that none of the terms cancel eachother. Therefore we will have a constraint that is non satisfied for + // the `Equalty check` gate. + Ok(()) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit {}, vec![]).unwrap(); + assert_eq!( + prover.verify(), + Err(vec![VerifyFailure::ConstraintNotSatisfied { + constraint: ((0, "Equality check").into(), 0, "").into(), + location: FailureLocation::InRegion { + region: (1, "Wrong synthesis").into(), + offset: 0, + }, + cell_values: vec![ + ( + ( + ( + Any::Advice(Advice { + phase: FirstPhase.to_sealed() + }), + 0 + ) + .into(), + 0 + ) + .into(), + "1".to_string() + ), + ( + ( + ( + Any::Advice(Advice { + phase: FirstPhase.to_sealed() + }), + 1 + ) + .into(), + 0 + ) + .into(), + "0".to_string() + ), + ( + ( + ( + Any::Advice(Advice { + phase: FirstPhase.to_sealed() + }), + 2 + ) + .into(), + 0 + ) + .into(), + "0x5".to_string() + ), + (((Any::Fixed, 0).into(), 0).into(), "0x7".to_string()), + ], + },]) + ) + } +} diff --git a/common/src/dev/cost.rs b/common/src/dev/cost.rs new file mode 100644 index 0000000000..57a2191574 --- /dev/null +++ b/common/src/dev/cost.rs @@ -0,0 +1,562 @@ +//! Developer tools for investigating the cost of a circuit. + +use std::{ + cmp, + collections::{HashMap, HashSet}, + iter, + marker::PhantomData, + ops::{Add, Mul}, +}; + +use ff::{Field, PrimeField}; +use group::prime::PrimeGroup; +use halo2_middleware::poly::Rotation; + +use crate::{ + circuit::{layouter::RegionColumn, Value}, + plonk::{ + Advice, Any, Assigned, Assignment, Circuit, Column, ConstraintSystem, Error, Fixed, + FloorPlanner, Instance, Selector, + }, +}; +use halo2_middleware::circuit::Challenge; + +/// Measures a circuit to determine its costs, and explain what contributes to them. +#[allow(dead_code)] +#[derive(Debug)] +pub struct CircuitCost> { + /// Power-of-2 bound on the number of rows in the circuit. + k: u32, + /// Maximum degree of the circuit. + max_deg: usize, + /// Number of advice columns. + advice_columns: usize, + /// Number of direct queries for each column type. + instance_queries: usize, + advice_queries: usize, + fixed_queries: usize, + /// Number of lookup arguments. + lookups: usize, + /// Number of columns in the global permutation. + permutation_cols: usize, + /// Number of distinct sets of points in the multiopening argument. + point_sets: usize, + /// Maximum rows used over all columns + max_rows: usize, + /// Maximum rows used over all advice columns + max_advice_rows: usize, + /// Maximum rows used over all fixed columns + max_fixed_rows: usize, + num_fixed_columns: usize, + num_advice_columns: usize, + num_instance_columns: usize, + num_total_columns: usize, + + _marker: PhantomData<(G, ConcreteCircuit)>, +} + +/// Region implementation used by Layout +#[allow(dead_code)] +#[derive(Debug)] +pub(crate) struct LayoutRegion { + /// The name of the region. Not required to be unique. + pub(crate) name: String, + /// The columns used by this region. + pub(crate) columns: HashSet, + /// The row that this region starts on, if known. + pub(crate) offset: Option, + /// The number of rows that this region takes up. + pub(crate) rows: usize, + /// The cells assigned in this region. + pub(crate) cells: Vec<(RegionColumn, usize)>, +} + +/// Cost and graphing layouter +#[derive(Default, Debug)] +pub(crate) struct Layout { + /// k = 1 << n + pub(crate) k: u32, + /// Regions of the layout + pub(crate) regions: Vec, + current_region: Option, + /// Total row count + pub(crate) total_rows: usize, + /// Total advice rows + pub(crate) total_advice_rows: usize, + /// Total fixed rows + pub(crate) total_fixed_rows: usize, + /// Any cells assigned outside of a region. + pub(crate) loose_cells: Vec<(RegionColumn, usize)>, + /// Pairs of cells between which we have equality constraints. + pub(crate) equality: Vec<(Column, usize, Column, usize)>, + /// Selector assignments used for optimization pass + pub(crate) selectors: Vec>, +} + +impl Layout { + /// Creates a empty layout + pub fn new(k: u32, n: usize, num_selectors: usize) -> Self { + Layout { + k, + regions: vec![], + current_region: None, + total_rows: 0, + total_advice_rows: 0, + total_fixed_rows: 0, + // Any cells assigned outside of a region. + loose_cells: vec![], + // Pairs of cells between which we have equality constraints. + equality: vec![], + // Selector assignments used for optimization pass + selectors: vec![vec![false; n]; num_selectors], + } + } + + /// Update layout metadata + pub fn update(&mut self, column: RegionColumn, row: usize) { + self.total_rows = cmp::max(self.total_rows, row + 1); + + if let RegionColumn::Column(col) = column { + match col.column_type() { + Any::Advice(_) => { + self.total_advice_rows = cmp::max(self.total_advice_rows, row + 1) + } + Any::Fixed => self.total_fixed_rows = cmp::max(self.total_fixed_rows, row + 1), + _ => {} + } + } + + if let Some(region) = self.current_region { + let region = &mut self.regions[region]; + region.columns.insert(column); + + // The region offset is the earliest row assigned to. + let mut offset = region.offset.unwrap_or(row); + if row < offset { + // The first row assigned was not at offset 0 within the region. + region.rows += offset - row; + offset = row; + } + // The number of rows in this region is the gap between the earliest and + // latest rows assigned. + region.rows = cmp::max(region.rows, row - offset + 1); + region.offset = Some(offset); + + region.cells.push((column, row)); + } else { + self.loose_cells.push((column, row)); + } + } +} + +impl Assignment for Layout { + fn enter_region(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + assert!(self.current_region.is_none()); + self.current_region = Some(self.regions.len()); + self.regions.push(LayoutRegion { + name: name_fn().into(), + columns: HashSet::default(), + offset: None, + rows: 0, + cells: vec![], + }) + } + + fn annotate_column(&mut self, _: A, _: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + } + + fn exit_region(&mut self) { + assert!(self.current_region.is_some()); + self.current_region = None; + } + + fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + if let Some(cell) = self.selectors[selector.0].get_mut(row) { + *cell = true; + } else { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.update((*selector).into(), row); + Ok(()) + } + + fn query_instance(&self, _: Column, _: usize) -> Result, Error> { + Ok(Value::unknown()) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + self.update(Column::::from(column).into(), row); + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + column: Column, + row: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + self.update(Column::::from(column).into(), row); + Ok(()) + } + + fn copy( + &mut self, + l_col: Column, + l_row: usize, + r_col: Column, + r_row: usize, + ) -> Result<(), crate::plonk::Error> { + self.equality.push((l_col, l_row, r_col, r_row)); + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +impl> CircuitCost { + /// Measures a circuit with parameter constant `k`. + /// + /// Panics if `k` is not large enough for the circuit. + pub fn measure(k: u32, circuit: &ConcreteCircuit) -> Self { + // Collect the layout details. + let mut cs = ConstraintSystem::default(); + let config = ConcreteCircuit::configure(&mut cs); + let mut layout = Layout::new(k, 1 << k, cs.num_selectors); + ConcreteCircuit::FloorPlanner::synthesize( + &mut layout, + circuit, + config, + cs.constants.clone(), + ) + .unwrap(); + let (cs, _) = cs.compress_selectors(layout.selectors); + + assert!((1 << k) >= cs.minimum_rows()); + + // Figure out how many point sets we have due to queried cells. + let mut column_queries: HashMap, HashSet> = HashMap::new(); + for (c, r) in iter::empty() + .chain( + cs.advice_queries + .iter() + .map(|(c, r)| (Column::::from(*c), *r)), + ) + .chain(cs.instance_queries.iter().map(|(c, r)| ((*c).into(), *r))) + .chain(cs.fixed_queries.iter().map(|(c, r)| ((*c).into(), *r))) + .chain( + cs.permutation + .get_columns() + .into_iter() + .map(|c| (c, Rotation::cur())), + ) + { + column_queries.entry(c).or_default().insert(r.0); + } + let mut point_sets: HashSet> = HashSet::new(); + for (_, r) in column_queries { + // Sort the query sets so we merge duplicates. + let mut query_set: Vec<_> = r.into_iter().collect(); + query_set.sort_unstable(); + point_sets.insert(query_set); + } + + // Include lookup polynomials in point sets: + point_sets.insert(vec![0, 1]); // product_poly + point_sets.insert(vec![-1, 0]); // permuted_input_poly + point_sets.insert(vec![0]); // permuted_table_poly + + // Include permutation polynomials in point sets. + point_sets.insert(vec![0, 1]); // permutation_product_poly + let max_deg = cs.degree(); + let permutation_cols = cs.permutation.get_columns().len(); + if permutation_cols > max_deg - 2 { + // permutation_product_poly for chaining chunks. + point_sets.insert(vec![-((cs.blinding_factors() + 1) as i32), 0, 1]); + } + + CircuitCost { + k, + max_deg, + advice_columns: cs.num_advice_columns, + instance_queries: cs.instance_queries.len(), + advice_queries: cs.advice_queries.len(), + fixed_queries: cs.fixed_queries.len(), + lookups: cs.lookups.len(), + permutation_cols, + point_sets: point_sets.len(), + max_rows: layout.total_rows, + max_advice_rows: layout.total_advice_rows, + max_fixed_rows: layout.total_fixed_rows, + num_advice_columns: cs.num_advice_columns, + num_fixed_columns: cs.num_fixed_columns, + num_instance_columns: cs.num_instance_columns, + num_total_columns: cs.num_instance_columns + + cs.num_advice_columns + + cs.num_fixed_columns, + _marker: PhantomData, + } + } + + fn permutation_chunks(&self) -> usize { + let chunk_size = self.max_deg - 2; + (self.permutation_cols + chunk_size - 1) / chunk_size + } + + /// Returns the marginal proof size per instance of this circuit. + pub fn marginal_proof_size(&self) -> MarginalProofSize { + let chunks = self.permutation_chunks(); + + MarginalProofSize { + // Cells: + // - 1 commitment per advice column per instance + // - 1 eval per instance column query per instance + // - 1 eval per advice column query per instance + instance: ProofContribution::new(0, self.instance_queries), + advice: ProofContribution::new(self.advice_columns, self.advice_queries), + + // Lookup arguments: + // - 3 commitments per lookup argument per instance + // - 5 evals per lookup argument per instance + lookups: ProofContribution::new(3 * self.lookups, 5 * self.lookups), + + // Global permutation argument: + // - chunks commitments per instance + // - 2 * chunks + (chunks - 1) evals per instance + equality: ProofContribution::new( + chunks, + if chunks == 0 { chunks } else { 3 * chunks - 1 }, + ), + + _marker: PhantomData, + } + } + + /// Returns the proof size for the given number of instances of this circuit. + pub fn proof_size(&self, instances: usize) -> ProofSize { + let marginal = self.marginal_proof_size(); + + ProofSize { + // Cells: + // - marginal cost per instance + // - 1 eval per fixed column query + instance: marginal.instance * instances, + advice: marginal.advice * instances, + fixed: ProofContribution::new(0, self.fixed_queries), + + // Lookup arguments: + // - marginal cost per instance + lookups: marginal.lookups * instances, + + // Global permutation argument: + // - marginal cost per instance + // - 1 eval per column + equality: marginal.equality * instances + + ProofContribution::new(0, self.permutation_cols), + + // Vanishing argument: + // - 1 + (max_deg - 1) commitments + // - 1 random_poly eval + vanishing: ProofContribution::new(self.max_deg, 1), + + // Multiopening argument: + // - f_commitment + // - 1 eval per set of points in multiopen argument + multiopen: ProofContribution::new(1, self.point_sets), + + // Polycommit: + // - s_poly commitment + // - inner product argument (2 * k round commitments) + // - a + // - xi + polycomm: ProofContribution::new((1 + 2 * self.k).try_into().unwrap(), 2), + + _marker: PhantomData, + } + } +} + +/// (commitments, evaluations) +#[derive(Debug)] +struct ProofContribution { + commitments: usize, + evaluations: usize, +} + +impl ProofContribution { + fn new(commitments: usize, evaluations: usize) -> Self { + ProofContribution { + commitments, + evaluations, + } + } + + fn len(&self, point: usize, scalar: usize) -> usize { + self.commitments * point + self.evaluations * scalar + } +} + +impl Add for ProofContribution { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + Self { + commitments: self.commitments + rhs.commitments, + evaluations: self.evaluations + rhs.evaluations, + } + } +} + +impl Mul for ProofContribution { + type Output = Self; + + fn mul(self, instances: usize) -> Self::Output { + Self { + commitments: self.commitments * instances, + evaluations: self.evaluations * instances, + } + } +} + +/// The marginal size of a Halo 2 proof, broken down into its contributing factors. +#[derive(Debug)] +pub struct MarginalProofSize { + instance: ProofContribution, + advice: ProofContribution, + lookups: ProofContribution, + equality: ProofContribution, + _marker: PhantomData, +} + +impl From> for usize { + fn from(proof: MarginalProofSize) -> Self { + let point = G::Repr::default().as_ref().len(); + let scalar = ::Repr::default().as_ref().len(); + + proof.instance.len(point, scalar) + + proof.advice.len(point, scalar) + + proof.lookups.len(point, scalar) + + proof.equality.len(point, scalar) + } +} + +/// The size of a Halo 2 proof, broken down into its contributing factors. +#[derive(Debug)] +pub struct ProofSize { + instance: ProofContribution, + advice: ProofContribution, + fixed: ProofContribution, + lookups: ProofContribution, + equality: ProofContribution, + vanishing: ProofContribution, + multiopen: ProofContribution, + polycomm: ProofContribution, + _marker: PhantomData, +} + +impl From> for usize { + fn from(proof: ProofSize) -> Self { + let point = G::Repr::default().as_ref().len(); + let scalar = ::Repr::default().as_ref().len(); + + proof.instance.len(point, scalar) + + proof.advice.len(point, scalar) + + proof.fixed.len(point, scalar) + + proof.lookups.len(point, scalar) + + proof.equality.len(point, scalar) + + proof.vanishing.len(point, scalar) + + proof.multiopen.len(point, scalar) + + proof.polycomm.len(point, scalar) + } +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::{Eq, Fp}; + + use crate::circuit::SimpleFloorPlanner; + + use super::*; + + #[test] + fn circuit_cost_without_permutation() { + const K: u32 = 4; + + struct MyCircuit; + impl Circuit for MyCircuit { + type Config = (); + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(_meta: &mut ConstraintSystem) -> Self::Config {} + + fn synthesize( + &self, + _config: Self::Config, + _layouter: impl crate::circuit::Layouter, + ) -> Result<(), Error> { + Ok(()) + } + } + CircuitCost::::measure(K, &MyCircuit).proof_size(1); + } +} diff --git a/common/src/dev/cost_model.rs b/common/src/dev/cost_model.rs new file mode 100644 index 0000000000..51b3a1ad76 --- /dev/null +++ b/common/src/dev/cost_model.rs @@ -0,0 +1,323 @@ +//! The cost estimator takes high-level parameters for a circuit design, and estimates the +//! verification cost, as well as resulting proof size. + +use std::collections::HashSet; +use std::{iter, num::ParseIntError, str::FromStr}; + +use crate::plonk::Circuit; +use ff::{Field, FromUniformBytes}; +use serde::Deserialize; +use serde_derive::Serialize; + +use super::MockProver; + +/// Supported commitment schemes +#[derive(Debug, Eq, PartialEq)] +pub enum CommitmentScheme { + /// Inner Product Argument commitment scheme + IPA, + /// KZG with GWC19 mutli-open strategy + KZGGWC, + /// KZG with BDFG20 mutli-open strategy + KZGSHPLONK, +} + +/// Options to build a circuit specification to measure the cost model of. +#[derive(Debug)] +pub struct CostOptions { + /// An advice column with the given rotations. May be repeated. + pub advice: Vec, + + /// An instance column with the given rotations. May be repeated. + pub instance: Vec, + + /// A fixed column with the given rotations. May be repeated. + pub fixed: Vec, + + /// Maximum degree of the custom gates. + pub gate_degree: usize, + + /// Maximum degree of the constraint system. + pub max_degree: usize, + + /// A lookup over N columns with max input degree I and max table degree T. May be repeated. + pub lookup: Vec, + + /// A permutation over N columns. May be repeated. + pub permutation: Permutation, + + /// A shuffle over N columns with max input degree I and max shuffle degree T. May be repeated. + pub shuffle: Vec, + + /// 2^K bound on the number of rows. + pub k: usize, +} + +/// Structure holding polynomial related data for benchmarks +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct Poly { + /// Rotations for the given polynomial + pub rotations: Vec, +} + +impl FromStr for Poly { + type Err = ParseIntError; + + fn from_str(s: &str) -> Result { + let mut rotations: Vec = + s.split(',').map(|r| r.parse()).collect::>()?; + rotations.sort_unstable(); + Ok(Poly { rotations }) + } +} + +/// Structure holding the Lookup related data for circuit benchmarks. +#[derive(Debug, Clone)] +pub struct Lookup; + +impl Lookup { + fn queries(&self) -> impl Iterator { + // - product commitments at x and \omega x + // - input commitments at x and x_inv + // - table commitments at x + let product = "0,1".parse().unwrap(); + let input = "0,-1".parse().unwrap(); + let table = "0".parse().unwrap(); + + iter::empty() + .chain(Some(product)) + .chain(Some(input)) + .chain(Some(table)) + } +} + +/// Number of permutation enabled columns +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct Permutation { + columns: usize, +} + +impl Permutation { + fn queries(&self) -> impl Iterator { + // - product commitments at x and x_inv + // - polynomial commitments at x + let product = "0,-1".parse().unwrap(); + let poly = "0".parse().unwrap(); + + iter::empty() + .chain(Some(product)) + .chain(iter::repeat(poly).take(self.columns)) + } +} + +/// Structure holding the [Shuffle] related data for circuit benchmarks. +#[derive(Debug, Clone)] +pub struct Shuffle; + +impl Shuffle { + fn queries(&self) -> impl Iterator { + // Open shuffle product commitment at x and \omega x + let shuffle = "0, 1".parse().unwrap(); + + iter::empty().chain(Some(shuffle)) + } +} + +/// High-level specifications of an abstract circuit. +#[derive(Debug, Deserialize, Serialize)] +pub struct ModelCircuit { + /// Power-of-2 bound on the number of rows in the circuit. + pub k: usize, + /// Maximum degree of the circuit. + pub max_deg: usize, + /// Number of advice columns. + pub advice_columns: usize, + /// Number of lookup arguments. + pub lookups: usize, + /// Equality constraint enabled columns. + pub permutations: usize, + /// Number of shuffle arguments + pub shuffles: usize, + /// Number of distinct column queries across all gates. + pub column_queries: usize, + /// Number of distinct sets of points in the multiopening argument. + pub point_sets: usize, + /// Size of the proof for the circuit + pub size: usize, +} + +impl CostOptions { + /// Convert [CostOptions] to [ModelCircuit]. The proof sizè is computed depending on the base + /// and scalar field size of the curve used, together with the [CommitmentScheme]. + pub fn into_model_circuit( + &self, + comm_scheme: CommitmentScheme, + ) -> ModelCircuit { + let mut queries: Vec<_> = iter::empty() + .chain(self.advice.iter()) + .chain(self.instance.iter()) + .chain(self.fixed.iter()) + .cloned() + .chain(self.lookup.iter().flat_map(|l| l.queries())) + .chain(self.permutation.queries()) + .chain(self.shuffle.iter().flat_map(|s| s.queries())) + .chain(iter::repeat("0".parse().unwrap()).take(self.max_degree - 1)) + .collect(); + + let column_queries = queries.len(); + queries.sort_unstable(); + queries.dedup(); + let point_sets = queries.len(); + + let comp_bytes = |points: usize, scalars: usize| points * COMM + scalars * SCALAR; + + // PLONK: + // - COMM bytes (commitment) per advice column + // - 3 * COMM bytes (commitments) + 5 * SCALAR bytes (evals) per lookup column + // - COMM bytes (commitment) + 2 * SCALAR bytes (evals) per permutation argument + // - COMM bytes (eval) per column per permutation argument + let plonk = comp_bytes(1, 0) * self.advice.len() + + comp_bytes(3, 5) * self.lookup.len() + + comp_bytes(1, 2 + self.permutation.columns); + + // Vanishing argument: + // - (max_deg - 1) * COMM bytes (commitments) + (max_deg - 1) * SCALAR bytes (h_evals) + // for quotient polynomial + // - SCALAR bytes (eval) per column query + let vanishing = + comp_bytes(self.max_degree - 1, self.max_degree - 1) + comp_bytes(0, column_queries); + + // Multiopening argument: + // - f_commitment (COMM bytes) + // - SCALAR bytes (evals) per set of points in multiopen argument + let multiopen = comp_bytes(1, point_sets); + + let polycomm = match comm_scheme { + CommitmentScheme::IPA => { + // Polycommit IPA: + // - s_poly commitment (COMM bytes) + // - inner product argument (k rounds * 2 * COMM bytes) + // - a (SCALAR bytes) + // - xi (SCALAR bytes) + comp_bytes(1 + 2 * self.k, 2) + } + CommitmentScheme::KZGGWC => { + let mut nr_rotations = HashSet::new(); + for poly in self.advice.iter() { + nr_rotations.extend(poly.rotations.clone()); + } + for poly in self.fixed.iter() { + nr_rotations.extend(poly.rotations.clone()); + } + for poly in self.instance.iter() { + nr_rotations.extend(poly.rotations.clone()); + } + + // Polycommit GWC: + // - number_rotations * COMM bytes + comp_bytes(nr_rotations.len(), 0) + } + CommitmentScheme::KZGSHPLONK => { + // Polycommit SHPLONK: + // - quotient polynomial commitment (COMM bytes) + comp_bytes(1, 0) + } + }; + + let size = plonk + vanishing + multiopen + polycomm; + + ModelCircuit { + k: self.k, + max_deg: self.max_degree, + advice_columns: self.advice.len(), + lookups: self.lookup.len(), + permutations: self.permutation.columns, + shuffles: self.shuffle.len(), + column_queries, + point_sets, + size, + } + } +} + +/// Given a Plonk circuit, this function returns a [ModelCircuit] +pub fn from_circuit_to_model_circuit< + F: Ord + Field + FromUniformBytes<64>, + C: Circuit, + const COMM: usize, + const SCALAR: usize, +>( + k: u32, + circuit: &C, + instances: Vec>, + comm_scheme: CommitmentScheme, +) -> ModelCircuit { + let options = from_circuit_to_cost_model_options(k, circuit, instances); + options.into_model_circuit::(comm_scheme) +} + +/// Given a Plonk circuit, this function returns [CostOptions] +pub fn from_circuit_to_cost_model_options, C: Circuit>( + k: u32, + circuit: &C, + instances: Vec>, +) -> CostOptions { + let prover = MockProver::run(k, circuit, instances).unwrap(); + let cs = prover.cs; + + let fixed = { + // init the fixed polynomials with no rotations + let mut fixed = vec![Poly { rotations: vec![] }; cs.num_fixed_columns()]; + for (col, rot) in cs.fixed_queries() { + fixed[col.index()].rotations.push(rot.0 as isize); + } + fixed + }; + + let advice = { + // init the advice polynomials with no rotations + let mut advice = vec![Poly { rotations: vec![] }; cs.num_advice_columns()]; + for (col, rot) in cs.advice_queries() { + advice[col.index()].rotations.push(rot.0 as isize); + } + advice + }; + + let instance = { + // init the instance polynomials with no rotations + let mut instance = vec![Poly { rotations: vec![] }; cs.num_instance_columns()]; + for (col, rot) in cs.instance_queries() { + instance[col.index()].rotations.push(rot.0 as isize); + } + instance + }; + + let lookup = { cs.lookups().iter().map(|_| Lookup).collect::>() }; + + let permutation = Permutation { + columns: cs.permutation().get_columns().len(), + }; + + let shuffle = { cs.shuffles.iter().map(|_| Shuffle).collect::>() }; + + let gate_degree = cs + .gates + .iter() + .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) + .max() + .unwrap_or(0); + + let k = prover.k.try_into().unwrap(); + + CostOptions { + advice, + instance, + fixed, + gate_degree, + max_degree: cs.degree(), + lookup, + permutation, + shuffle, + k, + } +} diff --git a/common/src/dev/failure.rs b/common/src/dev/failure.rs new file mode 100644 index 0000000000..f9f5c27ded --- /dev/null +++ b/common/src/dev/failure.rs @@ -0,0 +1,873 @@ +use std::collections::{BTreeMap, HashSet}; +use std::fmt::{self, Debug}; + +use group::ff::Field; + +use super::metadata::{DebugColumn, DebugVirtualCell}; +use super::MockProver; +use super::{ + metadata, + util::{self, AnyQuery}, + Region, +}; +use crate::dev::metadata::Constraint; +use crate::{ + dev::{Instance, Value}, + plonk::{Any, Column, ConstraintSystem, Expression, Gate}, +}; + +mod emitter; + +/// The location within the circuit at which a particular [`VerifyFailure`] occurred. +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum FailureLocation { + /// A location inside a region. + InRegion { + /// The region in which the failure occurred. + region: metadata::Region, + /// The offset (relative to the start of the region) at which the failure + /// occurred. + offset: usize, + }, + /// A location outside of a region. + OutsideRegion { + /// The circuit row on which the failure occurred. + row: usize, + }, +} + +impl fmt::Display for FailureLocation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::InRegion { region, offset } => write!(f, "in {region} at offset {offset}"), + Self::OutsideRegion { row } => { + write!(f, "outside any region, on row {row}") + } + } + } +} + +impl FailureLocation { + /// Returns a `DebugColumn` from Column metadata and `&self`. + pub(super) fn get_debug_column(&self, metadata: metadata::Column) -> DebugColumn { + match self { + Self::InRegion { region, .. } => { + DebugColumn::from((metadata, region.column_annotations.as_ref())) + } + _ => DebugColumn::from((metadata, None)), + } + } + + pub(super) fn find_expressions<'a, F: Field>( + cs: &ConstraintSystem, + regions: &[Region], + failure_row: usize, + failure_expressions: impl Iterator>, + ) -> Self { + let failure_columns: HashSet> = failure_expressions + .flat_map(|expression| { + expression.evaluate( + &|_| vec![], + &|_| panic!("virtual selectors are removed during optimization"), + &|query| vec![cs.fixed_queries[query.index.unwrap()].0.into()], + &|query| vec![cs.advice_queries[query.index.unwrap()].0.into()], + &|query| vec![cs.instance_queries[query.index.unwrap()].0.into()], + &|_| vec![], + &|a| a, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|a, _| a, + ) + }) + .collect(); + + Self::find(regions, failure_row, failure_columns) + } + + /// Figures out whether the given row and columns overlap an assigned region. + pub(super) fn find( + regions: &[Region], + failure_row: usize, + failure_columns: HashSet>, + ) -> Self { + regions + .iter() + .enumerate() + .find(|(_, r)| { + if let Some((start, end)) = r.rows { + // We match the region if any input columns overlap, rather than all of + // them, because matching complex selector columns is hard. As long as + // regions are rectangles, and failures occur due to assignments entirely + // within single regions, "any" will be equivalent to "all". If these + // assumptions change, we'll start getting bug reports from users :) + (start..=end).contains(&failure_row) && !failure_columns.is_disjoint(&r.columns) + } else { + // Zero-area region + false + } + }) + .map(|(r_i, r)| FailureLocation::InRegion { + region: (r_i, r.name.clone(), r.annotations.clone()).into(), + offset: failure_row - r.rows.unwrap().0, + }) + .unwrap_or_else(|| FailureLocation::OutsideRegion { row: failure_row }) + } +} + +/// The reasons why a particular circuit is not satisfied. +#[derive(PartialEq, Eq)] +pub enum VerifyFailure { + /// A cell used in an active gate was not assigned to. + CellNotAssigned { + /// The index of the active gate. + gate: metadata::Gate, + /// The region in which this cell should be assigned. + region: metadata::Region, + /// The offset (relative to the start of the region) at which the active gate + /// queries this cell. + gate_offset: usize, + /// The column in which this cell should be assigned. + column: Column, + /// The offset (relative to the start of the region) at which this cell should be + /// assigned. This may be negative (for example, if a selector enables a gate at + /// offset 0, but the gate uses `Rotation::prev()`). + offset: isize, + }, + /// An instance cell used in an active gate was not assigned to. + InstanceCellNotAssigned { + /// The index of the active gate. + gate: metadata::Gate, + /// The region in which this gate was activated. + region: metadata::Region, + /// The offset (relative to the start of the region) at which the active gate + /// queries this cell. + gate_offset: usize, + /// The column in which this cell should be assigned. + column: Column, + /// The absolute row at which this cell should be assigned. + row: usize, + }, + /// A constraint was not satisfied for a particular row. + ConstraintNotSatisfied { + /// The polynomial constraint that is not satisfied. + constraint: metadata::Constraint, + /// The location at which this constraint is not satisfied. + /// + /// `FailureLocation::OutsideRegion` is usually caused by a constraint that does + /// not contain a selector, and as a result is active on every row. + location: FailureLocation, + /// The values of the virtual cells used by this constraint. + cell_values: Vec<(metadata::VirtualCell, String)>, + }, + /// A constraint was active on an unusable row, and is likely missing a selector. + ConstraintPoisoned { + /// The polynomial constraint that is not satisfied. + constraint: metadata::Constraint, + }, + /// A lookup input did not exist in its corresponding table. + Lookup { + /// The name of the lookup that is not satisfied. + name: String, + /// The index of the lookup that is not satisfied. These indices are assigned in + /// the order in which `ConstraintSystem::lookup` is called during + /// `Circuit::configure`. + lookup_index: usize, + /// The location at which the lookup is not satisfied. + /// + /// `FailureLocation::InRegion` is most common, and may be due to the intentional + /// use of a lookup (if its inputs are conditional on a complex selector), or an + /// unintentional lookup constraint that overlaps the region (indicating that the + /// lookup's inputs should be made conditional). + /// + /// `FailureLocation::OutsideRegion` is uncommon, and could mean that: + /// - The input expressions do not correctly constrain a default value that exists + /// in the table when the lookup is not being used. + /// - The input expressions use a column queried at a non-zero `Rotation`, and the + /// lookup is active on a row adjacent to an unrelated region. + location: FailureLocation, + }, + /// A shuffle input did not exist in its corresponding map. + Shuffle { + /// The name of the lookup that is not satisfied. + name: String, + /// The index of the lookup that is not satisfied. These indices are assigned in + /// the order in which `ConstraintSystem::lookup` is called during + /// `Circuit::configure`. + shuffle_index: usize, + /// The location at which the lookup is not satisfied. + /// + /// `FailureLocation::InRegion` is most common, and may be due to the intentional + /// use of a lookup (if its inputs are conditional on a complex selector), or an + /// unintentional lookup constraint that overlaps the region (indicating that the + /// lookup's inputs should be made conditional). + /// + /// `FailureLocation::OutsideRegion` is uncommon, and could mean that: + /// - The input expressions do not correctly constrain a default value that exists + /// in the table when the lookup is not being used. + /// - The input expressions use a column queried at a non-zero `Rotation`, and the + /// lookup is active on a row adjacent to an unrelated region. + location: FailureLocation, + }, + /// A permutation did not preserve the original value of a cell. + Permutation { + /// The column in which this permutation is not satisfied. + column: metadata::Column, + /// The location at which the permutation is not satisfied. + location: FailureLocation, + }, +} + +impl fmt::Display for VerifyFailure { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::CellNotAssigned { + gate, + region, + gate_offset, + column, + offset, + } => { + write!( + f, + "{} uses {} at offset {}, which requires cell in column {:?} at offset {} with annotation {:?} to be assigned.", + region, gate, gate_offset, column, offset, region.get_column_annotation((*column).into()) + ) + } + Self::InstanceCellNotAssigned { + gate, + region, + gate_offset, + column, + row, + } => { + write!( + f, + "{region} uses {gate} at offset {gate_offset}, which requires cell in instance column {column:?} at row {row} to be assigned.", + ) + } + Self::ConstraintNotSatisfied { + constraint, + location, + cell_values, + } => { + writeln!(f, "{constraint} is not satisfied {location}")?; + for (dvc, value) in cell_values.iter().map(|(vc, string)| { + let ann_map = match location { + FailureLocation::InRegion { region, offset: _ } => { + ®ion.column_annotations + } + _ => &None, + }; + + (DebugVirtualCell::from((vc, ann_map.as_ref())), string) + }) { + writeln!(f, "- {dvc} = {value}")?; + } + Ok(()) + } + Self::ConstraintPoisoned { constraint } => { + write!( + f, + "{constraint} is active on an unusable row - missing selector?" + ) + } + Self::Lookup { + name, + lookup_index, + location, + } => { + write!( + f, + "Lookup {name}(index: {lookup_index}) is not satisfied {location}", + ) + } + Self::Shuffle { + name, + shuffle_index, + location, + } => { + write!( + f, + "Shuffle {name}(index: {shuffle_index}) is not satisfied {location}" + ) + } + Self::Permutation { column, location } => { + write!( + f, + "Equality constraint not satisfied by cell ({}, {})", + location.get_debug_column(*column), + location + ) + } + } + } +} + +impl Debug for VerifyFailure { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + VerifyFailure::ConstraintNotSatisfied { + constraint, + location, + cell_values, + } => { + #[allow(dead_code)] + #[derive(Debug)] + struct ConstraintCaseDebug { + constraint: Constraint, + location: FailureLocation, + cell_values: Vec<(DebugVirtualCell, String)>, + } + + let ann_map = match location { + FailureLocation::InRegion { region, offset: _ } => { + region.column_annotations.clone() + } + _ => None, + }; + + let debug = ConstraintCaseDebug { + constraint: constraint.clone(), + location: location.clone(), + cell_values: cell_values + .iter() + .map(|(vc, value)| { + ( + DebugVirtualCell::from((vc, ann_map.as_ref())), + value.clone(), + ) + }) + .collect(), + }; + + write!(f, "{debug:#?}") + } + _ => write!(f, "{self:#}"), + } + } +} + +/// Renders `VerifyFailure::CellNotAssigned`. +/// +/// ```text +/// error: cell not assigned +/// Cell layout in region 'Faulty synthesis': +/// | Offset | A0 | A1 | +/// +--------+----+----+ +/// | 0 | x0 | | +/// | 1 | | X | <--{ X marks the spot! 🦜 +/// +/// Gate 'Equality check' (applied at offset 1) queries these cells. +/// ``` +fn render_cell_not_assigned( + gates: &[Gate], + gate: &metadata::Gate, + region: &metadata::Region, + gate_offset: usize, + column: Column, + offset: isize, +) { + // Collect the necessary rendering information: + // - The columns involved in this gate. + // - How many cells are in each column. + // - The grid of cell values, indexed by rotation. + let mut columns = BTreeMap::::default(); + let mut layout = BTreeMap::>::default(); + for (i, cell) in gates[gate.index].queried_cells().iter().enumerate() { + let cell_column = cell.column.into(); + *columns.entry(cell_column).or_default() += 1; + layout + .entry(cell.rotation.0) + .or_default() + .entry(cell_column) + .or_insert_with(|| { + if cell.column == column && gate_offset as i32 + cell.rotation.0 == offset as i32 { + "X".to_string() + } else { + format!("x{i}") + } + }); + } + + eprintln!("error: cell not assigned"); + emitter::render_cell_layout( + " ", + &FailureLocation::InRegion { + region: region.clone(), + offset: gate_offset, + }, + &columns, + &layout, + |row_offset, rotation| { + if (row_offset.unwrap() + rotation) as isize == offset { + eprint!(" <--{{ X marks the spot! 🦜"); + } + }, + ); + eprintln!(); + eprintln!( + " Gate '{}' (applied at offset {}) queries these cells.", + gate.name, gate_offset + ); +} + +/// Renders `VerifyFailure::ConstraintNotSatisfied`. +/// +/// ```text +/// error: constraint not satisfied +/// Cell layout in region 'somewhere': +/// | Offset | A0 | +/// +--------+----+ +/// | 0 | x0 | <--{ Gate 'foo' applied here +/// | 1 | x1 | +/// +/// Constraint 'bar': +/// x1 + x1 * 0x100 + x1 * 0x10000 + x1 * 0x100_0000 - x0 = 0 +/// +/// Assigned cell values: +/// x0 = 0x5 +/// x1 = 0x5 +/// ``` +fn render_constraint_not_satisfied( + gates: &[Gate], + constraint: &metadata::Constraint, + location: &FailureLocation, + cell_values: &[(metadata::VirtualCell, String)], +) { + // Collect the necessary rendering information: + // - The columns involved in this constraint. + // - How many cells are in each column. + // - The grid of cell values, indexed by rotation. + let mut columns = BTreeMap::::default(); + let mut layout = BTreeMap::>::default(); + for (i, (cell, _)) in cell_values.iter().enumerate() { + *columns.entry(cell.column).or_default() += 1; + layout + .entry(cell.rotation) + .or_default() + .entry(cell.column) + .or_insert(format!("x{i}")); + } + + eprintln!("error: constraint not satisfied"); + emitter::render_cell_layout(" ", location, &columns, &layout, |_, rotation| { + if rotation == 0 { + eprint!(" <--{{ Gate '{}' applied here", constraint.gate.name); + } + }); + + // Print the unsatisfied constraint, in terms of the local variables. + eprintln!(); + eprintln!(" Constraint '{}':", constraint.name); + eprintln!( + " {} = 0", + emitter::expression_to_string( + &gates[constraint.gate.index].polynomials()[constraint.index], + &layout + ) + ); + + // Print the map from local variables to assigned values. + eprintln!(); + eprintln!(" Assigned cell values:"); + for (i, (_, value)) in cell_values.iter().enumerate() { + eprintln!(" x{i} = {value}"); + } +} + +/// Renders `VerifyFailure::Lookup`. +/// +/// ```text +/// error: lookup input does not exist in table +/// (L0) ∉ (F0) +/// +/// Lookup inputs: +/// L0 = x1 * x0 + (1 - x1) * 0x2 +/// ^ +/// | Cell layout in region 'Faulty synthesis': +/// | | Offset | A0 | F1 | +/// | +--------+----+----+ +/// | | 1 | x0 | x1 | <--{ Lookup inputs queried here +/// | +/// | Assigned cell values: +/// | x0 = 0x5 +/// | x1 = 1 +/// ``` +fn render_lookup( + prover: &MockProver, + name: &str, + lookup_index: usize, + location: &FailureLocation, +) { + let n = prover.n as i32; + let cs = &prover.cs; + let lookup = &cs.lookups[lookup_index]; + + // Get the absolute row on which the lookup's inputs are being queried, so we can + // fetch the input values. + let row = match location { + FailureLocation::InRegion { region, offset } => { + prover.regions[region.index].rows.unwrap().0 + offset + } + FailureLocation::OutsideRegion { row } => *row, + } as i32; + + // Recover the fixed columns from the table expressions. We don't allow composite + // expressions for the table side of lookups. + let lookup_columns = lookup.table_expressions.iter().map(|expr| { + expr.evaluate( + &|f| format! {"Const: {f:#?}"}, + &|s| format! {"S{}", s.0}, + &|query| { + format!( + "{:?}", + prover + .cs + .general_column_annotations + .get(&metadata::Column::from((Any::Fixed, query.column_index))) + .cloned() + .unwrap_or_else(|| format!("F{}", query.column_index())) + ) + }, + &|query| { + format!( + "{:?}", + prover + .cs + .general_column_annotations + .get(&metadata::Column::from((Any::advice(), query.column_index))) + .cloned() + .unwrap_or_else(|| format!("A{}", query.column_index())) + ) + }, + &|query| { + format!( + "{:?}", + prover + .cs + .general_column_annotations + .get(&metadata::Column::from((Any::Instance, query.column_index))) + .cloned() + .unwrap_or_else(|| format!("I{}", query.column_index())) + ) + }, + &|challenge| format! {"C{}", challenge.index()}, + &|query| format! {"-{query}"}, + &|a, b| format! {"{a} + {b}"}, + &|a, b| format! {"{a} * {b}"}, + &|a, b| format! {"{a} * {b:?}"}, + ) + }); + + fn cell_value<'a, F: Field, Q: Into + Copy>( + load: impl Fn(Q) -> Value + 'a, + ) -> impl Fn(Q) -> BTreeMap + 'a { + move |query| { + let AnyQuery { + column_type, + column_index, + rotation, + .. + } = query.into(); + Some(( + ((column_type, column_index).into(), rotation.0).into(), + match load(query) { + Value::Real(v) => util::format_value(v), + Value::Poison => unreachable!(), + }, + )) + .into_iter() + .collect() + } + } + + eprintln!("error: lookup input does not exist in table"); + eprint!(" ("); + for i in 0..lookup.input_expressions.len() { + eprint!("{}L{}", if i == 0 { "" } else { ", " }, i); + } + + eprint!(") ∉ ("); + for (i, column) in lookup_columns.enumerate() { + eprint!("{}{}", if i == 0 { "" } else { ", " }, column); + } + eprintln!(")"); + + eprintln!(); + eprintln!(" Lookup '{name}' inputs:"); + for (i, input) in lookup.input_expressions.iter().enumerate() { + // Fetch the cell values (since we don't store them in VerifyFailure::Lookup). + let cell_values = input.evaluate( + &|_| BTreeMap::default(), + &|_| panic!("virtual selectors are removed during optimization"), + &cell_value(&util::load(n, row, &cs.fixed_queries, &prover.fixed)), + &cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)), + &cell_value(&util::load_instance( + n, + row, + &cs.instance_queries, + &prover.instance, + )), + &|_| BTreeMap::default(), + &|a| a, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|a, _| a, + ); + + // Collect the necessary rendering information: + // - The columns involved in this constraint. + // - How many cells are in each column. + // - The grid of cell values, indexed by rotation. + let mut columns = BTreeMap::::default(); + let mut layout = BTreeMap::>::default(); + for (i, (cell, _)) in cell_values.iter().enumerate() { + *columns.entry(cell.column).or_default() += 1; + layout + .entry(cell.rotation) + .or_default() + .entry(cell.column) + .or_insert(format!("x{i}")); + } + + if i != 0 { + eprintln!(); + } + eprintln!( + " L{} = {}", + i, + emitter::expression_to_string(input, &layout) + ); + eprintln!(" ^"); + + emitter::render_cell_layout(" | ", location, &columns, &layout, |_, rotation| { + if rotation == 0 { + eprint!(" <--{{ Lookup '{name}' inputs queried here"); + } + }); + + // Print the map from local variables to assigned values. + eprintln!(" |"); + eprintln!(" | Assigned cell values:"); + for (i, (_, value)) in cell_values.iter().enumerate() { + eprintln!(" | x{i} = {value}"); + } + } +} + +fn render_shuffle( + prover: &MockProver, + name: &str, + shuffle_index: usize, + location: &FailureLocation, +) { + let n = prover.n as i32; + let cs = &prover.cs; + let shuffle = &cs.shuffles[shuffle_index]; + + // Get the absolute row on which the shuffle's inputs are being queried, so we can + // fetch the input values. + let row = match location { + FailureLocation::InRegion { region, offset } => { + prover.regions[region.index].rows.unwrap().0 + offset + } + FailureLocation::OutsideRegion { row } => *row, + } as i32; + + let shuffle_columns = shuffle.shuffle_expressions.iter().map(|expr| { + expr.evaluate( + &|f| format! {"Const: {f:#?}"}, + &|s| format! {"S{}", s.0}, + &|query| { + format!( + "{:?}", + prover + .cs + .general_column_annotations + .get(&metadata::Column::from((Any::Fixed, query.column_index))) + .cloned() + .unwrap_or_else(|| format!("F{}", query.column_index())) + ) + }, + &|query| { + format!( + "{:?}", + prover + .cs + .general_column_annotations + .get(&metadata::Column::from((Any::advice(), query.column_index))) + .cloned() + .unwrap_or_else(|| format!("A{}", query.column_index())) + ) + }, + &|query| { + format!( + "{:?}", + prover + .cs + .general_column_annotations + .get(&metadata::Column::from((Any::Instance, query.column_index))) + .cloned() + .unwrap_or_else(|| format!("I{}", query.column_index())) + ) + }, + &|challenge| format! {"C{}", challenge.index()}, + &|query| format! {"-{query}"}, + &|a, b| format! {"{a} + {b}"}, + &|a, b| format! {"{a} * {b}"}, + &|a, b| format! {"{a} * {b:?}"}, + ) + }); + + fn cell_value<'a, F: Field, Q: Into + Copy>( + load: impl Fn(Q) -> Value + 'a, + ) -> impl Fn(Q) -> BTreeMap + 'a { + move |query| { + let AnyQuery { + column_type, + column_index, + rotation, + .. + } = query.into(); + Some(( + ((column_type, column_index).into(), rotation.0).into(), + match load(query) { + Value::Real(v) => util::format_value(v), + Value::Poison => unreachable!(), + }, + )) + .into_iter() + .collect() + } + } + + eprintln!("error: input does not exist in shuffle"); + eprint!(" ("); + for i in 0..shuffle.input_expressions.len() { + eprint!("{}L{}", if i == 0 { "" } else { ", " }, i); + } + eprint!(") <-> ("); + for (i, column) in shuffle_columns.enumerate() { + eprint!("{}{}", if i == 0 { "" } else { ", " }, column); + } + eprintln!(")"); + + eprintln!(); + eprintln!(" Shuffle '{name}' inputs:"); + for (i, input) in shuffle.input_expressions.iter().enumerate() { + // Fetch the cell values (since we don't store them in VerifyFailure::Shuffle). + let cell_values = input.evaluate( + &|_| BTreeMap::default(), + &|_| panic!("virtual selectors are removed during optimization"), + &cell_value(&util::load(n, row, &cs.fixed_queries, &prover.fixed)), + &cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)), + &cell_value(&util::load_instance( + n, + row, + &cs.instance_queries, + &prover.instance, + )), + &|_| BTreeMap::default(), + &|a| a, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|a, _| a, + ); + + // Collect the necessary rendering information: + // - The columns involved in this constraint. + // - How many cells are in each column. + // - The grid of cell values, indexed by rotation. + let mut columns = BTreeMap::::default(); + let mut layout = BTreeMap::>::default(); + for (i, (cell, _)) in cell_values.iter().enumerate() { + *columns.entry(cell.column).or_default() += 1; + layout + .entry(cell.rotation) + .or_default() + .entry(cell.column) + .or_insert(format!("x{i}")); + } + + if i != 0 { + eprintln!(); + } + eprintln!( + " Sh{} = {}", + i, + emitter::expression_to_string(input, &layout) + ); + eprintln!(" ^"); + + emitter::render_cell_layout(" | ", location, &columns, &layout, |_, rotation| { + if rotation == 0 { + eprint!(" <--{{ Shuffle '{name}' inputs queried here"); + } + }); + + // Print the map from local variables to assigned values. + eprintln!(" |"); + eprintln!(" | Assigned cell values:"); + for (i, (_, value)) in cell_values.iter().enumerate() { + eprintln!(" | x{i} = {value}"); + } + } +} + +impl VerifyFailure { + /// Emits this failure in pretty-printed format to stderr. + pub(super) fn emit(&self, prover: &MockProver) { + match self { + Self::CellNotAssigned { + gate, + region, + gate_offset, + column, + offset, + } => render_cell_not_assigned( + &prover.cs.gates, + gate, + region, + *gate_offset, + *column, + *offset, + ), + Self::ConstraintNotSatisfied { + constraint, + location, + cell_values, + } => { + render_constraint_not_satisfied(&prover.cs.gates, constraint, location, cell_values) + } + Self::Lookup { + name, + lookup_index, + location, + } => render_lookup(prover, name, *lookup_index, location), + Self::Shuffle { + name, + shuffle_index, + location, + } => render_shuffle(prover, name, *shuffle_index, location), + _ => eprintln!("{self}"), + } + } +} diff --git a/common/src/dev/failure/emitter.rs b/common/src/dev/failure/emitter.rs new file mode 100644 index 0000000000..24109d599b --- /dev/null +++ b/common/src/dev/failure/emitter.rs @@ -0,0 +1,214 @@ +use std::collections::BTreeMap; +use std::iter; + +use group::ff::Field; + +use super::FailureLocation; +use crate::{ + dev::{metadata, util}, + plonk::{Advice, Any, Expression}, +}; + +fn padded(p: char, width: usize, text: &str) -> String { + let pad = width - text.len(); + + format!( + "{}{}{}", + iter::repeat(p).take(pad - pad / 2).collect::(), + text, + iter::repeat(p).take(pad / 2).collect::(), + ) +} + +fn column_type_and_idx(column: &metadata::Column) -> String { + format!( + "{}{}", + match column.column_type { + Any::Advice(_) => "A", + Any::Fixed => "F", + Any::Instance => "I", + }, + column.index + ) +} + +/// Renders a cell layout around a given failure location. +/// +/// `highlight_row` is called at the end of each row, with the offset of the active row +/// (if `location` is in a region), and the rotation of the current row relative to the +/// active row. +pub(super) fn render_cell_layout( + prefix: &str, + location: &FailureLocation, + columns: &BTreeMap, + layout: &BTreeMap>, + highlight_row: impl Fn(Option, i32), +) { + let col_width = |cells: usize| cells.to_string().len() + 3; + let mut col_headers = String::new(); + + // If we are in a region, show rows at offsets relative to it. Otherwise, just show + // the rotations directly. + let offset = match location { + FailureLocation::InRegion { region, offset } => { + col_headers + .push_str(format!("{}Cell layout in region '{}':\n", prefix, region.name).as_str()); + col_headers.push_str(format!("{prefix} | Offset |").as_str()); + Some(*offset as i32) + } + FailureLocation::OutsideRegion { row } => { + col_headers.push_str(format!("{prefix}Cell layout at row {row}:\n").as_str()); + col_headers.push_str(format!("{prefix} |Rotation|").as_str()); + None + } + }; + eprint!("\n{col_headers}"); + + let widths: Vec = columns + .iter() + .map(|(col, _)| { + let size = match location { + FailureLocation::InRegion { region, offset: _ } => { + if let Some(column_ann) = region.column_annotations.as_ref() { + if let Some(ann) = column_ann.get(col) { + ann.len() + } else { + col_width(column_type_and_idx(col).as_str().len()) + } + } else { + col_width(column_type_and_idx(col).as_str().len()) + } + } + FailureLocation::OutsideRegion { row: _ } => { + col_width(column_type_and_idx(col).as_str().len()) + } + }; + size + }) + .collect(); + + // Print the assigned cells, and their region offset or rotation + the column name at which they're assigned to. + for ((column, _), &width) in columns.iter().zip(widths.iter()) { + eprint!( + "{}|", + padded( + ' ', + width, + &match location { + FailureLocation::InRegion { region, offset: _ } => { + region + .column_annotations + .as_ref() + .and_then(|column_ann| column_ann.get(column).cloned()) + .unwrap_or_else(|| column_type_and_idx(column)) + } + FailureLocation::OutsideRegion { row: _ } => { + column_type_and_idx(column) + } + } + .to_string() + ) + ); + } + + eprintln!(); + eprint!("{prefix} +--------+"); + for &width in widths.iter() { + eprint!("{}+", padded('-', width, "")); + } + eprintln!(); + for (rotation, row) in layout { + eprint!( + "{} |{}|", + prefix, + padded(' ', 8, &(offset.unwrap_or(0) + rotation).to_string()) + ); + for ((col, _), &width) in columns.iter().zip(widths.iter()) { + eprint!( + "{}|", + padded( + ' ', + width, + row.get(col).map(|s| s.as_str()).unwrap_or_default() + ) + ); + } + highlight_row(offset, *rotation); + eprintln!(); + } +} + +pub(super) fn expression_to_string( + expr: &Expression, + layout: &BTreeMap>, +) -> String { + expr.evaluate( + &util::format_value, + &|_| panic!("virtual selectors are removed during optimization"), + &|query| { + if let Some(label) = layout + .get(&query.rotation.0) + .and_then(|row| row.get(&(Any::Fixed, query.column_index).into())) + { + label.clone() + } else if query.rotation.0 == 0 { + // This is most likely a merged selector + format!("S{}", query.index.unwrap()) + } else { + // No idea how we'd get here... + format!("F{}@{}", query.column_index, query.rotation.0) + } + }, + &|query| { + layout + .get(&query.rotation.0) + .and_then(|map| { + map.get( + &( + Any::Advice(Advice { phase: query.phase }), + query.column_index, + ) + .into(), + ) + }) + .cloned() + .unwrap_or_default() + }, + &|query| { + layout + .get(&query.rotation.0) + .unwrap() + .get(&(Any::Instance, query.column_index).into()) + .unwrap() + .clone() + }, + &|challenge| format!("C{}({})", challenge.index(), challenge.phase()), + &|a| { + if a.contains(' ') { + format!("-({a})") + } else { + format!("-{a}") + } + }, + &|a, b| { + if let Some(b) = b.strip_prefix('-') { + format!("{a} - {b}") + } else { + format!("{a} + {b}") + } + }, + &|a, b| match (a.contains(' '), b.contains(' ')) { + (false, false) => format!("{a} * {b}"), + (false, true) => format!("{a} * ({b})"), + (true, false) => format!("({a}) * {b}"), + (true, true) => format!("({a}) * ({b})"), + }, + &|a, s| { + if a.contains(' ') { + format!("({}) * {}", a, util::format_value(s)) + } else { + format!("{} * {}", a, util::format_value(s)) + } + }, + ) +} diff --git a/common/src/dev/gates.rs b/common/src/dev/gates.rs new file mode 100644 index 0000000000..4421c0967f --- /dev/null +++ b/common/src/dev/gates.rs @@ -0,0 +1,314 @@ +use std::{ + collections::BTreeSet, + fmt::{self, Write}, +}; + +use ff::PrimeField; + +use crate::{ + dev::util, + plonk::{sealed::SealedPhase, Circuit, ConstraintSystem, FirstPhase}, +}; + +#[derive(Debug)] +struct Constraint { + name: String, + expression: String, + queries: BTreeSet, +} + +#[derive(Debug)] +struct Gate { + name: String, + constraints: Vec, +} + +/// A struct for collecting and displaying the gates within a circuit. +/// +/// # Examples +/// +/// ``` +/// use ff::Field; +/// use halo2_proofs::{ +/// circuit::{Layouter, SimpleFloorPlanner}, +/// dev::CircuitGates, +/// plonk::{Circuit, ConstraintSystem, Error}, +/// poly::Rotation, +/// }; +/// use halo2curves::pasta::pallas; +/// +/// #[derive(Copy, Clone)] +/// struct MyConfig {} +/// +/// #[derive(Clone, Default)] +/// struct MyCircuit {} +/// +/// impl Circuit for MyCircuit { +/// type Config = MyConfig; +/// type FloorPlanner = SimpleFloorPlanner; +/// #[cfg(feature = "circuit-params")] +/// type Params = (); +/// +/// fn without_witnesses(&self) -> Self { +/// Self::default() +/// } +/// +/// fn configure(meta: &mut ConstraintSystem) -> MyConfig { +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let c = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("R1CS constraint", |meta| { +/// let a = meta.query_advice(a, Rotation::cur()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let c = meta.query_advice(c, Rotation::cur()); +/// let s = meta.query_selector(s); +/// +/// Some(("R1CS", s * (a * b - c))) +/// }); +/// +/// // We aren't using this circuit for anything in this example. +/// MyConfig {} +/// } +/// +/// fn synthesize(&self, _: MyConfig, _: impl Layouter) -> Result<(), Error> { +/// // Gates are known at configure time; it doesn't matter how we use them. +/// Ok(()) +/// } +/// } +/// +/// #[cfg(feature = "circuit-params")] +/// let gates = CircuitGates::collect::(()); +/// #[cfg(not(feature = "circuit-params"))] +/// let gates = CircuitGates::collect::(); +/// assert_eq!( +/// format!("{}", gates), +/// r#####"R1CS constraint: +/// - R1CS: +/// S0 * (A0@0 * A1@0 - A2@0) +/// Total gates: 1 +/// Total custom constraint polynomials: 1 +/// Total negations: 1 +/// Total additions: 1 +/// Total multiplications: 2 +/// "#####, +/// ); +/// ``` +#[derive(Debug)] +pub struct CircuitGates { + gates: Vec, + total_negations: usize, + total_additions: usize, + total_multiplications: usize, +} + +impl CircuitGates { + /// Collects the gates from within the circuit. + pub fn collect>( + #[cfg(feature = "circuit-params")] params: C::Params, + ) -> Self { + // Collect the graph details. + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let _ = C::configure_with_params(&mut cs, params); + #[cfg(not(feature = "circuit-params"))] + let _ = C::configure(&mut cs); + + let gates = cs + .gates + .iter() + .map(|gate| Gate { + name: gate.name().to_string(), + constraints: gate + .polynomials() + .iter() + .enumerate() + .map(|(i, constraint)| Constraint { + name: gate.constraint_name(i).to_string(), + expression: constraint.evaluate( + &util::format_value, + &|selector| format!("S{}", selector.0), + &|query| format!("F{}@{}", query.column_index, query.rotation.0), + &|query| { + if query.phase == FirstPhase.to_sealed() { + format!("A{}@{}", query.column_index, query.rotation.0) + } else { + format!( + "A{}({})@{}", + query.column_index, + query.phase(), + query.rotation.0 + ) + } + }, + &|query| format!("I{}@{}", query.column_index, query.rotation.0), + &|challenge| format!("C{}({})", challenge.index(), challenge.phase()), + &|a| { + if a.contains(' ') { + format!("-({a})") + } else { + format!("-{a}") + } + }, + &|a, b| { + if let Some(b) = b.strip_prefix('-') { + format!("{a} - {b}") + } else { + format!("{a} + {b}") + } + }, + &|a, b| match (a.contains(' '), b.contains(' ')) { + (false, false) => format!("{a} * {b}"), + (false, true) => format!("{a} * ({b})"), + (true, false) => format!("({a}) * {b}"), + (true, true) => format!("({a}) * ({b})"), + }, + &|a, s| { + if a.contains(' ') { + format!("({}) * {}", a, util::format_value(s)) + } else { + format!("{} * {}", a, util::format_value(s)) + } + }, + ), + queries: constraint.evaluate( + &|_| BTreeSet::default(), + &|selector| vec![format!("S{}", selector.0)].into_iter().collect(), + &|query| { + vec![format!("F{}@{}", query.column_index, query.rotation.0)] + .into_iter() + .collect() + }, + &|query| { + let query = if query.phase == FirstPhase.to_sealed() { + format!("A{}@{}", query.column_index, query.rotation.0) + } else { + format!( + "A{}({})@{}", + query.column_index, + query.phase(), + query.rotation.0 + ) + }; + vec![query].into_iter().collect() + }, + &|query| { + vec![format!("I{}@{}", query.column_index, query.rotation.0)] + .into_iter() + .collect() + }, + &|challenge| { + vec![format!("C{}({})", challenge.index(), challenge.phase())] + .into_iter() + .collect() + }, + &|a| a, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|a, _| a, + ), + }) + .collect(), + }) + .collect(); + + let (total_negations, total_additions, total_multiplications) = cs + .gates + .iter() + .flat_map(|gate| { + gate.polynomials().iter().map(|poly| { + poly.evaluate( + &|_| (0, 0, 0), + &|_| (0, 0, 0), + &|_| (0, 0, 0), + &|_| (0, 0, 0), + &|_| (0, 0, 0), + &|_| (0, 0, 0), + &|(a_n, a_a, a_m)| (a_n + 1, a_a, a_m), + &|(a_n, a_a, a_m), (b_n, b_a, b_m)| (a_n + b_n, a_a + b_a + 1, a_m + b_m), + &|(a_n, a_a, a_m), (b_n, b_a, b_m)| (a_n + b_n, a_a + b_a, a_m + b_m + 1), + &|(a_n, a_a, a_m), _| (a_n, a_a, a_m + 1), + ) + }) + }) + .fold((0, 0, 0), |(acc_n, acc_a, acc_m), (n, a, m)| { + (acc_n + n, acc_a + a, acc_m + m) + }); + + CircuitGates { + gates, + total_negations, + total_additions, + total_multiplications, + } + } + + /// Prints the queries in this circuit to a CSV grid. + pub fn queries_to_csv(&self) -> String { + let mut queries = BTreeSet::new(); + for gate in &self.gates { + for constraint in &gate.constraints { + for query in &constraint.queries { + queries.insert(query); + } + } + } + + let mut ret = String::new(); + let w = &mut ret; + for query in &queries { + write!(w, "{query},").unwrap(); + } + writeln!(w, "Name").unwrap(); + + for gate in &self.gates { + for constraint in &gate.constraints { + for query in &queries { + if constraint.queries.contains(*query) { + write!(w, "1").unwrap(); + } else { + write!(w, "0").unwrap(); + } + write!(w, ",").unwrap(); + } + writeln!(w, "{}/{}", gate.name, constraint.name).unwrap(); + } + } + ret + } +} + +impl fmt::Display for CircuitGates { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { + for gate in &self.gates { + writeln!(f, "{}:", gate.name)?; + for constraint in &gate.constraints { + if constraint.name.is_empty() { + writeln!(f, "- {}", constraint.expression)?; + } else { + writeln!(f, "- {}:", constraint.name)?; + writeln!(f, " {}", constraint.expression)?; + } + } + } + writeln!(f, "Total gates: {}", self.gates.len())?; + writeln!( + f, + "Total custom constraint polynomials: {}", + self.gates + .iter() + .map(|gate| gate.constraints.len()) + .sum::() + )?; + writeln!(f, "Total negations: {}", self.total_negations)?; + writeln!(f, "Total additions: {}", self.total_additions)?; + writeln!(f, "Total multiplications: {}", self.total_multiplications) + } +} diff --git a/common/src/dev/graph.rs b/common/src/dev/graph.rs new file mode 100644 index 0000000000..11654fe415 --- /dev/null +++ b/common/src/dev/graph.rs @@ -0,0 +1,204 @@ +use ff::Field; +use tabbycat::{AttrList, Edge, GraphBuilder, GraphType, Identity, StmtList}; + +use crate::{ + circuit::Value, + plonk::{ + Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, + Fixed, FloorPlanner, Instance, Selector, + }, +}; + +pub mod layout; + +/// Builds a dot graph string representing the given circuit. +/// +/// The graph is built from calls to [`Layouter::namespace`] both within the circuit, and +/// inside the gadgets and chips that it uses. +/// +/// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace +pub fn circuit_dot_graph>( + circuit: &ConcreteCircuit, +) -> String { + // Collect the graph details. + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let mut graph = Graph::default(); + ConcreteCircuit::FloorPlanner::synthesize(&mut graph, circuit, config, cs.constants).unwrap(); + + // Construct the node labels. We need to store these, because tabbycat operates on + // string references, and we need those references to live long enough. + let node_labels: Vec<_> = graph + .nodes + .into_iter() + .map(|(name, gadget_name)| { + if let Some(gadget_name) = gadget_name { + format!("[{gadget_name}] {name}") + } else { + name + } + }) + .collect(); + + // Construct the dot graph statements. + let mut stmts = StmtList::new(); + for (id, label) in node_labels.iter().enumerate() { + stmts = stmts.add_node( + id.into(), + None, + Some(AttrList::new().add_pair(tabbycat::attributes::label(label))), + ); + } + for (parent, child) in graph.edges { + stmts = + stmts.add_edge(Edge::head_node(parent.into(), None).arrow_to_node(child.into(), None)) + } + + // Build the graph! + GraphBuilder::default() + .graph_type(GraphType::DiGraph) + .strict(false) + .id(Identity::id("circuit").unwrap()) + .stmts(stmts) + .build() + .unwrap() + .to_string() +} + +#[derive(Default)] +struct Graph { + /// Graph nodes in the namespace, structured as `(name, gadget_name)`. + nodes: Vec<(String, Option)>, + + /// Directed edges in the graph, as pairs of indices into `nodes`. + edges: Vec<(usize, usize)>, + + /// The current namespace, as indices into `nodes`. + current_namespace: Vec, +} + +impl Assignment for Graph { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing; we don't care about cells in this context. + Ok(()) + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn query_instance(&self, _: Column, _: usize) -> Result, Error> { + Ok(Value::unknown()) + } + + fn assign_advice( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing; we don't care about cells in this context. + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing; we don't care about cells in this context. + Ok(()) + } + + fn copy( + &mut self, + _: Column, + _: usize, + _: Column, + _: usize, + ) -> Result<(), crate::plonk::Error> { + // Do nothing; we don't care about permutations in this context. + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Store the new node. + let new_node = self.nodes.len(); + self.nodes.push((name_fn().into(), None)); + + // Create an edge from the parent, if any. + if let Some(parent) = self.current_namespace.last() { + self.edges.push((*parent, new_node)); + } + + // Push the new namespace. + self.current_namespace.push(new_node); + } + + fn pop_namespace(&mut self, gadget_name: Option) { + // Store the gadget name that was extracted, if any. + let node = self + .current_namespace + .last() + .expect("pop_namespace should never be called on the root"); + self.nodes[*node].1 = gadget_name; + + // Pop the namespace. + self.current_namespace.pop(); + } +} diff --git a/common/src/dev/graph/layout.rs b/common/src/dev/graph/layout.rs new file mode 100644 index 0000000000..94bd7eea14 --- /dev/null +++ b/common/src/dev/graph/layout.rs @@ -0,0 +1,323 @@ +use ff::Field; +use plotters::{ + coord::Shift, + prelude::{DrawingArea, DrawingAreaErrorKind, DrawingBackend}, +}; +use std::collections::HashSet; +use std::ops::Range; + +use crate::{ + circuit::layouter::RegionColumn, + dev::cost::Layout, + plonk::{Any, Circuit, Column, ConstraintSystem, FloorPlanner}, +}; + +/// Graphical renderer for circuit layouts. +/// +/// Cells that have been assigned to by the circuit will be shaded. If any cells are +/// assigned to more than once (which is usually a mistake), they will be shaded darker +/// than the surrounding cells. +/// +/// # Examples +/// +/// ```ignore +/// use halo2_proofs::dev::CircuitLayout; +/// use plotters::prelude::*; +/// +/// let drawing_area = BitMapBackend::new("example-circuit-layout.png", (1024, 768)) +/// .into_drawing_area(); +/// drawing_area.fill(&WHITE).unwrap(); +/// let drawing_area = drawing_area +/// .titled("Example Circuit Layout", ("sans-serif", 60)) +/// .unwrap(); +/// +/// let circuit = MyCircuit::default(); +/// let k = 5; // Suitable size for MyCircuit +/// CircuitLayout::default().render(k, &circuit, &drawing_area).unwrap(); +/// ``` +#[derive(Debug, Default)] +pub struct CircuitLayout { + hide_labels: bool, + mark_equality_cells: bool, + show_equality_constraints: bool, + view_width: Option>, + view_height: Option>, +} + +impl CircuitLayout { + /// Sets the visibility of region labels. + /// + /// The default is to show labels. + pub fn show_labels(mut self, show: bool) -> Self { + self.hide_labels = !show; + self + } + + /// Marks cells involved in equality constraints, in red. + /// + /// The default is to not mark these cells. + pub fn mark_equality_cells(mut self, show: bool) -> Self { + self.mark_equality_cells = show; + self + } + + /// Draws red lines between equality-constrained cells. + /// + /// The default is to not show these, as they can get _very_ messy. + pub fn show_equality_constraints(mut self, show: bool) -> Self { + self.show_equality_constraints = show; + self + } + + /// Sets the view width for this layout, as a number of columns. + pub fn view_width(mut self, width: Range) -> Self { + self.view_width = Some(width); + self + } + + /// Sets the view height for this layout, as a number of rows. + pub fn view_height(mut self, height: Range) -> Self { + self.view_height = Some(height); + self + } + + /// Renders the given circuit on the given drawing area. + pub fn render, DB: DrawingBackend>( + self, + k: u32, + circuit: &ConcreteCircuit, + drawing_area: &DrawingArea, + ) -> Result<(), DrawingAreaErrorKind> { + use plotters::coord::types::RangedCoordusize; + use plotters::prelude::*; + + let n = 1 << k; + // Collect the layout details. + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let mut layout = Layout::new(k, n, cs.num_selectors); + ConcreteCircuit::FloorPlanner::synthesize( + &mut layout, + circuit, + config, + cs.constants.clone(), + ) + .unwrap(); + let (cs, selector_polys) = cs.compress_selectors(layout.selectors); + let non_selector_fixed_columns = cs.num_fixed_columns - selector_polys.len(); + + // Figure out what order to render the columns in. + // TODO: For now, just render them in the order they were configured. + let total_columns = cs.num_instance_columns + cs.num_advice_columns + cs.num_fixed_columns; + let column_index = |cs: &ConstraintSystem, column: RegionColumn| { + let column: Column = match column { + RegionColumn::Column(col) => col, + RegionColumn::Selector(selector) => cs.selector_map[selector.0].into(), + }; + column.index() + + match column.column_type() { + Any::Instance => 0, + Any::Advice(_) => cs.num_instance_columns, + Any::Fixed => cs.num_instance_columns + cs.num_advice_columns, + } + }; + + let view_width = self.view_width.unwrap_or(0..total_columns); + let view_height = self.view_height.unwrap_or(0..n); + let view_bottom = view_height.end; + + // Prepare the grid layout. We render a red background for advice columns, white for + // instance columns, and blue for fixed columns (with a darker blue for selectors). + let root = + drawing_area.apply_coord_spec(Cartesian2d::::new( + view_width, + view_height, + drawing_area.get_pixel_range(), + )); + root.draw(&Rectangle::new( + [(0, 0), (total_columns, view_bottom)], + ShapeStyle::from(&WHITE).filled(), + ))?; + root.draw(&Rectangle::new( + [ + (cs.num_instance_columns, 0), + (cs.num_instance_columns + cs.num_advice_columns, view_bottom), + ], + ShapeStyle::from(&RED.mix(0.2)).filled(), + ))?; + root.draw(&Rectangle::new( + [ + (cs.num_instance_columns + cs.num_advice_columns, 0), + (total_columns, view_bottom), + ], + ShapeStyle::from(&BLUE.mix(0.2)).filled(), + ))?; + { + root.draw(&Rectangle::new( + [ + ( + cs.num_instance_columns + + cs.num_advice_columns + + non_selector_fixed_columns, + 0, + ), + (total_columns, view_bottom), + ], + ShapeStyle::from(&BLUE.mix(0.1)).filled(), + ))?; + } + + // Mark the unusable rows of the circuit. + let usable_rows = n - (cs.blinding_factors() + 1); + if view_bottom > usable_rows { + root.draw(&Rectangle::new( + [(0, usable_rows), (total_columns, view_bottom)], + ShapeStyle::from(&RED.mix(0.4)).filled(), + ))?; + } + + root.draw(&Rectangle::new( + [(0, 0), (total_columns, view_bottom)], + BLACK, + ))?; + + let draw_region = |root: &DrawingArea<_, _>, top_left, bottom_right| { + root.draw(&Rectangle::new( + [top_left, bottom_right], + ShapeStyle::from(&WHITE).filled(), + ))?; + root.draw(&Rectangle::new( + [top_left, bottom_right], + ShapeStyle::from(&RED.mix(0.2)).filled(), + ))?; + root.draw(&Rectangle::new( + [top_left, bottom_right], + ShapeStyle::from(&GREEN.mix(0.2)).filled(), + ))?; + root.draw(&Rectangle::new([top_left, bottom_right], BLACK))?; + Ok(()) + }; + + let draw_cell = |root: &DrawingArea<_, _>, column, row| { + root.draw(&Rectangle::new( + [(column, row), (column + 1, row + 1)], + ShapeStyle::from(&BLACK.mix(0.1)).filled(), + )) + }; + + // Render the regions! + let mut labels = if self.hide_labels { None } else { Some(vec![]) }; + for region in &layout.regions { + if let Some(offset) = region.offset { + // Sort the region's columns according to the defined ordering. + let mut columns: Vec<_> = region.columns.iter().cloned().collect(); + columns.sort_unstable_by_key(|a| column_index(&cs, *a)); + + // Render contiguous parts of the same region as a single box. + let mut width = None; + for column in columns { + let column = column_index(&cs, column); + match width { + Some((start, end)) if end == column => width = Some((start, end + 1)), + Some((start, end)) => { + draw_region(&root, (start, offset), (end, offset + region.rows))?; + if let Some(labels) = &mut labels { + labels.push((region.name.clone(), (start, offset))); + } + width = Some((column, column + 1)); + } + None => width = Some((column, column + 1)), + } + } + + // Render the last part of the region. + if let Some((start, end)) = width { + draw_region(&root, (start, offset), (end, offset + region.rows))?; + if let Some(labels) = &mut labels { + labels.push((region.name.clone(), (start, offset))); + } + } + } + } + + // Darken the cells of the region that have been assigned to. + for region in layout.regions { + for (column, row) in region.cells { + draw_cell(&root, column_index(&cs, column), row)?; + } + } + + // Darken any loose cells that have been assigned to. + for (column, row) in layout.loose_cells { + draw_cell(&root, column_index(&cs, column), row)?; + } + + // Mark equality-constrained cells. + if self.mark_equality_cells { + let mut cells = HashSet::new(); + for (l_col, l_row, r_col, r_row) in &layout.equality { + let l_col = column_index(&cs, (*l_col).into()); + let r_col = column_index(&cs, (*r_col).into()); + + // Deduplicate cells. + cells.insert((l_col, *l_row)); + cells.insert((r_col, *r_row)); + } + + for (col, row) in cells { + root.draw(&Rectangle::new( + [(col, row), (col + 1, row + 1)], + ShapeStyle::from(&RED.mix(0.5)).filled(), + ))?; + } + } + + // Draw lines between equality-constrained cells. + if self.show_equality_constraints { + for (l_col, l_row, r_col, r_row) in &layout.equality { + let l_col = column_index(&cs, (*l_col).into()); + let r_col = column_index(&cs, (*r_col).into()); + root.draw(&PathElement::new( + [(l_col, *l_row), (r_col, *r_row)], + ShapeStyle::from(&RED), + ))?; + } + } + + // Add a line showing the total used rows. + root.draw(&PathElement::new( + [(0, layout.total_rows), (total_columns, layout.total_rows)], + ShapeStyle::from(&BLACK), + ))?; + + // Render labels last, on top of everything else. + if let Some(labels) = labels { + for (label, top_left) in labels { + root.draw( + &(EmptyElement::at(top_left) + + Text::new(label, (10, 10), ("sans-serif", 15.0).into_font())), + )?; + } + root.draw( + &(EmptyElement::at((0, layout.total_rows)) + + Text::new( + format!("{} used rows", layout.total_rows), + (10, 10), + ("sans-serif", 15.0).into_font(), + )), + )?; + root.draw( + &(EmptyElement::at((0, usable_rows)) + + Text::new( + format!("{usable_rows} usable rows"), + (10, 10), + ("sans-serif", 15.0).into_font(), + )), + )?; + } + Ok(()) + } +} diff --git a/common/src/dev/metadata.rs b/common/src/dev/metadata.rs new file mode 100644 index 0000000000..f81bfa67a7 --- /dev/null +++ b/common/src/dev/metadata.rs @@ -0,0 +1,313 @@ +//! Metadata about circuits. + +use super::metadata::Column as ColumnMetadata; +use crate::plonk::{self, Any}; +use std::{ + collections::HashMap, + fmt::{self, Debug}, +}; +/// Metadata about a column within a circuit. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Column { + /// The type of the column. + pub(super) column_type: Any, + /// The index of the column. + pub(super) index: usize, +} + +impl Column { + /// Return the column type. + pub fn column_type(&self) -> Any { + self.column_type + } + /// Return the column index. + pub fn index(&self) -> usize { + self.index + } +} + +impl fmt::Display for Column { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Column('{:?}', {})", self.column_type, self.index) + } +} + +impl From<(Any, usize)> for Column { + fn from((column_type, index): (Any, usize)) -> Self { + Column { column_type, index } + } +} + +impl From> for Column { + fn from(column: plonk::Column) -> Self { + Column { + column_type: *column.column_type(), + index: column.index(), + } + } +} + +/// A helper structure that allows to print a Column with it's annotation as a single structure. +#[derive(Debug, Clone)] +pub(super) struct DebugColumn { + /// The type of the column. + column_type: Any, + /// The index of the column. + index: usize, + /// Annotation of the column + annotation: String, +} + +impl From<(Column, Option<&HashMap>)> for DebugColumn { + fn from(info: (Column, Option<&HashMap>)) -> Self { + DebugColumn { + column_type: info.0.column_type, + index: info.0.index, + annotation: info + .1 + .and_then(|map| map.get(&info.0)) + .cloned() + .unwrap_or_default(), + } + } +} + +impl fmt::Display for DebugColumn { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Column('{:?}', {} - {})", + self.column_type, self.index, self.annotation + ) + } +} + +/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset +/// within a custom gate. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct VirtualCell { + name: String, + pub(super) column: Column, + pub(super) rotation: i32, +} + +impl From<(Column, i32)> for VirtualCell { + fn from((column, rotation): (Column, i32)) -> Self { + VirtualCell { + name: "".to_string(), + column, + rotation, + } + } +} + +impl> From<(S, Column, i32)> for VirtualCell { + fn from((name, column, rotation): (S, Column, i32)) -> Self { + VirtualCell { + name: name.as_ref().to_string(), + column, + rotation, + } + } +} + +impl From for VirtualCell { + fn from(c: plonk::VirtualCell) -> Self { + VirtualCell { + name: "".to_string(), + column: c.column.into(), + rotation: c.rotation.0, + } + } +} + +impl fmt::Display for VirtualCell { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}@{}", self.column, self.rotation)?; + if !self.name.is_empty() { + write!(f, "({})", self.name.as_str())?; + } + Ok(()) + } +} + +/// Helper structure used to be able to inject Column annotations inside a `Display` or `Debug` call. +#[derive(Clone, Debug)] +pub(super) struct DebugVirtualCell { + name: String, + column: DebugColumn, + rotation: i32, +} + +impl From<(&VirtualCell, Option<&HashMap>)> for DebugVirtualCell { + fn from(info: (&VirtualCell, Option<&HashMap>)) -> Self { + DebugVirtualCell { + name: info.0.name.clone(), + column: DebugColumn::from((info.0.column, info.1)), + rotation: info.0.rotation, + } + } +} + +impl fmt::Display for DebugVirtualCell { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}@{}", self.column, self.rotation)?; + if !self.name.is_empty() { + write!(f, "({})", self.name)?; + } + Ok(()) + } +} + +/// Metadata about a configured gate within a circuit. +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Gate { + /// The index of the active gate. These indices are assigned in the order in which + /// `ConstraintSystem::create_gate` is called during `Circuit::configure`. + pub(super) index: usize, + /// The name of the active gate. These are specified by the gate creator (such as + /// a chip implementation), and is not enforced to be unique. + pub(super) name: String, +} + +impl fmt::Display for Gate { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Gate {} ('{}')", self.index, self.name.as_str()) + } +} + +impl> From<(usize, S)> for Gate { + fn from((index, name): (usize, S)) -> Self { + Gate { + index, + name: name.as_ref().to_string(), + } + } +} + +/// Metadata about a configured constraint within a circuit. +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Constraint { + /// The gate containing the constraint. + pub(super) gate: Gate, + /// The index of the polynomial constraint within the gate. These indices correspond + /// to the order in which the constraints are returned from the closure passed to + /// `ConstraintSystem::create_gate` during `Circuit::configure`. + pub(super) index: usize, + /// The name of the constraint. This is specified by the gate creator (such as a chip + /// implementation), and is not enforced to be unique. + pub(super) name: String, +} + +impl fmt::Display for Constraint { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Constraint {}{} in gate {} ('{}')", + self.index, + if self.name.is_empty() { + String::new() + } else { + format!(" ('{}')", self.name.as_str()) + }, + self.gate.index, + self.gate.name, + ) + } +} + +impl> From<(Gate, usize, S)> for Constraint { + fn from((gate, index, name): (Gate, usize, S)) -> Self { + Constraint { + gate, + index, + name: name.as_ref().to_string(), + } + } +} + +/// Metadata about an assigned region within a circuit. +#[derive(Clone)] +pub struct Region { + /// The index of the region. These indices are assigned in the order in which + /// `Layouter::assign_region` is called during `Circuit::synthesize`. + pub(super) index: usize, + /// The name of the region. This is specified by the region creator (such as a chip + /// implementation), and is not enforced to be unique. + pub(super) name: String, + /// A reference to the annotations of the Columns that exist within this `Region`. + pub(super) column_annotations: Option>, +} + +impl Region { + /// Fetch the annotation of a `Column` within a `Region` providing it's associated metadata. + /// + /// This function will return `None` if: + /// - There's no annotation map generated for this `Region`. + /// - There's no entry on the annotation map corresponding to the metadata provided. + pub(crate) fn get_column_annotation(&self, metadata: ColumnMetadata) -> Option { + self.column_annotations + .as_ref() + .and_then(|map| map.get(&metadata).cloned()) + } +} + +impl PartialEq for Region { + fn eq(&self, other: &Self) -> bool { + self.index == other.index && self.name == other.name + } +} + +impl Eq for Region {} + +impl Debug for Region { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Region {} ('{}')", self.index, self.name) + } +} + +impl fmt::Display for Region { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Region {} ('{}')", self.index, self.name.as_str()) + } +} + +impl From<(usize, String)> for Region { + fn from((index, name): (usize, String)) -> Self { + Region { + index, + name, + column_annotations: None, + } + } +} + +impl From<(usize, &str)> for Region { + fn from((index, name): (usize, &str)) -> Self { + Region { + index, + name: name.to_owned(), + column_annotations: None, + } + } +} + +impl From<(usize, String, HashMap)> for Region { + fn from((index, name, annotations): (usize, String, HashMap)) -> Self { + Region { + index, + name, + column_annotations: Some(annotations), + } + } +} + +impl From<(usize, &str, HashMap)> for Region { + fn from((index, name, annotations): (usize, &str, HashMap)) -> Self { + Region { + index, + name: name.to_owned(), + column_annotations: Some(annotations), + } + } +} diff --git a/common/src/dev/tfp.rs b/common/src/dev/tfp.rs new file mode 100644 index 0000000000..f5960ff4d9 --- /dev/null +++ b/common/src/dev/tfp.rs @@ -0,0 +1,509 @@ +use std::{fmt, marker::PhantomData}; + +use ff::Field; +use tracing::{debug, debug_span, span::EnteredSpan}; + +use crate::{ + circuit::{ + layouter::{RegionLayouter, SyncDeps}, + AssignedCell, Cell, Layouter, Region, Table, Value, + }, + plonk::{ + Advice, Any, Assigned, Assignment, Circuit, Column, ConstraintSystem, Error, Fixed, + FloorPlanner, Instance, Selector, + }, +}; +use halo2_middleware::circuit::Challenge; + +/// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. +/// +/// `TracingFloorPlanner` can be used to instrument your circuit and determine exactly +/// what is happening during a particular run of keygen or proving. This can be useful for +/// identifying unexpected non-determinism or changes to a circuit. +/// +/// # No stability guarantees +/// +/// The `tracing` output is intended for use during circuit development. It should not be +/// considered production-stable, and the precise format or data exposed may change at any +/// time. +/// +/// # Examples +/// +/// ``` +/// use ff::Field; +/// use halo2_proofs::{ +/// circuit::{floor_planner, Layouter, Value}, +/// dev::TracingFloorPlanner, +/// plonk::{Circuit, ConstraintSystem, Error}, +/// }; +/// +/// # struct MyCircuit { +/// # some_witness: Value, +/// # }; +/// # #[derive(Clone)] +/// # struct MyConfig; +/// impl Circuit for MyCircuit { +/// // Wrap `TracingFloorPlanner` around your existing floor planner of choice. +/// //type FloorPlanner = floor_planner::V1; +/// type FloorPlanner = TracingFloorPlanner; +/// +/// // The rest of your `Circuit` implementation is unchanged. +/// type Config = MyConfig; +/// +/// #[cfg(feature = "circuit-params")] +/// type Params = (); +/// +/// fn without_witnesses(&self) -> Self { +/// Self { some_witness: Value::unknown() } +/// } +/// +/// fn configure(meta: &mut ConstraintSystem) -> Self::Config { +/// // .. +/// # todo!() +/// } +/// +/// fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error> { +/// // .. +/// # todo!() +/// } +/// } +/// +/// #[test] +/// fn some_circuit_test() { +/// // At the start of your test, enable tracing. +/// tracing_subscriber::fmt() +/// .with_max_level(tracing::Level::DEBUG) +/// .with_ansi(false) +/// .without_time() +/// .init(); +/// +/// // Now when the rest of the test runs, you will get `tracing` output for every +/// // operation that the circuit performs under the hood! +/// } +/// ``` +#[derive(Debug)] +pub struct TracingFloorPlanner { + _phantom: PhantomData

, +} + +impl FloorPlanner for TracingFloorPlanner

{ + fn synthesize + SyncDeps, C: Circuit>( + cs: &mut CS, + circuit: &C, + config: C::Config, + constants: Vec>, + ) -> Result<(), Error> { + P::synthesize( + &mut TracingAssignment::new(cs), + &TracingCircuit::borrowed(circuit), + config, + constants, + ) + } +} + +/// A helper type that augments a [`Circuit`] with [`tracing`] spans and events. +enum TracingCircuit<'c, F: Field, C: Circuit> { + Borrowed(&'c C, PhantomData), + Owned(C, PhantomData), +} + +impl<'c, F: Field, C: Circuit> TracingCircuit<'c, F, C> { + fn borrowed(circuit: &'c C) -> Self { + Self::Borrowed(circuit, PhantomData) + } + + fn owned(circuit: C) -> Self { + Self::Owned(circuit, PhantomData) + } + + fn inner_ref(&self) -> &C { + match self { + TracingCircuit::Borrowed(circuit, ..) => circuit, + TracingCircuit::Owned(circuit, ..) => circuit, + } + } +} + +impl<'c, F: Field, C: Circuit> Circuit for TracingCircuit<'c, F, C> { + type Config = C::Config; + type FloorPlanner = C::FloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self::owned(self.inner_ref().without_witnesses()) + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let _span = debug_span!("configure").entered(); + C::configure(meta) + } + + fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error> { + let _span = debug_span!("synthesize").entered(); + self.inner_ref() + .synthesize(config, TracingLayouter::new(layouter)) + } +} + +/// A helper type that augments a [`Layouter`] with [`tracing`] spans and events. +struct TracingLayouter> { + layouter: L, + namespace_spans: Vec, + _phantom: PhantomData, +} + +impl> TracingLayouter { + fn new(layouter: L) -> Self { + Self { + layouter, + namespace_spans: vec![], + _phantom: PhantomData, + } + } +} + +impl> Layouter for TracingLayouter { + type Root = Self; + + fn assign_region(&mut self, name: N, mut assignment: A) -> Result + where + A: FnMut(Region<'_, F>) -> Result, + N: Fn() -> NR, + NR: Into, + { + let _span = debug_span!("region", name = name().into()).entered(); + self.layouter.assign_region(name, |region| { + let mut region = TracingRegion(region); + let region: &mut dyn RegionLayouter = &mut region; + assignment(region.into()) + }) + } + + fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> + where + A: FnMut(Table<'_, F>) -> Result<(), Error>, + N: Fn() -> NR, + NR: Into, + { + let _span = debug_span!("table", name = name().into()).entered(); + self.layouter.assign_table(name, assignment) + } + + fn constrain_instance( + &mut self, + cell: Cell, + column: Column, + row: usize, + ) -> Result<(), Error> { + self.layouter.constrain_instance(cell, column, row) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn get_root(&mut self) -> &mut Self::Root { + self + } + + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + let name = name_fn().into(); + self.namespace_spans.push(debug_span!("ns", name).entered()); + self.layouter.push_namespace(|| name); + } + + fn pop_namespace(&mut self, gadget_name: Option) { + self.layouter.pop_namespace(gadget_name); + self.namespace_spans.pop(); + } +} + +fn debug_value_and_return_cell(value: AssignedCell) -> Cell { + if let Some(v) = value.value().into_option() { + debug!(target: "assigned", value = ?v); + } + value.cell() +} + +/// A helper type that augments a [`Region`] with [`tracing`] spans and events. +#[derive(Debug)] +struct TracingRegion<'r, F: Field>(Region<'r, F>); + +impl<'r, F: Field> RegionLayouter for TracingRegion<'r, F> { + fn enable_selector<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + selector: &Selector, + offset: usize, + ) -> Result<(), Error> { + let _guard = debug_span!("enable_selector", name = annotation(), offset = offset).entered(); + debug!(target: "layouter", "Entered"); + self.0.enable_selector(annotation, selector, offset) + } + + fn name_column<'v>( + &'v mut self, + _: &'v (dyn std::ops::Fn() -> std::string::String + 'v), + _: Column, + ) { + } + + fn assign_advice<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + let _guard = + debug_span!("assign_advice", name = annotation(), column = ?column, offset = offset) + .entered(); + debug!(target: "layouter", "Entered"); + self.0 + .assign_advice(annotation, column, offset, to) + .map(debug_value_and_return_cell) + } + + fn assign_advice_from_constant<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + constant: Assigned, + ) -> Result { + let _guard = debug_span!("assign_advice_from_constant", + name = annotation(), + column = ?column, + offset = offset, + constant = ?constant, + ) + .entered(); + debug!(target: "layouter", "Entered"); + self.0 + .assign_advice_from_constant(annotation, column, offset, constant) + .map(debug_value_and_return_cell) + } + + fn assign_advice_from_instance<'v>( + &mut self, + annotation: &'v (dyn Fn() -> String + 'v), + instance: Column, + row: usize, + advice: Column, + offset: usize, + ) -> Result<(Cell, Value), Error> { + let _guard = debug_span!("assign_advice_from_instance", + name = annotation(), + instance = ?instance, + row = row, + advice = ?advice, + offset = offset, + ) + .entered(); + debug!(target: "layouter", "Entered"); + self.0 + .assign_advice_from_instance(annotation, instance, row, advice, offset) + .map(|value| { + if let Some(v) = value.value().into_option() { + debug!(target: "assigned", value = ?v); + } + (value.cell(), value.value().cloned()) + }) + } + + fn instance_value( + &mut self, + instance: Column, + row: usize, + ) -> Result, Error> { + self.0.instance_value(instance, row) + } + + fn assign_fixed<'v>( + &'v mut self, + annotation: &'v (dyn Fn() -> String + 'v), + column: Column, + offset: usize, + to: &'v mut (dyn FnMut() -> Value> + 'v), + ) -> Result { + let _guard = + debug_span!("assign_fixed", name = annotation(), column = ?column, offset = offset) + .entered(); + debug!(target: "layouter", "Entered"); + self.0 + .assign_fixed(annotation, column, offset, to) + .map(debug_value_and_return_cell) + } + + fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error> { + debug!(target: "constrain_constant", cell = ?cell, constant = ?constant); + self.0.constrain_constant(cell, constant) + } + + fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { + debug!(target: "constrain_equal", left = ?left, right = ?right); + self.0.constrain_equal(left, right) + } +} + +/// A helper type that augments an [`Assignment`] with [`tracing`] spans and events. +struct TracingAssignment<'cs, F: Field, CS: Assignment> { + cs: &'cs mut CS, + in_region: bool, + _phantom: PhantomData, +} + +impl<'cs, F: Field, CS: Assignment> TracingAssignment<'cs, F, CS> { + fn new(cs: &'cs mut CS) -> Self { + Self { + cs, + in_region: false, + _phantom: PhantomData, + } + } +} + +impl<'cs, F: Field, CS: Assignment> Assignment for TracingAssignment<'cs, F, CS> { + fn enter_region(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + self.in_region = true; + self.cs.enter_region(name_fn); + } + + fn annotate_column(&mut self, _: A, _: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + } + + fn exit_region(&mut self) { + self.cs.exit_region(); + self.in_region = false; + } + + fn enable_selector( + &mut self, + annotation: A, + selector: &Selector, + row: usize, + ) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + let annotation = annotation().into(); + if self.in_region { + debug!(target: "position", row = row); + } else { + debug!(target: "enable_selector", name = annotation, row = row); + } + self.cs.enable_selector(|| annotation, selector, row) + } + + fn query_instance(&self, column: Column, row: usize) -> Result, Error> { + let _guard = debug_span!("positioned").entered(); + debug!(target: "query_instance", column = ?column, row = row); + self.cs.query_instance(column, row) + } + + fn assign_advice( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + let annotation = annotation().into(); + if self.in_region { + debug!(target: "position", row = row); + } else { + debug!(target: "assign_advice", name = annotation, column = ?column, row = row); + } + self.cs.assign_advice(|| annotation, column, row, to) + } + + fn assign_fixed( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + let annotation = annotation().into(); + if self.in_region { + debug!(target: "position", row = row); + } else { + debug!(target: "assign_fixed", name = annotation, column = ?column, row = row); + } + self.cs.assign_fixed(|| annotation, column, row, to) + } + + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + let _guard = debug_span!("positioned").entered(); + debug!( + target: "copy", + left_column = ?left_column, + left_row = left_row, + right_column = ?right_column, + right_row = right_row, + ); + self.cs.copy(left_column, left_row, right_column, right_row) + } + + fn fill_from_row( + &mut self, + column: Column, + row: usize, + to: Value>, + ) -> Result<(), Error> { + let _guard = debug_span!("positioned").entered(); + debug!(target: "fill_from_row", column = ?column, row = row); + self.cs.fill_from_row(column, row, to) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // We enter namespace spans in TracingLayouter. + self.cs.push_namespace(name_fn) + } + + fn pop_namespace(&mut self, gadget_name: Option) { + self.cs.pop_namespace(gadget_name); + // We exit namespace spans in TracingLayouter. + } +} diff --git a/common/src/dev/util.rs b/common/src/dev/util.rs new file mode 100644 index 0000000000..0e1bef7e5b --- /dev/null +++ b/common/src/dev/util.rs @@ -0,0 +1,159 @@ +use group::ff::Field; +use std::collections::BTreeMap; + +use super::{metadata, CellValue, InstanceValue, Value}; +use crate::plonk::{ + Advice, AdviceQuery, Any, Column, ColumnType, Expression, FixedQuery, Gate, InstanceQuery, + VirtualCell, +}; +use halo2_middleware::poly::Rotation; + +pub(crate) struct AnyQuery { + /// Query index + pub index: Option, + /// Column type + pub column_type: Any, + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +impl From for AnyQuery { + fn from(query: FixedQuery) -> Self { + Self { + index: query.index, + column_type: Any::Fixed, + column_index: query.column_index, + rotation: query.rotation, + } + } +} + +impl From for AnyQuery { + fn from(query: AdviceQuery) -> Self { + Self { + index: query.index, + column_type: Any::Advice(Advice { phase: query.phase }), + column_index: query.column_index, + rotation: query.rotation, + } + } +} + +impl From for AnyQuery { + fn from(query: InstanceQuery) -> Self { + Self { + index: query.index, + column_type: Any::Instance, + column_index: query.column_index, + rotation: query.rotation, + } + } +} + +pub(super) fn format_value(v: F) -> String { + if v.is_zero_vartime() { + "0".into() + } else if v == F::ONE { + "1".into() + } else if v == -F::ONE { + "-1".into() + } else { + // Format value as hex. + let s = format!("{v:?}"); + // Remove leading zeroes. + let s = s.strip_prefix("0x").unwrap(); + let s = s.trim_start_matches('0'); + format!("0x{s}") + } +} + +pub(super) fn load<'a, F: Field, T: ColumnType, Q: Into + Copy>( + n: i32, + row: i32, + queries: &'a [(Column, Rotation)], + cells: &'a [Vec>], +) -> impl Fn(Q) -> Value + 'a { + move |query| { + let (column, at) = &queries[query.into().index.unwrap()]; + let resolved_row = (row + at.0) % n; + cells[column.index()][resolved_row as usize].into() + } +} + +pub(super) fn load_instance<'a, F: Field, T: ColumnType, Q: Into + Copy>( + n: i32, + row: i32, + queries: &'a [(Column, Rotation)], + cells: &'a [Vec>], +) -> impl Fn(Q) -> Value + 'a { + move |query| { + let (column, at) = &queries[query.into().index.unwrap()]; + let resolved_row = (row + at.0) % n; + let cell = &cells[column.index()][resolved_row as usize]; + Value::Real(cell.value()) + } +} + +fn cell_value<'a, F: Field, Q: Into + Copy>( + virtual_cells: &'a [VirtualCell], + load: impl Fn(Q) -> Value + 'a, +) -> impl Fn(Q) -> BTreeMap + 'a { + move |query| { + let AnyQuery { + column_type, + column_index, + rotation, + .. + } = query.into(); + virtual_cells + .iter() + .find(|c| { + c.column.column_type() == &column_type + && c.column.index() == column_index + && c.rotation == rotation + }) + // None indicates a selector, which we don't bother showing. + .map(|cell| { + ( + cell.clone().into(), + match load(query) { + Value::Real(v) => format_value(v), + Value::Poison => unreachable!(), + }, + ) + }) + .into_iter() + .collect() + } +} + +pub(super) fn cell_values<'a, F: Field>( + gate: &Gate, + poly: &Expression, + load_fixed: impl Fn(FixedQuery) -> Value + 'a, + load_advice: impl Fn(AdviceQuery) -> Value + 'a, + load_instance: impl Fn(InstanceQuery) -> Value + 'a, +) -> Vec<(metadata::VirtualCell, String)> { + let virtual_cells = gate.queried_cells(); + let cell_values = poly.evaluate( + &|_| BTreeMap::default(), + &|_| panic!("virtual selectors are removed during optimization"), + &cell_value(virtual_cells, load_fixed), + &cell_value(virtual_cells, load_advice), + &cell_value(virtual_cells, load_instance), + &|_| BTreeMap::default(), + &|a| a, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|a, _| a, + ); + cell_values.into_iter().collect() +} diff --git a/common/src/helpers.rs b/common/src/helpers.rs index e69de29bb2..faf7351a3e 100644 --- a/common/src/helpers.rs +++ b/common/src/helpers.rs @@ -0,0 +1,154 @@ +use crate::poly::Polynomial; +use ff::PrimeField; +use halo2curves::{serde::SerdeObject, CurveAffine}; +use std::io; + +/// This enum specifies how various types are serialized and deserialized. +#[derive(Clone, Copy, Debug)] +pub enum SerdeFormat { + /// Curve elements are serialized in compressed form. + /// Field elements are serialized in standard form, with endianness specified by the + /// `PrimeField` implementation. + Processed, + /// Curve elements are serialized in uncompressed form. Field elements are serialized + /// in their internal Montgomery representation. + /// When deserializing, checks are performed to ensure curve elements indeed lie on the curve and field elements + /// are less than modulus. + RawBytes, + /// Serialization is the same as `RawBytes`, but no checks are performed. + RawBytesUnchecked, +} + +// Keep this trait for compatibility with IPA serialization +pub(crate) trait CurveRead: CurveAffine { + /// Reads a compressed element from the buffer and attempts to parse it + /// using `from_bytes`. + fn read(reader: &mut R) -> io::Result { + let mut compressed = Self::Repr::default(); + reader.read_exact(compressed.as_mut())?; + Option::from(Self::from_bytes(&compressed)) + .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Invalid point encoding in proof")) + } +} +impl CurveRead for C {} + +pub trait SerdeCurveAffine: CurveAffine + SerdeObject { + /// Reads an element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompress it + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + match format { + SerdeFormat::Processed => ::read(reader), + SerdeFormat::RawBytes => ::read_raw(reader), + SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), + } + } + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + match format { + SerdeFormat::Processed => writer.write_all(self.to_bytes().as_ref()), + _ => self.write_raw(writer), + } + } + + /// Byte length of an affine curve element according to `format`. + fn byte_length(format: SerdeFormat) -> usize { + match format { + SerdeFormat::Processed => Self::default().to_bytes().as_ref().len(), + _ => Self::Repr::default().as_ref().len() * 2, + } + } +} +impl SerdeCurveAffine for C {} + +pub trait SerdePrimeField: PrimeField + SerdeObject { + /// Reads a field element as bytes from the buffer according to the `format`: + /// - `Processed`: Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads a field element from raw bytes in its internal Montgomery representations, + /// and checks that the element is less than the modulus. + /// - `RawBytesUnchecked`: Reads a field element in Montgomery form and performs no checks. + fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + match format { + SerdeFormat::Processed => { + let mut compressed = Self::Repr::default(); + reader.read_exact(compressed.as_mut())?; + Option::from(Self::from_repr(compressed)).ok_or_else(|| { + io::Error::new(io::ErrorKind::Other, "Invalid prime field point encoding") + }) + } + SerdeFormat::RawBytes => ::read_raw(reader), + SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), + } + } + + /// Writes a field element as bytes to the buffer according to the `format`: + /// - `Processed`: Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + match format { + SerdeFormat::Processed => writer.write_all(self.to_repr().as_ref()), + _ => self.write_raw(writer), + } + } +} +impl SerdePrimeField for F {} + +/// Convert a slice of `bool` into a `u8`. +/// +/// Panics if the slice has length greater than 8. +pub fn pack(bits: &[bool]) -> u8 { + let mut value = 0u8; + assert!(bits.len() <= 8); + for (bit_index, bit) in bits.iter().enumerate() { + value |= (*bit as u8) << bit_index; + } + value +} + +/// Writes the first `bits.len()` bits of a `u8` into `bits`. +pub fn unpack(byte: u8, bits: &mut [bool]) { + for (bit_index, bit) in bits.iter_mut().enumerate() { + *bit = (byte >> bit_index) & 1 == 1; + } +} + +/// Reads a vector of polynomials from buffer +pub(crate) fn read_polynomial_vec( + reader: &mut R, + format: SerdeFormat, +) -> io::Result>> { + let mut len = [0u8; 4]; + reader.read_exact(&mut len)?; + let len = u32::from_be_bytes(len); + + (0..len) + .map(|_| Polynomial::::read(reader, format)) + .collect::>>() +} + +/// Writes a slice of polynomials to buffer +pub(crate) fn write_polynomial_slice( + slice: &[Polynomial], + writer: &mut W, + format: SerdeFormat, +) -> io::Result<()> { + writer.write_all(&(slice.len() as u32).to_be_bytes())?; + for poly in slice.iter() { + poly.write(writer, format)?; + } + Ok(()) +} + +/// Gets the total number of bytes of a slice of polynomials, assuming all polynomials are the same length +pub(crate) fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize { + let field_len = F::default().to_repr().as_ref().len(); + 4 + slice.len() * (4 + field_len * slice.get(0).map(|poly| poly.len()).unwrap_or(0)) +} diff --git a/common/src/lib.rs b/common/src/lib.rs index a809a4c9ad..acc26aff15 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -1,5 +1,21 @@ +//! # halo2_proofs + +#![cfg_attr(docsrs, feature(doc_cfg))] +// The actual lints we want to disable. +#![allow(clippy::op_ref, clippy::many_single_char_names)] +#![deny(rustdoc::broken_intra_doc_links)] +#![deny(missing_debug_implementations)] +#![deny(missing_docs)] +#![deny(unsafe_code)] + pub mod arithmetic; -pub mod helpers; -pub mod multicore; +pub mod circuit; +pub use halo2curves; +mod multicore; pub mod plonk; pub mod poly; +pub mod transcript; + +pub mod dev; +mod helpers; +pub use helpers::SerdeFormat; diff --git a/common/src/plonk.rs b/common/src/plonk.rs index 695a48ee9a..ab361d27ec 100644 --- a/common/src/plonk.rs +++ b/common/src/plonk.rs @@ -1,817 +1,48 @@ -mod lookup; -pub mod permutation; -mod shuffle; - -// use super::{lookup, permutation, shuffle}; -use core::cmp::max; -use core::ops::{Add, Mul}; -use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Any, Challenge, Column, ConstraintSystemV2Backend, ExpressionMid, - Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, +//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] +//! that is designed specifically for the polynomial commitment scheme described +//! in the [Halo][halo] paper. +//! +//! [halo]: https://eprint.iacr.org/2019/1021 +//! [plonk]: https://eprint.iacr.org/2019/953 + +use blake2b_simd::Params as Blake2bParams; +use group::ff::{Field, FromUniformBytes, PrimeField}; + +use crate::arithmetic::CurveAffine; +use crate::helpers::{ + polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, + SerdePrimeField, }; -use halo2_middleware::ff::Field; -use halo2_middleware::metadata; -use halo2_middleware::poly::Rotation; -use sealed::SealedPhase; -use std::collections::HashMap; -use std::iter::{Product, Sum}; -use std::{ - convert::TryFrom, - ops::{Neg, Sub}, +use crate::poly::{ + Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, + Polynomial, }; +use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; +use crate::SerdeFormat; +use halo2_middleware::poly::Rotation; -// TODO: No sealed Phase on the backend, only in the frontend! -pub(crate) mod sealed { - /// Phase of advice column - #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct Phase(pub(crate) u8); - - impl Phase { - pub fn prev(&self) -> Option { - self.0.checked_sub(1).map(Phase) - } - } - - impl SealedPhase for Phase { - fn to_sealed(self) -> Phase { - self - } - } - - /// Sealed trait to help keep `Phase` private. - pub trait SealedPhase { - fn to_sealed(self) -> Phase; - } -} - -/// Phase of advice column -pub trait Phase: SealedPhase {} - -impl Phase for P {} - -/// First phase -#[derive(Debug)] -pub struct FirstPhase; - -impl SealedPhase for super::FirstPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(0) - } -} - -/// Second phase -#[derive(Debug)] -pub struct SecondPhase; - -impl SealedPhase for super::SecondPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(1) - } -} - -/// Third phase -#[derive(Debug)] -pub struct ThirdPhase; - -impl SealedPhase for super::ThirdPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(2) - } -} - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl FixedQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, - /// Phase of this advice column - pub(crate) phase: sealed::Phase, -} - -impl AdviceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } - - /// Phase of this advice column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl InstanceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, PartialEq, Eq)] -pub enum Expression { - /// This is a constant polynomial - Constant(F), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQuery), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQuery), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQuery), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl Into> for Expression { - fn into(self) -> ExpressionMid { - match self { - Expression::Constant(c) => ExpressionMid::Constant(c), - Expression::Fixed(FixedQuery { - column_index, - rotation, - .. - }) => ExpressionMid::Fixed(FixedQueryMid { - column_index, - rotation, - }), - Expression::Advice(AdviceQuery { - column_index, - rotation, - phase, - .. - }) => ExpressionMid::Advice(AdviceQueryMid { - column_index, - rotation, - phase: phase.0, - }), - Expression::Instance(InstanceQuery { - column_index, - rotation, - .. - }) => ExpressionMid::Instance(InstanceQueryMid { - column_index, - rotation, - }), - Expression::Challenge(c) => ExpressionMid::Challenge(c), - Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), - Expression::Sum(lhs, rhs) => { - ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) - } - Expression::Product(lhs, rhs) => { - ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) - } - Expression::Scaled(e, c) => ExpressionMid::Scaled(Box::new((*e).into()), c), - } - } -} - -impl Expression { - /// Evaluate the polynomial using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - constant: &impl Fn(F) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - product(a, b) - } - Expression::Scaled(a, f) => { - let a = a.evaluate( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - scaled(a, *f) - } - } - } - - /// Evaluate the polynomial lazily using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate_lazy( - &self, - constant: &impl Fn(F) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - zero: &T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - let b = b.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let (a, b) = if a.complexity() <= b.complexity() { - (a, b) - } else { - (b, a) - }; - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - - if a == *zero { - a - } else { - let b = b.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - product(a, b) - } - } - Expression::Scaled(a, f) => { - let a = a.evaluate_lazy( - constant, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - scaled(a, *f) - } - } - } - - fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { - match self { - Expression::Constant(scalar) => write!(writer, "{scalar:?}"), - Expression::Fixed(query) => { - write!( - writer, - "fixed[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Advice(query) => { - write!( - writer, - "advice[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Instance(query) => { - write!( - writer, - "instance[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Challenge(challenge) => { - write!(writer, "challenge[{}]", challenge.index()) - } - Expression::Negated(a) => { - writer.write_all(b"(-")?; - a.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Sum(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"+")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Product(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"*")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Scaled(a, f) => { - a.write_identifier(writer)?; - write!(writer, "*{f:?}") - } - } - } - - /// Identifier for this expression. Expressions with identical identifiers - /// do the same calculation (but the expressions don't need to be exactly equal - /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). - pub fn identifier(&self) -> String { - let mut cursor = std::io::Cursor::new(Vec::new()); - self.write_identifier(&mut cursor).unwrap(); - String::from_utf8(cursor.into_inner()).unwrap() - } - - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.degree(), - Expression::Sum(a, b) => max(a.degree(), b.degree()), - Expression::Product(a, b) => a.degree() + b.degree(), - Expression::Scaled(poly, _) => poly.degree(), - } - } - - /// Approximate the computational complexity of this expression. - pub fn complexity(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.complexity() + 5, - Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, - Expression::Product(a, b) => a.complexity() + b.complexity() + 30, - Expression::Scaled(poly, _) => poly.complexity() + 30, - } - } - - /// Square this expression. - pub fn square(self) -> Self { - self.clone() * self - } -} - -impl std::fmt::Debug for Expression { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), - // Skip enum variant and print query struct directly to maintain backwards compatibility. - Expression::Fixed(query) => { - let mut debug_struct = f.debug_struct("Fixed"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Advice(query) => { - let mut debug_struct = f.debug_struct("Advice"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation); - // Only show advice's phase if it's not in first phase. - if query.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &query.phase); - } - debug_struct.finish() - } - Expression::Instance(query) => { - let mut debug_struct = f.debug_struct("Instance"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Challenge(challenge) => { - f.debug_tuple("Challenge").field(challenge).finish() - } - Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), - Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), - Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), - Expression::Scaled(poly, scalar) => { - f.debug_tuple("Scaled").field(poly).field(scalar).finish() - } - } - } -} - -impl Neg for Expression { - type Output = Expression; - fn neg(self) -> Self::Output { - Expression::Negated(Box::new(self)) - } -} - -impl Add for Expression { - type Output = Expression; - fn add(self, rhs: Expression) -> Expression { - Expression::Sum(Box::new(self), Box::new(rhs)) - } -} - -impl Sub for Expression { - type Output = Expression; - fn sub(self, rhs: Expression) -> Expression { - Expression::Sum(Box::new(self), Box::new(-rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: Expression) -> Expression { - Expression::Product(Box::new(self), Box::new(rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: F) -> Expression { - Expression::Scaled(Box::new(self), rhs) - } -} - -impl Sum for Expression { - fn sum>(iter: I) -> Self { - iter.reduce(|acc, x| acc + x) - .unwrap_or(Expression::Constant(F::ZERO)) - } -} - -impl Product for Expression { - fn product>(iter: I) -> Self { - iter.reduce(|acc, x| acc * x) - .unwrap_or(Expression::Constant(F::ONE)) - } -} - -/// Gate -#[derive(Clone, Debug)] -pub struct Gate { - name: String, - constraint_names: Vec, - polys: Vec>, -} - -impl Gate { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the name of the constraint at index `constraint_index`. - pub fn constraint_name(&self, constraint_index: usize) -> &str { - self.constraint_names[constraint_index].as_str() - } - - /// Returns constraints of this gate - pub fn polynomials(&self) -> &[Expression] { - &self.polys - } -} - -/// Represents the minimal parameters that determine a `ConstraintSystem`. -#[allow(dead_code)] -pub struct PinnedConstraintSystem<'a, F: Field> { - num_fixed_columns: &'a usize, - num_advice_columns: &'a usize, - num_instance_columns: &'a usize, - num_selectors: &'a usize, - num_challenges: &'a usize, - advice_column_phase: &'a Vec, - challenge_phase: &'a Vec, - gates: PinnedGates<'a, F>, - advice_queries: &'a Vec<(Column, Rotation)>, - instance_queries: &'a Vec<(Column, Rotation)>, - fixed_queries: &'a Vec<(Column, Rotation)>, - permutation: &'a permutation::Argument, - lookups: &'a Vec>, - shuffles: &'a Vec>, - constants: &'a Vec>, - minimum_degree: &'a Option, -} - -impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("PinnedConstraintSystem"); - debug_struct - .field("num_fixed_columns", self.num_fixed_columns) - .field("num_advice_columns", self.num_advice_columns) - .field("num_instance_columns", self.num_instance_columns) - .field("num_selectors", self.num_selectors); - // Only show multi-phase related fields if it's used. - if *self.num_challenges > 0 { - debug_struct - .field("num_challenges", self.num_challenges) - .field("advice_column_phase", self.advice_column_phase) - .field("challenge_phase", self.challenge_phase); - } - debug_struct - .field("gates", &self.gates) - .field("advice_queries", self.advice_queries) - .field("instance_queries", self.instance_queries) - .field("fixed_queries", self.fixed_queries) - .field("permutation", self.permutation) - .field("lookups", self.lookups); - if !self.shuffles.is_empty() { - debug_struct.field("shuffles", self.shuffles); - } - debug_struct - .field("constants", self.constants) - .field("minimum_degree", self.minimum_degree); - debug_struct.finish() - } -} - -struct PinnedGates<'a, F: Field>(&'a Vec>); +mod assigned; +mod circuit; +mod error; +mod evaluation; +mod keygen; +mod lookup; +pub mod permutation; +mod shuffle; +mod vanishing; -impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - f.debug_list() - .entries(self.0.iter().flat_map(|gate| gate.polynomials().iter())) - .finish() - } -} +mod prover; +mod verifier; -struct QueriesMap { - advice_map: HashMap<(Column, Rotation), usize>, - instance_map: HashMap<(Column, Rotation), usize>, - fixed_map: HashMap<(Column, Rotation), usize>, - advice: Vec<(Column, Rotation)>, - instance: Vec<(Column, Rotation)>, - fixed: Vec<(Column, Rotation)>, -} +pub use assigned::*; +pub use circuit::*; +pub use error::*; +pub use keygen::*; +pub use prover::*; +pub use verifier::*; -impl QueriesMap { - fn add_advice(&mut self, col: Column, rot: Rotation) -> usize { - *self.advice_map.entry((col, rot)).or_insert_with(|| { - self.advice.push((col, rot)); - self.advice.len() - 1 - }) - } - fn add_instance(&mut self, col: Column, rot: Rotation) -> usize { - *self.instance_map.entry((col, rot)).or_insert_with(|| { - self.instance.push((col, rot)); - self.instance.len() - 1 - }) - } - fn add_fixed(&mut self, col: Column, rot: Rotation) -> usize { - *self.fixed_map.entry((col, rot)).or_insert_with(|| { - self.fixed.push((col, rot)); - self.fixed.len() - 1 - }) - } -} - -impl QueriesMap { - fn as_expression(&mut self, expr: &ExpressionMid) -> Expression { - match expr { - ExpressionMid::Constant(c) => Expression::Constant(*c), - ExpressionMid::Fixed(query) => { - let (col, rot) = (Column::new(query.column_index, Fixed), query.rotation); - let index = self.add_fixed(col, rot); - Expression::Fixed(FixedQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - }) - } - ExpressionMid::Advice(query) => { - let (col, rot) = ( - Column::new(query.column_index, Advice { phase: query.phase }), - query.rotation, - ); - let index = self.add_advice(col, rot); - Expression::Advice(AdviceQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - phase: sealed::Phase(query.phase), - }) - } - ExpressionMid::Instance(query) => { - let (col, rot) = (Column::new(query.column_index, Instance), query.rotation); - let index = self.add_instance(col, rot); - Expression::Instance(InstanceQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - }) - } - ExpressionMid::Challenge(c) => Expression::Challenge(*c), - ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), - ExpressionMid::Sum(lhs, rhs) => Expression::Sum( - Box::new(self.as_expression(lhs)), - Box::new(self.as_expression(rhs)), - ), - ExpressionMid::Product(lhs, rhs) => Expression::Product( - Box::new(self.as_expression(lhs)), - Box::new(self.as_expression(rhs)), - ), - ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.as_expression(e)), *c), - } - } -} +use evaluation::Evaluator; +use std::io; /// List of queries (columns and rotations) used by a circuit #[derive(Debug, Clone)] @@ -871,470 +102,449 @@ impl Queries { } } -/// Collect queries used in gates while mapping those gates to equivalent ones with indexed -/// query references in the expressions. -fn collect_queries_gates( - cs2: &ConstraintSystemV2Backend, - queries: &mut QueriesMap, -) -> Vec> { - cs2.gates - .iter() - .map(|gate| Gate { - name: gate.name.clone(), - constraint_names: Vec::new(), - polys: vec![queries.as_expression(gate.polynomial())], - }) - .collect() -} - -/// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed -/// query references in the expressions. -fn collect_queries_lookups( - cs2: &ConstraintSystemV2Backend, - queries: &mut QueriesMap, -) -> Vec> { - cs2.lookups - .iter() - .map(|lookup| lookup::Argument { - name: lookup.name.clone(), - input_expressions: lookup - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - table_expressions: lookup - .table_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() -} - -/// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed -/// query references in the expressions. -fn collect_queries_shuffles( - cs2: &ConstraintSystemV2Backend, - queries: &mut QueriesMap, -) -> Vec> { - cs2.shuffles - .iter() - .map(|shuffle| shuffle::Argument { - name: shuffle.name.clone(), - input_expressions: shuffle - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - shuffle_expressions: shuffle - .shuffle_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() -} - -/// Collect all queries used in the expressions of gates, lookups and shuffles. Map the -/// expressions of gates, lookups and shuffles into equivalent ones with indexed query -/// references. -pub(crate) fn collect_queries( - cs2: &ConstraintSystemV2Backend, -) -> ( - Queries, - Vec>, - Vec>, - Vec>, -) { - let mut queries = QueriesMap { - advice_map: HashMap::new(), - instance_map: HashMap::new(), - fixed_map: HashMap::new(), - advice: Vec::new(), - instance: Vec::new(), - fixed: Vec::new(), - }; - - let gates = collect_queries_gates(cs2, &mut queries); - let lookups = collect_queries_lookups(cs2, &mut queries); - let shuffles = collect_queries_shuffles(cs2, &mut queries); - - // Each column used in a copy constraint involves a query at rotation current. - for column in cs2.permutation.get_columns() { - match column.column_type { - Any::Instance => { - queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) - } - Any::Fixed => queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()), - Any::Advice(advice) => { - queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) - } - }; - } - - let mut num_advice_queries = vec![0; cs2.num_advice_columns]; - for (column, _) in queries.advice.iter() { - num_advice_queries[column.index()] += 1; - } - - let queries = Queries { - advice: queries.advice, - instance: queries.instance, - fixed: queries.fixed, - num_advice_queries, - }; - (queries, gates, lookups, shuffles) -} - -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystem { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_selectors: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - pub(crate) gates: Vec>, - pub(crate) advice_queries: Vec<(Column, Rotation)>, - // Contains an integer for each advice column - // identifying how many distinct queries it has - // so far; should be same length as num_advice_columns. - pub(crate) num_advice_queries: Vec, - pub(crate) instance_queries: Vec<(Column, Rotation)>, - pub(crate) fixed_queries: Vec<(Column, Rotation)>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, - - // Vector of fixed columns, which can be used to store constant values - // that are copied into advice columns. - pub(crate) constants: Vec>, - - pub(crate) minimum_degree: Option, -} - -impl From> for ConstraintSystem { - fn from(cs2: ConstraintSystemV2Backend) -> Self { - let (queries, gates, lookups, shuffles) = collect_queries(&cs2); - ConstraintSystem { - num_fixed_columns: cs2.num_fixed_columns, - num_advice_columns: cs2.num_advice_columns, - num_instance_columns: cs2.num_instance_columns, - num_selectors: 0, - num_challenges: cs2.num_challenges, - unblinded_advice_columns: cs2.unblinded_advice_columns, - advice_column_phase: cs2 - .advice_column_phase - .into_iter() - .map(sealed::Phase) - .collect(), - challenge_phase: cs2.challenge_phase.into_iter().map(sealed::Phase).collect(), - gates, - advice_queries: queries.advice, - num_advice_queries: queries.num_advice_queries, - instance_queries: queries.instance, - fixed_queries: queries.fixed, - permutation: cs2.permutation.into(), - lookups, - shuffles, - general_column_annotations: cs2.general_column_annotations, - constants: Vec::new(), - minimum_degree: None, - } - } -} - -impl Default for ConstraintSystem { - fn default() -> ConstraintSystem { - ConstraintSystem { - num_fixed_columns: 0, - num_advice_columns: 0, - num_instance_columns: 0, - num_selectors: 0, - num_challenges: 0, - unblinded_advice_columns: Vec::new(), - advice_column_phase: Vec::new(), - challenge_phase: Vec::new(), - gates: vec![], - fixed_queries: Vec::new(), - advice_queries: Vec::new(), - num_advice_queries: Vec::new(), - instance_queries: Vec::new(), - permutation: permutation::Argument::new(), - lookups: Vec::new(), - shuffles: Vec::new(), - general_column_annotations: HashMap::new(), - constants: vec![], - minimum_degree: None, +/// This is a verifying key which allows for the verification of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct VerifyingKey { + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystem, + /// Cached maximum degree of `cs` (which doesn't change after construction). + cs_degree: usize, + /// The representative of this `VerifyingKey` in transcripts. + transcript_repr: C::Scalar, + selectors: Vec>, + /// Whether selector compression is turned on or not. + compress_selectors: bool, +} + +// Current version of the VK +const VERSION: u8 = 0x03; + +impl VerifyingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a verifying key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + // Version byte that will be checked on read. + writer.write_all(&[VERSION])?; + let k = &self.domain.k(); + assert!(*k <= C::Scalar::S); + // k value fits in 1 byte + writer.write_all(&[*k as u8])?; + writer.write_all(&[self.compress_selectors as u8])?; + writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; + for commitment in &self.fixed_commitments { + commitment.write(writer, format)?; } - } -} + self.permutation.write(writer, format)?; -impl ConstraintSystem { - /// Obtain a pinned version of this constraint system; a structure with the - /// minimal parameters needed to determine the rest of the constraint - /// system. - pub fn pinned(&self) -> PinnedConstraintSystem<'_, F> { - PinnedConstraintSystem { - num_fixed_columns: &self.num_fixed_columns, - num_advice_columns: &self.num_advice_columns, - num_instance_columns: &self.num_instance_columns, - num_selectors: &self.num_selectors, - num_challenges: &self.num_challenges, - advice_column_phase: &self.advice_column_phase, - challenge_phase: &self.challenge_phase, - gates: PinnedGates(&self.gates), - fixed_queries: &self.fixed_queries, - advice_queries: &self.advice_queries, - instance_queries: &self.instance_queries, - permutation: &self.permutation, - lookups: &self.lookups, - shuffles: &self.shuffles, - constants: &self.constants, - minimum_degree: &self.minimum_degree, + if !self.compress_selectors { + assert!(self.selectors.is_empty()); } - } - - pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, advice_query) in self.advice_queries.iter().enumerate() { - if advice_query == &(column, at) { - return index; + // write self.selectors + for selector in &self.selectors { + // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write + for bits in selector.chunks(8) { + writer.write_all(&[crate::helpers::pack(bits)])?; } } - - panic!("get_advice_query_index called for non-existent query"); - } - - pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, fixed_query) in self.fixed_queries.iter().enumerate() { - if fixed_query == &(column, at) { - return index; - } + Ok(()) + } + + /// Reads a verification key from a buffer. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let mut version_byte = [0u8; 1]; + reader.read_exact(&mut version_byte)?; + if VERSION != version_byte[0] { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected version byte", + )); } - panic!("get_fixed_query_index called for non-existent query"); - } - - pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, instance_query) in self.instance_queries.iter().enumerate() { - if instance_query == &(column, at) { - return index; - } + let mut k = [0u8; 1]; + reader.read_exact(&mut k)?; + let k = u8::from_le_bytes(k); + if k as u32 > C::Scalar::S { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!( + "circuit size value (k): {} exceeds maxium: {}", + k, + C::Scalar::S + ), + )); } - - panic!("get_instance_query_index called for non-existent query"); - } - - pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { - match column.column_type() { - Any::Advice(_) => { - self.get_advice_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Fixed => { - self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Instance => { - self.get_instance_query_index(Column::::try_from(column).unwrap(), at) - } + let mut compress_selectors = [0u8; 1]; + reader.read_exact(&mut compress_selectors)?; + if compress_selectors[0] != 0 && compress_selectors[0] != 1 { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected compress_selectors not boolean", + )); } - } - - /// Returns the list of phases - pub fn phases(&self) -> impl Iterator { - let max_phase = self - .advice_column_phase - .iter() - .max() - .map(|phase| phase.0) - .unwrap_or_default(); - (0..=max_phase).map(sealed::Phase) - } - - /// Compute the degree of the constraint system (the maximum degree of all - /// constraints). - pub fn degree(&self) -> usize { - // The permutation argument will serve alongside the gates, so must be - // accounted for. - let mut degree = self.permutation.required_degree(); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.lookups - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), + let compress_selectors = compress_selectors[0] == 1; + let (domain, cs, _) = keygen::create_domain::( + k as u32, + #[cfg(feature = "circuit-params")] + params, ); + let mut num_fixed_columns = [0u8; 4]; + reader.read_exact(&mut num_fixed_columns)?; + let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.shuffles - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); + let fixed_commitments: Vec<_> = (0..num_fixed_columns) + .map(|_| C::read(reader, format)) + .collect::>()?; - // Account for each gate to ensure our quotient polynomial is the - // correct degree and that our extended domain is the right size. - degree = std::cmp::max( - degree, - self.gates - .iter() - .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) - .max() - .unwrap_or(0), - ); + let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; - std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) - } + let (cs, selectors) = if compress_selectors { + // read selectors + let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] + .into_iter() + .map(|mut selector| { + let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; + reader.read_exact(&mut selector_bytes)?; + for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { + crate::helpers::unpack(byte, bits); + } + Ok(selector) + }) + .collect::>()?; + let (cs, _) = cs.compress_selectors(selectors.clone()); + (cs, selectors) + } else { + // we still need to replace selectors with fixed Expressions in `cs` + let fake_selectors = vec![vec![]; cs.num_selectors]; + let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); + (cs, vec![]) + }; - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. + Ok(Self::from_parts( + domain, + fixed_commitments, + permutation, + cs, + selectors, + compress_selectors, + )) + } + + /// Writes a verifying key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + /// Reads a verification key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) + } +} + +impl VerifyingKey { + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + 10 + (self.fixed_commitments.len() * C::byte_length(format)) + + self.permutation.bytes_length(format) + + self.selectors.len() + * (self + .selectors + .get(0) + .map(|selector| (selector.len() + 7) / 8) + .unwrap_or(0)) + } + + fn from_parts( + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystem, + selectors: Vec>, + compress_selectors: bool, + ) -> Self + where + C::ScalarExt: FromUniformBytes<64>, + { + // Compute cached values. + let cs_degree = cs.degree(); + + let mut vk = Self { + domain, + fixed_commitments, + permutation, + cs, + cs_degree, + // Temporary, this is not pinned. + transcript_repr: C::Scalar::ZERO, + selectors, + compress_selectors, + }; - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); + let mut hasher = Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Verify-Key") + .to_state(); - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; + let s = format!("{:?}", vk.pinned()); - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. + hasher.update(&(s.len() as u64).to_le_bytes()); + hasher.update(s.as_bytes()); - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. + // Hash in final Blake2bState + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 + vk } - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } + /// Hashes a verification key into a transcript. + pub fn hash_into, T: Transcript>( + &self, + transcript: &mut T, + ) -> io::Result<()> { + transcript.common_scalar(self.transcript_repr)?; - /// Returns number of fixed columns - pub fn num_fixed_columns(&self) -> usize { - self.num_fixed_columns + Ok(()) } - /// Returns number of advice columns - pub fn num_advice_columns(&self) -> usize { - self.num_advice_columns + /// Obtains a pinned representation of this verification key that contains + /// the minimal information necessary to reconstruct the verification key. + pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { + PinnedVerificationKey { + base_modulus: C::Base::MODULUS, + scalar_modulus: C::Scalar::MODULUS, + domain: self.domain.pinned(), + fixed_commitments: &self.fixed_commitments, + permutation: &self.permutation, + cs: self.cs.pinned(), + } } - /// Returns number of instance columns - pub fn num_instance_columns(&self) -> usize { - self.num_instance_columns + /// Returns commitments of fixed polynomials + pub fn fixed_commitments(&self) -> &Vec { + &self.fixed_commitments } - /// Returns number of selectors - pub fn num_selectors(&self) -> usize { - self.num_selectors + /// Returns `VerifyingKey` of permutation + pub fn permutation(&self) -> &permutation::VerifyingKey { + &self.permutation } - /// Returns number of challenges - pub fn num_challenges(&self) -> usize { - self.num_challenges + /// Returns `ConstraintSystem` + pub fn cs(&self) -> &ConstraintSystem { + &self.cs } - /// Returns phase of advice columns - pub fn advice_column_phase(&self) -> Vec { - self.advice_column_phase - .iter() - .map(|phase| phase.0) - .collect() + /// Returns representative of this `VerifyingKey` in transcripts + pub fn transcript_repr(&self) -> C::Scalar { + self.transcript_repr } +} - /// Returns phase of challenges - pub fn challenge_phase(&self) -> Vec { - self.challenge_phase.iter().map(|phase| phase.0).collect() - } +/// Minimal representation of a verification key that can be used to identify +/// its active contents. +#[allow(dead_code)] +#[derive(Debug)] +pub struct PinnedVerificationKey<'a, C: CurveAffine> { + base_modulus: &'static str, + scalar_modulus: &'static str, + domain: PinnedEvaluationDomain<'a, C::Scalar>, + cs: PinnedConstraintSystem<'a, C::Scalar>, + fixed_commitments: &'a Vec, + permutation: &'a permutation::VerifyingKey, +} - /// Returns gates - pub fn gates(&self) -> &Vec> { - &self.gates +/// This is a proving key which allows for the creation of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct ProvingKey { + vk: VerifyingKey, + l0: Polynomial, + l_last: Polynomial, + l_active_row: Polynomial, + fixed_values: Vec>, + fixed_polys: Vec>, + fixed_cosets: Vec>, + permutation: permutation::ProvingKey, + ev: Evaluator, +} + +impl ProvingKey +where + C::Scalar: FromUniformBytes<64>, +{ + /// Get the underlying [`VerifyingKey`]. + pub fn get_vk(&self) -> &VerifyingKey { + &self.vk + } + + /// Gets the total number of bytes in the serialization of `self` + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + let scalar_len = C::Scalar::default().to_repr().as_ref().len(); + self.vk.bytes_length(format) + + 12 + + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) + + polynomial_slice_byte_length(&self.fixed_values) + + polynomial_slice_byte_length(&self.fixed_polys) + + polynomial_slice_byte_length(&self.fixed_cosets) + + self.permutation.bytes_length() + } +} + +impl ProvingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a proving key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + self.vk.write(writer, format)?; + self.l0.write(writer, format)?; + self.l_last.write(writer, format)?; + self.l_active_row.write(writer, format)?; + write_polynomial_slice(&self.fixed_values, writer, format)?; + write_polynomial_slice(&self.fixed_polys, writer, format)?; + write_polynomial_slice(&self.fixed_cosets, writer, format)?; + self.permutation.write(writer, format)?; + Ok(()) + } + + /// Reads a proving key from a buffer. + /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let vk = VerifyingKey::::read::( + reader, + format, + #[cfg(feature = "circuit-params")] + params, + )?; + let l0 = Polynomial::read(reader, format)?; + let l_last = Polynomial::read(reader, format)?; + let l_active_row = Polynomial::read(reader, format)?; + let fixed_values = read_polynomial_vec(reader, format)?; + let fixed_polys = read_polynomial_vec(reader, format)?; + let fixed_cosets = read_polynomial_vec(reader, format)?; + let permutation = permutation::ProvingKey::read(reader, format)?; + let ev = Evaluator::new(vk.cs()); + Ok(Self { + vk, + l0, + l_last, + l_active_row, + fixed_values, + fixed_polys, + fixed_cosets, + permutation, + ev, + }) } - /// Returns general column annotations - pub fn general_column_annotations(&self) -> &HashMap { - &self.general_column_annotations + /// Writes a proving key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes } - /// Returns advice queries - pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { - &self.advice_queries + /// Reads a proving key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) } +} - /// Returns instance queries - pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { - &self.instance_queries +impl VerifyingKey { + /// Get the underlying [`EvaluationDomain`]. + pub fn get_domain(&self) -> &EvaluationDomain { + &self.domain } +} - /// Returns fixed queries - pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { - &self.fixed_queries - } +#[derive(Clone, Copy, Debug)] +struct Theta; +type ChallengeTheta = ChallengeScalar; - /// Returns permutation argument - pub fn permutation(&self) -> &permutation::Argument { - &self.permutation - } +#[derive(Clone, Copy, Debug)] +struct Beta; +type ChallengeBeta = ChallengeScalar; - /// Returns lookup arguments - pub fn lookups(&self) -> &Vec> { - &self.lookups - } +#[derive(Clone, Copy, Debug)] +struct Gamma; +type ChallengeGamma = ChallengeScalar; - /// Returns shuffle arguments - pub fn shuffles(&self) -> &Vec> { - &self.shuffles - } +#[derive(Clone, Copy, Debug)] +struct Y; +type ChallengeY = ChallengeScalar; - /// Returns constants - pub fn constants(&self) -> &Vec> { - &self.constants - } -} +#[derive(Clone, Copy, Debug)] +struct X; +type ChallengeX = ChallengeScalar; diff --git a/common/src/plonk/assigned.rs b/common/src/plonk/assigned.rs new file mode 100644 index 0000000000..07de325678 --- /dev/null +++ b/common/src/plonk/assigned.rs @@ -0,0 +1,665 @@ +use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; + +use group::ff::Field; + +/// A value assigned to a cell within a circuit. +/// +/// Stored as a fraction, so the backend can use batch inversion. +/// +/// A denominator of zero maps to an assigned value of zero. +#[derive(Clone, Copy, Debug)] +pub enum Assigned { + /// The field element zero. + Zero, + /// A value that does not require inversion to evaluate. + Trivial(F), + /// A value stored as a fraction to enable batch inversion. + Rational(F, F), +} + +impl From<&Assigned> for Assigned { + fn from(val: &Assigned) -> Self { + *val + } +} + +impl From<&F> for Assigned { + fn from(numerator: &F) -> Self { + Assigned::Trivial(*numerator) + } +} + +impl From for Assigned { + fn from(numerator: F) -> Self { + Assigned::Trivial(numerator) + } +} + +impl From<(F, F)> for Assigned { + fn from((numerator, denominator): (F, F)) -> Self { + Assigned::Rational(numerator, denominator) + } +} + +impl PartialEq for Assigned { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + // At least one side is directly zero. + (Self::Zero, Self::Zero) => true, + (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + x.is_zero_vartime() + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, + (Self::Trivial(x), Self::Rational(numerator, denominator)) + | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { + &(*x * denominator) == numerator + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, + } + } +} + +impl Eq for Assigned {} + +impl Neg for Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + match self { + Self::Zero => Self::Zero, + Self::Trivial(numerator) => Self::Trivial(-numerator), + Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), + } + } +} + +impl Neg for &Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + -*self + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + // One side is directly zero. + (Self::Zero, _) => rhs, + (_, Self::Zero) => self, + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + other + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator + denominator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + self + Self::Trivial(rhs) + } +} + +impl Add for &Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for Assigned { + type Output = Assigned; + fn add(self, rhs: &Self) -> Assigned { + self + *rhs + } +} + +impl Add> for &Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for &Assigned { + type Output = Assigned; + fn add(self, rhs: &Assigned) -> Assigned { + *self + *rhs + } +} + +impl AddAssign for Assigned { + fn add_assign(&mut self, rhs: Self) { + *self = *self + rhs; + } +} + +impl AddAssign<&Assigned> for Assigned { + fn add_assign(&mut self, rhs: &Self) { + *self = *self + rhs; + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + self + (-rhs) + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + self + (-rhs) + } +} + +impl Sub for &Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for Assigned { + type Output = Assigned; + fn sub(self, rhs: &Self) -> Assigned { + self - *rhs + } +} + +impl Sub> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: &Assigned) -> Assigned { + *self - *rhs + } +} + +impl SubAssign for Assigned { + fn sub_assign(&mut self, rhs: Self) { + *self = *self - rhs; + } +} + +impl SubAssign<&Assigned> for Assigned { + fn sub_assign(&mut self, rhs: &Self) { + *self = *self - rhs; + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + (Self::Zero, _) | (_, Self::Zero) => Self::Zero, + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + self * Self::Trivial(rhs) + } +} + +impl Mul for &Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + *self * rhs + } +} + +impl Mul<&Assigned> for Assigned { + type Output = Assigned; + fn mul(self, rhs: &Assigned) -> Assigned { + self * *rhs + } +} + +impl MulAssign for Assigned { + fn mul_assign(&mut self, rhs: Self) { + *self = *self * rhs; + } +} + +impl MulAssign<&Assigned> for Assigned { + fn mul_assign(&mut self, rhs: &Self) { + *self = *self * rhs; + } +} + +impl Assigned { + /// Returns the numerator. + pub fn numerator(&self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => *x, + Self::Rational(numerator, _) => *numerator, + } + } + + /// Returns the denominator, if non-trivial. + pub fn denominator(&self) -> Option { + match self { + Self::Zero => None, + Self::Trivial(_) => None, + Self::Rational(_, denominator) => Some(*denominator), + } + } + + /// Returns true iff this element is zero. + pub fn is_zero_vartime(&self) -> bool { + match self { + Self::Zero => true, + Self::Trivial(x) => x.is_zero_vartime(), + // Assigned maps x/0 -> 0. + Self::Rational(numerator, denominator) => { + numerator.is_zero_vartime() || denominator.is_zero_vartime() + } + } + } + + /// Doubles this element. + #[must_use] + pub fn double(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.double()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.double(), *denominator) + } + } + } + + /// Squares this element. + #[must_use] + pub fn square(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.square()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.square(), denominator.square()) + } + } + } + + /// Cubes this element. + #[must_use] + pub fn cube(&self) -> Self { + self.square() * self + } + + /// Inverts this assigned value (taking the inverse of zero to be zero). + pub fn invert(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Rational(F::ONE, *x), + Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), + } + } + + /// Evaluates this assigned value directly, performing an unbatched inversion if + /// necessary. + /// + /// If the denominator is zero, this returns zero. + pub fn evaluate(self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => x, + Self::Rational(numerator, denominator) => { + if denominator == F::ONE { + numerator + } else { + numerator * denominator.invert().unwrap_or(F::ZERO) + } + } + } + } +} + +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use super::Assigned; + // We use (numerator, denominator) in the comments below to denote a rational. + #[test] + fn add_trivial_to_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // 2 + (1,0) = 2 + 0 = 2 + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn add_rational_to_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) + (1,0) = (1,2) + 0 = (1,2) + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn sub_trivial_from_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - 2 = 0 - 2 = -2 + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // 2 - (1,0) = 2 - 0 = 2 + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn sub_rational_from_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - (1,2) = 0 - (1,2) = -(1,2) + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // (1,2) - (1,0) = (1,2) - 0 = (1,2) + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn mul_rational_by_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) * (1,0) = (1,2) * 0 = 0 + assert_eq!((a * b).evaluate(), Fp::zero()); + + // (1,0) * (1,2) = 0 * (1,2) = 0 + assert_eq!((b * a).evaluate(), Fp::zero()); + } +} + +#[cfg(test)] +mod proptests { + use std::{ + cmp, + ops::{Add, Mul, Neg, Sub}, + }; + + use group::ff::Field; + use halo2curves::pasta::Fp; + use proptest::{collection::vec, prelude::*, sample::select}; + + use super::Assigned; + + trait UnaryOperand: Neg { + fn double(&self) -> Self; + fn square(&self) -> Self; + fn cube(&self) -> Self; + fn inv0(&self) -> Self; + } + + impl UnaryOperand for F { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert().unwrap_or(F::ZERO) + } + } + + impl UnaryOperand for Assigned { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert() + } + } + + #[derive(Clone, Debug)] + enum UnaryOperator { + Neg, + Double, + Square, + Cube, + Inv0, + } + + const UNARY_OPERATORS: &[UnaryOperator] = &[ + UnaryOperator::Neg, + UnaryOperator::Double, + UnaryOperator::Square, + UnaryOperator::Cube, + UnaryOperator::Inv0, + ]; + + impl UnaryOperator { + fn apply(&self, a: F) -> F { + match self { + Self::Neg => -a, + Self::Double => a.double(), + Self::Square => a.square(), + Self::Cube => a.cube(), + Self::Inv0 => a.inv0(), + } + } + } + + trait BinaryOperand: Sized + Add + Sub + Mul {} + impl BinaryOperand for F {} + impl BinaryOperand for Assigned {} + + #[derive(Clone, Debug)] + enum BinaryOperator { + Add, + Sub, + Mul, + } + + const BINARY_OPERATORS: &[BinaryOperator] = &[ + BinaryOperator::Add, + BinaryOperator::Sub, + BinaryOperator::Mul, + ]; + + impl BinaryOperator { + fn apply(&self, a: F, b: F) -> F { + match self { + Self::Add => a + b, + Self::Sub => a - b, + Self::Mul => a * b, + } + } + } + + #[derive(Clone, Debug)] + enum Operator { + Unary(UnaryOperator), + Binary(BinaryOperator), + } + + prop_compose! { + /// Use narrow that can be easily reduced. + fn arb_element()(val in any::()) -> Fp { + Fp::from(val) + } + } + + prop_compose! { + fn arb_trivial()(element in arb_element()) -> Assigned { + Assigned::Trivial(element) + } + } + + prop_compose! { + /// Generates half of the denominators as zero to represent a deferred inversion. + fn arb_rational()( + numerator in arb_element(), + denominator in prop_oneof![ + 1 => Just(Fp::zero()), + 2 => arb_element(), + ], + ) -> Assigned { + Assigned::Rational(numerator, denominator) + } + } + + prop_compose! { + fn arb_operators(num_unary: usize, num_binary: usize)( + unary in vec(select(UNARY_OPERATORS), num_unary), + binary in vec(select(BINARY_OPERATORS), num_binary), + ) -> Vec { + unary.into_iter() + .map(Operator::Unary) + .chain(binary.into_iter().map(Operator::Binary)) + .collect() + } + } + + prop_compose! { + fn arb_testcase()( + num_unary in 0usize..5, + num_binary in 0usize..5, + )( + values in vec( + prop_oneof![ + 1 => Just(Assigned::Zero), + 2 => arb_trivial(), + 2 => arb_rational(), + ], + // Ensure that: + // - we have at least one value to apply unary operators to. + // - we can apply every binary operator pairwise sequentially. + cmp::max(usize::from(num_unary > 0), num_binary + 1)), + operations in arb_operators(num_unary, num_binary).prop_shuffle(), + ) -> (Vec>, Vec) { + (values, operations) + } + } + + proptest! { + #[test] + fn operation_commutativity((values, operations) in arb_testcase()) { + // Evaluate the values at the start. + let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); + + // Apply the operations to both the deferred and evaluated values. + fn evaluate( + items: Vec, + operators: &[Operator], + ) -> F { + let mut ops = operators.iter(); + + // Process all binary operators. We are guaranteed to have exactly as many + // binary operators as we need calls to the reduction closure. + let mut res = items.into_iter().reduce(|mut a, b| loop { + match ops.next() { + Some(Operator::Unary(op)) => a = op.apply(a), + Some(Operator::Binary(op)) => break op.apply(a, b), + None => unreachable!(), + } + }).unwrap(); + + // Process any unary operators that weren't handled in the reduce() call + // above (either if we only had one item, or there were unary operators + // after the last binary operator). We are guaranteed to have no binary + // operators remaining at this point. + loop { + match ops.next() { + Some(Operator::Unary(op)) => res = op.apply(res), + Some(Operator::Binary(_)) => unreachable!(), + None => break res, + } + } + } + let deferred_result = evaluate(values, &operations); + let evaluated_result = evaluate(elements, &operations); + + // The two should be equal, i.e. deferred inversion should commute with the + // list of operations. + assert_eq!(deferred_result.evaluate(), evaluated_result); + } + } +} diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs new file mode 100644 index 0000000000..0b4fee82e8 --- /dev/null +++ b/common/src/plonk/circuit.rs @@ -0,0 +1,3162 @@ +use super::{lookup, permutation, shuffle, Assigned, Error, Queries}; +use crate::circuit::layouter::SyncDeps; +use crate::dev::metadata; +use crate::plonk::WitnessCollection; +use crate::{ + circuit::{Layouter, Region, Value}, + poly::{batch_invert_assigned, Polynomial}, +}; +use core::cmp::max; +use core::ops::{Add, Mul}; +use ff::Field; +use halo2_middleware::circuit::{ + AdviceQueryMid, Challenge, ExpressionMid, FixedQueryMid, InstanceQueryMid, +}; +use halo2_middleware::lookup::ArgumentV2; +use halo2_middleware::poly::Rotation; +use sealed::SealedPhase; +use std::collections::BTreeSet; +use std::collections::HashMap; +use std::fmt::Debug; +use std::iter::{Product, Sum}; +use std::{ + convert::TryFrom, + ops::{Neg, Sub}, +}; + +mod compress_selectors; + +/// A column type +pub trait ColumnType: + 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into +{ + /// Return expression from cell + fn query_cell(&self, index: usize, at: Rotation) -> Expression; +} + +/// A column with an index and type +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Column { + index: usize, + column_type: C, +} + +impl Column { + pub(crate) fn new(index: usize, column_type: C) -> Self { + Column { index, column_type } + } + + /// Index of this column. + pub fn index(&self) -> usize { + self.index + } + + /// Type of this column. + pub fn column_type(&self) -> &C { + &self.column_type + } + + /// Return expression from column at a relative position + pub fn query_cell(&self, at: Rotation) -> Expression { + self.column_type.query_cell(self.index, at) + } + + /// Return expression from column at the current row + pub fn cur(&self) -> Expression { + self.query_cell(Rotation::cur()) + } + + /// Return expression from column at the next row + pub fn next(&self) -> Expression { + self.query_cell(Rotation::next()) + } + + /// Return expression from column at the previous row + pub fn prev(&self) -> Expression { + self.query_cell(Rotation::prev()) + } + + /// Return expression from column at the specified rotation + pub fn rot(&self, rotation: i32) -> Expression { + self.query_cell(Rotation(rotation)) + } +} + +impl Ord for Column { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match self.column_type.into().cmp(&other.column_type.into()) { + // Indices are assigned within column types. + std::cmp::Ordering::Equal => self.index.cmp(&other.index), + order => order, + } + } +} + +impl PartialOrd for Column { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +pub(crate) mod sealed { + /// Phase of advice column + #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] + pub struct Phase(pub(crate) u8); + + impl Phase { + pub fn prev(&self) -> Option { + self.0.checked_sub(1).map(Phase) + } + } + + impl SealedPhase for Phase { + fn to_sealed(self) -> Phase { + self + } + } + + /// Sealed trait to help keep `Phase` private. + pub trait SealedPhase { + fn to_sealed(self) -> Phase; + } +} + +/// Phase of advice column +pub trait Phase: SealedPhase {} + +impl Phase for P {} + +/// First phase +#[derive(Debug)] +pub struct FirstPhase; + +impl SealedPhase for super::FirstPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(0) + } +} + +/// Second phase +#[derive(Debug)] +pub struct SecondPhase; + +impl SealedPhase for super::SecondPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(1) + } +} + +/// Third phase +#[derive(Debug)] +pub struct ThirdPhase; + +impl SealedPhase for super::ThirdPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(2) + } +} + +/// An advice column +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub struct Advice { + pub(crate) phase: sealed::Phase, +} + +impl Default for Advice { + fn default() -> Advice { + Advice { + phase: FirstPhase.to_sealed(), + } + } +} + +impl Advice { + /// Returns `Advice` in given `Phase` + pub fn new(phase: P) -> Advice { + Advice { + phase: phase.to_sealed(), + } + } + + /// Phase of this column + pub fn phase(&self) -> u8 { + self.phase.0 + } +} + +impl std::fmt::Debug for Advice { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut debug_struct = f.debug_struct("Advice"); + // Only show advice's phase if it's not in first phase. + if self.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &self.phase); + } + debug_struct.finish() + } +} + +/// A fixed column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Fixed; + +/// An instance column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Instance; + +/// An enum over the Advice, Fixed, Instance structs +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub enum Any { + /// An Advice variant + Advice(Advice), + /// A Fixed variant + Fixed, + /// An Instance variant + Instance, +} + +impl Any { + /// Returns Advice variant in `FirstPhase` + pub fn advice() -> Any { + Any::Advice(Advice::default()) + } + + /// Returns Advice variant in given `Phase` + pub fn advice_in(phase: P) -> Any { + Any::Advice(Advice::new(phase)) + } +} + +impl std::fmt::Debug for Any { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Any::Advice(advice) => { + let mut debug_struct = f.debug_struct("Advice"); + // Only show advice's phase if it's not in first phase. + if advice.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &advice.phase); + } + debug_struct.finish() + } + Any::Fixed => f.debug_struct("Fixed").finish(), + Any::Instance => f.debug_struct("Instance").finish(), + } + } +} + +impl Ord for Any { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match (self, other) { + (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, + (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), + // Across column types, sort Instance < Advice < Fixed. + (Any::Instance, Any::Advice(_)) + | (Any::Advice(_), Any::Fixed) + | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, + (Any::Fixed, Any::Instance) + | (Any::Fixed, Any::Advice(_)) + | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, + } + } +} + +impl PartialOrd for Any { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl ColumnType for Advice { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Advice(AdviceQuery { + index: None, + column_index: index, + rotation: at, + phase: self.phase, + }) + } +} +impl ColumnType for Fixed { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Fixed(FixedQuery { + index: None, + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Instance { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Instance(InstanceQuery { + index: None, + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Any { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + match self { + Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { + index: None, + column_index: index, + rotation: at, + phase: *phase, + }), + Any::Fixed => Expression::Fixed(FixedQuery { + index: None, + column_index: index, + rotation: at, + }), + Any::Instance => Expression::Instance(InstanceQuery { + index: None, + column_index: index, + rotation: at, + }), + } + } +} + +impl From for Any { + fn from(advice: Advice) -> Any { + Any::Advice(advice) + } +} + +impl From for Any { + fn from(_: Fixed) -> Any { + Any::Fixed + } +} + +impl From for Any { + fn from(_: Instance) -> Any { + Any::Instance + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Advice(advice.column_type), + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Fixed, + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Instance, + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Advice(advice) => Ok(Column { + index: any.index(), + column_type: *advice, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Fixed => Ok(Column { + index: any.index(), + column_type: Fixed, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Instance => Ok(Column { + index: any.index(), + column_type: Instance, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +/// A selector, representing a fixed boolean value per row of the circuit. +/// +/// Selectors can be used to conditionally enable (portions of) gates: +/// ``` +/// use halo2_proofs::poly::Rotation; +/// # use halo2curves::pasta::Fp; +/// # use halo2_proofs::plonk::ConstraintSystem; +/// +/// # let mut meta = ConstraintSystem::::default(); +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("foo", |meta| { +/// let a = meta.query_advice(a, Rotation::prev()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let s = meta.query_selector(s); +/// +/// // On rows where the selector is enabled, a is constrained to equal b. +/// // On rows where the selector is disabled, a and b can take any value. +/// vec![s * (a - b)] +/// }); +/// ``` +/// +/// Selectors are disabled on all rows by default, and must be explicitly enabled on each +/// row when required: +/// ``` +/// use halo2_proofs::{ +/// circuit::{Chip, Layouter, Value}, +/// plonk::{Advice, Column, Error, Selector}, +/// }; +/// use ff::Field; +/// # use halo2_proofs::plonk::Fixed; +/// +/// struct Config { +/// a: Column, +/// b: Column, +/// s: Selector, +/// } +/// +/// fn circuit_logic>(chip: C, mut layouter: impl Layouter) -> Result<(), Error> { +/// let config = chip.config(); +/// # let config: Config = todo!(); +/// layouter.assign_region(|| "bar", |mut region| { +/// region.assign_advice(|| "a", config.a, 0, || Value::known(F::ONE))?; +/// region.assign_advice(|| "a", config.b, 1, || Value::known(F::ONE))?; +/// config.s.enable(&mut region, 1) +/// })?; +/// Ok(()) +/// } +/// ``` +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct Selector(pub(crate) usize, bool); + +impl Selector { + /// Enable this selector at the given offset within the given region. + pub fn enable(&self, region: &mut Region, offset: usize) -> Result<(), Error> { + region.enable_selector(|| "", self, offset) + } + + /// Is this selector "simple"? Simple selectors can only be multiplied + /// by expressions that contain no other simple selectors. + pub fn is_simple(&self) -> bool { + self.1 + } + + /// Returns index of this selector + pub fn index(&self) -> usize { + self.0 + } + + /// Return expression from selector + pub fn expr(&self) -> Expression { + Expression::Selector(*self) + } +} + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl FixedQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, + /// Phase of this advice column + pub(crate) phase: sealed::Phase, +} + +impl AdviceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } + + /// Phase of this advice column + pub fn phase(&self) -> u8 { + self.phase.0 + } +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQuery { + /// Query index + pub(crate) index: Option, + /// Column index + pub(crate) column_index: usize, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl InstanceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// A fixed column of a lookup table. +/// +/// A lookup table can be loaded into this column via [`Layouter::assign_table`]. Columns +/// can currently only contain a single table, but they may be used in multiple lookup +/// arguments via [`ConstraintSystem::lookup`]. +/// +/// Lookup table columns are always "encumbered" by the lookup arguments they are used in; +/// they cannot simultaneously be used as general fixed columns. +/// +/// [`Layouter::assign_table`]: crate::circuit::Layouter::assign_table +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub struct TableColumn { + /// The fixed column that this table column is stored in. + /// + /// # Security + /// + /// This inner column MUST NOT be exposed in the public API, or else chip developers + /// can load lookup tables into their circuits without default-value-filling the + /// columns, which can cause soundness bugs. + inner: Column, +} + +impl TableColumn { + /// Returns inner column + pub fn inner(&self) -> Column { + self.inner + } +} + +/// This trait allows a [`Circuit`] to direct some backend to assign a witness +/// for a constraint system. +pub trait Assignment { + /// Creates a new region and enters into it. + /// + /// Panics if we are currently in a region (if `exit_region` was not called). + /// + /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. + /// + /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region + fn enter_region(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR; + + /// Allows the developer to include an annotation for an specific column within a `Region`. + /// + /// This is usually useful for debugging circuit failures. + fn annotate_column(&mut self, annotation: A, column: Column) + where + A: FnOnce() -> AR, + AR: Into; + + /// Exits the current region. + /// + /// Panics if we are not currently in a region (if `enter_region` was not called). + /// + /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. + /// + /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region + fn exit_region(&mut self); + + /// Enables a selector at the given row. + fn enable_selector( + &mut self, + annotation: A, + selector: &Selector, + row: usize, + ) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into; + + /// Queries the cell of an instance column at a particular absolute row. + /// + /// Returns the cell's value, if known. + fn query_instance(&self, column: Column, row: usize) -> Result, Error>; + + /// Assign an advice column value (witness) + fn assign_advice( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into; + + /// Assign a fixed value + fn assign_fixed( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into; + + /// Assign two cells to have the same value + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error>; + + /// Fills a fixed `column` starting from the given `row` with value `to`. + fn fill_from_row( + &mut self, + column: Column, + row: usize, + to: Value>, + ) -> Result<(), Error>; + + /// Queries the value of the given challenge. + /// + /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. + fn get_challenge(&self, challenge: Challenge) -> Value; + + /// Creates a new (sub)namespace and enters into it. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + /// + /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR; + + /// Exits out of the existing namespace. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + /// + /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace + fn pop_namespace(&mut self, gadget_name: Option); +} + +/// A floor planning strategy for a circuit. +/// +/// The floor planner is chip-agnostic and applies its strategy to the circuit it is used +/// within. +pub trait FloorPlanner { + /// Given the provided `cs`, synthesize the given circuit. + /// + /// `constants` is the list of fixed columns that the layouter may use to assign + /// global constant values. These columns will all have been equality-enabled. + /// + /// Internally, a floor planner will perform the following operations: + /// - Instantiate a [`Layouter`] for this floor planner. + /// - Perform any necessary setup or measurement tasks, which may involve one or more + /// calls to `Circuit::default().synthesize(config, &mut layouter)`. + /// - Call `circuit.synthesize(config, &mut layouter)` exactly once. + fn synthesize + SyncDeps, C: Circuit>( + cs: &mut CS, + circuit: &C, + config: C::Config, + constants: Vec>, + ) -> Result<(), Error>; +} + +/// This is a trait that circuits provide implementations for so that the +/// backend prover can ask the circuit to synthesize using some given +/// [`ConstraintSystem`] implementation. +pub trait Circuit { + /// This is a configuration object that stores things like columns. + type Config: Clone; + /// The floor planner used for this circuit. This is an associated type of the + /// `Circuit` trait because its behaviour is circuit-critical. + type FloorPlanner: FloorPlanner; + /// Optional circuit configuration parameters. Requires the `circuit-params` feature. + #[cfg(feature = "circuit-params")] + type Params: Default; + + /// Returns a copy of this circuit with no witness values (i.e. all witnesses set to + /// `None`). For most circuits, this will be equal to `Self::default()`. + fn without_witnesses(&self) -> Self; + + /// Returns a reference to the parameters that should be used to configure the circuit. + /// Requires the `circuit-params` feature. + #[cfg(feature = "circuit-params")] + fn params(&self) -> Self::Params { + Self::Params::default() + } + + /// The circuit is given an opportunity to describe the exact gate + /// arrangement, column arrangement, etc. Takes a runtime parameter. The default + /// implementation calls `configure` ignoring the `_params` argument in order to easily support + /// circuits that don't use configuration parameters. + #[cfg(feature = "circuit-params")] + fn configure_with_params( + meta: &mut ConstraintSystem, + _params: Self::Params, + ) -> Self::Config { + Self::configure(meta) + } + + /// The circuit is given an opportunity to describe the exact gate + /// arrangement, column arrangement, etc. + fn configure(meta: &mut ConstraintSystem) -> Self::Config; + + /// Given the provided `cs`, synthesize the circuit. The concrete type of + /// the caller will be different depending on the context, and they may or + /// may not expect to have a witness present. + fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, PartialEq, Eq)] +pub enum Expression { + /// This is a constant polynomial + Constant(F), + /// This is a virtual selector + Selector(Selector), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQuery), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQuery), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQuery), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl Into> for Expression { + fn into(self) -> ExpressionMid { + match self { + Expression::Constant(c) => ExpressionMid::Constant(c), + Expression::Selector(_) => unreachable!(), + Expression::Fixed(FixedQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Fixed(FixedQueryMid { + column_index, + rotation, + }), + Expression::Advice(AdviceQuery { + column_index, + rotation, + phase, + .. + }) => ExpressionMid::Advice(AdviceQueryMid { + column_index, + rotation, + phase: phase.0, + }), + Expression::Instance(InstanceQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Instance(InstanceQueryMid { + column_index, + rotation, + }), + Expression::Challenge(c) => ExpressionMid::Challenge(c), + Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), + Expression::Sum(lhs, rhs) => { + ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Product(lhs, rhs) => { + ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Scaled(e, c) => ExpressionMid::Scaled(Box::new((*e).into()), c), + } + } +} + +impl Expression { + /// Make side effects + pub fn query_cells(&mut self, cells: &mut VirtualCells<'_, F>) { + match self { + Expression::Constant(_) => (), + Expression::Selector(selector) => { + if !cells.queried_selectors.contains(selector) { + cells.queried_selectors.push(*selector); + } + } + Expression::Fixed(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Fixed, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_fixed_index(col, query.rotation)); + } + } + Expression::Advice(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Advice { phase: query.phase }, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_advice_index(col, query.rotation)); + } + } + Expression::Instance(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Instance, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_instance_index(col, query.rotation)); + } + } + Expression::Challenge(_) => (), + Expression::Negated(a) => a.query_cells(cells), + Expression::Sum(a, b) => { + a.query_cells(cells); + b.query_cells(cells); + } + Expression::Product(a, b) => { + a.query_cells(cells); + b.query_cells(cells); + } + Expression::Scaled(a, _) => a.query_cells(cells), + }; + } + + /// Evaluate the polynomial using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + constant: &impl Fn(F) -> T, + selector_column: &impl Fn(Selector) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Selector(selector) => selector_column(*selector), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + product(a, b) + } + Expression::Scaled(a, f) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + scaled(a, *f) + } + } + } + + /// Evaluate the polynomial lazily using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate_lazy( + &self, + constant: &impl Fn(F) -> T, + selector_column: &impl Fn(Selector) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + zero: &T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Selector(selector) => selector_column(*selector), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + let b = b.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let (a, b) = if a.complexity() <= b.complexity() { + (a, b) + } else { + (b, a) + }; + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + + if a == *zero { + a + } else { + let b = b.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + product(a, b) + } + } + Expression::Scaled(a, f) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + scaled(a, *f) + } + } + } + + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + match self { + Expression::Constant(scalar) => write!(writer, "{scalar:?}"), + Expression::Selector(selector) => write!(writer, "selector[{}]", selector.0), + Expression::Fixed(query) => { + write!( + writer, + "fixed[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Advice(query) => { + write!( + writer, + "advice[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Instance(query) => { + write!( + writer, + "instance[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Challenge(challenge) => { + write!(writer, "challenge[{}]", challenge.index()) + } + Expression::Negated(a) => { + writer.write_all(b"(-")?; + a.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Sum(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"+")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Product(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"*")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Scaled(a, f) => { + a.write_identifier(writer)?; + write!(writer, "*{f:?}") + } + } + } + + /// Identifier for this expression. Expressions with identical identifiers + /// do the same calculation (but the expressions don't need to be exactly equal + /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). + pub fn identifier(&self) -> String { + let mut cursor = std::io::Cursor::new(Vec::new()); + self.write_identifier(&mut cursor).unwrap(); + String::from_utf8(cursor.into_inner()).unwrap() + } + + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Selector(_) => 1, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.degree(), + Expression::Sum(a, b) => max(a.degree(), b.degree()), + Expression::Product(a, b) => a.degree() + b.degree(), + Expression::Scaled(poly, _) => poly.degree(), + } + } + + /// Approximate the computational complexity of this expression. + pub fn complexity(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Selector(_) => 1, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.complexity() + 5, + Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, + Expression::Product(a, b) => a.complexity() + b.complexity() + 30, + Expression::Scaled(poly, _) => poly.complexity() + 30, + } + } + + /// Square this expression. + pub fn square(self) -> Self { + self.clone() * self + } + + /// Returns whether or not this expression contains a simple `Selector`. + fn contains_simple_selector(&self) -> bool { + self.evaluate( + &|_| false, + &|selector| selector.is_simple(), + &|_| false, + &|_| false, + &|_| false, + &|_| false, + &|a| a, + &|a, b| a || b, + &|a, b| a || b, + &|a, _| a, + ) + } + + /// Extracts a simple selector from this gate, if present + fn extract_simple_selector(&self) -> Option { + let op = |a, b| match (a, b) { + (Some(a), None) | (None, Some(a)) => Some(a), + (Some(_), Some(_)) => panic!("two simple selectors cannot be in the same expression"), + _ => None, + }; + + self.evaluate( + &|_| None, + &|selector| { + if selector.is_simple() { + Some(selector) + } else { + None + } + }, + &|_| None, + &|_| None, + &|_| None, + &|_| None, + &|a| a, + &op, + &op, + &|a, _| a, + ) + } +} + +impl std::fmt::Debug for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), + Expression::Selector(selector) => f.debug_tuple("Selector").field(selector).finish(), + // Skip enum variant and print query struct directly to maintain backwards compatibility. + Expression::Fixed(query) => { + let mut debug_struct = f.debug_struct("Fixed"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Advice(query) => { + let mut debug_struct = f.debug_struct("Advice"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation); + // Only show advice's phase if it's not in first phase. + if query.phase != FirstPhase.to_sealed() { + debug_struct.field("phase", &query.phase); + } + debug_struct.finish() + } + Expression::Instance(query) => { + let mut debug_struct = f.debug_struct("Instance"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Challenge(challenge) => { + f.debug_tuple("Challenge").field(challenge).finish() + } + Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), + Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), + Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), + Expression::Scaled(poly, scalar) => { + f.debug_tuple("Scaled").field(poly).field(scalar).finish() + } + } + } +} + +impl Neg for Expression { + type Output = Expression; + fn neg(self) -> Self::Output { + Expression::Negated(Box::new(self)) + } +} + +impl Add for Expression { + type Output = Expression; + fn add(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() || rhs.contains_simple_selector() { + panic!("attempted to use a simple selector in an addition"); + } + Expression::Sum(Box::new(self), Box::new(rhs)) + } +} + +impl Sub for Expression { + type Output = Expression; + fn sub(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() || rhs.contains_simple_selector() { + panic!("attempted to use a simple selector in a subtraction"); + } + Expression::Sum(Box::new(self), Box::new(-rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() && rhs.contains_simple_selector() { + panic!("attempted to multiply two expressions containing simple selectors"); + } + Expression::Product(Box::new(self), Box::new(rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: F) -> Expression { + Expression::Scaled(Box::new(self), rhs) + } +} + +impl Sum for Expression { + fn sum>(iter: I) -> Self { + iter.reduce(|acc, x| acc + x) + .unwrap_or(Expression::Constant(F::ZERO)) + } +} + +impl Product for Expression { + fn product>(iter: I) -> Self { + iter.reduce(|acc, x| acc * x) + .unwrap_or(Expression::Constant(F::ONE)) + } +} + +/// Represents an index into a vector where each entry corresponds to a distinct +/// point that polynomials are queried at. +#[derive(Copy, Clone, Debug)] +pub(crate) struct PointIndex(pub usize); + +/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset +/// within a custom gate. +#[derive(Clone, Debug)] +pub struct VirtualCell { + pub(crate) column: Column, + pub(crate) rotation: Rotation, +} + +impl>> From<(Col, Rotation)> for VirtualCell { + fn from((column, rotation): (Col, Rotation)) -> Self { + VirtualCell { + column: column.into(), + rotation, + } + } +} + +/// An individual polynomial constraint. +/// +/// These are returned by the closures passed to `ConstraintSystem::create_gate`. +#[derive(Debug)] +pub struct Constraint { + name: String, + poly: Expression, +} + +impl From> for Constraint { + fn from(poly: Expression) -> Self { + Constraint { + name: "".to_string(), + poly, + } + } +} + +impl> From<(S, Expression)> for Constraint { + fn from((name, poly): (S, Expression)) -> Self { + Constraint { + name: name.as_ref().to_string(), + poly, + } + } +} + +impl From> for Vec> { + fn from(poly: Expression) -> Self { + vec![Constraint { + name: "".to_string(), + poly, + }] + } +} + +/// A set of polynomial constraints with a common selector. +/// +/// ``` +/// use halo2_proofs::{plonk::{Constraints, Expression}, poly::Rotation}; +/// use halo2curves::pasta::Fp; +/// # use halo2_proofs::plonk::ConstraintSystem; +/// +/// # let mut meta = ConstraintSystem::::default(); +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let c = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("foo", |meta| { +/// let next = meta.query_advice(a, Rotation::next()); +/// let a = meta.query_advice(a, Rotation::cur()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let c = meta.query_advice(c, Rotation::cur()); +/// let s_ternary = meta.query_selector(s); +/// +/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); +/// +/// Constraints::with_selector( +/// s_ternary, +/// std::array::IntoIter::new([ +/// ("a is boolean", a.clone() * one_minus_a.clone()), +/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), +/// ]), +/// ) +/// }); +/// ``` +/// +/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to +/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, +/// you can pass an array directly. +#[derive(Debug)] +pub struct Constraints>, Iter: IntoIterator> { + selector: Expression, + constraints: Iter, +} + +impl>, Iter: IntoIterator> Constraints { + /// Constructs a set of constraints that are controlled by the given selector. + /// + /// Each constraint `c` in `iterator` will be converted into the constraint + /// `selector * c`. + pub fn with_selector(selector: Expression, constraints: Iter) -> Self { + Constraints { + selector, + constraints, + } + } +} + +fn apply_selector_to_constraint>>( + (selector, c): (Expression, C), +) -> Constraint { + let constraint: Constraint = c.into(); + Constraint { + name: constraint.name, + poly: selector * constraint.poly, + } +} + +type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; +type ConstraintsIterator = std::iter::Map< + std::iter::Zip>, I>, + ApplySelectorToConstraint, +>; + +impl>, Iter: IntoIterator> IntoIterator + for Constraints +{ + type Item = Constraint; + type IntoIter = ConstraintsIterator; + + fn into_iter(self) -> Self::IntoIter { + std::iter::repeat(self.selector) + .zip(self.constraints) + .map(apply_selector_to_constraint) + } +} + +/// A Gate contains a single polynomial identity with a name as metadata. +#[derive(Clone, Debug)] +pub struct GateV2Backend { + name: String, + poly: ExpressionMid, +} + +impl GateV2Backend { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the polynomial identity of this gate + pub fn polynomial(&self) -> &ExpressionMid { + &self.poly + } +} + +/// Gate +#[derive(Clone, Debug)] +pub struct Gate { + name: String, + constraint_names: Vec, + polys: Vec>, + /// We track queried selectors separately from other cells, so that we can use them to + /// trigger debug checks on gates. + queried_selectors: Vec, + queried_cells: Vec, +} + +impl Gate { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the name of the constraint at index `constraint_index`. + pub fn constraint_name(&self, constraint_index: usize) -> &str { + self.constraint_names[constraint_index].as_str() + } + + /// Returns constraints of this gate + pub fn polynomials(&self) -> &[Expression] { + &self.polys + } + + pub(crate) fn queried_selectors(&self) -> &[Selector] { + &self.queried_selectors + } + + pub(crate) fn queried_cells(&self) -> &[VirtualCell] { + &self.queried_cells + } +} + +/// Data that needs to be preprocessed from a circuit +#[derive(Debug, Clone)] +pub struct PreprocessingV2 { + // TODO(Edu): Can we replace this by a simpler structure? + pub(crate) permutation: permutation::keygen::AssemblyMid, + pub(crate) fixed: Vec>, +} + +/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +/// as well as the fixed columns and copy constraints information. +#[derive(Debug, Clone)] +pub struct CompiledCircuitV2 { + pub(crate) preprocessing: PreprocessingV2, + pub(crate) cs: ConstraintSystemV2Backend, +} + +struct QueriesMap { + advice_map: HashMap<(Column, Rotation), usize>, + instance_map: HashMap<(Column, Rotation), usize>, + fixed_map: HashMap<(Column, Rotation), usize>, + advice: Vec<(Column, Rotation)>, + instance: Vec<(Column, Rotation)>, + fixed: Vec<(Column, Rotation)>, +} + +impl QueriesMap { + fn add_advice(&mut self, col: Column, rot: Rotation) -> usize { + *self.advice_map.entry((col, rot)).or_insert_with(|| { + self.advice.push((col, rot)); + self.advice.len() - 1 + }) + } + fn add_instance(&mut self, col: Column, rot: Rotation) -> usize { + *self.instance_map.entry((col, rot)).or_insert_with(|| { + self.instance.push((col, rot)); + self.instance.len() - 1 + }) + } + fn add_fixed(&mut self, col: Column, rot: Rotation) -> usize { + *self.fixed_map.entry((col, rot)).or_insert_with(|| { + self.fixed.push((col, rot)); + self.fixed.len() - 1 + }) + } +} + +impl QueriesMap { + fn as_expression(&mut self, expr: &ExpressionMid) -> Expression { + match expr { + ExpressionMid::Constant(c) => Expression::Constant(*c), + ExpressionMid::Fixed(query) => { + let (col, rot) = (Column::new(query.column_index, Fixed), query.rotation); + let index = self.add_fixed(col, rot); + Expression::Fixed(FixedQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Advice(query) => { + let (col, rot) = ( + Column::new( + query.column_index, + Advice { + phase: sealed::Phase(query.phase), + }, + ), + query.rotation, + ); + let index = self.add_advice(col, rot); + Expression::Advice(AdviceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + phase: sealed::Phase(query.phase), + }) + } + ExpressionMid::Instance(query) => { + let (col, rot) = (Column::new(query.column_index, Instance), query.rotation); + let index = self.add_instance(col, rot); + Expression::Instance(InstanceQuery { + index: Some(index), + column_index: query.column_index, + rotation: query.rotation, + }) + } + ExpressionMid::Challenge(c) => Expression::Challenge(*c), + ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), + ExpressionMid::Sum(lhs, rhs) => Expression::Sum( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + ExpressionMid::Product(lhs, rhs) => Expression::Product( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.as_expression(e)), *c), + } + } +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystemV2Backend { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + pub(crate) gates: Vec>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, +} + +impl Into> for ConstraintSystem { + fn into(self) -> ConstraintSystemV2Backend { + ConstraintSystemV2Backend { + num_fixed_columns: self.num_fixed_columns, + num_advice_columns: self.num_advice_columns, + num_instance_columns: self.num_instance_columns, + num_challenges: self.num_challenges, + unblinded_advice_columns: self.unblinded_advice_columns.clone(), + advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), + challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), + gates: self + .gates + .iter() + .map(|g| { + g.polys.clone().into_iter().enumerate().map(|(i, e)| { + let name = match g.constraint_name(i) { + "" => g.name.clone(), + constraint_name => format!("{}:{}", g.name, constraint_name), + }; + GateV2Backend { + name, + poly: e.into(), + } + }) + }) + .flatten() + .collect(), + permutation: self.permutation.clone(), + lookups: self + .lookups + .iter() + .map(|l| ArgumentV2 { + name: l.name.clone(), + input_expressions: l + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + table_expressions: l + .table_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + shuffles: self + .shuffles + .iter() + .map(|s| shuffle::ArgumentV2 { + name: s.name.clone(), + input_expressions: s + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + shuffle_expressions: s + .shuffle_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + general_column_annotations: self.general_column_annotations.clone(), + } + } +} + +/// Witness calculator. Frontend function +#[derive(Debug)] +pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { + k: u32, + n: usize, + unusable_rows_start: usize, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [&'a [F]], + next_phase: u8, +} + +impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, ConcreteCircuit> { + /// Create a new WitnessCalculator + pub fn new( + k: u32, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [&'a [F]], + ) -> Self { + let n = 2usize.pow(k); + let unusable_rows_start = n - (cs.blinding_factors() + 1); + Self { + k, + n, + unusable_rows_start, + circuit, + config, + cs, + instances, + next_phase: 0, + } + } + + /// Calculate witness at phase + pub fn calc( + &mut self, + phase: u8, + challenges: &HashMap, + ) -> Result>>>, Error> { + if phase != self.next_phase { + return Err(Error::Other(format!( + "Expected phase {}, got {}", + self.next_phase, phase + ))); + } + let current_phase = match phase { + 0 => FirstPhase.to_sealed(), + 1 => SecondPhase.to_sealed(), + 2 => ThirdPhase.to_sealed(), + _ => unreachable!("only phase [0,2] supported"), + }; + + let mut witness = WitnessCollection { + k: self.k, + current_phase, + advice: vec![vec![Assigned::Zero; self.n]; self.cs.num_advice_columns], + instances: self.instances, + challenges, + // The prover will not be allowed to assign values to advice + // cells that exist within inactive rows, which include some + // number of blinding factors and an extra row for use in the + // permutation argument. + usable_rows: ..self.unusable_rows_start, + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain the witness and other information. + ConcreteCircuit::FloorPlanner::synthesize( + &mut witness, + self.circuit, + self.config.clone(), + self.cs.constants.clone(), + ) + .expect("todo"); + + let column_indices = self + .cs + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == *phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + self.next_phase += 1; + Ok(witness + .advice + .into_iter() + .enumerate() + .map(|(column_index, advice)| { + if column_indices.contains(&column_index) { + Some(advice) + } else { + None + } + }) + .collect()) + } +} + +/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the +/// circuit into its columns and the column configuration; as well as doing the fixed column and +/// copy constraints assignments. The output of this function can then be used for the key +/// generation, and proof generation. +/// If `compress_selectors` is true, multiple selector columns may be multiplexed. +pub fn compile_circuit>( + k: u32, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result< + ( + CompiledCircuitV2, + ConcreteCircuit::Config, + ConstraintSystem, + ), + Error, +> { + let n = 2usize.pow(k); + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let cs = cs; + + if n < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(k)); + } + + let mut assembly = crate::plonk::keygen::Assembly { + k, + fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], + permutation: permutation::keygen::AssemblyFront::new(n, &cs.permutation), + selectors: vec![vec![false; n]; cs.num_selectors], + usable_rows: 0..n - (cs.blinding_factors() + 1), + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain URS + ConcreteCircuit::FloorPlanner::synthesize( + &mut assembly, + circuit, + config.clone(), + cs.constants.clone(), + )?; + + let fixed = batch_invert_assigned(assembly.fixed); + let (cs, selector_polys) = if compress_selectors { + cs.compress_selectors(assembly.selectors.clone()) + } else { + // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. + let selectors = std::mem::take(&mut assembly.selectors); + cs.directly_convert_selectors_to_fixed(selectors) + }; + let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); + fixed.extend(selector_polys.into_iter()); + + let preprocessing = PreprocessingV2 { + permutation: permutation::keygen::AssemblyMid { + copies: assembly.permutation.copies, + }, + fixed, + }; + + Ok(( + CompiledCircuitV2 { + cs: cs.clone().into(), + preprocessing, + }, + config, + cs, + )) +} + +impl ConstraintSystemV2Backend { + /// Collect queries used in gates while mapping those gates to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_gates(&self, queries: &mut QueriesMap) -> Vec> { + self.gates + .iter() + .map(|gate| Gate { + name: gate.name.clone(), + constraint_names: Vec::new(), + polys: vec![queries.as_expression(gate.polynomial())], + queried_selectors: Vec::new(), // Unused? + queried_cells: Vec::new(), // Unused? + }) + .collect() + } + + /// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_lookups(&self, queries: &mut QueriesMap) -> Vec> { + self.lookups + .iter() + .map(|lookup| lookup::Argument { + name: lookup.name.clone(), + input_expressions: lookup + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + table_expressions: lookup + .table_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() + } + + /// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed + /// query references in the expressions. + fn collect_queries_shuffles(&self, queries: &mut QueriesMap) -> Vec> { + self.shuffles + .iter() + .map(|shuffle| shuffle::Argument { + name: shuffle.name.clone(), + input_expressions: shuffle + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + shuffle_expressions: shuffle + .shuffle_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() + } + + /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the + /// expressions of gates, lookups and shuffles into equivalent ones with indexed query + /// references. + pub(crate) fn collect_queries( + &self, + ) -> ( + Queries, + Vec>, + Vec>, + Vec>, + ) { + let mut queries = QueriesMap { + advice_map: HashMap::new(), + instance_map: HashMap::new(), + fixed_map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), + }; + + let gates = self.collect_queries_gates(&mut queries); + let lookups = self.collect_queries_lookups(&mut queries); + let shuffles = self.collect_queries_shuffles(&mut queries); + + // Each column used in a copy constraint involves a query at rotation current. + for column in self.permutation.get_columns() { + match column.column_type { + Any::Instance => { + queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) + } + Any::Fixed => { + queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()) + } + Any::Advice(advice) => { + queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) + } + }; + } + + let mut num_advice_queries = vec![0; self.num_advice_columns]; + for (column, _) in queries.advice.iter() { + num_advice_queries[column.index()] += 1; + } + + let queries = Queries { + advice: queries.advice, + instance: queries.instance, + fixed: queries.fixed, + num_advice_queries, + }; + (queries, gates, lookups, shuffles) + } +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystem { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_selectors: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + /// This is a cached vector that maps virtual selectors to the concrete + /// fixed column that they were compressed into. This is just used by dev + /// tooling right now. + pub(crate) selector_map: Vec>, + + pub(crate) gates: Vec>, + pub(crate) advice_queries: Vec<(Column, Rotation)>, + // Contains an integer for each advice column + // identifying how many distinct queries it has + // so far; should be same length as num_advice_columns. + pub(crate) num_advice_queries: Vec, + pub(crate) instance_queries: Vec<(Column, Rotation)>, + pub(crate) fixed_queries: Vec<(Column, Rotation)>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, + + // Vector of fixed columns, which can be used to store constant values + // that are copied into advice columns. + pub(crate) constants: Vec>, + + pub(crate) minimum_degree: Option, +} + +impl From> for ConstraintSystem { + fn from(cs2: ConstraintSystemV2Backend) -> Self { + let (queries, gates, lookups, shuffles) = cs2.collect_queries(); + ConstraintSystem { + num_fixed_columns: cs2.num_fixed_columns, + num_advice_columns: cs2.num_advice_columns, + num_instance_columns: cs2.num_instance_columns, + num_selectors: 0, + num_challenges: cs2.num_challenges, + unblinded_advice_columns: cs2.unblinded_advice_columns, + advice_column_phase: cs2 + .advice_column_phase + .into_iter() + .map(sealed::Phase) + .collect(), + challenge_phase: cs2.challenge_phase.into_iter().map(sealed::Phase).collect(), + selector_map: Vec::new(), + gates, + advice_queries: queries.advice, + num_advice_queries: queries.num_advice_queries, + instance_queries: queries.instance, + fixed_queries: queries.fixed, + permutation: cs2.permutation, + lookups, + shuffles, + general_column_annotations: cs2.general_column_annotations, + constants: Vec::new(), + minimum_degree: None, + } + } +} + +/// Represents the minimal parameters that determine a `ConstraintSystem`. +#[allow(dead_code)] +pub struct PinnedConstraintSystem<'a, F: Field> { + num_fixed_columns: &'a usize, + num_advice_columns: &'a usize, + num_instance_columns: &'a usize, + num_selectors: &'a usize, + num_challenges: &'a usize, + advice_column_phase: &'a Vec, + challenge_phase: &'a Vec, + gates: PinnedGates<'a, F>, + advice_queries: &'a Vec<(Column, Rotation)>, + instance_queries: &'a Vec<(Column, Rotation)>, + fixed_queries: &'a Vec<(Column, Rotation)>, + permutation: &'a permutation::Argument, + lookups: &'a Vec>, + shuffles: &'a Vec>, + constants: &'a Vec>, + minimum_degree: &'a Option, +} + +impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut debug_struct = f.debug_struct("PinnedConstraintSystem"); + debug_struct + .field("num_fixed_columns", self.num_fixed_columns) + .field("num_advice_columns", self.num_advice_columns) + .field("num_instance_columns", self.num_instance_columns) + .field("num_selectors", self.num_selectors); + // Only show multi-phase related fields if it's used. + if *self.num_challenges > 0 { + debug_struct + .field("num_challenges", self.num_challenges) + .field("advice_column_phase", self.advice_column_phase) + .field("challenge_phase", self.challenge_phase); + } + debug_struct + .field("gates", &self.gates) + .field("advice_queries", self.advice_queries) + .field("instance_queries", self.instance_queries) + .field("fixed_queries", self.fixed_queries) + .field("permutation", self.permutation) + .field("lookups", self.lookups); + if !self.shuffles.is_empty() { + debug_struct.field("shuffles", self.shuffles); + } + debug_struct + .field("constants", self.constants) + .field("minimum_degree", self.minimum_degree); + debug_struct.finish() + } +} + +struct PinnedGates<'a, F: Field>(&'a Vec>); + +impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + f.debug_list() + .entries(self.0.iter().flat_map(|gate| gate.polynomials().iter())) + .finish() + } +} + +impl Default for ConstraintSystem { + fn default() -> ConstraintSystem { + ConstraintSystem { + num_fixed_columns: 0, + num_advice_columns: 0, + num_instance_columns: 0, + num_selectors: 0, + num_challenges: 0, + unblinded_advice_columns: Vec::new(), + advice_column_phase: Vec::new(), + challenge_phase: Vec::new(), + selector_map: vec![], + gates: vec![], + fixed_queries: Vec::new(), + advice_queries: Vec::new(), + num_advice_queries: Vec::new(), + instance_queries: Vec::new(), + permutation: permutation::Argument::new(), + lookups: Vec::new(), + shuffles: Vec::new(), + general_column_annotations: HashMap::new(), + constants: vec![], + minimum_degree: None, + } + } +} + +impl ConstraintSystem { + /// Obtain a pinned version of this constraint system; a structure with the + /// minimal parameters needed to determine the rest of the constraint + /// system. + pub fn pinned(&self) -> PinnedConstraintSystem<'_, F> { + PinnedConstraintSystem { + num_fixed_columns: &self.num_fixed_columns, + num_advice_columns: &self.num_advice_columns, + num_instance_columns: &self.num_instance_columns, + num_selectors: &self.num_selectors, + num_challenges: &self.num_challenges, + advice_column_phase: &self.advice_column_phase, + challenge_phase: &self.challenge_phase, + gates: PinnedGates(&self.gates), + fixed_queries: &self.fixed_queries, + advice_queries: &self.advice_queries, + instance_queries: &self.instance_queries, + permutation: &self.permutation, + lookups: &self.lookups, + shuffles: &self.shuffles, + constants: &self.constants, + minimum_degree: &self.minimum_degree, + } + } + + /// Enables this fixed column to be used for global constant assignments. + /// + /// # Side-effects + /// + /// The column will be equality-enabled. + pub fn enable_constant(&mut self, column: Column) { + if !self.constants.contains(&column) { + self.constants.push(column); + self.enable_equality(column); + } + } + + /// Enable the ability to enforce equality over cells in this column + pub fn enable_equality>>(&mut self, column: C) { + let column = column.into(); + self.query_any_index(column, Rotation::cur()); + self.permutation.add_column(column); + } + + /// Add a lookup argument for some input expressions and table columns. + /// + /// `table_map` returns a map between input expressions and the table columns + /// they need to match. + pub fn lookup>( + &mut self, + name: S, + table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, TableColumn)>, + ) -> usize { + let mut cells = VirtualCells::new(self); + let table_map = table_map(&mut cells) + .into_iter() + .map(|(mut input, table)| { + if input.contains_simple_selector() { + panic!("expression containing simple selector supplied to lookup argument"); + } + let mut table = cells.query_fixed(table.inner(), Rotation::cur()); + input.query_cells(&mut cells); + table.query_cells(&mut cells); + (input, table) + }) + .collect(); + let index = self.lookups.len(); + + self.lookups + .push(lookup::Argument::new(name.as_ref(), table_map)); + + index + } + + /// Add a lookup argument for some input expressions and table expressions. + /// + /// `table_map` returns a map between input expressions and the table expressions + /// they need to match. + pub fn lookup_any>( + &mut self, + name: S, + table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, + ) -> usize { + let mut cells = VirtualCells::new(self); + let table_map = table_map(&mut cells) + .into_iter() + .map(|(mut input, mut table)| { + if input.contains_simple_selector() { + panic!("expression containing simple selector supplied to lookup argument"); + } + if table.contains_simple_selector() { + panic!("expression containing simple selector supplied to lookup argument"); + } + input.query_cells(&mut cells); + table.query_cells(&mut cells); + (input, table) + }) + .collect(); + let index = self.lookups.len(); + + self.lookups + .push(lookup::Argument::new(name.as_ref(), table_map)); + + index + } + + /// Add a shuffle argument for some input expressions and table expressions. + pub fn shuffle>( + &mut self, + name: S, + shuffle_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, + ) -> usize { + let mut cells = VirtualCells::new(self); + let shuffle_map = shuffle_map(&mut cells) + .into_iter() + .map(|(mut input, mut table)| { + input.query_cells(&mut cells); + table.query_cells(&mut cells); + (input, table) + }) + .collect(); + let index = self.shuffles.len(); + + self.shuffles + .push(shuffle::Argument::new(name.as_ref(), shuffle_map)); + + index + } + + fn query_fixed_index(&mut self, column: Column, at: Rotation) -> usize { + // Return existing query, if it exists + for (index, fixed_query) in self.fixed_queries.iter().enumerate() { + if fixed_query == &(column, at) { + return index; + } + } + + // Make a new query + let index = self.fixed_queries.len(); + self.fixed_queries.push((column, at)); + + index + } + + pub(crate) fn query_advice_index(&mut self, column: Column, at: Rotation) -> usize { + // Return existing query, if it exists + for (index, advice_query) in self.advice_queries.iter().enumerate() { + if advice_query == &(column, at) { + return index; + } + } + + // Make a new query + let index = self.advice_queries.len(); + self.advice_queries.push((column, at)); + self.num_advice_queries[column.index] += 1; + + index + } + + fn query_instance_index(&mut self, column: Column, at: Rotation) -> usize { + // Return existing query, if it exists + for (index, instance_query) in self.instance_queries.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + + // Make a new query + let index = self.instance_queries.len(); + self.instance_queries.push((column, at)); + + index + } + + fn query_any_index(&mut self, column: Column, at: Rotation) -> usize { + match column.column_type() { + Any::Advice(_) => { + self.query_advice_index(Column::::try_from(column).unwrap(), at) + } + Any::Fixed => self.query_fixed_index(Column::::try_from(column).unwrap(), at), + Any::Instance => { + self.query_instance_index(Column::::try_from(column).unwrap(), at) + } + } + } + + pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, advice_query) in self.advice_queries.iter().enumerate() { + if advice_query == &(column, at) { + return index; + } + } + + panic!("get_advice_query_index called for non-existent query"); + } + + pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, fixed_query) in self.fixed_queries.iter().enumerate() { + if fixed_query == &(column, at) { + return index; + } + } + + panic!("get_fixed_query_index called for non-existent query"); + } + + pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, instance_query) in self.instance_queries.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + + panic!("get_instance_query_index called for non-existent query"); + } + + pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { + match column.column_type() { + Any::Advice(_) => { + self.get_advice_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Fixed => { + self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Instance => { + self.get_instance_query_index(Column::::try_from(column).unwrap(), at) + } + } + } + + /// Sets the minimum degree required by the circuit, which can be set to a + /// larger amount than actually needed. This can be used, for example, to + /// force the permutation argument to involve more columns in the same set. + pub fn set_minimum_degree(&mut self, degree: usize) { + self.minimum_degree = Some(degree); + } + + /// Creates a new gate. + /// + /// # Panics + /// + /// A gate is required to contain polynomial constraints. This method will panic if + /// `constraints` returns an empty iterator. + pub fn create_gate>, Iter: IntoIterator, S: AsRef>( + &mut self, + name: S, + constraints: impl FnOnce(&mut VirtualCells<'_, F>) -> Iter, + ) { + let mut cells = VirtualCells::new(self); + let constraints = constraints(&mut cells); + let (constraint_names, polys): (_, Vec<_>) = constraints + .into_iter() + .map(|c| c.into()) + .map(|mut c: Constraint| { + c.poly.query_cells(&mut cells); + (c.name, c.poly) + }) + .unzip(); + + let queried_selectors = cells.queried_selectors; + let queried_cells = cells.queried_cells; + + assert!( + !polys.is_empty(), + "Gates must contain at least one constraint." + ); + + self.gates.push(Gate { + name: name.as_ref().to_string(), + constraint_names, + polys, + queried_selectors, + queried_cells, + }); + } + + /// This will compress selectors together depending on their provided + /// assignments. This `ConstraintSystem` will then be modified to add new + /// fixed columns (representing the actual selectors) and will return the + /// polynomials for those columns. Finally, an internal map is updated to + /// find which fixed column corresponds with a given `Selector`. + /// + /// Do not call this twice. Yes, this should be a builder pattern instead. + pub fn compress_selectors(mut self, selectors: Vec>) -> (Self, Vec>) { + // The number of provided selector assignments must be the number we + // counted for this constraint system. + assert_eq!(selectors.len(), self.num_selectors); + + // Compute the maximal degree of every selector. We only consider the + // expressions in gates, as lookup arguments cannot support simple + // selectors. Selectors that are complex or do not appear in any gates + // will have degree zero. + let mut degrees = vec![0; selectors.len()]; + for expr in self.gates.iter().flat_map(|gate| gate.polys.iter()) { + if let Some(selector) = expr.extract_simple_selector() { + degrees[selector.0] = max(degrees[selector.0], expr.degree()); + } + } + + // We will not increase the degree of the constraint system, so we limit + // ourselves to the largest existing degree constraint. + let max_degree = self.degree(); + + let mut new_columns = vec![]; + let (polys, selector_assignment) = compress_selectors::process( + selectors + .into_iter() + .zip(degrees) + .enumerate() + .map( + |(i, (activations, max_degree))| compress_selectors::SelectorDescription { + selector: i, + activations, + max_degree, + }, + ) + .collect(), + max_degree, + || { + let column = self.fixed_column(); + new_columns.push(column); + Expression::Fixed(FixedQuery { + index: Some(self.query_fixed_index(column, Rotation::cur())), + column_index: column.index, + rotation: Rotation::cur(), + }) + }, + ); + + let mut selector_map = vec![None; selector_assignment.len()]; + let mut selector_replacements = vec![None; selector_assignment.len()]; + for assignment in selector_assignment { + selector_replacements[assignment.selector] = Some(assignment.expression); + selector_map[assignment.selector] = Some(new_columns[assignment.combination_index]); + } + + self.selector_map = selector_map + .into_iter() + .map(|a| a.unwrap()) + .collect::>(); + let selector_replacements = selector_replacements + .into_iter() + .map(|a| a.unwrap()) + .collect::>(); + self.replace_selectors_with_fixed(&selector_replacements); + + (self, polys) + } + + /// Does not combine selectors and directly replaces them everywhere with fixed columns. + pub fn directly_convert_selectors_to_fixed( + mut self, + selectors: Vec>, + ) -> (Self, Vec>) { + // The number of provided selector assignments must be the number we + // counted for this constraint system. + assert_eq!(selectors.len(), self.num_selectors); + + let (polys, selector_replacements): (Vec<_>, Vec<_>) = selectors + .into_iter() + .map(|selector| { + let poly = selector + .iter() + .map(|b| if *b { F::ONE } else { F::ZERO }) + .collect::>(); + let column = self.fixed_column(); + let rotation = Rotation::cur(); + let expr = Expression::Fixed(FixedQuery { + index: Some(self.query_fixed_index(column, rotation)), + column_index: column.index, + rotation, + }); + (poly, expr) + }) + .unzip(); + + self.replace_selectors_with_fixed(&selector_replacements); + self.num_selectors = 0; + + (self, polys) + } + + fn replace_selectors_with_fixed(&mut self, selector_replacements: &[Expression]) { + fn replace_selectors( + expr: &mut Expression, + selector_replacements: &[Expression], + must_be_nonsimple: bool, + ) { + *expr = expr.evaluate( + &|constant| Expression::Constant(constant), + &|selector| { + if must_be_nonsimple { + // Simple selectors are prohibited from appearing in + // expressions in the lookup argument by + // `ConstraintSystem`. + assert!(!selector.is_simple()); + } + + selector_replacements[selector.0].clone() + }, + &|query| Expression::Fixed(query), + &|query| Expression::Advice(query), + &|query| Expression::Instance(query), + &|challenge| Expression::Challenge(challenge), + &|a| -a, + &|a, b| a + b, + &|a, b| a * b, + &|a, f| a * f, + ); + } + + // Substitute selectors for the real fixed columns in all gates + for expr in self.gates.iter_mut().flat_map(|gate| gate.polys.iter_mut()) { + replace_selectors(expr, selector_replacements, false); + } + + // Substitute non-simple selectors for the real fixed columns in all + // lookup expressions + for expr in self.lookups.iter_mut().flat_map(|lookup| { + lookup + .input_expressions + .iter_mut() + .chain(lookup.table_expressions.iter_mut()) + }) { + replace_selectors(expr, selector_replacements, true); + } + + for expr in self.shuffles.iter_mut().flat_map(|shuffle| { + shuffle + .input_expressions + .iter_mut() + .chain(shuffle.shuffle_expressions.iter_mut()) + }) { + replace_selectors(expr, selector_replacements, true); + } + } + + /// Allocate a new (simple) selector. Simple selectors cannot be added to + /// expressions nor multiplied by other expressions containing simple + /// selectors. Also, simple selectors may not appear in lookup argument + /// inputs. + pub fn selector(&mut self) -> Selector { + let index = self.num_selectors; + self.num_selectors += 1; + Selector(index, true) + } + + /// Allocate a new complex selector that can appear anywhere + /// within expressions. + pub fn complex_selector(&mut self) -> Selector { + let index = self.num_selectors; + self.num_selectors += 1; + Selector(index, false) + } + + /// Allocates a new fixed column that can be used in a lookup table. + pub fn lookup_table_column(&mut self) -> TableColumn { + TableColumn { + inner: self.fixed_column(), + } + } + + /// Annotate a Lookup column. + pub fn annotate_lookup_column(&mut self, column: TableColumn, annotation: A) + where + A: Fn() -> AR, + AR: Into, + { + // We don't care if the table has already an annotation. If it's the case we keep the new one. + self.general_column_annotations.insert( + metadata::Column::from((Any::Fixed, column.inner().index)), + annotation().into(), + ); + } + + /// Annotate an Instance column. + pub fn annotate_lookup_any_column(&mut self, column: T, annotation: A) + where + A: Fn() -> AR, + AR: Into, + T: Into>, + { + let col_any = column.into(); + // We don't care if the table has already an annotation. If it's the case we keep the new one. + self.general_column_annotations.insert( + metadata::Column::from((col_any.column_type, col_any.index)), + annotation().into(), + ); + } + + /// Allocate a new fixed column + pub fn fixed_column(&mut self) -> Column { + let tmp = Column { + index: self.num_fixed_columns, + column_type: Fixed, + }; + self.num_fixed_columns += 1; + tmp + } + + /// Allocate a new unblinded advice column at `FirstPhase` + pub fn unblinded_advice_column(&mut self) -> Column { + self.unblinded_advice_column_in(FirstPhase) + } + + /// Allocate a new advice column at `FirstPhase` + pub fn advice_column(&mut self) -> Column { + self.advice_column_in(FirstPhase) + } + + /// Allocate a new unblinded advice column in given phase. This allows for the generation of deterministic commitments to advice columns + /// which can be used to split large circuits into smaller ones, whose proofs can then be "joined" together by their common witness commitments. + pub fn unblinded_advice_column_in(&mut self, phase: P) -> Column { + let phase = phase.to_sealed(); + if let Some(previous_phase) = phase.prev() { + self.assert_phase_exists( + previous_phase, + format!("Column in later phase {phase:?}").as_str(), + ); + } + + let tmp = Column { + index: self.num_advice_columns, + column_type: Advice { phase }, + }; + self.unblinded_advice_columns.push(tmp.index); + self.num_advice_columns += 1; + self.num_advice_queries.push(0); + self.advice_column_phase.push(phase); + tmp + } + + /// Allocate a new advice column in given phase + /// + /// # Panics + /// + /// It panics if previous phase before the given one doesn't have advice column allocated. + pub fn advice_column_in(&mut self, phase: P) -> Column { + let phase = phase.to_sealed(); + if let Some(previous_phase) = phase.prev() { + self.assert_phase_exists( + previous_phase, + format!("Column in later phase {phase:?}").as_str(), + ); + } + + let tmp = Column { + index: self.num_advice_columns, + column_type: Advice { phase }, + }; + self.num_advice_columns += 1; + self.num_advice_queries.push(0); + self.advice_column_phase.push(phase); + tmp + } + + /// Allocate a new instance column + pub fn instance_column(&mut self) -> Column { + let tmp = Column { + index: self.num_instance_columns, + column_type: Instance, + }; + self.num_instance_columns += 1; + tmp + } + + /// Requests a challenge that is usable after the given phase. + /// + /// # Panics + /// + /// It panics if the given phase doesn't have advice column allocated. + pub fn challenge_usable_after(&mut self, phase: P) -> Challenge { + let phase = phase.to_sealed(); + self.assert_phase_exists( + phase, + format!("Challenge usable after phase {phase:?}").as_str(), + ); + + let tmp = Challenge { + index: self.num_challenges, + phase: phase.0, + }; + self.num_challenges += 1; + self.challenge_phase.push(phase); + tmp + } + + /// Helper funciotn to assert phase exists, to make sure phase-aware resources + /// are allocated in order, and to avoid any phase to be skipped accidentally + /// to cause unexpected issue in the future. + fn assert_phase_exists(&self, phase: sealed::Phase, resource: &str) { + self.advice_column_phase + .iter() + .find(|advice_column_phase| **advice_column_phase == phase) + .unwrap_or_else(|| { + panic!( + "No Column is used in phase {phase:?} while allocating a new {resource:?}" + ) + }); + } + + /// Returns the list of phases + pub fn phases(&self) -> impl Iterator { + let max_phase = self + .advice_column_phase + .iter() + .max() + .map(|phase| phase.0) + .unwrap_or_default(); + (0..=max_phase).map(sealed::Phase) + } + + /// Compute the degree of the constraint system (the maximum degree of all + /// constraints). + pub fn degree(&self) -> usize { + // The permutation argument will serve alongside the gates, so must be + // accounted for. + let mut degree = self.permutation.required_degree(); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.lookups + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.shuffles + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // Account for each gate to ensure our quotient polynomial is the + // correct degree and that our extended domain is the right size. + degree = std::cmp::max( + degree, + self.gates + .iter() + .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) + .max() + .unwrap_or(0), + ); + + std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } + + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Returns number of fixed columns + pub fn num_fixed_columns(&self) -> usize { + self.num_fixed_columns + } + + /// Returns number of advice columns + pub fn num_advice_columns(&self) -> usize { + self.num_advice_columns + } + + /// Returns number of instance columns + pub fn num_instance_columns(&self) -> usize { + self.num_instance_columns + } + + /// Returns number of selectors + pub fn num_selectors(&self) -> usize { + self.num_selectors + } + + /// Returns number of challenges + pub fn num_challenges(&self) -> usize { + self.num_challenges + } + + /// Returns phase of advice columns + pub fn advice_column_phase(&self) -> Vec { + self.advice_column_phase + .iter() + .map(|phase| phase.0) + .collect() + } + + /// Returns phase of challenges + pub fn challenge_phase(&self) -> Vec { + self.challenge_phase.iter().map(|phase| phase.0).collect() + } + + /// Returns gates + pub fn gates(&self) -> &Vec> { + &self.gates + } + + /// Returns general column annotations + pub fn general_column_annotations(&self) -> &HashMap { + &self.general_column_annotations + } + + /// Returns advice queries + pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { + &self.advice_queries + } + + /// Returns instance queries + pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { + &self.instance_queries + } + + /// Returns fixed queries + pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { + &self.fixed_queries + } + + /// Returns permutation argument + pub fn permutation(&self) -> &permutation::Argument { + &self.permutation + } + + /// Returns lookup arguments + pub fn lookups(&self) -> &Vec> { + &self.lookups + } + + /// Returns shuffle arguments + pub fn shuffles(&self) -> &Vec> { + &self.shuffles + } + + /// Returns constants + pub fn constants(&self) -> &Vec> { + &self.constants + } +} + +/// Exposes the "virtual cells" that can be queried while creating a custom gate or lookup +/// table. +#[derive(Debug)] +pub struct VirtualCells<'a, F: Field> { + meta: &'a mut ConstraintSystem, + queried_selectors: Vec, + queried_cells: Vec, +} + +impl<'a, F: Field> VirtualCells<'a, F> { + fn new(meta: &'a mut ConstraintSystem) -> Self { + VirtualCells { + meta, + queried_selectors: vec![], + queried_cells: vec![], + } + } + + /// Query a selector at the current position. + pub fn query_selector(&mut self, selector: Selector) -> Expression { + self.queried_selectors.push(selector); + Expression::Selector(selector) + } + + /// Query a fixed column at a relative position + pub fn query_fixed(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Fixed(FixedQuery { + index: Some(self.meta.query_fixed_index(column, at)), + column_index: column.index, + rotation: at, + }) + } + + /// Query an advice column at a relative position + pub fn query_advice(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Advice(AdviceQuery { + index: Some(self.meta.query_advice_index(column, at)), + column_index: column.index, + rotation: at, + phase: column.column_type().phase, + }) + } + + /// Query an instance column at a relative position + pub fn query_instance(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Instance(InstanceQuery { + index: Some(self.meta.query_instance_index(column, at)), + column_index: column.index, + rotation: at, + }) + } + + /// Query an Any column at a relative position + pub fn query_any>>(&mut self, column: C, at: Rotation) -> Expression { + let column = column.into(); + match column.column_type() { + Any::Advice(_) => self.query_advice(Column::::try_from(column).unwrap(), at), + Any::Fixed => self.query_fixed(Column::::try_from(column).unwrap(), at), + Any::Instance => self.query_instance(Column::::try_from(column).unwrap(), at), + } + } + + /// Query a challenge + pub fn query_challenge(&mut self, challenge: Challenge) -> Expression { + Expression::Challenge(challenge) + } +} + +#[cfg(test)] +mod tests { + use super::Expression; + use halo2curves::bn256::Fr; + + #[test] + fn iter_sum() { + let exprs: Vec> = vec![ + Expression::Constant(1.into()), + Expression::Constant(2.into()), + Expression::Constant(3.into()), + ]; + let happened: Expression = exprs.into_iter().sum(); + let expected: Expression = Expression::Sum( + Box::new(Expression::Sum( + Box::new(Expression::Constant(1.into())), + Box::new(Expression::Constant(2.into())), + )), + Box::new(Expression::Constant(3.into())), + ); + + assert_eq!(happened, expected); + } + + #[test] + fn iter_product() { + let exprs: Vec> = vec![ + Expression::Constant(1.into()), + Expression::Constant(2.into()), + Expression::Constant(3.into()), + ]; + let happened: Expression = exprs.into_iter().product(); + let expected: Expression = Expression::Product( + Box::new(Expression::Product( + Box::new(Expression::Constant(1.into())), + Box::new(Expression::Constant(2.into())), + )), + Box::new(Expression::Constant(3.into())), + ); + + assert_eq!(happened, expected); + } +} diff --git a/frontend/src/plonk/circuit/compress_selectors.rs b/common/src/plonk/circuit/compress_selectors.rs similarity index 99% rename from frontend/src/plonk/circuit/compress_selectors.rs rename to common/src/plonk/circuit/compress_selectors.rs index 053ebe3178..15016f31b9 100644 --- a/frontend/src/plonk/circuit/compress_selectors.rs +++ b/common/src/plonk/circuit/compress_selectors.rs @@ -229,7 +229,8 @@ where #[cfg(test)] mod tests { use super::*; - use crate::{plonk::FixedQuery, poly::Rotation}; + use crate::plonk::FixedQuery; + use halo2_middleware::poly::Rotation; use halo2curves::pasta::Fp; use proptest::collection::{vec, SizeRange}; use proptest::prelude::*; diff --git a/frontend/src/error.rs b/common/src/plonk/error.rs similarity index 98% rename from frontend/src/error.rs rename to common/src/plonk/error.rs index 362634f6c9..14d7339503 100644 --- a/frontend/src/error.rs +++ b/common/src/plonk/error.rs @@ -2,8 +2,8 @@ use std::error; use std::fmt; use std::io; -use crate::plonk::TableColumn; -use halo2_middleware::circuit::{Any, Column}; +use super::TableColumn; +use super::{Any, Column}; /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up diff --git a/backend/src/plonk/evaluation.rs b/common/src/plonk/evaluation.rs similarity index 99% rename from backend/src/plonk/evaluation.rs rename to common/src/plonk/evaluation.rs index 2cb9c5a9ae..66730b907e 100644 --- a/backend/src/plonk/evaluation.rs +++ b/common/src/plonk/evaluation.rs @@ -1,12 +1,11 @@ use crate::multicore; -use crate::plonk::{lookup, permutation, ProvingKey}; +use crate::plonk::{lookup, permutation, Any, ProvingKey}; use crate::poly::Basis; use crate::{ arithmetic::{parallelize, CurveAffine}, poly::{Coeff, ExtendedLagrangeCoeff, Polynomial}, }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; -use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; use super::{shuffle, ConstraintSystem, Expression}; @@ -675,6 +674,7 @@ impl GraphEvaluator { fn add_expression(&mut self, expr: &Expression) -> ValueSource { match expr { Expression::Constant(scalar) => self.add_constant(scalar), + Expression::Selector(_selector) => unreachable!(), Expression::Fixed(query) => { let rot_idx = self.add_rotation(&query.rotation); self.add_calculation(Calculation::Store(ValueSource::Fixed( @@ -847,6 +847,7 @@ pub fn evaluate( let idx = start + i; *value = expression.evaluate( &|scalar| scalar, + &|_| panic!("virtual selectors are removed during optimization"), &|query| { fixed[query.column_index] [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs new file mode 100644 index 0000000000..3a4ba1ac14 --- /dev/null +++ b/common/src/plonk/keygen.rs @@ -0,0 +1,395 @@ +#![allow(clippy::int_plus_one)] + +use std::ops::Range; + +use ff::{Field, FromUniformBytes}; +use group::Curve; + +use super::{ + circuit::{ + compile_circuit, Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, + ConstraintSystem, Fixed, Instance, Selector, + }, + evaluation::Evaluator, + permutation, Assigned, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, +}; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + circuit::Value, + poly::{ + commitment::{Blind, Params}, + EvaluationDomain, + }, +}; +use halo2_middleware::circuit::Challenge; + +pub(crate) fn create_domain( + k: u32, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, +) -> ( + EvaluationDomain, + ConstraintSystem, + ConcreteCircuit::Config, +) +where + C: CurveAffine, + ConcreteCircuit: Circuit, +{ + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, params); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + + let degree = cs.degree(); + + let domain = EvaluationDomain::new(degree as u32, k); + + (domain, cs, config) +} + +/// Assembly to be used in circuit synthesis. +#[derive(Debug)] +pub(crate) struct Assembly { + pub(crate) k: u32, + pub(crate) fixed: Vec, LagrangeCoeff>>, + pub(crate) permutation: permutation::keygen::AssemblyFront, + pub(crate) selectors: Vec>, + // A range of available rows for assignment and copies. + pub(crate) usable_rows: Range, + pub(crate) _marker: std::marker::PhantomData, +} + +impl Assignment for Assembly { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.selectors[selector.0][row] = true; + + Ok(()) + } + + fn query_instance(&self, _: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + // There is no instance in this context. + Ok(Value::unknown()) + } + + fn assign_advice( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about fixed columns here + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .fixed + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.permutation + .copy(left_column, left_row, right_column, right_row) + } + + fn fill_from_row( + &mut self, + column: Column, + from_row: usize, + to: Value>, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&from_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + let col = self + .fixed + .get_mut(column.index()) + .ok_or(Error::BoundsFailure)?; + + let filler = to.assign()?; + for row in self.usable_rows.clone().skip(from_row) { + col[row] = filler; + } + + Ok(()) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. +pub fn keygen_vk_v2<'params, C, P>( + params: &P, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + C::Scalar: FromUniformBytes<64>, +{ + let cs2 = &circuit.cs; + let cs: ConstraintSystem = cs2.clone().into(); + let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); + + if (params.n() as usize) < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_vk(params, &domain, &cs.permutation); + + let fixed_commitments = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + params + .commit_lagrange( + &Polynomial::new_lagrange_from_vec(poly.clone()), + Blind::default(), + ) + .to_affine() + }) + .collect(); + + Ok(VerifyingKey::from_parts( + domain, + fixed_commitments, + permutation_vk, + cs, + Vec::new(), + false, + )) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// By default, selector compression is turned **off**. +pub fn keygen_vk<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + keygen_vk_custom(params, circuit, true) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// +/// The selector compression optimization is turned on only if `compress_selectors` is `true`. +pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; + let mut vk = keygen_vk_v2(params, &compiled_circuit)?; + vk.compress_selectors = compress_selectors; + Ok(vk) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. +pub fn keygen_pk_v2<'params, C, P>( + params: &P, + vk: VerifyingKey, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, +{ + let cs = &circuit.cs; + + if (params.n() as usize) < vk.cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let fixed_polys: Vec<_> = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + vk.domain + .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) + }) + .collect(); + + let fixed_cosets = fixed_polys + .iter() + .map(|poly| vk.domain.coeff_to_extended(poly.clone())) + .collect(); + + let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_pk(params, &vk.domain, &cs.permutation); + + // Compute l_0(X) + // TODO: this can be done more efficiently + let mut l0 = vk.domain.empty_lagrange(); + l0[0] = C::Scalar::ONE; + let l0 = vk.domain.lagrange_to_coeff(l0); + let l0 = vk.domain.coeff_to_extended(l0); + + // Compute l_blind(X) which evaluates to 1 for each blinding factor row + // and 0 otherwise over the domain. + let mut l_blind = vk.domain.empty_lagrange(); + for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { + *evaluation = C::Scalar::ONE; + } + let l_blind = vk.domain.lagrange_to_coeff(l_blind); + let l_blind = vk.domain.coeff_to_extended(l_blind); + + // Compute l_last(X) which evaluates to 1 on the first inactive row (just + // before the blinding factors) and 0 otherwise over the domain + let mut l_last = vk.domain.empty_lagrange(); + l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; + let l_last = vk.domain.lagrange_to_coeff(l_last); + let l_last = vk.domain.coeff_to_extended(l_last); + + // Compute l_active_row(X) + let one = C::Scalar::ONE; + let mut l_active_row = vk.domain.empty_extended(); + parallelize(&mut l_active_row, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = i + start; + *value = one - (l_last[idx] + l_blind[idx]); + } + }); + + // Compute the optimized evaluation data structure + let ev = Evaluator::new(&vk.cs); + + Ok(ProvingKey { + vk, + l0, + l_last, + l_active_row, + fixed_values: circuit + .preprocessing + .fixed + .clone() + .into_iter() + .map(Polynomial::new_lagrange_from_vec) + .collect(), + fixed_polys, + fixed_cosets, + permutation: permutation_pk, + ev, + }) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. +pub fn keygen_pk<'params, C, P, ConcreteCircuit>( + params: &P, + vk: VerifyingKey, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; + keygen_pk_v2(params, vk, &compiled_circuit) +} diff --git a/common/src/plonk/lookup.rs b/common/src/plonk/lookup.rs index 5be61e7111..fb6b3492a3 100644 --- a/common/src/plonk/lookup.rs +++ b/common/src/plonk/lookup.rs @@ -1,5 +1,6 @@ use super::circuit::Expression; -use halo2_middleware::ff::Field; +use ff::Field; +use halo2_middleware::circuit::ExpressionMid; use std::fmt::{self, Debug}; pub(crate) mod prover; diff --git a/common/src/plonk/lookup/prover.rs b/common/src/plonk/lookup/prover.rs index ff1e169983..71066da9e6 100644 --- a/common/src/plonk/lookup/prover.rs +++ b/common/src/plonk/lookup/prover.rs @@ -12,7 +12,7 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use halo2_middleware::ff::WithSmallOrderMulGroup; +use ff::WithSmallOrderMulGroup; use group::{ ff::{BatchInvert, Field}, Curve, diff --git a/common/src/plonk/lookup/verifier.rs b/common/src/plonk/lookup/verifier.rs index 773b6f8393..84cd02efb5 100644 --- a/common/src/plonk/lookup/verifier.rs +++ b/common/src/plonk/lookup/verifier.rs @@ -10,7 +10,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use halo2_middleware::ff::Field; +use ff::Field; use halo2_middleware::poly::Rotation; pub struct PermutationCommitments { @@ -121,6 +121,7 @@ impl Evaluated { .map(|expression| { expression.evaluate( &|scalar| scalar, + &|_| panic!("virtual selectors are removed during optimization"), &|query| fixed_evals[query.index.unwrap()], &|query| advice_evals[query.index.unwrap()], &|query| instance_evals[query.index.unwrap()], diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index 31c49e8a06..22c1fad6c3 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -1,5 +1,6 @@ //! Implementation of permutation argument. +use super::circuit::{Any, Column}; use crate::{ arithmetic::CurveAffine, helpers::{ @@ -9,8 +10,6 @@ use crate::{ poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, SerdeFormat, }; -use halo2_middleware::circuit::{Any, Column}; -// use halo2_middleware::permutation::Argument; pub(crate) mod keygen; pub(crate) mod prover; @@ -20,9 +19,6 @@ pub use keygen::Assembly; use std::io; -// TODO: Use https://docs.rs/ref-cast/latest/ref_cast/index.html here? This way we can have -// Argument(halo2_middleware::permutation::Argument) and easily translate from one type to the -// other while using references. /// A permutation argument. #[derive(Debug, Clone)] pub struct Argument { @@ -30,14 +26,6 @@ pub struct Argument { pub(super) columns: Vec>, } -impl From for Argument { - fn from(arg: halo2_middleware::permutation::Argument) -> Self { - Self { - columns: arg.columns, - } - } -} - impl Argument { pub(crate) fn new() -> Self { Argument { columns: vec![] } @@ -81,6 +69,12 @@ impl Argument { 3 } + pub(crate) fn add_column(&mut self, column: Column) { + if !self.columns.contains(&column) { + self.columns.push(column); + } + } + /// Returns columns that participate on the permutation argument. pub fn get_columns(&self) -> Vec> { self.columns.clone() diff --git a/common/src/plonk/permutation/keygen.rs b/common/src/plonk/permutation/keygen.rs index 5fd5529807..32ee0aa25e 100644 --- a/common/src/plonk/permutation/keygen.rs +++ b/common/src/plonk/permutation/keygen.rs @@ -1,17 +1,15 @@ -use halo2_middleware::ff::{Field, PrimeField}; +use ff::{Field, PrimeField}; use group::Curve; use super::{Argument, ProvingKey, VerifyingKey}; use crate::{ arithmetic::{parallelize, CurveAffine}, - plonk::Error, + plonk::{Any, Column, Error}, poly::{ commitment::{Blind, Params}, EvaluationDomain, }, }; -use halo2_middleware::circuit::{Any, Column}; -use halo2_middleware::permutation; #[cfg(feature = "thread-safe-region")] use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; @@ -36,12 +34,71 @@ pub struct Assembly { sizes: Vec>, } +// TODO: Dedup with other Cell definition +#[derive(Clone, Debug)] +pub struct Cell { + pub column: Column, + pub row: usize, +} + +#[derive(Clone, Debug)] +pub struct AssemblyMid { + pub copies: Vec<(Cell, Cell)>, +} + +#[derive(Clone, Debug)] +pub struct AssemblyFront { + n: usize, + columns: Vec>, + pub(crate) copies: Vec<(Cell, Cell)>, +} + +impl AssemblyFront { + pub(crate) fn new(n: usize, p: &Argument) -> Self { + Self { + n, + columns: p.columns.clone(), + copies: Vec::new(), + } + } + + pub(crate) fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.columns.contains(&left_column) { + return Err(Error::ColumnNotInPermutation(left_column)); + } + if !self.columns.contains(&right_column) { + return Err(Error::ColumnNotInPermutation(right_column)); + } + // Check bounds + if left_row >= self.n || right_row >= self.n { + return Err(Error::BoundsFailure); + } + self.copies.push(( + Cell { + column: left_column, + row: left_row, + }, + Cell { + column: right_column, + row: right_row, + }, + )); + Ok(()) + } +} + #[cfg(not(feature = "thread-safe-region"))] impl Assembly { pub(crate) fn new_from_assembly_mid( n: usize, - p: &permutation::Argument, - a: &permutation::AssemblyMid, + p: &Argument, + a: &AssemblyMid, ) -> Result { let mut assembly = Self::new(n, p); for copy in &a.copies { @@ -50,7 +107,7 @@ impl Assembly { Ok(assembly) } - pub(crate) fn new(n: usize, p: &permutation::Argument) -> Self { + pub(crate) fn new(n: usize, p: &Argument) -> Self { // Initialize the copy vector to keep track of copy constraints in all // the permutation arguments. let mut columns = vec![]; @@ -178,6 +235,18 @@ pub struct Assembly { #[cfg(feature = "thread-safe-region")] impl Assembly { + pub(crate) fn new_from_assembly_mid( + n: usize, + p: &Argument, + a: &AssemblyMid, + ) -> Result { + let mut assembly = Self::new(n, p); + for copy in &a.copies { + assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; + } + Ok(assembly) + } + pub(crate) fn new(n: usize, p: &Argument) -> Self { Assembly { columns: p.columns.clone(), diff --git a/common/src/plonk/permutation/prover.rs b/common/src/plonk/permutation/prover.rs index eb155663be..3199b7d13c 100644 --- a/common/src/plonk/permutation/prover.rs +++ b/common/src/plonk/permutation/prover.rs @@ -1,4 +1,4 @@ -use halo2_middleware::ff::PrimeField; +use ff::PrimeField; use group::{ ff::{BatchInvert, Field}, Curve, @@ -6,7 +6,7 @@ use group::{ use rand_core::RngCore; use std::iter::{self, ExactSizeIterator}; -use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, ProvingKey}; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, @@ -17,7 +17,6 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; pub(crate) struct CommittedSet { diff --git a/common/src/plonk/permutation/verifier.rs b/common/src/plonk/permutation/verifier.rs index 195a771999..ef3cdba141 100644 --- a/common/src/plonk/permutation/verifier.rs +++ b/common/src/plonk/permutation/verifier.rs @@ -1,7 +1,7 @@ -use halo2_middleware::ff::{Field, PrimeField}; +use ff::{Field, PrimeField}; use std::iter; -use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, VerifyingKey}; use crate::{ arithmetic::CurveAffine, @@ -9,7 +9,6 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; pub struct Committed { diff --git a/backend/src/plonk/prover.rs b/common/src/plonk/prover.rs similarity index 78% rename from backend/src/plonk/prover.rs rename to common/src/plonk/prover.rs index 4ac4b953d6..8105c230b0 100644 --- a/backend/src/plonk/prover.rs +++ b/common/src/plonk/prover.rs @@ -1,17 +1,25 @@ +use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use group::Curve; -use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use rand_core::RngCore; use std::collections::{BTreeSet, HashSet}; +use std::ops::RangeTo; use std::{collections::HashMap, iter}; use super::{ - circuit::sealed::{self}, + circuit::{ + compile_circuit, + sealed::{self}, + Advice, Any, Assignment, Circuit, Column, Fixed, Instance, Selector, WitnessCalculator, + }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, }; +use halo2_middleware::circuit::Challenge; use crate::{ arithmetic::{eval_polynomial, CurveAffine}, + circuit::Value, + plonk::Assigned, poly::{ commitment::{Blind, CommitmentScheme, Params, Prover}, Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, @@ -22,7 +30,6 @@ use crate::{ transcript::{EncodedChallenge, TranscriptWrite}, }; use group::prime::PrimeCurveAffine; -use halo2_middleware::plonk::Assigned; /// Collection of instance data used during proving for a single circuit proof. #[derive(Debug)] @@ -734,3 +741,254 @@ impl< Ok(()) } } + +pub(crate) struct WitnessCollection<'a, F: Field> { + pub(crate) k: u32, + pub(crate) current_phase: sealed::Phase, + pub(crate) advice: Vec>>, + // pub(crate) unblinded_advice: HashSet, + pub(crate) challenges: &'a HashMap, + pub(crate) instances: &'a [&'a [F]], + pub(crate) usable_rows: RangeTo, + pub(crate) _marker: std::marker::PhantomData, +} + +impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn query_instance(&self, column: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.instances + .get(column.index()) + .and_then(|column| column.get(row)) + .map(|v| Value::known(*v)) + .ok_or(Error::BoundsFailure) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Ignore assignment of advice column in different phase than current one. + if self.current_phase != column.column_type().phase { + return Ok(()); + } + + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .advice + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { + // We only care about advice columns here + + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.challenges + .get(&challenge.index()) + .cloned() + .map(Value::known) + .unwrap_or_else(Value::unknown) + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +/// This creates a proof for the provided `circuit` when given the public +/// parameters `params` and the proving key [`ProvingKey`] that was +/// generated previously for the same circuit. The provided `instances` +/// are zero-padded internally. +pub fn create_proof< + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + ConcreteCircuit: Circuit, +>( + params: &'params Scheme::ParamsProver, + pk: &ProvingKey, + circuits: &[ConcreteCircuit], + instances: &[&[&[Scheme::Scalar]]], + rng: R, + transcript: &mut T, +) -> Result<(), Error> +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + if circuits.len() != instances.len() { + return Err(Error::InvalidInstances); + } + let (_, config, cs) = compile_circuit(params.k(), &circuits[0], pk.vk.compress_selectors)?; + let mut witness_calcs: Vec<_> = circuits + .iter() + .enumerate() + .map(|(i, circuit)| WitnessCalculator::new(params.k(), circuit, &config, &cs, instances[i])) + .collect(); + let mut prover = ProverV2::::new(params, pk, instances, rng, transcript)?; + let mut challenges = HashMap::new(); + let phases = prover.phases.clone(); + for phase in &phases { + println!("DBG phase {}", phase.0); + let mut witnesses = Vec::with_capacity(circuits.len()); + for witness_calc in witness_calcs.iter_mut() { + witnesses.push(witness_calc.calc(phase.0, &challenges)?); + } + challenges = prover.commit_phase(phase.0, witnesses).unwrap(); + } + prover.create_proof() +} + +#[test] +fn test_create_proof() { + use crate::{ + circuit::SimpleFloorPlanner, + plonk::{keygen_pk, keygen_vk, ConstraintSystem}, + poly::kzg::{ + commitment::{KZGCommitmentScheme, ParamsKZG}, + multiopen::ProverSHPLONK, + }, + transcript::{Blake2bWrite, Challenge255, TranscriptWriterBuffer}, + }; + use halo2curves::bn256::Bn256; + use rand_core::OsRng; + + #[derive(Clone, Copy)] + struct MyCircuit; + + impl Circuit for MyCircuit { + type Config = (); + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + *self + } + + fn configure(_meta: &mut ConstraintSystem) -> Self::Config {} + + fn synthesize( + &self, + _config: Self::Config, + _layouter: impl crate::circuit::Layouter, + ) -> Result<(), Error> { + Ok(()) + } + } + + let params: ParamsKZG = ParamsKZG::setup(3, OsRng); + let vk = keygen_vk(¶ms, &MyCircuit).expect("keygen_vk should not fail"); + let pk = keygen_pk(¶ms, vk, &MyCircuit).expect("keygen_pk should not fail"); + let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); + + // Create proof with wrong number of instances + let proof = create_proof::, ProverSHPLONK<_>, _, _, _, _>( + ¶ms, + &pk, + &[MyCircuit, MyCircuit], + &[], + OsRng, + &mut transcript, + ); + assert!(matches!(proof.unwrap_err(), Error::InvalidInstances)); + + // Create proof with correct number of instances + create_proof::, ProverSHPLONK<_>, _, _, _, _>( + ¶ms, + &pk, + &[MyCircuit, MyCircuit], + &[&[], &[]], + OsRng, + &mut transcript, + ) + .expect("proof generation should not fail"); +} diff --git a/common/src/plonk/shuffle.rs b/common/src/plonk/shuffle.rs index 26b1596ff0..8656a3b854 100644 --- a/common/src/plonk/shuffle.rs +++ b/common/src/plonk/shuffle.rs @@ -1,10 +1,19 @@ use super::circuit::Expression; -use halo2_middleware::ff::Field; +use ff::Field; +use halo2_middleware::circuit::ExpressionMid; use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; +/// Expressions involved in a shuffle argument, with a name as metadata. +#[derive(Clone, Debug)] +pub struct ArgumentV2 { + pub(crate) name: String, + pub(crate) input_expressions: Vec>, + pub(crate) shuffle_expressions: Vec>, +} + /// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] pub struct Argument { diff --git a/common/src/plonk/shuffle/prover.rs b/common/src/plonk/shuffle/prover.rs index 5740792b15..f730a8ecf7 100644 --- a/common/src/plonk/shuffle/prover.rs +++ b/common/src/plonk/shuffle/prover.rs @@ -11,7 +11,7 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use halo2_middleware::ff::WithSmallOrderMulGroup; +use ff::WithSmallOrderMulGroup; use group::{ff::BatchInvert, Curve}; use halo2_middleware::poly::Rotation; use rand_core::RngCore; diff --git a/common/src/plonk/shuffle/verifier.rs b/common/src/plonk/shuffle/verifier.rs index c9806455cc..4f3233ad60 100644 --- a/common/src/plonk/shuffle/verifier.rs +++ b/common/src/plonk/shuffle/verifier.rs @@ -8,7 +8,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use halo2_middleware::ff::Field; +use ff::Field; use halo2_middleware::poly::Rotation; pub struct Committed { @@ -77,6 +77,7 @@ impl Evaluated { .map(|expression| { expression.evaluate( &|scalar| scalar, + &|_| panic!("virtual selectors are removed during optimization"), &|query| fixed_evals[query.index.unwrap()], &|query| advice_evals[query.index.unwrap()], &|query| instance_evals[query.index.unwrap()], diff --git a/backend/src/plonk/vanishing.rs b/common/src/plonk/vanishing.rs similarity index 100% rename from backend/src/plonk/vanishing.rs rename to common/src/plonk/vanishing.rs diff --git a/backend/src/plonk/vanishing/prover.rs b/common/src/plonk/vanishing/prover.rs similarity index 99% rename from backend/src/plonk/vanishing/prover.rs rename to common/src/plonk/vanishing/prover.rs index 0679ee1988..7943086826 100644 --- a/backend/src/plonk/vanishing/prover.rs +++ b/common/src/plonk/vanishing/prover.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, iter}; -use halo2_middleware::ff::Field; +use ff::Field; use group::Curve; use rand_chacha::ChaCha20Rng; use rand_core::{RngCore, SeedableRng}; diff --git a/backend/src/plonk/vanishing/verifier.rs b/common/src/plonk/vanishing/verifier.rs similarity index 99% rename from backend/src/plonk/vanishing/verifier.rs rename to common/src/plonk/vanishing/verifier.rs index 05ccb02a5b..0881dfb2c0 100644 --- a/backend/src/plonk/vanishing/verifier.rs +++ b/common/src/plonk/vanishing/verifier.rs @@ -1,6 +1,6 @@ use std::iter; -use halo2_middleware::ff::Field; +use ff::Field; use crate::{ arithmetic::CurveAffine, diff --git a/backend/src/plonk/verifier.rs b/common/src/plonk/verifier.rs similarity index 98% rename from backend/src/plonk/verifier.rs rename to common/src/plonk/verifier.rs index d92c2ff985..62c18c609a 100644 --- a/backend/src/plonk/verifier.rs +++ b/common/src/plonk/verifier.rs @@ -1,4 +1,4 @@ -use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use group::Curve; use std::iter; @@ -316,6 +316,9 @@ where gate.polynomials().iter().map(move |poly| { poly.evaluate( &|scalar| scalar, + &|_| { + panic!("virtual selectors are removed during optimization") + }, &|query| fixed_evals[query.index.unwrap()], &|query| advice_evals[query.index.unwrap()], &|query| instance_evals[query.index.unwrap()], diff --git a/backend/src/plonk/verifier/batch.rs b/common/src/plonk/verifier/batch.rs similarity index 98% rename from backend/src/plonk/verifier/batch.rs rename to common/src/plonk/verifier/batch.rs index d52f96ab0c..ba3e2419e6 100644 --- a/backend/src/plonk/verifier/batch.rs +++ b/common/src/plonk/verifier/batch.rs @@ -1,4 +1,4 @@ -use halo2_middleware::ff::FromUniformBytes; +use ff::FromUniformBytes; use group::ff::Field; use halo2curves::CurveAffine; use rand_core::OsRng; diff --git a/common/src/poly.rs b/common/src/poly.rs index edd4445d5f..100ee10eb8 100644 --- a/common/src/poly.rs +++ b/common/src/poly.rs @@ -4,10 +4,10 @@ use crate::arithmetic::parallelize; use crate::helpers::SerdePrimeField; +use crate::plonk::Assigned; use crate::SerdeFormat; -use halo2_middleware::plonk::Assigned; -use halo2_middleware::ff::{BatchInvert, Field}; +use group::ff::{BatchInvert, Field}; use halo2_middleware::poly::Rotation; use std::fmt::Debug; use std::io; @@ -15,23 +15,23 @@ use std::marker::PhantomData; use std::ops::{Add, Deref, DerefMut, Index, IndexMut, Mul, RangeFrom, RangeFull, Sub}; /// Generic commitment scheme structures -// pub mod commitment; -// mod domain; -// mod query; -// mod strategy; +pub mod commitment; +mod domain; +mod query; +mod strategy; -// /// Inner product argument commitment scheme -// pub mod ipa; +/// Inner product argument commitment scheme +pub mod ipa; -// /// KZG commitment scheme -// pub mod kzg; +/// KZG commitment scheme +pub mod kzg; -// #[cfg(test)] -// mod multiopen_test; +#[cfg(test)] +mod multiopen_test; -// pub use domain::*; -// pub use query::{ProverQuery, VerifierQuery}; -// pub use strategy::{Guard, VerificationStrategy}; +pub use domain::*; +pub use query::{ProverQuery, VerifierQuery}; +pub use strategy::{Guard, VerificationStrategy}; /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up diff --git a/backend/src/poly/commitment.rs b/common/src/poly/commitment.rs similarity index 99% rename from backend/src/poly/commitment.rs rename to common/src/poly/commitment.rs index 78b17fc808..feae085655 100644 --- a/backend/src/poly/commitment.rs +++ b/common/src/poly/commitment.rs @@ -5,7 +5,7 @@ use super::{ }; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead, TranscriptWrite}; -use halo2_middleware::ff::Field; +use ff::Field; use halo2curves::CurveAffine; use rand_core::RngCore; use std::{ diff --git a/backend/src/poly/domain.rs b/common/src/poly/domain.rs similarity index 99% rename from backend/src/poly/domain.rs rename to common/src/poly/domain.rs index f25f183d70..45e6db0570 100644 --- a/backend/src/poly/domain.rs +++ b/common/src/poly/domain.rs @@ -1,12 +1,15 @@ //! Contains utilities for performing polynomial arithmetic over an evaluation //! domain that is of a suitable size for the application. -use crate::arithmetic::{best_fft, parallelize}; -use halo2_middleware::plonk::Assigned; +use crate::{ + arithmetic::{best_fft, parallelize}, + plonk::Assigned, +}; -use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, Rotation}; +use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}; +use ff::WithSmallOrderMulGroup; use group::ff::{BatchInvert, Field}; -use halo2_middleware::ff::WithSmallOrderMulGroup; +use halo2_middleware::poly::Rotation; use std::marker::PhantomData; diff --git a/backend/src/poly/ipa/commitment.rs b/common/src/poly/ipa/commitment.rs similarity index 99% rename from backend/src/poly/ipa/commitment.rs rename to common/src/poly/ipa/commitment.rs index c479ea1ce1..7be053c49c 100644 --- a/backend/src/poly/ipa/commitment.rs +++ b/common/src/poly/ipa/commitment.rs @@ -234,7 +234,7 @@ mod test { use crate::poly::ipa::commitment::{create_proof, verify_proof, ParamsIPA}; use crate::poly::ipa::msm::MSMIPA; - use halo2_middleware::ff::Field; + use ff::Field; use group::Curve; #[test] @@ -291,7 +291,7 @@ mod test { fn test_opening_proof() { const K: u32 = 6; - use halo2_middleware::ff::Field; + use ff::Field; use rand_core::OsRng; use super::super::commitment::{Blind, Params}; diff --git a/backend/src/poly/ipa/commitment/prover.rs b/common/src/poly/ipa/commitment/prover.rs similarity index 99% rename from backend/src/poly/ipa/commitment/prover.rs rename to common/src/poly/ipa/commitment/prover.rs index ee92c7677f..344dbc0e65 100644 --- a/backend/src/poly/ipa/commitment/prover.rs +++ b/common/src/poly/ipa/commitment/prover.rs @@ -1,4 +1,4 @@ -use halo2_middleware::ff::Field; +use ff::Field; use rand_core::RngCore; use super::ParamsIPA; diff --git a/backend/src/poly/ipa/commitment/verifier.rs b/common/src/poly/ipa/commitment/verifier.rs similarity index 100% rename from backend/src/poly/ipa/commitment/verifier.rs rename to common/src/poly/ipa/commitment/verifier.rs diff --git a/backend/src/poly/ipa/mod.rs b/common/src/poly/ipa/mod.rs similarity index 100% rename from backend/src/poly/ipa/mod.rs rename to common/src/poly/ipa/mod.rs diff --git a/backend/src/poly/ipa/msm.rs b/common/src/poly/ipa/msm.rs similarity index 99% rename from backend/src/poly/ipa/msm.rs rename to common/src/poly/ipa/msm.rs index 921b95587b..a615ddce49 100644 --- a/backend/src/poly/ipa/msm.rs +++ b/common/src/poly/ipa/msm.rs @@ -1,6 +1,6 @@ use crate::arithmetic::{best_multiexp, CurveAffine}; use crate::poly::{commitment::MSM, ipa::commitment::ParamsVerifierIPA}; -use halo2_middleware::ff::Field; +use ff::Field; use group::Group; use std::collections::BTreeMap; diff --git a/backend/src/poly/ipa/multiopen.rs b/common/src/poly/ipa/multiopen.rs similarity index 99% rename from backend/src/poly/ipa/multiopen.rs rename to common/src/poly/ipa/multiopen.rs index 1df7f41daa..b78acb5934 100644 --- a/backend/src/poly/ipa/multiopen.rs +++ b/common/src/poly/ipa/multiopen.rs @@ -5,7 +5,7 @@ use super::*; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use halo2_middleware::ff::Field; +use ff::Field; use std::collections::{BTreeMap, BTreeSet}; mod prover; diff --git a/backend/src/poly/ipa/multiopen/prover.rs b/common/src/poly/ipa/multiopen/prover.rs similarity index 99% rename from backend/src/poly/ipa/multiopen/prover.rs rename to common/src/poly/ipa/multiopen/prover.rs index 3510756812..2ae745d457 100644 --- a/backend/src/poly/ipa/multiopen/prover.rs +++ b/common/src/poly/ipa/multiopen/prover.rs @@ -7,7 +7,7 @@ use crate::poly::query::ProverQuery; use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; -use halo2_middleware::ff::Field; +use ff::Field; use group::Curve; use rand_core::RngCore; use std::io; diff --git a/backend/src/poly/ipa/multiopen/verifier.rs b/common/src/poly/ipa/multiopen/verifier.rs similarity index 99% rename from backend/src/poly/ipa/multiopen/verifier.rs rename to common/src/poly/ipa/multiopen/verifier.rs index 7910a0662e..d559e33384 100644 --- a/backend/src/poly/ipa/multiopen/verifier.rs +++ b/common/src/poly/ipa/multiopen/verifier.rs @@ -1,6 +1,6 @@ use std::fmt::Debug; -use halo2_middleware::ff::Field; +use ff::Field; use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; use crate::arithmetic::{eval_polynomial, lagrange_interpolate, CurveAffine}; diff --git a/backend/src/poly/ipa/strategy.rs b/common/src/poly/ipa/strategy.rs similarity index 99% rename from backend/src/poly/ipa/strategy.rs rename to common/src/poly/ipa/strategy.rs index 2064c89478..d2d1b3d364 100644 --- a/backend/src/poly/ipa/strategy.rs +++ b/common/src/poly/ipa/strategy.rs @@ -9,7 +9,7 @@ use crate::{ strategy::{Guard, VerificationStrategy}, }, }; -use halo2_middleware::ff::Field; +use ff::Field; use group::Curve; use halo2curves::CurveAffine; use rand_core::OsRng; diff --git a/backend/src/poly/kzg/commitment.rs b/common/src/poly/kzg/commitment.rs similarity index 99% rename from backend/src/poly/kzg/commitment.rs rename to common/src/poly/kzg/commitment.rs index a89bd12ffa..114b9ac013 100644 --- a/backend/src/poly/kzg/commitment.rs +++ b/common/src/poly/kzg/commitment.rs @@ -4,7 +4,7 @@ use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, Par use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; use crate::SerdeFormat; -use halo2_middleware::ff::{Field, PrimeField}; +use ff::{Field, PrimeField}; use group::{prime::PrimeCurveAffine, Curve, Group}; use halo2curves::pairing::Engine; use halo2curves::CurveExt; @@ -365,7 +365,7 @@ mod test { use crate::poly::commitment::ParamsProver; use crate::poly::commitment::{Blind, Params}; use crate::poly::kzg::commitment::ParamsKZG; - use halo2_middleware::ff::Field; + use ff::Field; #[test] fn test_commit_lagrange() { diff --git a/backend/src/poly/kzg/mod.rs b/common/src/poly/kzg/mod.rs similarity index 100% rename from backend/src/poly/kzg/mod.rs rename to common/src/poly/kzg/mod.rs diff --git a/backend/src/poly/kzg/msm.rs b/common/src/poly/kzg/msm.rs similarity index 99% rename from backend/src/poly/kzg/msm.rs rename to common/src/poly/kzg/msm.rs index 6244209965..f9b8c284bd 100644 --- a/backend/src/poly/kzg/msm.rs +++ b/common/src/poly/kzg/msm.rs @@ -37,7 +37,7 @@ where /// Prepares all scalars in the MSM to linear combination pub fn combine_with_base(&mut self, base: E::Fr) { - use halo2_middleware::ff::Field; + use ff::Field; let mut acc = E::Fr::ONE; if !self.scalars.is_empty() { for scalar in self.scalars.iter_mut().rev() { diff --git a/backend/src/poly/kzg/multiopen.rs b/common/src/poly/kzg/multiopen.rs similarity index 100% rename from backend/src/poly/kzg/multiopen.rs rename to common/src/poly/kzg/multiopen.rs diff --git a/backend/src/poly/kzg/multiopen/gwc.rs b/common/src/poly/kzg/multiopen/gwc.rs similarity index 97% rename from backend/src/poly/kzg/multiopen/gwc.rs rename to common/src/poly/kzg/multiopen/gwc.rs index 8c8e056e83..3fd28dd00a 100644 --- a/backend/src/poly/kzg/multiopen/gwc.rs +++ b/common/src/poly/kzg/multiopen/gwc.rs @@ -5,7 +5,7 @@ pub use prover::ProverGWC; pub use verifier::VerifierGWC; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use halo2_middleware::ff::Field; +use ff::Field; use std::marker::PhantomData; #[derive(Clone, Copy, Debug)] diff --git a/backend/src/poly/kzg/multiopen/gwc/prover.rs b/common/src/poly/kzg/multiopen/gwc/prover.rs similarity index 100% rename from backend/src/poly/kzg/multiopen/gwc/prover.rs rename to common/src/poly/kzg/multiopen/gwc/prover.rs diff --git a/backend/src/poly/kzg/multiopen/gwc/verifier.rs b/common/src/poly/kzg/multiopen/gwc/verifier.rs similarity index 99% rename from backend/src/poly/kzg/multiopen/gwc/verifier.rs rename to common/src/poly/kzg/multiopen/gwc/verifier.rs index 261f5e2234..fcfda6941f 100644 --- a/backend/src/poly/kzg/multiopen/gwc/verifier.rs +++ b/common/src/poly/kzg/multiopen/gwc/verifier.rs @@ -13,7 +13,7 @@ use crate::poly::query::{CommitmentReference, VerifierQuery}; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead}; -use halo2_middleware::ff::Field; +use ff::Field; use halo2curves::pairing::{Engine, MultiMillerLoop}; use halo2curves::CurveExt; diff --git a/backend/src/poly/kzg/multiopen/shplonk.rs b/common/src/poly/kzg/multiopen/shplonk.rs similarity index 98% rename from backend/src/poly/kzg/multiopen/shplonk.rs rename to common/src/poly/kzg/multiopen/shplonk.rs index 5f963f4049..d0814e83e3 100644 --- a/backend/src/poly/kzg/multiopen/shplonk.rs +++ b/common/src/poly/kzg/multiopen/shplonk.rs @@ -3,7 +3,7 @@ mod verifier; use crate::multicore::{IntoParallelIterator, ParallelIterator}; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use halo2_middleware::ff::Field; +use ff::Field; pub use prover::ProverSHPLONK; use std::collections::BTreeSet; pub use verifier::VerifierSHPLONK; @@ -142,7 +142,7 @@ where #[cfg(test)] mod proptests { use super::{construct_intermediate_sets, Commitment, IntermediateSets}; - use halo2_middleware::ff::FromUniformBytes; + use ff::FromUniformBytes; use halo2curves::pasta::Fp; use proptest::{collection::vec, prelude::*, sample::select}; use std::convert::TryFrom; diff --git a/backend/src/poly/kzg/multiopen/shplonk/prover.rs b/common/src/poly/kzg/multiopen/shplonk/prover.rs similarity index 99% rename from backend/src/poly/kzg/multiopen/shplonk/prover.rs rename to common/src/poly/kzg/multiopen/shplonk/prover.rs index e857dc59bd..5001d69094 100644 --- a/backend/src/poly/kzg/multiopen/shplonk/prover.rs +++ b/common/src/poly/kzg/multiopen/shplonk/prover.rs @@ -13,7 +13,7 @@ use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; use crate::multicore::{IntoParallelIterator, ParallelIterator}; -use halo2_middleware::ff::Field; +use ff::Field; use group::Curve; use halo2curves::pairing::Engine; use halo2curves::CurveExt; diff --git a/backend/src/poly/kzg/multiopen/shplonk/verifier.rs b/common/src/poly/kzg/multiopen/shplonk/verifier.rs similarity index 99% rename from backend/src/poly/kzg/multiopen/shplonk/verifier.rs rename to common/src/poly/kzg/multiopen/shplonk/verifier.rs index f5a4d824f6..5d03940177 100644 --- a/backend/src/poly/kzg/multiopen/shplonk/verifier.rs +++ b/common/src/poly/kzg/multiopen/shplonk/verifier.rs @@ -15,7 +15,7 @@ use crate::poly::kzg::strategy::GuardKZG; use crate::poly::query::{CommitmentReference, VerifierQuery}; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead}; -use halo2_middleware::ff::Field; +use ff::Field; use halo2curves::pairing::{Engine, MultiMillerLoop}; use halo2curves::CurveExt; use std::ops::MulAssign; diff --git a/backend/src/poly/kzg/strategy.rs b/common/src/poly/kzg/strategy.rs similarity index 99% rename from backend/src/poly/kzg/strategy.rs rename to common/src/poly/kzg/strategy.rs index 78d182fbf6..ee80d800ac 100644 --- a/backend/src/poly/kzg/strategy.rs +++ b/common/src/poly/kzg/strategy.rs @@ -10,7 +10,7 @@ use crate::{ strategy::{Guard, VerificationStrategy}, }, }; -use halo2_middleware::ff::Field; +use ff::Field; use halo2curves::{ pairing::{Engine, MultiMillerLoop}, CurveAffine, CurveExt, diff --git a/backend/src/poly/multiopen_test.rs b/common/src/poly/multiopen_test.rs similarity index 99% rename from backend/src/poly/multiopen_test.rs rename to common/src/poly/multiopen_test.rs index b961e7d05d..47c6731167 100644 --- a/backend/src/poly/multiopen_test.rs +++ b/common/src/poly/multiopen_test.rs @@ -14,7 +14,7 @@ mod test { Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read, Keccak256Write, TranscriptReadBuffer, TranscriptWriterBuffer, }; - use halo2_middleware::ff::WithSmallOrderMulGroup; + use ff::WithSmallOrderMulGroup; use group::Curve; use rand_core::OsRng; diff --git a/backend/src/poly/query.rs b/common/src/poly/query.rs similarity index 100% rename from backend/src/poly/query.rs rename to common/src/poly/query.rs diff --git a/backend/src/poly/strategy.rs b/common/src/poly/strategy.rs similarity index 100% rename from backend/src/poly/strategy.rs rename to common/src/poly/strategy.rs diff --git a/backend/src/transcript.rs b/common/src/transcript.rs similarity index 100% rename from backend/src/transcript.rs rename to common/src/transcript.rs diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index c6d33bfd6f..e69de29bb2 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -1,6 +0,0 @@ -#![allow(unused)] // TODO: Remove - -pub mod circuit; -pub mod error; -pub mod plonk; -pub mod poly; diff --git a/frontend/src/plonk.rs b/frontend/src/plonk.rs deleted file mode 100644 index 86da807379..0000000000 --- a/frontend/src/plonk.rs +++ /dev/null @@ -1,86 +0,0 @@ -use crate::error::Error; -use crate::poly::batch_invert_assigned; -use crate::poly::Polynomial; -use halo2_middleware::circuit::{CompiledCircuitV2, PreprocessingV2}; -use halo2_middleware::ff::Field; - -mod circuit; -mod lookup; -pub mod permutation; -mod shuffle; - -pub use circuit::*; - -/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the -/// circuit into its columns and the column configuration; as well as doing the fixed column and -/// copy constraints assignments. The output of this function can then be used for the key -/// generation, and proof generation. -/// If `compress_selectors` is true, multiple selector columns may be multiplexed. -pub fn compile_circuit>( - k: u32, - circuit: &ConcreteCircuit, - compress_selectors: bool, -) -> Result< - ( - CompiledCircuitV2, - ConcreteCircuit::Config, - ConstraintSystem, - ), - Error, -> { - let n = 2usize.pow(k); - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - let cs = cs; - - if n < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(k)); - } - - let mut assembly = crate::plonk::keygen::Assembly { - k, - fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], - permutation: permutation::keygen::AssemblyFront::new(n, &cs.permutation), - selectors: vec![vec![false; n]; cs.num_selectors], - usable_rows: 0..n - (cs.blinding_factors() + 1), - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain URS - ConcreteCircuit::FloorPlanner::synthesize( - &mut assembly, - circuit, - config.clone(), - cs.constants.clone(), - )?; - - let fixed = batch_invert_assigned(assembly.fixed); - let (cs, selector_polys) = if compress_selectors { - cs.compress_selectors(assembly.selectors.clone()) - } else { - // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. - let selectors = std::mem::take(&mut assembly.selectors); - cs.directly_convert_selectors_to_fixed(selectors) - }; - let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); - fixed.extend(selector_polys.into_iter()); - - let preprocessing = PreprocessingV2 { - permutation: permutation::keygen::AssemblyMid { - copies: assembly.permutation.copies, - }, - fixed, - }; - - Ok(( - CompiledCircuitV2 { - cs: cs.clone().into(), - preprocessing, - }, - config, - cs, - )) -} diff --git a/frontend/src/plonk/circuit.rs b/frontend/src/plonk/circuit.rs deleted file mode 100644 index c90512a2ba..0000000000 --- a/frontend/src/plonk/circuit.rs +++ /dev/null @@ -1,1264 +0,0 @@ -use super::{lookup, permutation, shuffle}; -use crate::circuit::layouter::SyncDeps; -use crate::circuit::{Layouter, Region, Value}; -use crate::error::Error; -use core::cmp::max; -use core::ops::{Add, Mul}; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; -use halo2_middleware::ff::Field; -use halo2_middleware::metadata; -use halo2_middleware::plonk::Assigned; -use halo2_middleware::poly::Rotation; -use sealed::SealedPhase; -use std::collections::HashMap; -use std::iter::{Product, Sum}; -use std::{ - convert::TryFrom, - ops::{Neg, Sub}, -}; - -mod compress_selectors; - -pub(crate) mod sealed { - /// Phase of advice column - #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct Phase(pub(crate) u8); - - impl Phase { - pub fn prev(&self) -> Option { - self.0.checked_sub(1).map(Phase) - } - } - - impl SealedPhase for Phase { - fn to_sealed(self) -> Phase { - self - } - } - - /// Sealed trait to help keep `Phase` private. - pub trait SealedPhase { - fn to_sealed(self) -> Phase; - } -} - -/// Phase of advice column -pub trait Phase: SealedPhase {} - -impl Phase for P {} - -/// First phase -#[derive(Debug)] -pub struct FirstPhase; - -impl SealedPhase for super::FirstPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(0) - } -} - -/// Second phase -#[derive(Debug)] -pub struct SecondPhase; - -impl SealedPhase for super::SecondPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(1) - } -} - -/// Third phase -#[derive(Debug)] -pub struct ThirdPhase; - -impl SealedPhase for super::ThirdPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(2) - } -} - -/// A selector, representing a fixed boolean value per row of the circuit. -/// -/// Selectors can be used to conditionally enable (portions of) gates: -/// ``` -/// use halo2_proofs::poly::Rotation; -/// # use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; -/// -/// # let mut meta = ConstraintSystem::::default(); -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("foo", |meta| { -/// let a = meta.query_advice(a, Rotation::prev()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let s = meta.query_selector(s); -/// -/// // On rows where the selector is enabled, a is constrained to equal b. -/// // On rows where the selector is disabled, a and b can take any value. -/// vec![s * (a - b)] -/// }); -/// ``` -/// -/// Selectors are disabled on all rows by default, and must be explicitly enabled on each -/// row when required: -/// ``` -/// use halo2_proofs::{ -/// circuit::{Chip, Layouter, Value}, -/// plonk::{Advice, Column, Error, Selector}, -/// }; -/// use ff::Field; -/// # use halo2_proofs::plonk::Fixed; -/// -/// struct Config { -/// a: Column, -/// b: Column, -/// s: Selector, -/// } -/// -/// fn circuit_logic>(chip: C, mut layouter: impl Layouter) -> Result<(), Error> { -/// let config = chip.config(); -/// # let config: Config = todo!(); -/// layouter.assign_region(|| "bar", |mut region| { -/// region.assign_advice(|| "a", config.a, 0, || Value::known(F::ONE))?; -/// region.assign_advice(|| "a", config.b, 1, || Value::known(F::ONE))?; -/// config.s.enable(&mut region, 1) -/// })?; -/// Ok(()) -/// } -/// ``` -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Selector(pub(crate) usize, bool); - -impl Selector { - /// Enable this selector at the given offset within the given region. - pub fn enable(&self, region: &mut Region, offset: usize) -> Result<(), Error> { - region.enable_selector(|| "", self, offset) - } - - /// Is this selector "simple"? Simple selectors can only be multiplied - /// by expressions that contain no other simple selectors. - pub fn is_simple(&self) -> bool { - self.1 - } - - /// Returns index of this selector - pub fn index(&self) -> usize { - self.0 - } - - /// Return expression from selector - pub fn expr(&self) -> Expression { - Expression::Selector(*self) - } -} - -/// A fixed column of a lookup table. -/// -/// A lookup table can be loaded into this column via [`Layouter::assign_table`]. Columns -/// can currently only contain a single table, but they may be used in multiple lookup -/// arguments via [`ConstraintSystem::lookup`]. -/// -/// Lookup table columns are always "encumbered" by the lookup arguments they are used in; -/// they cannot simultaneously be used as general fixed columns. -/// -/// [`Layouter::assign_table`]: crate::circuit::Layouter::assign_table -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub struct TableColumn { - /// The fixed column that this table column is stored in. - /// - /// # Security - /// - /// This inner column MUST NOT be exposed in the public API, or else chip developers - /// can load lookup tables into their circuits without default-value-filling the - /// columns, which can cause soundness bugs. - inner: Column, -} - -impl TableColumn { - /// Returns inner column - pub fn inner(&self) -> Column { - self.inner - } -} - -/// This trait allows a [`Circuit`] to direct some backend to assign a witness -/// for a constraint system. -pub trait Assignment { - /// Creates a new region and enters into it. - /// - /// Panics if we are currently in a region (if `exit_region` was not called). - /// - /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. - /// - /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region - fn enter_region(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR; - - /// Allows the developer to include an annotation for an specific column within a `Region`. - /// - /// This is usually useful for debugging circuit failures. - fn annotate_column(&mut self, annotation: A, column: Column) - where - A: FnOnce() -> AR, - AR: Into; - - /// Exits the current region. - /// - /// Panics if we are not currently in a region (if `enter_region` was not called). - /// - /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. - /// - /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region - fn exit_region(&mut self); - - /// Enables a selector at the given row. - fn enable_selector( - &mut self, - annotation: A, - selector: &Selector, - row: usize, - ) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into; - - /// Queries the cell of an instance column at a particular absolute row. - /// - /// Returns the cell's value, if known. - fn query_instance(&self, column: Column, row: usize) -> Result, Error>; - - /// Assign an advice column value (witness) - fn assign_advice( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into; - - /// Assign a fixed value - fn assign_fixed( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into; - - /// Assign two cells to have the same value - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error>; - - /// Fills a fixed `column` starting from the given `row` with value `to`. - fn fill_from_row( - &mut self, - column: Column, - row: usize, - to: Value>, - ) -> Result<(), Error>; - - /// Queries the value of the given challenge. - /// - /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. - fn get_challenge(&self, challenge: Challenge) -> Value; - - /// Creates a new (sub)namespace and enters into it. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - /// - /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR; - - /// Exits out of the existing namespace. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - /// - /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace - fn pop_namespace(&mut self, gadget_name: Option); -} - -/// A floor planning strategy for a circuit. -/// -/// The floor planner is chip-agnostic and applies its strategy to the circuit it is used -/// within. -pub trait FloorPlanner { - /// Given the provided `cs`, synthesize the given circuit. - /// - /// `constants` is the list of fixed columns that the layouter may use to assign - /// global constant values. These columns will all have been equality-enabled. - /// - /// Internally, a floor planner will perform the following operations: - /// - Instantiate a [`Layouter`] for this floor planner. - /// - Perform any necessary setup or measurement tasks, which may involve one or more - /// calls to `Circuit::default().synthesize(config, &mut layouter)`. - /// - Call `circuit.synthesize(config, &mut layouter)` exactly once. - fn synthesize + SyncDeps, C: Circuit>( - cs: &mut CS, - circuit: &C, - config: C::Config, - constants: Vec>, - ) -> Result<(), Error>; -} - -/// This is a trait that circuits provide implementations for so that the -/// backend prover can ask the circuit to synthesize using some given -/// [`ConstraintSystem`] implementation. -pub trait Circuit { - /// This is a configuration object that stores things like columns. - type Config: Clone; - /// The floor planner used for this circuit. This is an associated type of the - /// `Circuit` trait because its behaviour is circuit-critical. - type FloorPlanner: FloorPlanner; - /// Optional circuit configuration parameters. Requires the `circuit-params` feature. - #[cfg(feature = "circuit-params")] - type Params: Default; - - /// Returns a copy of this circuit with no witness values (i.e. all witnesses set to - /// `None`). For most circuits, this will be equal to `Self::default()`. - fn without_witnesses(&self) -> Self; - - /// Returns a reference to the parameters that should be used to configure the circuit. - /// Requires the `circuit-params` feature. - #[cfg(feature = "circuit-params")] - fn params(&self) -> Self::Params { - Self::Params::default() - } - - /// The circuit is given an opportunity to describe the exact gate - /// arrangement, column arrangement, etc. Takes a runtime parameter. The default - /// implementation calls `configure` ignoring the `_params` argument in order to easily support - /// circuits that don't use configuration parameters. - #[cfg(feature = "circuit-params")] - fn configure_with_params( - meta: &mut ConstraintSystem, - _params: Self::Params, - ) -> Self::Config { - Self::configure(meta) - } - - /// The circuit is given an opportunity to describe the exact gate - /// arrangement, column arrangement, etc. - fn configure(meta: &mut ConstraintSystem) -> Self::Config; - - /// Given the provided `cs`, synthesize the circuit. The concrete type of - /// the caller will be different depending on the context, and they may or - /// may not expect to have a witness present. - fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; -} - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl FixedQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, - /// Phase of this advice column - pub(crate) phase: sealed::Phase, -} - -impl AdviceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } - - /// Phase of this advice column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl InstanceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, PartialEq, Eq)] -pub enum Expression { - /// This is a constant polynomial - Constant(F), - /// This is a virtual selector - Selector(Selector), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQuery), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQuery), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQuery), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl Expression { - /// Make side effects - pub fn query_cells(&mut self, cells: &mut VirtualCells<'_, F>) { - match self { - Expression::Constant(_) => (), - Expression::Selector(selector) => { - if !cells.queried_selectors.contains(selector) { - cells.queried_selectors.push(*selector); - } - } - Expression::Fixed(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Fixed, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_fixed_index(col, query.rotation)); - } - } - Expression::Advice(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Advice { phase: query.phase }, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_advice_index(col, query.rotation)); - } - } - Expression::Instance(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Instance, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_instance_index(col, query.rotation)); - } - } - Expression::Challenge(_) => (), - Expression::Negated(a) => a.query_cells(cells), - Expression::Sum(a, b) => { - a.query_cells(cells); - b.query_cells(cells); - } - Expression::Product(a, b) => { - a.query_cells(cells); - b.query_cells(cells); - } - Expression::Scaled(a, _) => a.query_cells(cells), - }; - } - - /// Evaluate the polynomial using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - constant: &impl Fn(F) -> T, - selector_column: &impl Fn(Selector) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Selector(selector) => selector_column(*selector), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - product(a, b) - } - Expression::Scaled(a, f) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - scaled(a, *f) - } - } - } - - /// Evaluate the polynomial lazily using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate_lazy( - &self, - constant: &impl Fn(F) -> T, - selector_column: &impl Fn(Selector) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - zero: &T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Selector(selector) => selector_column(*selector), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - let b = b.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let (a, b) = if a.complexity() <= b.complexity() { - (a, b) - } else { - (b, a) - }; - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - - if a == *zero { - a - } else { - let b = b.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - product(a, b) - } - } - Expression::Scaled(a, f) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - scaled(a, *f) - } - } - } - - fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { - match self { - Expression::Constant(scalar) => write!(writer, "{scalar:?}"), - Expression::Selector(selector) => write!(writer, "selector[{}]", selector.0), - Expression::Fixed(query) => { - write!( - writer, - "fixed[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Advice(query) => { - write!( - writer, - "advice[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Instance(query) => { - write!( - writer, - "instance[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Challenge(challenge) => { - write!(writer, "challenge[{}]", challenge.index()) - } - Expression::Negated(a) => { - writer.write_all(b"(-")?; - a.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Sum(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"+")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Product(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"*")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Scaled(a, f) => { - a.write_identifier(writer)?; - write!(writer, "*{f:?}") - } - } - } - - /// Identifier for this expression. Expressions with identical identifiers - /// do the same calculation (but the expressions don't need to be exactly equal - /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). - pub fn identifier(&self) -> String { - let mut cursor = std::io::Cursor::new(Vec::new()); - self.write_identifier(&mut cursor).unwrap(); - String::from_utf8(cursor.into_inner()).unwrap() - } - - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Selector(_) => 1, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.degree(), - Expression::Sum(a, b) => max(a.degree(), b.degree()), - Expression::Product(a, b) => a.degree() + b.degree(), - Expression::Scaled(poly, _) => poly.degree(), - } - } - - /// Approximate the computational complexity of this expression. - pub fn complexity(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Selector(_) => 1, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.complexity() + 5, - Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, - Expression::Product(a, b) => a.complexity() + b.complexity() + 30, - Expression::Scaled(poly, _) => poly.complexity() + 30, - } - } - - /// Square this expression. - pub fn square(self) -> Self { - self.clone() * self - } - - /// Returns whether or not this expression contains a simple `Selector`. - fn contains_simple_selector(&self) -> bool { - self.evaluate( - &|_| false, - &|selector| selector.is_simple(), - &|_| false, - &|_| false, - &|_| false, - &|_| false, - &|a| a, - &|a, b| a || b, - &|a, b| a || b, - &|a, _| a, - ) - } - - /// Extracts a simple selector from this gate, if present - fn extract_simple_selector(&self) -> Option { - let op = |a, b| match (a, b) { - (Some(a), None) | (None, Some(a)) => Some(a), - (Some(_), Some(_)) => panic!("two simple selectors cannot be in the same expression"), - _ => None, - }; - - self.evaluate( - &|_| None, - &|selector| { - if selector.is_simple() { - Some(selector) - } else { - None - } - }, - &|_| None, - &|_| None, - &|_| None, - &|_| None, - &|a| a, - &op, - &op, - &|a, _| a, - ) - } -} - -impl std::fmt::Debug for Expression { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), - Expression::Selector(selector) => f.debug_tuple("Selector").field(selector).finish(), - // Skip enum variant and print query struct directly to maintain backwards compatibility. - Expression::Fixed(query) => { - let mut debug_struct = f.debug_struct("Fixed"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Advice(query) => { - let mut debug_struct = f.debug_struct("Advice"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation); - // Only show advice's phase if it's not in first phase. - if query.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &query.phase); - } - debug_struct.finish() - } - Expression::Instance(query) => { - let mut debug_struct = f.debug_struct("Instance"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Challenge(challenge) => { - f.debug_tuple("Challenge").field(challenge).finish() - } - Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), - Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), - Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), - Expression::Scaled(poly, scalar) => { - f.debug_tuple("Scaled").field(poly).field(scalar).finish() - } - } - } -} - -impl Neg for Expression { - type Output = Expression; - fn neg(self) -> Self::Output { - Expression::Negated(Box::new(self)) - } -} - -impl Add for Expression { - type Output = Expression; - fn add(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() || rhs.contains_simple_selector() { - panic!("attempted to use a simple selector in an addition"); - } - Expression::Sum(Box::new(self), Box::new(rhs)) - } -} - -impl Sub for Expression { - type Output = Expression; - fn sub(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() || rhs.contains_simple_selector() { - panic!("attempted to use a simple selector in a subtraction"); - } - Expression::Sum(Box::new(self), Box::new(-rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() && rhs.contains_simple_selector() { - panic!("attempted to multiply two expressions containing simple selectors"); - } - Expression::Product(Box::new(self), Box::new(rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: F) -> Expression { - Expression::Scaled(Box::new(self), rhs) - } -} - -impl Sum for Expression { - fn sum>(iter: I) -> Self { - iter.reduce(|acc, x| acc + x) - .unwrap_or(Expression::Constant(F::ZERO)) - } -} - -impl Product for Expression { - fn product>(iter: I) -> Self { - iter.reduce(|acc, x| acc * x) - .unwrap_or(Expression::Constant(F::ONE)) - } -} - -/// An individual polynomial constraint. -/// -/// These are returned by the closures passed to `ConstraintSystem::create_gate`. -#[derive(Debug)] -pub struct Constraint { - name: String, - poly: Expression, -} - -impl From> for Constraint { - fn from(poly: Expression) -> Self { - Constraint { - name: "".to_string(), - poly, - } - } -} - -impl> From<(S, Expression)> for Constraint { - fn from((name, poly): (S, Expression)) -> Self { - Constraint { - name: name.as_ref().to_string(), - poly, - } - } -} - -impl From> for Vec> { - fn from(poly: Expression) -> Self { - vec![Constraint { - name: "".to_string(), - poly, - }] - } -} - -/// A set of polynomial constraints with a common selector. -/// -/// ``` -/// use halo2_proofs::{plonk::{Constraints, Expression}, poly::Rotation}; -/// use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; -/// -/// # let mut meta = ConstraintSystem::::default(); -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let c = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("foo", |meta| { -/// let next = meta.query_advice(a, Rotation::next()); -/// let a = meta.query_advice(a, Rotation::cur()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let c = meta.query_advice(c, Rotation::cur()); -/// let s_ternary = meta.query_selector(s); -/// -/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); -/// -/// Constraints::with_selector( -/// s_ternary, -/// std::array::IntoIter::new([ -/// ("a is boolean", a.clone() * one_minus_a.clone()), -/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), -/// ]), -/// ) -/// }); -/// ``` -/// -/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to -/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, -/// you can pass an array directly. -#[derive(Debug)] -pub struct Constraints>, Iter: IntoIterator> { - selector: Expression, - constraints: Iter, -} - -impl>, Iter: IntoIterator> Constraints { - /// Constructs a set of constraints that are controlled by the given selector. - /// - /// Each constraint `c` in `iterator` will be converted into the constraint - /// `selector * c`. - pub fn with_selector(selector: Expression, constraints: Iter) -> Self { - Constraints { - selector, - constraints, - } - } -} - -fn apply_selector_to_constraint>>( - (selector, c): (Expression, C), -) -> Constraint { - let constraint: Constraint = c.into(); - Constraint { - name: constraint.name, - poly: selector * constraint.poly, - } -} - -type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; -type ConstraintsIterator = std::iter::Map< - std::iter::Zip>, I>, - ApplySelectorToConstraint, ->; - -impl>, Iter: IntoIterator> IntoIterator - for Constraints -{ - type Item = Constraint; - type IntoIter = ConstraintsIterator; - - fn into_iter(self) -> Self::IntoIter { - std::iter::repeat(self.selector) - .zip(self.constraints) - .map(apply_selector_to_constraint) - } -} - -/// Gate -#[derive(Clone, Debug)] -pub struct Gate { - name: String, - constraint_names: Vec, - polys: Vec>, - /// We track queried selectors separately from other cells, so that we can use them to - /// trigger debug checks on gates. - queried_selectors: Vec, - queried_cells: Vec, -} - -impl Gate { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the name of the constraint at index `constraint_index`. - pub fn constraint_name(&self, constraint_index: usize) -> &str { - self.constraint_names[constraint_index].as_str() - } - - /// Returns constraints of this gate - pub fn polynomials(&self) -> &[Expression] { - &self.polys - } - - pub(crate) fn queried_selectors(&self) -> &[Selector] { - &self.queried_selectors - } - - pub(crate) fn queried_cells(&self) -> &[VirtualCell] { - &self.queried_cells - } -} - -/// Exposes the "virtual cells" that can be queried while creating a custom gate or lookup -/// table. -#[derive(Debug)] -pub struct VirtualCells<'a, F: Field> { - meta: &'a mut ConstraintSystem, - queried_selectors: Vec, - queried_cells: Vec, -} - -impl<'a, F: Field> VirtualCells<'a, F> { - fn new(meta: &'a mut ConstraintSystem) -> Self { - VirtualCells { - meta, - queried_selectors: vec![], - queried_cells: vec![], - } - } - - /// Query a selector at the current position. - pub fn query_selector(&mut self, selector: Selector) -> Expression { - self.queried_selectors.push(selector); - Expression::Selector(selector) - } - - /// Query a fixed column at a relative position - pub fn query_fixed(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Fixed(FixedQuery { - index: Some(self.meta.query_fixed_index(column, at)), - column_index: column.index, - rotation: at, - }) - } - - /// Query an advice column at a relative position - pub fn query_advice(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Advice(AdviceQuery { - index: Some(self.meta.query_advice_index(column, at)), - column_index: column.index, - rotation: at, - phase: column.column_type().phase, - }) - } - - /// Query an instance column at a relative position - pub fn query_instance(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Instance(InstanceQuery { - index: Some(self.meta.query_instance_index(column, at)), - column_index: column.index, - rotation: at, - }) - } - - /// Query an Any column at a relative position - pub fn query_any>>(&mut self, column: C, at: Rotation) -> Expression { - let column = column.into(); - match column.column_type() { - Any::Advice(_) => self.query_advice(Column::::try_from(column).unwrap(), at), - Any::Fixed => self.query_fixed(Column::::try_from(column).unwrap(), at), - Any::Instance => self.query_instance(Column::::try_from(column).unwrap(), at), - } - } - - /// Query a challenge - pub fn query_challenge(&mut self, challenge: Challenge) -> Expression { - Expression::Challenge(challenge) - } -} - -/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset -/// within a custom gate. -#[derive(Clone, Debug)] -pub struct VirtualCell { - pub(crate) column: Column, - pub(crate) rotation: Rotation, -} - -impl>> From<(Col, Rotation)> for VirtualCell { - fn from((column, rotation): (Col, Rotation)) -> Self { - VirtualCell { - column: column.into(), - rotation, - } - } -} diff --git a/frontend/src/plonk/lookup.rs b/frontend/src/plonk/lookup.rs deleted file mode 100644 index 5182850dc0..0000000000 --- a/frontend/src/plonk/lookup.rs +++ /dev/null @@ -1,10 +0,0 @@ -use super::circuit::Expression; -use halo2_middleware::ff::Field; - -/// Expressions involved in a lookup argument, with a name as metadata. -#[derive(Clone)] -pub struct Argument { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) table_expressions: Vec>, -} diff --git a/frontend/src/plonk/permutation.rs b/frontend/src/plonk/permutation.rs deleted file mode 100644 index 94ad432394..0000000000 --- a/frontend/src/plonk/permutation.rs +++ /dev/null @@ -1,8 +0,0 @@ -use halo2_middleware::circuit::{Any, Column}; - -/// A permutation argument. -#[derive(Debug, Clone)] -pub struct Argument { - /// A sequence of columns involved in the argument. - pub(super) columns: Vec>, -} diff --git a/frontend/src/plonk/shuffle.rs b/frontend/src/plonk/shuffle.rs deleted file mode 100644 index c109eea9a6..0000000000 --- a/frontend/src/plonk/shuffle.rs +++ /dev/null @@ -1,10 +0,0 @@ -use super::circuit::Expression; -use halo2_middleware::ff::Field; - -/// Expressions involved in a shuffle argument, with a name as metadata. -#[derive(Clone)] -pub struct Argument { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) shuffle_expressions: Vec>, -} diff --git a/frontend/src/poly.rs b/frontend/src/poly.rs deleted file mode 100644 index 7be272ca0a..0000000000 --- a/frontend/src/poly.rs +++ /dev/null @@ -1,85 +0,0 @@ -use halo2_middleware::ff::{BatchInvert, Field}; -use halo2_middleware::plonk::Assigned; -use std::fmt::Debug; -use std::marker::PhantomData; - -// TODO: We only need the batch_invert_assigned from all this code, probably we can simplify this a -// lot - -/// Represents a univariate polynomial defined over a field and a particular -/// basis. -#[derive(Clone, Debug)] -pub struct Polynomial { - pub(crate) values: Vec, - pub(crate) _marker: PhantomData, -} - -impl Polynomial { - pub(crate) fn new_empty(size: usize, zero: F) -> Self { - Polynomial { - values: vec![zero; size], - _marker: PhantomData, - } - } -} - -/// The basis over which a polynomial is described. -pub trait Basis: Copy + Debug + Send + Sync {} - -/// The polynomial is defined as coefficients -#[derive(Clone, Copy, Debug)] -pub struct Coeff; -impl Basis for Coeff {} - -/// The polynomial is defined as coefficients of Lagrange basis polynomials -#[derive(Clone, Copy, Debug)] -pub struct LagrangeCoeff; -impl Basis for LagrangeCoeff {} - -pub(crate) fn batch_invert_assigned( - assigned: Vec, LagrangeCoeff>>, -) -> Vec> { - let mut assigned_denominators: Vec<_> = assigned - .iter() - .map(|f| { - f.values - .iter() - .map(|value| value.denominator()) - .collect::>() - }) - .collect(); - - assigned_denominators - .iter_mut() - .flat_map(|f| { - f.iter_mut() - // If the denominator is trivial, we can skip it, reducing the - // size of the batch inversion. - .filter_map(|d| d.as_mut()) - }) - .batch_invert(); - - assigned - .iter() - .zip(assigned_denominators) - .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) - .collect() -} - -impl Polynomial, LagrangeCoeff> { - pub(crate) fn invert( - &self, - inv_denoms: impl Iterator + ExactSizeIterator, - ) -> Polynomial { - assert_eq!(inv_denoms.len(), self.values.len()); - Polynomial { - values: self - .values - .iter() - .zip(inv_denoms) - .map(|(a, inv_den)| a.numerator() * inv_den) - .collect(), - _marker: self._marker, - } - } -} diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs index 19d327a196..b248e74208 100644 --- a/middleware/src/circuit.rs +++ b/middleware/src/circuit.rs @@ -36,8 +36,8 @@ pub struct InstanceQueryMid { /// A challenge squeezed from transcript after advice columns at the phase have been committed. #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] pub struct Challenge { - index: usize, - pub(crate) phase: u8, + pub index: usize, + pub phase: u8, } impl Challenge { From de024999a55d30b0f6a4ab7882e17da2ba813f58 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 13:26:58 +0000 Subject: [PATCH 37/79] Checkpoint --- common/src/plonk/circuit.rs | 9 ++++----- common/src/plonk/shuffle.rs | 8 -------- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 0b4fee82e8..fe5fd0037c 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -12,7 +12,6 @@ use ff::Field; use halo2_middleware::circuit::{ AdviceQueryMid, Challenge, ExpressionMid, FixedQueryMid, InstanceQueryMid, }; -use halo2_middleware::lookup::ArgumentV2; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; use std::collections::BTreeSet; @@ -1715,11 +1714,11 @@ pub struct ConstraintSystemV2Backend { // Vector of lookup arguments, where each corresponds to a sequence of // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, + pub(crate) lookups: Vec>, // Vector of shuffle arguments, where each corresponds to a sequence of // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, + pub(crate) shuffles: Vec>, // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. pub(crate) general_column_annotations: HashMap, @@ -1756,7 +1755,7 @@ impl Into> for ConstraintSystem { lookups: self .lookups .iter() - .map(|l| ArgumentV2 { + .map(|l| halo2_middleware::lookup::ArgumentV2 { name: l.name.clone(), input_expressions: l .input_expressions @@ -1775,7 +1774,7 @@ impl Into> for ConstraintSystem { shuffles: self .shuffles .iter() - .map(|s| shuffle::ArgumentV2 { + .map(|s| halo2_middleware::shuffle::ArgumentV2 { name: s.name.clone(), input_expressions: s .input_expressions diff --git a/common/src/plonk/shuffle.rs b/common/src/plonk/shuffle.rs index 8656a3b854..496f691cd5 100644 --- a/common/src/plonk/shuffle.rs +++ b/common/src/plonk/shuffle.rs @@ -6,14 +6,6 @@ use std::fmt::{self, Debug}; pub(crate) mod prover; pub(crate) mod verifier; -/// Expressions involved in a shuffle argument, with a name as metadata. -#[derive(Clone, Debug)] -pub struct ArgumentV2 { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) shuffle_expressions: Vec>, -} - /// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] pub struct Argument { From adcbe4b4a4ca20cc7fcb330a3da8f1243b45baa9 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 13:54:00 +0000 Subject: [PATCH 38/79] Checkpoint --- common/src/circuit.rs | 4 +- .../src/circuit/floor_planner/single_pass.rs | 7 +- common/src/circuit/floor_planner/v1.rs | 7 +- .../src/circuit/floor_planner/v1/strategy.rs | 3 +- common/src/circuit/layouter.rs | 3 +- common/src/dev.rs | 8 +- common/src/dev/cost.rs | 7 +- common/src/dev/failure.rs | 3 +- common/src/dev/failure/emitter.rs | 7 +- common/src/dev/metadata.rs | 7 +- common/src/dev/tfp.rs | 7 +- common/src/dev/util.rs | 10 +- common/src/plonk.rs | 4 + common/src/plonk/circuit.rs | 373 +----------------- common/src/plonk/error.rs | 2 +- common/src/plonk/evaluation.rs | 3 +- common/src/plonk/keygen.rs | 5 +- common/src/plonk/permutation.rs | 2 +- common/src/plonk/permutation/keygen.rs | 16 +- common/src/plonk/permutation/prover.rs | 3 +- common/src/plonk/permutation/verifier.rs | 3 +- common/src/plonk/prover.rs | 6 +- 22 files changed, 65 insertions(+), 425 deletions(-) diff --git a/common/src/circuit.rs b/common/src/circuit.rs index 546325edd4..bdb2da9bbd 100644 --- a/common/src/circuit.rs +++ b/common/src/circuit.rs @@ -4,8 +4,8 @@ use std::{fmt, marker::PhantomData}; use ff::Field; -use crate::plonk::{Advice, Any, Assigned, Column, Error, Fixed, Instance, Selector, TableColumn}; -use halo2_middleware::circuit::Challenge; +use crate::plonk::{Assigned, Error, Selector, TableColumn}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; mod value; pub use value::Value; diff --git a/common/src/circuit/floor_planner/single_pass.rs b/common/src/circuit/floor_planner/single_pass.rs index 2c3bb2437a..147b2a9626 100644 --- a/common/src/circuit/floor_planner/single_pass.rs +++ b/common/src/circuit/floor_planner/single_pass.rs @@ -11,12 +11,9 @@ use crate::{ table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{ - Advice, Any, Assigned, Assignment, Circuit, Column, Error, Fixed, FloorPlanner, Instance, - Selector, TableColumn, - }, + plonk::{Assigned, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; -use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; /// A simple [`FloorPlanner`] that performs minimal optimizations. /// diff --git a/common/src/circuit/floor_planner/v1.rs b/common/src/circuit/floor_planner/v1.rs index 32af27d182..88f99b39db 100644 --- a/common/src/circuit/floor_planner/v1.rs +++ b/common/src/circuit/floor_planner/v1.rs @@ -8,12 +8,9 @@ use crate::{ table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{ - Advice, Any, Assigned, Assignment, Circuit, Column, Error, Fixed, FloorPlanner, Instance, - Selector, TableColumn, - }, + plonk::{Assigned, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; -use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; mod strategy; diff --git a/common/src/circuit/floor_planner/v1/strategy.rs b/common/src/circuit/floor_planner/v1/strategy.rs index 71745de245..86db63124d 100644 --- a/common/src/circuit/floor_planner/v1/strategy.rs +++ b/common/src/circuit/floor_planner/v1/strategy.rs @@ -5,7 +5,8 @@ use std::{ }; use super::{RegionColumn, RegionShape}; -use crate::{circuit::RegionStart, plonk::Any}; +use crate::circuit::RegionStart; +use halo2_middleware::circuit::Any; /// A region allocated within a column. #[derive(Clone, Default, Debug, PartialEq, Eq)] diff --git a/common/src/circuit/layouter.rs b/common/src/circuit/layouter.rs index f939c3fca5..b7583060e1 100644 --- a/common/src/circuit/layouter.rs +++ b/common/src/circuit/layouter.rs @@ -8,7 +8,8 @@ use ff::Field; pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; -use crate::plonk::{Advice, Any, Assigned, Column, Error, Fixed, Instance, Selector}; +use crate::plonk::{Assigned, Error, Selector}; +use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; /// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. #[cfg(feature = "thread-safe-region")] diff --git a/common/src/dev.rs b/common/src/dev.rs index dc42abfa20..be9366956c 100644 --- a/common/src/dev.rs +++ b/common/src/dev.rs @@ -15,11 +15,11 @@ use crate::{ plonk::{ permutation, sealed::{self, SealedPhase}, - Advice, Any, Assigned, Assignment, Circuit, Column, ConstraintSystem, Error, Expression, - FirstPhase, Fixed, FloorPlanner, Instance, Phase, Selector, + Assigned, Assignment, Circuit, ConstraintSystem, Error, Expression, FirstPhase, + FloorPlanner, Phase, Selector, }, }; -use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; use crate::multicore::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, @@ -484,7 +484,7 @@ impl Assignment for MockProver { } Err(err) => { // Propagate `assign` error if the column is in current phase. - if self.in_phase(column.column_type().phase) { + if self.in_phase(sealed::Phase(column.column_type().phase)) { return Err(err); } } diff --git a/common/src/dev/cost.rs b/common/src/dev/cost.rs index 57a2191574..96ef53b093 100644 --- a/common/src/dev/cost.rs +++ b/common/src/dev/cost.rs @@ -14,12 +14,9 @@ use halo2_middleware::poly::Rotation; use crate::{ circuit::{layouter::RegionColumn, Value}, - plonk::{ - Advice, Any, Assigned, Assignment, Circuit, Column, ConstraintSystem, Error, Fixed, - FloorPlanner, Instance, Selector, - }, + plonk::{Assigned, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, }; -use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; /// Measures a circuit to determine its costs, and explain what contributes to them. #[allow(dead_code)] diff --git a/common/src/dev/failure.rs b/common/src/dev/failure.rs index f9f5c27ded..09c93c24c6 100644 --- a/common/src/dev/failure.rs +++ b/common/src/dev/failure.rs @@ -13,8 +13,9 @@ use super::{ use crate::dev::metadata::Constraint; use crate::{ dev::{Instance, Value}, - plonk::{Any, Column, ConstraintSystem, Expression, Gate}, + plonk::{ConstraintSystem, Expression, Gate}, }; +use halo2_middleware::circuit::{Any, Column}; mod emitter; diff --git a/common/src/dev/failure/emitter.rs b/common/src/dev/failure/emitter.rs index 24109d599b..1bb3432940 100644 --- a/common/src/dev/failure/emitter.rs +++ b/common/src/dev/failure/emitter.rs @@ -6,8 +6,9 @@ use group::ff::Field; use super::FailureLocation; use crate::{ dev::{metadata, util}, - plonk::{Advice, Any, Expression}, + plonk::Expression, }; +use halo2_middleware::circuit::{Advice, Any}; fn padded(p: char, width: usize, text: &str) -> String { let pad = width - text.len(); @@ -165,7 +166,9 @@ pub(super) fn expression_to_string( .and_then(|map| { map.get( &( - Any::Advice(Advice { phase: query.phase }), + Any::Advice(Advice { + phase: query.phase.0, + }), query.column_index, ) .into(), diff --git a/common/src/dev/metadata.rs b/common/src/dev/metadata.rs index f81bfa67a7..171b2dff86 100644 --- a/common/src/dev/metadata.rs +++ b/common/src/dev/metadata.rs @@ -1,7 +1,8 @@ //! Metadata about circuits. use super::metadata::Column as ColumnMetadata; -use crate::plonk::{self, Any}; +use crate::plonk::{self}; +use halo2_middleware::circuit::Any; use std::{ collections::HashMap, fmt::{self, Debug}, @@ -38,8 +39,8 @@ impl From<(Any, usize)> for Column { } } -impl From> for Column { - fn from(column: plonk::Column) -> Self { +impl From> for Column { + fn from(column: halo2_middleware::circuit::Column) -> Self { Column { column_type: *column.column_type(), index: column.index(), diff --git a/common/src/dev/tfp.rs b/common/src/dev/tfp.rs index f5960ff4d9..82ec9abcfb 100644 --- a/common/src/dev/tfp.rs +++ b/common/src/dev/tfp.rs @@ -8,12 +8,9 @@ use crate::{ layouter::{RegionLayouter, SyncDeps}, AssignedCell, Cell, Layouter, Region, Table, Value, }, - plonk::{ - Advice, Any, Assigned, Assignment, Circuit, Column, ConstraintSystem, Error, Fixed, - FloorPlanner, Instance, Selector, - }, + plonk::{Assigned, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, }; -use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; /// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. /// diff --git a/common/src/dev/util.rs b/common/src/dev/util.rs index 0e1bef7e5b..df91f29b16 100644 --- a/common/src/dev/util.rs +++ b/common/src/dev/util.rs @@ -2,10 +2,8 @@ use group::ff::Field; use std::collections::BTreeMap; use super::{metadata, CellValue, InstanceValue, Value}; -use crate::plonk::{ - Advice, AdviceQuery, Any, Column, ColumnType, Expression, FixedQuery, Gate, InstanceQuery, - VirtualCell, -}; +use crate::plonk::{AdviceQuery, Expression, FixedQuery, Gate, InstanceQuery, VirtualCell}; +use halo2_middleware::circuit::{Advice, Any, Column, ColumnType}; use halo2_middleware::poly::Rotation; pub(crate) struct AnyQuery { @@ -34,7 +32,9 @@ impl From for AnyQuery { fn from(query: AdviceQuery) -> Self { Self { index: query.index, - column_type: Any::Advice(Advice { phase: query.phase }), + column_type: Any::Advice(Advice { + phase: query.phase.0, + }), column_index: query.column_index, rotation: query.rotation, } diff --git a/common/src/plonk.rs b/common/src/plonk.rs index ab361d27ec..aa88af62a4 100644 --- a/common/src/plonk.rs +++ b/common/src/plonk.rs @@ -19,6 +19,10 @@ use crate::poly::{ }; use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; use crate::SerdeFormat; +use halo2_middleware::circuit::{ + Advice, AdviceQueryMid, Challenge, Column, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, + Instance, InstanceQueryMid, PreprocessingV2, +}; use halo2_middleware::poly::Rotation; mod assigned; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index fe5fd0037c..0e274c657b 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -10,7 +10,8 @@ use core::cmp::max; use core::ops::{Add, Mul}; use ff::Field; use halo2_middleware::circuit::{ - AdviceQueryMid, Challenge, ExpressionMid, FixedQueryMid, InstanceQueryMid, + Advice, AdviceQueryMid, Any, Challenge, Column, ExpressionMid, Fixed, FixedQueryMid, + GateV2Backend, Instance, InstanceQueryMid, PreprocessingV2, }; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; @@ -25,80 +26,7 @@ use std::{ mod compress_selectors; -/// A column type -pub trait ColumnType: - 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into -{ - /// Return expression from cell - fn query_cell(&self, index: usize, at: Rotation) -> Expression; -} - -/// A column with an index and type -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Column { - index: usize, - column_type: C, -} - -impl Column { - pub(crate) fn new(index: usize, column_type: C) -> Self { - Column { index, column_type } - } - - /// Index of this column. - pub fn index(&self) -> usize { - self.index - } - - /// Type of this column. - pub fn column_type(&self) -> &C { - &self.column_type - } - - /// Return expression from column at a relative position - pub fn query_cell(&self, at: Rotation) -> Expression { - self.column_type.query_cell(self.index, at) - } - - /// Return expression from column at the current row - pub fn cur(&self) -> Expression { - self.query_cell(Rotation::cur()) - } - - /// Return expression from column at the next row - pub fn next(&self) -> Expression { - self.query_cell(Rotation::next()) - } - - /// Return expression from column at the previous row - pub fn prev(&self) -> Expression { - self.query_cell(Rotation::prev()) - } - - /// Return expression from column at the specified rotation - pub fn rot(&self, rotation: i32) -> Expression { - self.query_cell(Rotation(rotation)) - } -} - -impl Ord for Column { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match self.column_type.into().cmp(&other.column_type.into()) { - // Indices are assigned within column types. - std::cmp::Ordering::Equal => self.index.cmp(&other.index), - order => order, - } - } -} - -impl PartialOrd for Column { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - +// TODO: Move sealed phase to frontend, and always use u8 in middleware and backend pub(crate) mod sealed { /// Phase of advice column #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] @@ -157,255 +85,6 @@ impl SealedPhase for super::ThirdPhase { } } -/// An advice column -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub struct Advice { - pub(crate) phase: sealed::Phase, -} - -impl Default for Advice { - fn default() -> Advice { - Advice { - phase: FirstPhase.to_sealed(), - } - } -} - -impl Advice { - /// Returns `Advice` in given `Phase` - pub fn new(phase: P) -> Advice { - Advice { - phase: phase.to_sealed(), - } - } - - /// Phase of this column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -impl std::fmt::Debug for Advice { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if self.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &self.phase); - } - debug_struct.finish() - } -} - -/// A fixed column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Fixed; - -/// An instance column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Instance; - -/// An enum over the Advice, Fixed, Instance structs -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub enum Any { - /// An Advice variant - Advice(Advice), - /// A Fixed variant - Fixed, - /// An Instance variant - Instance, -} - -impl Any { - /// Returns Advice variant in `FirstPhase` - pub fn advice() -> Any { - Any::Advice(Advice::default()) - } - - /// Returns Advice variant in given `Phase` - pub fn advice_in(phase: P) -> Any { - Any::Advice(Advice::new(phase)) - } -} - -impl std::fmt::Debug for Any { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Any::Advice(advice) => { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if advice.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &advice.phase); - } - debug_struct.finish() - } - Any::Fixed => f.debug_struct("Fixed").finish(), - Any::Instance => f.debug_struct("Instance").finish(), - } - } -} - -impl Ord for Any { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match (self, other) { - (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, - (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), - // Across column types, sort Instance < Advice < Fixed. - (Any::Instance, Any::Advice(_)) - | (Any::Advice(_), Any::Fixed) - | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, - (Any::Fixed, Any::Instance) - | (Any::Fixed, Any::Advice(_)) - | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, - } - } -} - -impl PartialOrd for Any { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl ColumnType for Advice { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: self.phase, - }) - } -} -impl ColumnType for Fixed { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Instance { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Any { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - match self { - Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: *phase, - }), - Any::Fixed => Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }), - Any::Instance => Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }), - } - } -} - -impl From for Any { - fn from(advice: Advice) -> Any { - Any::Advice(advice) - } -} - -impl From for Any { - fn from(_: Fixed) -> Any { - Any::Fixed - } -} - -impl From for Any { - fn from(_: Instance) -> Any { - Any::Instance - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Advice(advice.column_type), - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Fixed, - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Instance, - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Advice(advice) => Ok(Column { - index: any.index(), - column_type: *advice, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Fixed => Ok(Column { - index: any.index(), - column_type: Fixed, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Instance => Ok(Column { - index: any.index(), - column_type: Instance, - }), - _ => Err("Cannot convert into Column"), - } - } -} - /// A selector, representing a fixed boolean value per row of the circuit. /// /// Selectors can be used to conditionally enable (portions of) gates: @@ -866,7 +545,9 @@ impl Expression { if query.index.is_none() { let col = Column { index: query.column_index, - column_type: Advice { phase: query.phase }, + column_type: Advice { + phase: query.phase.0, + }, }; cells.queried_cells.push((col, query.rotation).into()); query.index = Some(cells.meta.query_advice_index(col, query.rotation)); @@ -1533,25 +1214,6 @@ impl>, Iter: IntoIterator> IntoIterato } } -/// A Gate contains a single polynomial identity with a name as metadata. -#[derive(Clone, Debug)] -pub struct GateV2Backend { - name: String, - poly: ExpressionMid, -} - -impl GateV2Backend { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the polynomial identity of this gate - pub fn polynomial(&self) -> &ExpressionMid { - &self.poly - } -} - /// Gate #[derive(Clone, Debug)] pub struct Gate { @@ -1589,14 +1251,6 @@ impl Gate { } } -/// Data that needs to be preprocessed from a circuit -#[derive(Debug, Clone)] -pub struct PreprocessingV2 { - // TODO(Edu): Can we replace this by a simpler structure? - pub(crate) permutation: permutation::keygen::AssemblyMid, - pub(crate) fixed: Vec>, -} - /// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System /// as well as the fixed columns and copy constraints information. #[derive(Debug, Clone)] @@ -1650,12 +1304,7 @@ impl QueriesMap { } ExpressionMid::Advice(query) => { let (col, rot) = ( - Column::new( - query.column_index, - Advice { - phase: sealed::Phase(query.phase), - }, - ), + Column::new(query.column_index, Advice { phase: query.phase }), query.rotation, ); let index = self.add_advice(col, rot); @@ -1961,7 +1610,7 @@ pub fn compile_circuit>( fixed.extend(selector_polys.into_iter()); let preprocessing = PreprocessingV2 { - permutation: permutation::keygen::AssemblyMid { + permutation: halo2_middleware::permutation::AssemblyMid { copies: assembly.permutation.copies, }, fixed, @@ -2788,7 +2437,7 @@ impl ConstraintSystem { let tmp = Column { index: self.num_advice_columns, - column_type: Advice { phase }, + column_type: Advice { phase: phase.0 }, }; self.unblinded_advice_columns.push(tmp.index); self.num_advice_columns += 1; @@ -2813,7 +2462,7 @@ impl ConstraintSystem { let tmp = Column { index: self.num_advice_columns, - column_type: Advice { phase }, + column_type: Advice { phase: phase.0 }, }; self.num_advice_columns += 1; self.num_advice_queries.push(0); @@ -3086,7 +2735,7 @@ impl<'a, F: Field> VirtualCells<'a, F> { index: Some(self.meta.query_advice_index(column, at)), column_index: column.index, rotation: at, - phase: column.column_type().phase, + phase: sealed::Phase(column.column_type().phase), }) } diff --git a/common/src/plonk/error.rs b/common/src/plonk/error.rs index 14d7339503..84ead5aa7c 100644 --- a/common/src/plonk/error.rs +++ b/common/src/plonk/error.rs @@ -3,7 +3,7 @@ use std::fmt; use std::io; use super::TableColumn; -use super::{Any, Column}; +use halo2_middleware::circuit::{Any, Column}; /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up diff --git a/common/src/plonk/evaluation.rs b/common/src/plonk/evaluation.rs index 66730b907e..2cd00a5f7c 100644 --- a/common/src/plonk/evaluation.rs +++ b/common/src/plonk/evaluation.rs @@ -1,11 +1,12 @@ use crate::multicore; -use crate::plonk::{lookup, permutation, Any, ProvingKey}; +use crate::plonk::{lookup, permutation, ProvingKey}; use crate::poly::Basis; use crate::{ arithmetic::{parallelize, CurveAffine}, poly::{Coeff, ExtendedLagrangeCoeff, Polynomial}, }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; +use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; use super::{shuffle, ConstraintSystem, Expression}; diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index 3a4ba1ac14..4d505d9520 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -7,8 +7,7 @@ use group::Curve; use super::{ circuit::{ - compile_circuit, Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, - ConstraintSystem, Fixed, Instance, Selector, + compile_circuit, Assignment, Circuit, CompiledCircuitV2, ConstraintSystem, Selector, }, evaluation::Evaluator, permutation, Assigned, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, @@ -21,7 +20,7 @@ use crate::{ EvaluationDomain, }, }; -use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; pub(crate) fn create_domain( k: u32, diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index 22c1fad6c3..8d1cbdb5d0 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -1,6 +1,5 @@ //! Implementation of permutation argument. -use super::circuit::{Any, Column}; use crate::{ arithmetic::CurveAffine, helpers::{ @@ -10,6 +9,7 @@ use crate::{ poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, SerdeFormat, }; +use halo2_middleware::circuit::{Any, Column}; pub(crate) mod keygen; pub(crate) mod prover; diff --git a/common/src/plonk/permutation/keygen.rs b/common/src/plonk/permutation/keygen.rs index 32ee0aa25e..c84fb75726 100644 --- a/common/src/plonk/permutation/keygen.rs +++ b/common/src/plonk/permutation/keygen.rs @@ -4,12 +4,14 @@ use group::Curve; use super::{Argument, ProvingKey, VerifyingKey}; use crate::{ arithmetic::{parallelize, CurveAffine}, - plonk::{Any, Column, Error}, + plonk::Error, poly::{ commitment::{Blind, Params}, EvaluationDomain, }, }; +use halo2_middleware::circuit::{Any, Column}; +use halo2_middleware::permutation::{AssemblyMid, Cell}; #[cfg(feature = "thread-safe-region")] use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; @@ -34,18 +36,6 @@ pub struct Assembly { sizes: Vec>, } -// TODO: Dedup with other Cell definition -#[derive(Clone, Debug)] -pub struct Cell { - pub column: Column, - pub row: usize, -} - -#[derive(Clone, Debug)] -pub struct AssemblyMid { - pub copies: Vec<(Cell, Cell)>, -} - #[derive(Clone, Debug)] pub struct AssemblyFront { n: usize, diff --git a/common/src/plonk/permutation/prover.rs b/common/src/plonk/permutation/prover.rs index 3199b7d13c..cd4ad43797 100644 --- a/common/src/plonk/permutation/prover.rs +++ b/common/src/plonk/permutation/prover.rs @@ -6,7 +6,7 @@ use group::{ use rand_core::RngCore; use std::iter::{self, ExactSizeIterator}; -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, ProvingKey}; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, @@ -17,6 +17,7 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; +use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; pub(crate) struct CommittedSet { diff --git a/common/src/plonk/permutation/verifier.rs b/common/src/plonk/permutation/verifier.rs index ef3cdba141..96ec55ef41 100644 --- a/common/src/plonk/permutation/verifier.rs +++ b/common/src/plonk/permutation/verifier.rs @@ -1,7 +1,7 @@ use ff::{Field, PrimeField}; use std::iter; -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, VerifyingKey}; use crate::{ arithmetic::CurveAffine, @@ -9,6 +9,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; +use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; pub struct Committed { diff --git a/common/src/plonk/prover.rs b/common/src/plonk/prover.rs index 8105c230b0..7daca34c5e 100644 --- a/common/src/plonk/prover.rs +++ b/common/src/plonk/prover.rs @@ -9,12 +9,12 @@ use super::{ circuit::{ compile_circuit, sealed::{self}, - Advice, Any, Assignment, Circuit, Column, Fixed, Instance, Selector, WitnessCalculator, + Assignment, Circuit, Selector, WitnessCalculator, }, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, }; -use halo2_middleware::circuit::Challenge; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; use crate::{ arithmetic::{eval_polynomial, CurveAffine}, @@ -810,7 +810,7 @@ impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { AR: Into, { // Ignore assignment of advice column in different phase than current one. - if self.current_phase != column.column_type().phase { + if self.current_phase.0 != column.column_type().phase { return Ok(()); } From e54c39cae2ff225d06dd794687f5f4db6caa708b Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 15:37:04 +0000 Subject: [PATCH 39/79] Checkpoint --- common/src/dev/failure.rs | 5 +- common/src/dev/metadata.rs | 41 +-- common/src/plonk/circuit.rs | 335 ++++++++++++++----------- common/src/plonk/keygen.rs | 12 +- common/src/plonk/permutation.rs | 9 + common/src/plonk/permutation/keygen.rs | 6 +- middleware/src/circuit.rs | 2 +- middleware/src/metadata.rs | 4 +- middleware/src/permutation.rs | 60 +---- 9 files changed, 215 insertions(+), 259 deletions(-) diff --git a/common/src/dev/failure.rs b/common/src/dev/failure.rs index 09c93c24c6..ee369ba2a8 100644 --- a/common/src/dev/failure.rs +++ b/common/src/dev/failure.rs @@ -50,7 +50,10 @@ impl fmt::Display for FailureLocation { impl FailureLocation { /// Returns a `DebugColumn` from Column metadata and `&self`. - pub(super) fn get_debug_column(&self, metadata: metadata::Column) -> DebugColumn { + pub(super) fn get_debug_column( + &self, + metadata: halo2_middleware::metadata::Column, + ) -> DebugColumn { match self { Self::InRegion { region, .. } => { DebugColumn::from((metadata, region.column_annotations.as_ref())) diff --git a/common/src/dev/metadata.rs b/common/src/dev/metadata.rs index 171b2dff86..5ca99410b0 100644 --- a/common/src/dev/metadata.rs +++ b/common/src/dev/metadata.rs @@ -3,50 +3,11 @@ use super::metadata::Column as ColumnMetadata; use crate::plonk::{self}; use halo2_middleware::circuit::Any; +pub use halo2_middleware::metadata::Column; use std::{ collections::HashMap, fmt::{self, Debug}, }; -/// Metadata about a column within a circuit. -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Column { - /// The type of the column. - pub(super) column_type: Any, - /// The index of the column. - pub(super) index: usize, -} - -impl Column { - /// Return the column type. - pub fn column_type(&self) -> Any { - self.column_type - } - /// Return the column index. - pub fn index(&self) -> usize { - self.index - } -} - -impl fmt::Display for Column { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Column('{:?}', {})", self.column_type, self.index) - } -} - -impl From<(Any, usize)> for Column { - fn from((column_type, index): (Any, usize)) -> Self { - Column { column_type, index } - } -} - -impl From> for Column { - fn from(column: halo2_middleware::circuit::Column) -> Self { - Column { - column_type: *column.column_type(), - index: column.index(), - } - } -} /// A helper structure that allows to print a Column with it's annotation as a single structure. #[derive(Debug, Clone)] diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 0e274c657b..d692738796 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1,6 +1,5 @@ use super::{lookup, permutation, shuffle, Assigned, Error, Queries}; use crate::circuit::layouter::SyncDeps; -use crate::dev::metadata; use crate::plonk::WitnessCollection; use crate::{ circuit::{Layouter, Region, Value}, @@ -10,9 +9,11 @@ use core::cmp::max; use core::ops::{Add, Mul}; use ff::Field; use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Any, Challenge, Column, ExpressionMid, Fixed, FixedQueryMid, - GateV2Backend, Instance, InstanceQueryMid, PreprocessingV2, + Advice, AdviceQueryMid, Any, Challenge, Column, CompiledCircuitV2, ConstraintSystemV2Backend, + ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, + PreprocessingV2, }; +use halo2_middleware::metadata; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; use std::collections::BTreeSet; @@ -1251,14 +1252,6 @@ impl Gate { } } -/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System -/// as well as the fixed columns and copy constraints information. -#[derive(Debug, Clone)] -pub struct CompiledCircuitV2 { - pub(crate) preprocessing: PreprocessingV2, - pub(crate) cs: ConstraintSystemV2Backend, -} - struct QueriesMap { advice_map: HashMap<(Column, Rotation), usize>, instance_map: HashMap<(Column, Rotation), usize>, @@ -1339,40 +1332,80 @@ impl QueriesMap { } } -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystemV2Backend { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - pub(crate) gates: Vec>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, +impl From> for ConstraintSystemV2Backend { + fn from(cs: ConstraintSystem) -> Self { + ConstraintSystemV2Backend { + num_fixed_columns: cs.num_fixed_columns, + num_advice_columns: cs.num_advice_columns, + num_instance_columns: cs.num_instance_columns, + num_challenges: cs.num_challenges, + unblinded_advice_columns: cs.unblinded_advice_columns.clone(), + advice_column_phase: cs.advice_column_phase.iter().map(|p| p.0).collect(), + challenge_phase: cs.challenge_phase.iter().map(|p| p.0).collect(), + gates: cs + .gates + .iter() + .map(|g| { + g.polys.clone().into_iter().enumerate().map(|(i, e)| { + let name = match g.constraint_name(i) { + "" => g.name.clone(), + constraint_name => format!("{}:{}", g.name, constraint_name), + }; + GateV2Backend { + name, + poly: e.into(), + } + }) + }) + .flatten() + .collect(), + permutation: halo2_middleware::permutation::ArgumentV2 { + columns: cs.permutation.columns.clone(), + }, + lookups: cs + .lookups + .iter() + .map(|l| halo2_middleware::lookup::ArgumentV2 { + name: l.name.clone(), + input_expressions: l + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + table_expressions: l + .table_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + shuffles: cs + .shuffles + .iter() + .map(|s| halo2_middleware::shuffle::ArgumentV2 { + name: s.name.clone(), + input_expressions: s + .input_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + shuffle_expressions: s + .shuffle_expressions + .clone() + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + general_column_annotations: cs.general_column_annotations.clone(), + } + } } +/* impl Into> for ConstraintSystem { fn into(self) -> ConstraintSystemV2Backend { ConstraintSystemV2Backend { @@ -1443,6 +1476,7 @@ impl Into> for ConstraintSystem { } } } +*/ /// Witness calculator. Frontend function #[derive(Debug)] @@ -1626,116 +1660,121 @@ pub fn compile_circuit>( )) } -impl ConstraintSystemV2Backend { - /// Collect queries used in gates while mapping those gates to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_gates(&self, queries: &mut QueriesMap) -> Vec> { - self.gates - .iter() - .map(|gate| Gate { - name: gate.name.clone(), - constraint_names: Vec::new(), - polys: vec![queries.as_expression(gate.polynomial())], - queried_selectors: Vec::new(), // Unused? - queried_cells: Vec::new(), // Unused? - }) - .collect() - } - - /// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_lookups(&self, queries: &mut QueriesMap) -> Vec> { - self.lookups - .iter() - .map(|lookup| lookup::Argument { - name: lookup.name.clone(), - input_expressions: lookup - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - table_expressions: lookup - .table_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() - } - - /// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_shuffles(&self, queries: &mut QueriesMap) -> Vec> { - self.shuffles - .iter() - .map(|shuffle| shuffle::Argument { - name: shuffle.name.clone(), - input_expressions: shuffle - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - shuffle_expressions: shuffle - .shuffle_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() - } +/// Collect queries used in gates while mapping those gates to equivalent ones with indexed +/// query references in the expressions. +fn cs2_collect_queries_gates( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.gates + .iter() + .map(|gate| Gate { + name: gate.name.clone(), + constraint_names: Vec::new(), + polys: vec![queries.as_expression(gate.polynomial())], + queried_selectors: Vec::new(), // Unused? + queried_cells: Vec::new(), // Unused? + }) + .collect() +} - /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the - /// expressions of gates, lookups and shuffles into equivalent ones with indexed query - /// references. - pub(crate) fn collect_queries( - &self, - ) -> ( - Queries, - Vec>, - Vec>, - Vec>, - ) { - let mut queries = QueriesMap { - advice_map: HashMap::new(), - instance_map: HashMap::new(), - fixed_map: HashMap::new(), - advice: Vec::new(), - instance: Vec::new(), - fixed: Vec::new(), - }; +/// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn cs2_collect_queries_lookups( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.lookups + .iter() + .map(|lookup| lookup::Argument { + name: lookup.name.clone(), + input_expressions: lookup + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + table_expressions: lookup + .table_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() +} - let gates = self.collect_queries_gates(&mut queries); - let lookups = self.collect_queries_lookups(&mut queries); - let shuffles = self.collect_queries_shuffles(&mut queries); +/// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn cs2_collect_queries_shuffles( + cs2: &ConstraintSystemV2Backend, + queries: &mut QueriesMap, +) -> Vec> { + cs2.shuffles + .iter() + .map(|shuffle| shuffle::Argument { + name: shuffle.name.clone(), + input_expressions: shuffle + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + shuffle_expressions: shuffle + .shuffle_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() +} - // Each column used in a copy constraint involves a query at rotation current. - for column in self.permutation.get_columns() { - match column.column_type { - Any::Instance => { - queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) - } - Any::Fixed => { - queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()) - } - Any::Advice(advice) => { - queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) - } - }; - } +/// Collect all queries used in the expressions of gates, lookups and shuffles. Map the +/// expressions of gates, lookups and shuffles into equivalent ones with indexed query +/// references. +pub(crate) fn collect_queries( + cs2: &ConstraintSystemV2Backend, +) -> ( + Queries, + Vec>, + Vec>, + Vec>, +) { + let mut queries = QueriesMap { + advice_map: HashMap::new(), + instance_map: HashMap::new(), + fixed_map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), + }; - let mut num_advice_queries = vec![0; self.num_advice_columns]; - for (column, _) in queries.advice.iter() { - num_advice_queries[column.index()] += 1; - } + let gates = cs2_collect_queries_gates(cs2, &mut queries); + let lookups = cs2_collect_queries_lookups(cs2, &mut queries); + let shuffles = cs2_collect_queries_shuffles(cs2, &mut queries); - let queries = Queries { - advice: queries.advice, - instance: queries.instance, - fixed: queries.fixed, - num_advice_queries, + // Each column used in a copy constraint involves a query at rotation current. + for column in &cs2.permutation.columns { + match column.column_type { + Any::Instance => { + queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) + } + Any::Fixed => queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()), + Any::Advice(advice) => { + queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) + } }; - (queries, gates, lookups, shuffles) } + + let mut num_advice_queries = vec![0; cs2.num_advice_columns]; + for (column, _) in queries.advice.iter() { + num_advice_queries[column.index()] += 1; + } + + let queries = Queries { + advice: queries.advice, + instance: queries.instance, + fixed: queries.fixed, + num_advice_queries, + }; + (queries, gates, lookups, shuffles) } /// This is a description of the circuit environment, such as the gate, column and @@ -1793,7 +1832,7 @@ pub struct ConstraintSystem { impl From> for ConstraintSystem { fn from(cs2: ConstraintSystemV2Backend) -> Self { - let (queries, gates, lookups, shuffles) = cs2.collect_queries(); + let (queries, gates, lookups, shuffles) = collect_queries(&cs2); ConstraintSystem { num_fixed_columns: cs2.num_fixed_columns, num_advice_columns: cs2.num_advice_columns, @@ -1813,7 +1852,7 @@ impl From> for ConstraintSystem { num_advice_queries: queries.num_advice_queries, instance_queries: queries.instance, fixed_queries: queries.fixed, - permutation: cs2.permutation, + permutation: cs2.permutation.into(), lookups, shuffles, general_column_annotations: cs2.general_column_annotations, diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index 4d505d9520..a64b754e93 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -6,9 +6,7 @@ use ff::{Field, FromUniformBytes}; use group::Curve; use super::{ - circuit::{ - compile_circuit, Assignment, Circuit, CompiledCircuitV2, ConstraintSystem, Selector, - }, + circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, evaluation::Evaluator, permutation, Assigned, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, }; @@ -20,7 +18,9 @@ use crate::{ EvaluationDomain, }, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::circuit::{ + Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, +}; pub(crate) fn create_domain( k: u32, @@ -221,7 +221,7 @@ where let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( params.n() as usize, - &cs.permutation, + &cs2.permutation, &circuit.preprocessing.permutation, )? .build_vk(params, &domain, &cs.permutation); @@ -321,7 +321,7 @@ where &cs.permutation, &circuit.preprocessing.permutation, )? - .build_pk(params, &vk.domain, &cs.permutation); + .build_pk(params, &vk.domain, &cs.permutation.clone().into()); // Compute l_0(X) // TODO: this can be done more efficiently diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index 8d1cbdb5d0..19fcb7eceb 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -10,6 +10,7 @@ use crate::{ SerdeFormat, }; use halo2_middleware::circuit::{Any, Column}; +use halo2_middleware::permutation::ArgumentV2; pub(crate) mod keygen; pub(crate) mod prover; @@ -26,6 +27,14 @@ pub struct Argument { pub(super) columns: Vec>, } +impl From for Argument { + fn from(arg: ArgumentV2) -> Self { + Self { + columns: arg.columns.clone(), + } + } +} + impl Argument { pub(crate) fn new() -> Self { Argument { columns: vec![] } diff --git a/common/src/plonk/permutation/keygen.rs b/common/src/plonk/permutation/keygen.rs index c84fb75726..200e65e1fa 100644 --- a/common/src/plonk/permutation/keygen.rs +++ b/common/src/plonk/permutation/keygen.rs @@ -11,7 +11,7 @@ use crate::{ }, }; use halo2_middleware::circuit::{Any, Column}; -use halo2_middleware::permutation::{AssemblyMid, Cell}; +use halo2_middleware::permutation::{ArgumentV2, AssemblyMid, Cell}; #[cfg(feature = "thread-safe-region")] use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; @@ -87,10 +87,10 @@ impl AssemblyFront { impl Assembly { pub(crate) fn new_from_assembly_mid( n: usize, - p: &Argument, + p: &ArgumentV2, a: &AssemblyMid, ) -> Result { - let mut assembly = Self::new(n, p); + let mut assembly = Self::new(n, &p.clone().into()); for copy in &a.copies { assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; } diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs index b248e74208..3a58f59cc5 100644 --- a/middleware/src/circuit.rs +++ b/middleware/src/circuit.rs @@ -132,7 +132,7 @@ pub struct ConstraintSystemV2Backend { pub gates: Vec>, // Permutation argument for performing equality constraints - pub permutation: permutation::Argument, + pub permutation: permutation::ArgumentV2, // Vector of lookup arguments, where each corresponds to a sequence of // input expressions and a sequence of table expressions involved in the lookup. diff --git a/middleware/src/metadata.rs b/middleware/src/metadata.rs index 61ff5a2ef7..de3dd40e88 100644 --- a/middleware/src/metadata.rs +++ b/middleware/src/metadata.rs @@ -6,9 +6,9 @@ use std::fmt::{self, Debug}; #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Column { /// The type of the column. - pub(super) column_type: Any, + pub column_type: Any, /// The index of the column. - pub(super) index: usize, + pub index: usize, } impl Column { diff --git a/middleware/src/permutation.rs b/middleware/src/permutation.rs index bd6d848e93..5cc20cd586 100644 --- a/middleware/src/permutation.rs +++ b/middleware/src/permutation.rs @@ -1,6 +1,6 @@ use crate::circuit::{Any, Column}; -// TODO: Dedup with other Cell definition +// TODO: Dedup with other Cell definition, or move this to a higher level #[derive(Clone, Debug)] pub struct Cell { pub column: Column, @@ -14,63 +14,7 @@ pub struct AssemblyMid { /// A permutation argument. #[derive(Debug, Clone)] -pub struct Argument { +pub struct ArgumentV2 { /// A sequence of columns involved in the argument. pub columns: Vec>, } - -// TODO: Remove all these methods, and directly access the fields? -impl Argument { - pub fn new() -> Self { - Argument { columns: vec![] } - } - - /// Returns the minimum circuit degree required by the permutation argument. - /// The argument may use larger degree gates depending on the actual - /// circuit's degree and how many columns are involved in the permutation. - pub(crate) fn required_degree(&self) -> usize { - // degree 2: - // l_0(X) * (1 - z(X)) = 0 - // - // We will fit as many polynomials p_i(X) as possible - // into the required degree of the circuit, so the - // following will not affect the required degree of - // this middleware. - // - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) - // - z(X) \prod (p(X) + \delta^i \beta X + \gamma) - // ) - // - // On the first sets of columns, except the first - // set, we will do - // - // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0 - // - // where z'(X) is the permutation for the previous set - // of columns. - // - // On the final set of columns, we will do - // - // degree 3: - // l_last(X) * (z'(X)^2 - z'(X)) = 0 - // - // which will allow the last value to be zero to - // ensure the argument is perfectly complete. - - // There are constraints of degree 3 regardless of the - // number of columns involved. - 3 - } - - pub(crate) fn add_column(&mut self, column: Column) { - if !self.columns.contains(&column) { - self.columns.push(column); - } - } - - /// Returns columns that participate on the permutation argument. - pub fn get_columns(&self) -> Vec> { - self.columns.clone() - } -} From e004913a5251773f473fe61a3461d44cb05625d6 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 15:47:44 +0000 Subject: [PATCH 40/79] Checkpoint --- backend/src/lib.rs | 1 + backend/src/plonk.rs | 1 + {common => backend}/src/plonk/prover.rs | 141 +---------------- common/Cargo.toml | 1 - common/src/arithmetic.rs | 2 +- common/src/circuit.rs | 2 +- .../src/circuit/floor_planner/single_pass.rs | 2 +- common/src/circuit/floor_planner/v1.rs | 2 +- common/src/circuit/layouter.rs | 2 +- common/src/circuit/table_layouter.rs | 2 +- common/src/dev.rs | 4 +- common/src/dev/cost.rs | 2 +- common/src/dev/gates.rs | 2 +- common/src/dev/tfp.rs | 2 +- common/src/helpers.rs | 2 +- common/src/plonk.rs | 2 - common/src/plonk/circuit.rs | 144 +++++++++++++++++- .../src/plonk/circuit/compress_selectors.rs | 2 +- common/src/plonk/keygen.rs | 2 +- common/src/plonk/lookup.rs | 2 +- common/src/plonk/lookup/prover.rs | 2 +- common/src/plonk/lookup/verifier.rs | 2 +- common/src/plonk/permutation/keygen.rs | 2 +- common/src/plonk/permutation/prover.rs | 2 +- common/src/plonk/permutation/verifier.rs | 2 +- common/src/plonk/shuffle.rs | 2 +- common/src/plonk/shuffle/prover.rs | 2 +- common/src/plonk/shuffle/verifier.rs | 2 +- common/src/plonk/vanishing/prover.rs | 2 +- common/src/plonk/vanishing/verifier.rs | 2 +- common/src/plonk/verifier.rs | 2 +- common/src/plonk/verifier/batch.rs | 2 +- common/src/poly/commitment.rs | 2 +- common/src/poly/domain.rs | 2 +- common/src/poly/ipa/commitment/prover.rs | 2 +- common/src/poly/ipa/msm.rs | 2 +- common/src/poly/ipa/multiopen.rs | 2 +- common/src/poly/ipa/multiopen/prover.rs | 2 +- common/src/poly/ipa/multiopen/verifier.rs | 2 +- common/src/poly/ipa/strategy.rs | 2 +- common/src/poly/kzg/commitment.rs | 2 +- common/src/poly/kzg/msm.rs | 2 +- common/src/poly/kzg/multiopen/gwc.rs | 2 +- common/src/poly/kzg/multiopen/gwc/verifier.rs | 2 +- common/src/poly/kzg/multiopen/shplonk.rs | 2 +- .../src/poly/kzg/multiopen/shplonk/prover.rs | 2 +- .../poly/kzg/multiopen/shplonk/verifier.rs | 2 +- common/src/poly/kzg/strategy.rs | 2 +- 48 files changed, 188 insertions(+), 188 deletions(-) create mode 100644 backend/src/plonk.rs rename {common => backend}/src/plonk/prover.rs (89%) diff --git a/backend/src/lib.rs b/backend/src/lib.rs index e69de29bb2..c3120151fd 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -0,0 +1 @@ +pub mod plonk; diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs new file mode 100644 index 0000000000..b8fcb1c31a --- /dev/null +++ b/backend/src/plonk.rs @@ -0,0 +1 @@ +pub mod prover; diff --git a/common/src/plonk/prover.rs b/backend/src/plonk/prover.rs similarity index 89% rename from common/src/plonk/prover.rs rename to backend/src/plonk/prover.rs index 7daca34c5e..d99c79c577 100644 --- a/common/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -1,5 +1,5 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use group::Curve; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use rand_core::RngCore; use std::collections::{BTreeSet, HashSet}; use std::ops::RangeTo; @@ -742,145 +742,6 @@ impl< } } -pub(crate) struct WitnessCollection<'a, F: Field> { - pub(crate) k: u32, - pub(crate) current_phase: sealed::Phase, - pub(crate) advice: Vec>>, - // pub(crate) unblinded_advice: HashSet, - pub(crate) challenges: &'a HashMap, - pub(crate) instances: &'a [&'a [F]], - pub(crate) usable_rows: RangeTo, - pub(crate) _marker: std::marker::PhantomData, -} - -impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn query_instance(&self, column: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.instances - .get(column.index()) - .and_then(|column| column.get(row)) - .map(|v| Value::known(*v)) - .ok_or(Error::BoundsFailure) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Ignore assignment of advice column in different phase than current one. - if self.current_phase.0 != column.column_type().phase { - return Ok(()); - } - - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .advice - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { - // We only care about advice columns here - - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.challenges - .get(&challenge.index()) - .cloned() - .map(Value::known) - .unwrap_or_else(Value::unknown) - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} - /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` diff --git a/common/Cargo.toml b/common/Cargo.toml index 5cc521d3be..6f6a128099 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -26,7 +26,6 @@ rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] [dependencies] backtrace = { version = "0.3", optional = true } -ff = "0.13" group = "0.13" halo2curves = { version = "0.6.0", default-features = false } rand_core = { version = "0.6", default-features = false } diff --git a/common/src/arithmetic.rs b/common/src/arithmetic.rs index 0163e355eb..5c85dec61d 100644 --- a/common/src/arithmetic.rs +++ b/common/src/arithmetic.rs @@ -2,11 +2,11 @@ //! field and polynomial arithmetic. use super::multicore; -pub use ff::Field; use group::{ ff::{BatchInvert, PrimeField}, Curve, Group, GroupOpsOwned, ScalarMulOwned, }; +pub use halo2_middleware::ff::Field; pub use halo2curves::{CurveAffine, CurveExt}; diff --git a/common/src/circuit.rs b/common/src/circuit.rs index bdb2da9bbd..7f68562594 100644 --- a/common/src/circuit.rs +++ b/common/src/circuit.rs @@ -2,7 +2,7 @@ use std::{fmt, marker::PhantomData}; -use ff::Field; +use halo2_middleware::ff::Field; use crate::plonk::{Assigned, Error, Selector, TableColumn}; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; diff --git a/common/src/circuit/floor_planner/single_pass.rs b/common/src/circuit/floor_planner/single_pass.rs index 147b2a9626..448fd89c33 100644 --- a/common/src/circuit/floor_planner/single_pass.rs +++ b/common/src/circuit/floor_planner/single_pass.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use std::fmt; use std::marker::PhantomData; -use ff::Field; +use halo2_middleware::ff::Field; use crate::{ circuit::{ diff --git a/common/src/circuit/floor_planner/v1.rs b/common/src/circuit/floor_planner/v1.rs index 88f99b39db..ed738c4b4e 100644 --- a/common/src/circuit/floor_planner/v1.rs +++ b/common/src/circuit/floor_planner/v1.rs @@ -1,6 +1,6 @@ use std::fmt; -use ff::Field; +use halo2_middleware::ff::Field; use crate::{ circuit::{ diff --git a/common/src/circuit/layouter.rs b/common/src/circuit/layouter.rs index b7583060e1..455a5e4418 100644 --- a/common/src/circuit/layouter.rs +++ b/common/src/circuit/layouter.rs @@ -4,7 +4,7 @@ use std::cmp; use std::collections::HashSet; use std::fmt; -use ff::Field; +use halo2_middleware::ff::Field; pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; diff --git a/common/src/circuit/table_layouter.rs b/common/src/circuit/table_layouter.rs index 7189621067..d6b1d6fc5b 100644 --- a/common/src/circuit/table_layouter.rs +++ b/common/src/circuit/table_layouter.rs @@ -5,7 +5,7 @@ use std::{ fmt::{self, Debug}, }; -use ff::Field; +use halo2_middleware::ff::Field; use crate::plonk::{Assigned, Assignment, Error, TableColumn, TableError}; diff --git a/common/src/dev.rs b/common/src/dev.rs index be9366956c..b6a108057b 100644 --- a/common/src/dev.rs +++ b/common/src/dev.rs @@ -6,8 +6,8 @@ use std::iter; use std::ops::{Add, Mul, Neg, Range}; use blake2b_simd::blake2b; -use ff::Field; -use ff::FromUniformBytes; +use halo2_middleware::ff::Field; +use halo2_middleware::ff::FromUniformBytes; use crate::plonk::permutation::keygen::Assembly; use crate::{ diff --git a/common/src/dev/cost.rs b/common/src/dev/cost.rs index 96ef53b093..7fbeb8a6fa 100644 --- a/common/src/dev/cost.rs +++ b/common/src/dev/cost.rs @@ -8,8 +8,8 @@ use std::{ ops::{Add, Mul}, }; -use ff::{Field, PrimeField}; use group::prime::PrimeGroup; +use halo2_middleware::ff::{Field, PrimeField}; use halo2_middleware::poly::Rotation; use crate::{ diff --git a/common/src/dev/gates.rs b/common/src/dev/gates.rs index 4421c0967f..41ab2edd9d 100644 --- a/common/src/dev/gates.rs +++ b/common/src/dev/gates.rs @@ -3,7 +3,7 @@ use std::{ fmt::{self, Write}, }; -use ff::PrimeField; +use halo2_middleware::ff::PrimeField; use crate::{ dev::util, diff --git a/common/src/dev/tfp.rs b/common/src/dev/tfp.rs index 82ec9abcfb..ec1a195f6e 100644 --- a/common/src/dev/tfp.rs +++ b/common/src/dev/tfp.rs @@ -1,6 +1,6 @@ use std::{fmt, marker::PhantomData}; -use ff::Field; +use halo2_middleware::ff::Field; use tracing::{debug, debug_span, span::EnteredSpan}; use crate::{ diff --git a/common/src/helpers.rs b/common/src/helpers.rs index faf7351a3e..3b1e5769f8 100644 --- a/common/src/helpers.rs +++ b/common/src/helpers.rs @@ -1,5 +1,5 @@ use crate::poly::Polynomial; -use ff::PrimeField; +use halo2_middleware::ff::PrimeField; use halo2curves::{serde::SerdeObject, CurveAffine}; use std::io; diff --git a/common/src/plonk.rs b/common/src/plonk.rs index aa88af62a4..7a072cd28d 100644 --- a/common/src/plonk.rs +++ b/common/src/plonk.rs @@ -35,14 +35,12 @@ pub mod permutation; mod shuffle; mod vanishing; -mod prover; mod verifier; pub use assigned::*; pub use circuit::*; pub use error::*; pub use keygen::*; -pub use prover::*; pub use verifier::*; use evaluation::Evaluator; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index d692738796..a5b0365aed 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1,18 +1,17 @@ use super::{lookup, permutation, shuffle, Assigned, Error, Queries}; use crate::circuit::layouter::SyncDeps; -use crate::plonk::WitnessCollection; use crate::{ circuit::{Layouter, Region, Value}, poly::{batch_invert_assigned, Polynomial}, }; use core::cmp::max; use core::ops::{Add, Mul}; -use ff::Field; use halo2_middleware::circuit::{ Advice, AdviceQueryMid, Any, Challenge, Column, CompiledCircuitV2, ConstraintSystemV2Backend, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, PreprocessingV2, }; +use halo2_middleware::ff::Field; use halo2_middleware::metadata; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; @@ -20,6 +19,7 @@ use std::collections::BTreeSet; use std::collections::HashMap; use std::fmt::Debug; use std::iter::{Product, Sum}; +use std::ops::RangeTo; use std::{ convert::TryFrom, ops::{Neg, Sub}, @@ -1478,6 +1478,146 @@ impl Into> for ConstraintSystem { } */ +// TODO: Move to frontend +pub(crate) struct WitnessCollection<'a, F: Field> { + pub(crate) k: u32, + pub(crate) current_phase: sealed::Phase, + pub(crate) advice: Vec>>, + // pub(crate) unblinded_advice: HashSet, + pub(crate) challenges: &'a HashMap, + pub(crate) instances: &'a [&'a [F]], + pub(crate) usable_rows: RangeTo, + pub(crate) _marker: std::marker::PhantomData, +} + +impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn query_instance(&self, column: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.instances + .get(column.index()) + .and_then(|column| column.get(row)) + .map(|v| Value::known(*v)) + .ok_or(Error::BoundsFailure) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Ignore assignment of advice column in different phase than current one. + if self.current_phase.0 != column.column_type().phase { + return Ok(()); + } + + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .advice + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { + // We only care about advice columns here + + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.challenges + .get(&challenge.index()) + .cloned() + .map(Value::known) + .unwrap_or_else(Value::unknown) + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + /// Witness calculator. Frontend function #[derive(Debug)] pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { diff --git a/common/src/plonk/circuit/compress_selectors.rs b/common/src/plonk/circuit/compress_selectors.rs index 15016f31b9..b40b27503d 100644 --- a/common/src/plonk/circuit/compress_selectors.rs +++ b/common/src/plonk/circuit/compress_selectors.rs @@ -1,5 +1,5 @@ use super::Expression; -use ff::Field; +use halo2_middleware::ff::Field; /// This describes a selector and where it is activated. #[derive(Debug, Clone)] diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index a64b754e93..2dea5d26b1 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -2,8 +2,8 @@ use std::ops::Range; -use ff::{Field, FromUniformBytes}; use group::Curve; +use halo2_middleware::ff::{Field, FromUniformBytes}; use super::{ circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, diff --git a/common/src/plonk/lookup.rs b/common/src/plonk/lookup.rs index fb6b3492a3..80ecd08285 100644 --- a/common/src/plonk/lookup.rs +++ b/common/src/plonk/lookup.rs @@ -1,6 +1,6 @@ use super::circuit::Expression; -use ff::Field; use halo2_middleware::circuit::ExpressionMid; +use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; pub(crate) mod prover; diff --git a/common/src/plonk/lookup/prover.rs b/common/src/plonk/lookup/prover.rs index 71066da9e6..ed8cdabc77 100644 --- a/common/src/plonk/lookup/prover.rs +++ b/common/src/plonk/lookup/prover.rs @@ -12,11 +12,11 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use ff::WithSmallOrderMulGroup; use group::{ ff::{BatchInvert, Field}, Curve, }; +use halo2_middleware::ff::WithSmallOrderMulGroup; use halo2_middleware::poly::Rotation; use rand_core::RngCore; use std::{ diff --git a/common/src/plonk/lookup/verifier.rs b/common/src/plonk/lookup/verifier.rs index 84cd02efb5..11e780148d 100644 --- a/common/src/plonk/lookup/verifier.rs +++ b/common/src/plonk/lookup/verifier.rs @@ -10,7 +10,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use ff::Field; +use halo2_middleware::ff::Field; use halo2_middleware::poly::Rotation; pub struct PermutationCommitments { diff --git a/common/src/plonk/permutation/keygen.rs b/common/src/plonk/permutation/keygen.rs index 200e65e1fa..7064c849f9 100644 --- a/common/src/plonk/permutation/keygen.rs +++ b/common/src/plonk/permutation/keygen.rs @@ -1,5 +1,5 @@ -use ff::{Field, PrimeField}; use group::Curve; +use halo2_middleware::ff::{Field, PrimeField}; use super::{Argument, ProvingKey, VerifyingKey}; use crate::{ diff --git a/common/src/plonk/permutation/prover.rs b/common/src/plonk/permutation/prover.rs index cd4ad43797..8d4b168740 100644 --- a/common/src/plonk/permutation/prover.rs +++ b/common/src/plonk/permutation/prover.rs @@ -1,8 +1,8 @@ -use ff::PrimeField; use group::{ ff::{BatchInvert, Field}, Curve, }; +use halo2_middleware::ff::PrimeField; use rand_core::RngCore; use std::iter::{self, ExactSizeIterator}; diff --git a/common/src/plonk/permutation/verifier.rs b/common/src/plonk/permutation/verifier.rs index 96ec55ef41..195a771999 100644 --- a/common/src/plonk/permutation/verifier.rs +++ b/common/src/plonk/permutation/verifier.rs @@ -1,4 +1,4 @@ -use ff::{Field, PrimeField}; +use halo2_middleware::ff::{Field, PrimeField}; use std::iter; use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; diff --git a/common/src/plonk/shuffle.rs b/common/src/plonk/shuffle.rs index 496f691cd5..d80c4e498a 100644 --- a/common/src/plonk/shuffle.rs +++ b/common/src/plonk/shuffle.rs @@ -1,6 +1,6 @@ use super::circuit::Expression; -use ff::Field; use halo2_middleware::circuit::ExpressionMid; +use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; pub(crate) mod prover; diff --git a/common/src/plonk/shuffle/prover.rs b/common/src/plonk/shuffle/prover.rs index f730a8ecf7..b70184fc8c 100644 --- a/common/src/plonk/shuffle/prover.rs +++ b/common/src/plonk/shuffle/prover.rs @@ -11,8 +11,8 @@ use crate::{ }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use ff::WithSmallOrderMulGroup; use group::{ff::BatchInvert, Curve}; +use halo2_middleware::ff::WithSmallOrderMulGroup; use halo2_middleware::poly::Rotation; use rand_core::RngCore; use std::{ diff --git a/common/src/plonk/shuffle/verifier.rs b/common/src/plonk/shuffle/verifier.rs index 4f3233ad60..46a7823c9c 100644 --- a/common/src/plonk/shuffle/verifier.rs +++ b/common/src/plonk/shuffle/verifier.rs @@ -8,7 +8,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use ff::Field; +use halo2_middleware::ff::Field; use halo2_middleware::poly::Rotation; pub struct Committed { diff --git a/common/src/plonk/vanishing/prover.rs b/common/src/plonk/vanishing/prover.rs index 7943086826..d30d9dc4af 100644 --- a/common/src/plonk/vanishing/prover.rs +++ b/common/src/plonk/vanishing/prover.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, iter}; -use ff::Field; use group::Curve; +use halo2_middleware::ff::Field; use rand_chacha::ChaCha20Rng; use rand_core::{RngCore, SeedableRng}; diff --git a/common/src/plonk/vanishing/verifier.rs b/common/src/plonk/vanishing/verifier.rs index 0881dfb2c0..05ccb02a5b 100644 --- a/common/src/plonk/vanishing/verifier.rs +++ b/common/src/plonk/vanishing/verifier.rs @@ -1,6 +1,6 @@ use std::iter; -use ff::Field; +use halo2_middleware::ff::Field; use crate::{ arithmetic::CurveAffine, diff --git a/common/src/plonk/verifier.rs b/common/src/plonk/verifier.rs index 62c18c609a..e60f19374f 100644 --- a/common/src/plonk/verifier.rs +++ b/common/src/plonk/verifier.rs @@ -1,5 +1,5 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use group::Curve; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use std::iter; use super::{ diff --git a/common/src/plonk/verifier/batch.rs b/common/src/plonk/verifier/batch.rs index ba3e2419e6..d869d87559 100644 --- a/common/src/plonk/verifier/batch.rs +++ b/common/src/plonk/verifier/batch.rs @@ -1,5 +1,5 @@ -use ff::FromUniformBytes; use group::ff::Field; +use halo2_middleware::ff::FromUniformBytes; use halo2curves::CurveAffine; use rand_core::OsRng; diff --git a/common/src/poly/commitment.rs b/common/src/poly/commitment.rs index feae085655..78b17fc808 100644 --- a/common/src/poly/commitment.rs +++ b/common/src/poly/commitment.rs @@ -5,7 +5,7 @@ use super::{ }; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead, TranscriptWrite}; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::CurveAffine; use rand_core::RngCore; use std::{ diff --git a/common/src/poly/domain.rs b/common/src/poly/domain.rs index 45e6db0570..edeaefb8b2 100644 --- a/common/src/poly/domain.rs +++ b/common/src/poly/domain.rs @@ -7,8 +7,8 @@ use crate::{ }; use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}; -use ff::WithSmallOrderMulGroup; use group::ff::{BatchInvert, Field}; +use halo2_middleware::ff::WithSmallOrderMulGroup; use halo2_middleware::poly::Rotation; use std::marker::PhantomData; diff --git a/common/src/poly/ipa/commitment/prover.rs b/common/src/poly/ipa/commitment/prover.rs index 344dbc0e65..ee92c7677f 100644 --- a/common/src/poly/ipa/commitment/prover.rs +++ b/common/src/poly/ipa/commitment/prover.rs @@ -1,4 +1,4 @@ -use ff::Field; +use halo2_middleware::ff::Field; use rand_core::RngCore; use super::ParamsIPA; diff --git a/common/src/poly/ipa/msm.rs b/common/src/poly/ipa/msm.rs index a615ddce49..59c99d1f29 100644 --- a/common/src/poly/ipa/msm.rs +++ b/common/src/poly/ipa/msm.rs @@ -1,7 +1,7 @@ use crate::arithmetic::{best_multiexp, CurveAffine}; use crate::poly::{commitment::MSM, ipa::commitment::ParamsVerifierIPA}; -use ff::Field; use group::Group; +use halo2_middleware::ff::Field; use std::collections::BTreeMap; /// A multiscalar multiplication in the polynomial commitment scheme diff --git a/common/src/poly/ipa/multiopen.rs b/common/src/poly/ipa/multiopen.rs index b78acb5934..1df7f41daa 100644 --- a/common/src/poly/ipa/multiopen.rs +++ b/common/src/poly/ipa/multiopen.rs @@ -5,7 +5,7 @@ use super::*; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; use std::collections::{BTreeMap, BTreeSet}; mod prover; diff --git a/common/src/poly/ipa/multiopen/prover.rs b/common/src/poly/ipa/multiopen/prover.rs index 2ae745d457..923248704f 100644 --- a/common/src/poly/ipa/multiopen/prover.rs +++ b/common/src/poly/ipa/multiopen/prover.rs @@ -7,8 +7,8 @@ use crate::poly::query::ProverQuery; use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; -use ff::Field; use group::Curve; +use halo2_middleware::ff::Field; use rand_core::RngCore; use std::io; use std::marker::PhantomData; diff --git a/common/src/poly/ipa/multiopen/verifier.rs b/common/src/poly/ipa/multiopen/verifier.rs index d559e33384..7910a0662e 100644 --- a/common/src/poly/ipa/multiopen/verifier.rs +++ b/common/src/poly/ipa/multiopen/verifier.rs @@ -1,6 +1,6 @@ use std::fmt::Debug; -use ff::Field; +use halo2_middleware::ff::Field; use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; use crate::arithmetic::{eval_polynomial, lagrange_interpolate, CurveAffine}; diff --git a/common/src/poly/ipa/strategy.rs b/common/src/poly/ipa/strategy.rs index d2d1b3d364..6900981f01 100644 --- a/common/src/poly/ipa/strategy.rs +++ b/common/src/poly/ipa/strategy.rs @@ -9,8 +9,8 @@ use crate::{ strategy::{Guard, VerificationStrategy}, }, }; -use ff::Field; use group::Curve; +use halo2_middleware::ff::Field; use halo2curves::CurveAffine; use rand_core::OsRng; diff --git a/common/src/poly/kzg/commitment.rs b/common/src/poly/kzg/commitment.rs index 114b9ac013..e162694941 100644 --- a/common/src/poly/kzg/commitment.rs +++ b/common/src/poly/kzg/commitment.rs @@ -4,8 +4,8 @@ use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, Par use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; use crate::SerdeFormat; -use ff::{Field, PrimeField}; use group::{prime::PrimeCurveAffine, Curve, Group}; +use halo2_middleware::ff::{Field, PrimeField}; use halo2curves::pairing::Engine; use halo2curves::CurveExt; use rand_core::{OsRng, RngCore}; diff --git a/common/src/poly/kzg/msm.rs b/common/src/poly/kzg/msm.rs index f9b8c284bd..6244209965 100644 --- a/common/src/poly/kzg/msm.rs +++ b/common/src/poly/kzg/msm.rs @@ -37,7 +37,7 @@ where /// Prepares all scalars in the MSM to linear combination pub fn combine_with_base(&mut self, base: E::Fr) { - use ff::Field; + use halo2_middleware::ff::Field; let mut acc = E::Fr::ONE; if !self.scalars.is_empty() { for scalar in self.scalars.iter_mut().rev() { diff --git a/common/src/poly/kzg/multiopen/gwc.rs b/common/src/poly/kzg/multiopen/gwc.rs index 3fd28dd00a..8c8e056e83 100644 --- a/common/src/poly/kzg/multiopen/gwc.rs +++ b/common/src/poly/kzg/multiopen/gwc.rs @@ -5,7 +5,7 @@ pub use prover::ProverGWC; pub use verifier::VerifierGWC; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; use std::marker::PhantomData; #[derive(Clone, Copy, Debug)] diff --git a/common/src/poly/kzg/multiopen/gwc/verifier.rs b/common/src/poly/kzg/multiopen/gwc/verifier.rs index fcfda6941f..261f5e2234 100644 --- a/common/src/poly/kzg/multiopen/gwc/verifier.rs +++ b/common/src/poly/kzg/multiopen/gwc/verifier.rs @@ -13,7 +13,7 @@ use crate::poly::query::{CommitmentReference, VerifierQuery}; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead}; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::pairing::{Engine, MultiMillerLoop}; use halo2curves::CurveExt; diff --git a/common/src/poly/kzg/multiopen/shplonk.rs b/common/src/poly/kzg/multiopen/shplonk.rs index d0814e83e3..80cad76fa0 100644 --- a/common/src/poly/kzg/multiopen/shplonk.rs +++ b/common/src/poly/kzg/multiopen/shplonk.rs @@ -3,7 +3,7 @@ mod verifier; use crate::multicore::{IntoParallelIterator, ParallelIterator}; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; pub use prover::ProverSHPLONK; use std::collections::BTreeSet; pub use verifier::VerifierSHPLONK; diff --git a/common/src/poly/kzg/multiopen/shplonk/prover.rs b/common/src/poly/kzg/multiopen/shplonk/prover.rs index 5001d69094..388fa91147 100644 --- a/common/src/poly/kzg/multiopen/shplonk/prover.rs +++ b/common/src/poly/kzg/multiopen/shplonk/prover.rs @@ -13,8 +13,8 @@ use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; use crate::multicore::{IntoParallelIterator, ParallelIterator}; -use ff::Field; use group::Curve; +use halo2_middleware::ff::Field; use halo2curves::pairing::Engine; use halo2curves::CurveExt; use rand_core::RngCore; diff --git a/common/src/poly/kzg/multiopen/shplonk/verifier.rs b/common/src/poly/kzg/multiopen/shplonk/verifier.rs index 5d03940177..f5a4d824f6 100644 --- a/common/src/poly/kzg/multiopen/shplonk/verifier.rs +++ b/common/src/poly/kzg/multiopen/shplonk/verifier.rs @@ -15,7 +15,7 @@ use crate::poly::kzg::strategy::GuardKZG; use crate::poly::query::{CommitmentReference, VerifierQuery}; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead}; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::pairing::{Engine, MultiMillerLoop}; use halo2curves::CurveExt; use std::ops::MulAssign; diff --git a/common/src/poly/kzg/strategy.rs b/common/src/poly/kzg/strategy.rs index ee80d800ac..78d182fbf6 100644 --- a/common/src/poly/kzg/strategy.rs +++ b/common/src/poly/kzg/strategy.rs @@ -10,7 +10,7 @@ use crate::{ strategy::{Guard, VerificationStrategy}, }, }; -use ff::Field; +use halo2_middleware::ff::Field; use halo2curves::{ pairing::{Engine, MultiMillerLoop}, CurveAffine, CurveExt, From fd80a62776d82bc42f03ec8e8628d90533d23a07 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 16:03:06 +0000 Subject: [PATCH 41/79] Checkpoint --- backend/src/lib.rs | 3 +++ backend/src/plonk/prover.rs | 18 ++++++++---------- common/src/lib.rs | 6 ++++-- common/src/plonk.rs | 38 ++++++++++++++++++------------------- common/src/plonk/circuit.rs | 2 +- common/src/plonk/lookup.rs | 4 ++-- common/src/poly.rs | 2 +- 7 files changed, 38 insertions(+), 35 deletions(-) diff --git a/backend/src/lib.rs b/backend/src/lib.rs index c3120151fd..904a19c0ed 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -1 +1,4 @@ +#![allow(dead_code)] // TODO: Remove +#![allow(unused_imports)] // TODO: Remove + pub mod plonk; diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index d99c79c577..3ae9b1f64d 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -2,21 +2,17 @@ use group::Curve; use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use rand_core::RngCore; use std::collections::{BTreeSet, HashSet}; -use std::ops::RangeTo; use std::{collections::HashMap, iter}; -use super::{ - circuit::{ - compile_circuit, - sealed::{self}, - Assignment, Circuit, Selector, WitnessCalculator, - }, +use halo2_common::plonk::{ + circuit::{sealed, Assignment, Circuit, Selector}, lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; -use crate::{ +use group::prime::PrimeCurveAffine; +use halo2_common::{ arithmetic::{eval_polynomial, CurveAffine}, circuit::Value, plonk::Assigned, @@ -25,11 +21,10 @@ use crate::{ Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, }, }; -use crate::{ +use halo2_common::{ poly::batch_invert_assigned, transcript::{EncodedChallenge, TranscriptWrite}, }; -use group::prime::PrimeCurveAffine; /// Collection of instance data used during proving for a single circuit proof. #[derive(Debug)] @@ -742,6 +737,8 @@ impl< } } +// TODO: Move this to halo2_proofs as a legacy wrapper +/* /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` @@ -853,3 +850,4 @@ fn test_create_proof() { ) .expect("proof generation should not fail"); } +*/ diff --git a/common/src/lib.rs b/common/src/lib.rs index acc26aff15..832d3ee8ad 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -1,11 +1,13 @@ //! # halo2_proofs +#![allow(dead_code)] // TODO: Remove +#![allow(unused_imports)] // TODO: Remove #![cfg_attr(docsrs, feature(doc_cfg))] // The actual lints we want to disable. #![allow(clippy::op_ref, clippy::many_single_char_names)] #![deny(rustdoc::broken_intra_doc_links)] -#![deny(missing_debug_implementations)] -#![deny(missing_docs)] +// #![deny(missing_debug_implementations)] // TODO: Uncomment +// #![deny(missing_docs)] // TODO: Uncomment #![deny(unsafe_code)] pub mod arithmetic; diff --git a/common/src/plonk.rs b/common/src/plonk.rs index 7a072cd28d..f7bcd4c6fb 100644 --- a/common/src/plonk.rs +++ b/common/src/plonk.rs @@ -25,17 +25,17 @@ use halo2_middleware::circuit::{ }; use halo2_middleware::poly::Rotation; -mod assigned; -mod circuit; -mod error; -mod evaluation; -mod keygen; -mod lookup; +pub mod assigned; +pub mod circuit; +pub mod error; +pub mod evaluation; +pub mod keygen; +pub mod lookup; pub mod permutation; -mod shuffle; -mod vanishing; +pub mod shuffle; +pub mod vanishing; -mod verifier; +pub mod verifier; pub use assigned::*; pub use circuit::*; @@ -532,21 +532,21 @@ impl VerifyingKey { } #[derive(Clone, Copy, Debug)] -struct Theta; -type ChallengeTheta = ChallengeScalar; +pub struct Theta; +pub type ChallengeTheta = ChallengeScalar; #[derive(Clone, Copy, Debug)] -struct Beta; -type ChallengeBeta = ChallengeScalar; +pub struct Beta; +pub type ChallengeBeta = ChallengeScalar; #[derive(Clone, Copy, Debug)] -struct Gamma; -type ChallengeGamma = ChallengeScalar; +pub struct Gamma; +pub type ChallengeGamma = ChallengeScalar; #[derive(Clone, Copy, Debug)] -struct Y; -type ChallengeY = ChallengeScalar; +pub struct Y; +pub type ChallengeY = ChallengeScalar; #[derive(Clone, Copy, Debug)] -struct X; -type ChallengeX = ChallengeScalar; +pub struct X; +pub type ChallengeX = ChallengeScalar; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index a5b0365aed..c0b18bf566 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -28,7 +28,7 @@ use std::{ mod compress_selectors; // TODO: Move sealed phase to frontend, and always use u8 in middleware and backend -pub(crate) mod sealed { +pub mod sealed { /// Phase of advice column #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct Phase(pub(crate) u8); diff --git a/common/src/plonk/lookup.rs b/common/src/plonk/lookup.rs index 80ecd08285..34426a2952 100644 --- a/common/src/plonk/lookup.rs +++ b/common/src/plonk/lookup.rs @@ -3,8 +3,8 @@ use halo2_middleware::circuit::ExpressionMid; use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; -pub(crate) mod prover; -pub(crate) mod verifier; +pub mod prover; +pub mod verifier; /// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone)] diff --git a/common/src/poly.rs b/common/src/poly.rs index 100ee10eb8..9fe12b5da5 100644 --- a/common/src/poly.rs +++ b/common/src/poly.rs @@ -197,7 +197,7 @@ impl Polynomial { } } -pub(crate) fn batch_invert_assigned( +pub fn batch_invert_assigned( assigned: Vec, LagrangeCoeff>>, ) -> Vec> { let mut assigned_denominators: Vec<_> = assigned From 43fdbb3eee864f0521a58757bcc57db04e58c835 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 16:28:43 +0000 Subject: [PATCH 42/79] Checkpoint --- common/src/plonk.rs | 436 +---------- common/src/plonk/circuit.rs | 2 +- common/src/plonk/evaluation.rs | 873 ----------------------- common/src/plonk/keygen.rs | 224 +----- common/src/plonk/lookup.rs | 3 - common/src/plonk/lookup/prover.rs | 476 ------------ common/src/plonk/lookup/verifier.rs | 212 ------ common/src/plonk/permutation.rs | 59 +- common/src/plonk/permutation/keygen.rs | 47 -- common/src/plonk/permutation/prover.rs | 331 --------- common/src/plonk/permutation/verifier.rs | 256 ------- common/src/plonk/shuffle.rs | 3 - common/src/plonk/shuffle/prover.rs | 251 ------- common/src/plonk/shuffle/verifier.rs | 139 ---- common/src/plonk/vanishing.rs | 11 - common/src/plonk/vanishing/prover.rs | 199 ------ common/src/plonk/vanishing/verifier.rs | 138 ---- common/src/plonk/verifier.rs | 462 ------------ common/src/plonk/verifier/batch.rs | 135 ---- 19 files changed, 59 insertions(+), 4198 deletions(-) delete mode 100644 common/src/plonk/evaluation.rs delete mode 100644 common/src/plonk/lookup/prover.rs delete mode 100644 common/src/plonk/lookup/verifier.rs delete mode 100644 common/src/plonk/permutation/prover.rs delete mode 100644 common/src/plonk/permutation/verifier.rs delete mode 100644 common/src/plonk/shuffle/prover.rs delete mode 100644 common/src/plonk/shuffle/verifier.rs delete mode 100644 common/src/plonk/vanishing.rs delete mode 100644 common/src/plonk/vanishing/prover.rs delete mode 100644 common/src/plonk/vanishing/verifier.rs delete mode 100644 common/src/plonk/verifier.rs delete mode 100644 common/src/plonk/verifier/batch.rs diff --git a/common/src/plonk.rs b/common/src/plonk.rs index f7bcd4c6fb..76b23665bc 100644 --- a/common/src/plonk.rs +++ b/common/src/plonk.rs @@ -28,22 +28,21 @@ use halo2_middleware::poly::Rotation; pub mod assigned; pub mod circuit; pub mod error; -pub mod evaluation; +// pub mod evaluation; pub mod keygen; pub mod lookup; pub mod permutation; pub mod shuffle; -pub mod vanishing; +// pub mod vanishing; -pub mod verifier; +// pub mod verifier; pub use assigned::*; pub use circuit::*; pub use error::*; pub use keygen::*; -pub use verifier::*; +// pub use verifier::*; -use evaluation::Evaluator; use std::io; /// List of queries (columns and rotations) used by a circuit @@ -104,433 +103,6 @@ impl Queries { } } -/// This is a verifying key which allows for the verification of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct VerifyingKey { - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - /// Cached maximum degree of `cs` (which doesn't change after construction). - cs_degree: usize, - /// The representative of this `VerifyingKey` in transcripts. - transcript_repr: C::Scalar, - selectors: Vec>, - /// Whether selector compression is turned on or not. - compress_selectors: bool, -} - -// Current version of the VK -const VERSION: u8 = 0x03; - -impl VerifyingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a verifying key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - // Version byte that will be checked on read. - writer.write_all(&[VERSION])?; - let k = &self.domain.k(); - assert!(*k <= C::Scalar::S); - // k value fits in 1 byte - writer.write_all(&[*k as u8])?; - writer.write_all(&[self.compress_selectors as u8])?; - writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; - for commitment in &self.fixed_commitments { - commitment.write(writer, format)?; - } - self.permutation.write(writer, format)?; - - if !self.compress_selectors { - assert!(self.selectors.is_empty()); - } - // write self.selectors - for selector in &self.selectors { - // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write - for bits in selector.chunks(8) { - writer.write_all(&[crate::helpers::pack(bits)])?; - } - } - Ok(()) - } - - /// Reads a verification key from a buffer. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let mut version_byte = [0u8; 1]; - reader.read_exact(&mut version_byte)?; - if VERSION != version_byte[0] { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected version byte", - )); - } - - let mut k = [0u8; 1]; - reader.read_exact(&mut k)?; - let k = u8::from_le_bytes(k); - if k as u32 > C::Scalar::S { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - format!( - "circuit size value (k): {} exceeds maxium: {}", - k, - C::Scalar::S - ), - )); - } - let mut compress_selectors = [0u8; 1]; - reader.read_exact(&mut compress_selectors)?; - if compress_selectors[0] != 0 && compress_selectors[0] != 1 { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected compress_selectors not boolean", - )); - } - let compress_selectors = compress_selectors[0] == 1; - let (domain, cs, _) = keygen::create_domain::( - k as u32, - #[cfg(feature = "circuit-params")] - params, - ); - let mut num_fixed_columns = [0u8; 4]; - reader.read_exact(&mut num_fixed_columns)?; - let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); - - let fixed_commitments: Vec<_> = (0..num_fixed_columns) - .map(|_| C::read(reader, format)) - .collect::>()?; - - let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; - - let (cs, selectors) = if compress_selectors { - // read selectors - let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] - .into_iter() - .map(|mut selector| { - let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; - reader.read_exact(&mut selector_bytes)?; - for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { - crate::helpers::unpack(byte, bits); - } - Ok(selector) - }) - .collect::>()?; - let (cs, _) = cs.compress_selectors(selectors.clone()); - (cs, selectors) - } else { - // we still need to replace selectors with fixed Expressions in `cs` - let fake_selectors = vec![vec![]; cs.num_selectors]; - let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); - (cs, vec![]) - }; - - Ok(Self::from_parts( - domain, - fixed_commitments, - permutation, - cs, - selectors, - compress_selectors, - )) - } - - /// Writes a verifying key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a verification key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } -} - -impl VerifyingKey { - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - 10 + (self.fixed_commitments.len() * C::byte_length(format)) - + self.permutation.bytes_length(format) - + self.selectors.len() - * (self - .selectors - .get(0) - .map(|selector| (selector.len() + 7) / 8) - .unwrap_or(0)) - } - - fn from_parts( - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - selectors: Vec>, - compress_selectors: bool, - ) -> Self - where - C::ScalarExt: FromUniformBytes<64>, - { - // Compute cached values. - let cs_degree = cs.degree(); - - let mut vk = Self { - domain, - fixed_commitments, - permutation, - cs, - cs_degree, - // Temporary, this is not pinned. - transcript_repr: C::Scalar::ZERO, - selectors, - compress_selectors, - }; - - let mut hasher = Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Verify-Key") - .to_state(); - - let s = format!("{:?}", vk.pinned()); - - hasher.update(&(s.len() as u64).to_le_bytes()); - hasher.update(s.as_bytes()); - - // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - - vk - } - - /// Hashes a verification key into a transcript. - pub fn hash_into, T: Transcript>( - &self, - transcript: &mut T, - ) -> io::Result<()> { - transcript.common_scalar(self.transcript_repr)?; - - Ok(()) - } - - /// Obtains a pinned representation of this verification key that contains - /// the minimal information necessary to reconstruct the verification key. - pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { - PinnedVerificationKey { - base_modulus: C::Base::MODULUS, - scalar_modulus: C::Scalar::MODULUS, - domain: self.domain.pinned(), - fixed_commitments: &self.fixed_commitments, - permutation: &self.permutation, - cs: self.cs.pinned(), - } - } - - /// Returns commitments of fixed polynomials - pub fn fixed_commitments(&self) -> &Vec { - &self.fixed_commitments - } - - /// Returns `VerifyingKey` of permutation - pub fn permutation(&self) -> &permutation::VerifyingKey { - &self.permutation - } - - /// Returns `ConstraintSystem` - pub fn cs(&self) -> &ConstraintSystem { - &self.cs - } - - /// Returns representative of this `VerifyingKey` in transcripts - pub fn transcript_repr(&self) -> C::Scalar { - self.transcript_repr - } -} - -/// Minimal representation of a verification key that can be used to identify -/// its active contents. -#[allow(dead_code)] -#[derive(Debug)] -pub struct PinnedVerificationKey<'a, C: CurveAffine> { - base_modulus: &'static str, - scalar_modulus: &'static str, - domain: PinnedEvaluationDomain<'a, C::Scalar>, - cs: PinnedConstraintSystem<'a, C::Scalar>, - fixed_commitments: &'a Vec, - permutation: &'a permutation::VerifyingKey, -} - -/// This is a proving key which allows for the creation of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct ProvingKey { - vk: VerifyingKey, - l0: Polynomial, - l_last: Polynomial, - l_active_row: Polynomial, - fixed_values: Vec>, - fixed_polys: Vec>, - fixed_cosets: Vec>, - permutation: permutation::ProvingKey, - ev: Evaluator, -} - -impl ProvingKey -where - C::Scalar: FromUniformBytes<64>, -{ - /// Get the underlying [`VerifyingKey`]. - pub fn get_vk(&self) -> &VerifyingKey { - &self.vk - } - - /// Gets the total number of bytes in the serialization of `self` - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - let scalar_len = C::Scalar::default().to_repr().as_ref().len(); - self.vk.bytes_length(format) - + 12 - + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) - + polynomial_slice_byte_length(&self.fixed_values) - + polynomial_slice_byte_length(&self.fixed_polys) - + polynomial_slice_byte_length(&self.fixed_cosets) - + self.permutation.bytes_length() - } -} - -impl ProvingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a proving key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - self.vk.write(writer, format)?; - self.l0.write(writer, format)?; - self.l_last.write(writer, format)?; - self.l_active_row.write(writer, format)?; - write_polynomial_slice(&self.fixed_values, writer, format)?; - write_polynomial_slice(&self.fixed_polys, writer, format)?; - write_polynomial_slice(&self.fixed_cosets, writer, format)?; - self.permutation.write(writer, format)?; - Ok(()) - } - - /// Reads a proving key from a buffer. - /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let vk = VerifyingKey::::read::( - reader, - format, - #[cfg(feature = "circuit-params")] - params, - )?; - let l0 = Polynomial::read(reader, format)?; - let l_last = Polynomial::read(reader, format)?; - let l_active_row = Polynomial::read(reader, format)?; - let fixed_values = read_polynomial_vec(reader, format)?; - let fixed_polys = read_polynomial_vec(reader, format)?; - let fixed_cosets = read_polynomial_vec(reader, format)?; - let permutation = permutation::ProvingKey::read(reader, format)?; - let ev = Evaluator::new(vk.cs()); - Ok(Self { - vk, - l0, - l_last, - l_active_row, - fixed_values, - fixed_polys, - fixed_cosets, - permutation, - ev, - }) - } - - /// Writes a proving key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a proving key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } -} - -impl VerifyingKey { - /// Get the underlying [`EvaluationDomain`]. - pub fn get_domain(&self) -> &EvaluationDomain { - &self.domain - } -} - #[derive(Clone, Copy, Debug)] pub struct Theta; pub type ChallengeTheta = ChallengeScalar; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index c0b18bf566..1ee8ffefc6 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1758,7 +1758,7 @@ pub fn compile_circuit>( let mut assembly = crate::plonk::keygen::Assembly { k, fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], - permutation: permutation::keygen::AssemblyFront::new(n, &cs.permutation), + permutation: permutation::AssemblyFront::new(n, &cs.permutation), selectors: vec![vec![false; n]; cs.num_selectors], usable_rows: 0..n - (cs.blinding_factors() + 1), _marker: std::marker::PhantomData, diff --git a/common/src/plonk/evaluation.rs b/common/src/plonk/evaluation.rs deleted file mode 100644 index 2cd00a5f7c..0000000000 --- a/common/src/plonk/evaluation.rs +++ /dev/null @@ -1,873 +0,0 @@ -use crate::multicore; -use crate::plonk::{lookup, permutation, ProvingKey}; -use crate::poly::Basis; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - poly::{Coeff, ExtendedLagrangeCoeff, Polynomial}, -}; -use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; -use halo2_middleware::circuit::Any; -use halo2_middleware::poly::Rotation; - -use super::{shuffle, ConstraintSystem, Expression}; - -/// Return the index in the polynomial of size `isize` after rotation `rot`. -fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { - (((idx as i32) + (rot * rot_scale)).rem_euclid(isize)) as usize -} - -/// Value used in a calculation -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] -pub enum ValueSource { - /// This is a constant value - Constant(usize), - /// This is an intermediate value - Intermediate(usize), - /// This is a fixed column - Fixed(usize, usize), - /// This is an advice (witness) column - Advice(usize, usize), - /// This is an instance (external) column - Instance(usize, usize), - /// This is a challenge - Challenge(usize), - /// beta - Beta(), - /// gamma - Gamma(), - /// theta - Theta(), - /// y - Y(), - /// Previous value - PreviousValue(), -} - -impl Default for ValueSource { - fn default() -> Self { - ValueSource::Constant(0) - } -} - -impl ValueSource { - /// Get the value for this source - #[allow(clippy::too_many_arguments)] - pub fn get( - &self, - rotations: &[usize], - constants: &[F], - intermediates: &[F], - fixed_values: &[Polynomial], - advice_values: &[Polynomial], - instance_values: &[Polynomial], - challenges: &[F], - beta: &F, - gamma: &F, - theta: &F, - y: &F, - previous_value: &F, - ) -> F { - match self { - ValueSource::Constant(idx) => constants[*idx], - ValueSource::Intermediate(idx) => intermediates[*idx], - ValueSource::Fixed(column_index, rotation) => { - fixed_values[*column_index][rotations[*rotation]] - } - ValueSource::Advice(column_index, rotation) => { - advice_values[*column_index][rotations[*rotation]] - } - ValueSource::Instance(column_index, rotation) => { - instance_values[*column_index][rotations[*rotation]] - } - ValueSource::Challenge(index) => challenges[*index], - ValueSource::Beta() => *beta, - ValueSource::Gamma() => *gamma, - ValueSource::Theta() => *theta, - ValueSource::Y() => *y, - ValueSource::PreviousValue() => *previous_value, - } - } -} - -/// Calculation -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum Calculation { - /// This is an addition - Add(ValueSource, ValueSource), - /// This is a subtraction - Sub(ValueSource, ValueSource), - /// This is a product - Mul(ValueSource, ValueSource), - /// This is a square - Square(ValueSource), - /// This is a double - Double(ValueSource), - /// This is a negation - Negate(ValueSource), - /// This is Horner's rule: `val = a; val = val * c + b[]` - Horner(ValueSource, Vec, ValueSource), - /// This is a simple assignment - Store(ValueSource), -} - -impl Calculation { - /// Get the resulting value of this calculation - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - rotations: &[usize], - constants: &[F], - intermediates: &[F], - fixed_values: &[Polynomial], - advice_values: &[Polynomial], - instance_values: &[Polynomial], - challenges: &[F], - beta: &F, - gamma: &F, - theta: &F, - y: &F, - previous_value: &F, - ) -> F { - let get_value = |value: &ValueSource| { - value.get( - rotations, - constants, - intermediates, - fixed_values, - advice_values, - instance_values, - challenges, - beta, - gamma, - theta, - y, - previous_value, - ) - }; - match self { - Calculation::Add(a, b) => get_value(a) + get_value(b), - Calculation::Sub(a, b) => get_value(a) - get_value(b), - Calculation::Mul(a, b) => get_value(a) * get_value(b), - Calculation::Square(v) => get_value(v).square(), - Calculation::Double(v) => get_value(v).double(), - Calculation::Negate(v) => -get_value(v), - Calculation::Horner(start_value, parts, factor) => { - let factor = get_value(factor); - let mut value = get_value(start_value); - for part in parts.iter() { - value = value * factor + get_value(part); - } - value - } - Calculation::Store(v) => get_value(v), - } - } -} - -/// Evaluator -#[derive(Clone, Default, Debug)] -pub struct Evaluator { - /// Custom gates evalution - pub custom_gates: GraphEvaluator, - /// Lookups evalution - pub lookups: Vec>, - /// Shuffle evalution - pub shuffles: Vec>, -} - -/// GraphEvaluator -#[derive(Clone, Debug)] -pub struct GraphEvaluator { - /// Constants - pub constants: Vec, - /// Rotations - pub rotations: Vec, - /// Calculations - pub calculations: Vec, - /// Number of intermediates - pub num_intermediates: usize, -} - -/// EvaluationData -#[derive(Default, Debug)] -pub struct EvaluationData { - /// Intermediates - pub intermediates: Vec, - /// Rotations - pub rotations: Vec, -} - -/// CaluclationInfo -#[derive(Clone, Debug)] -pub struct CalculationInfo { - /// Calculation - pub calculation: Calculation, - /// Target - pub target: usize, -} - -impl Evaluator { - /// Creates a new evaluation structure - pub fn new(cs: &ConstraintSystem) -> Self { - let mut ev = Evaluator::default(); - - // Custom gates - let mut parts = Vec::new(); - for gate in cs.gates.iter() { - parts.extend( - gate.polynomials() - .iter() - .map(|poly| ev.custom_gates.add_expression(poly)), - ); - } - ev.custom_gates.add_calculation(Calculation::Horner( - ValueSource::PreviousValue(), - parts, - ValueSource::Y(), - )); - - // Lookups - for lookup in cs.lookups.iter() { - let mut graph = GraphEvaluator::default(); - - let mut evaluate_lc = |expressions: &Vec>| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; - - // Input coset - let compressed_input_coset = evaluate_lc(&lookup.input_expressions); - // table coset - let compressed_table_coset = evaluate_lc(&lookup.table_expressions); - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - let right_gamma = graph.add_calculation(Calculation::Add( - compressed_table_coset, - ValueSource::Gamma(), - )); - let lc = graph.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Beta(), - )); - graph.add_calculation(Calculation::Mul(lc, right_gamma)); - - ev.lookups.push(graph); - } - - // Shuffles - for shuffle in cs.shuffles.iter() { - let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; - - let mut graph_input = GraphEvaluator::default(); - let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); - let _ = graph_input.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Gamma(), - )); - - let mut graph_shuffle = GraphEvaluator::default(); - let compressed_shuffle_coset = - evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); - let _ = graph_shuffle.add_calculation(Calculation::Add( - compressed_shuffle_coset, - ValueSource::Gamma(), - )); - - ev.shuffles.push(graph_input); - ev.shuffles.push(graph_shuffle); - } - - ev - } - - /// Evaluate h poly - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn evaluate_h( - &self, - pk: &ProvingKey, - advice_polys: &[&[Polynomial]], - instance_polys: &[&[Polynomial]], - challenges: &[C::ScalarExt], - y: C::ScalarExt, - beta: C::ScalarExt, - gamma: C::ScalarExt, - theta: C::ScalarExt, - lookups: &[Vec>], - shuffles: &[Vec>], - permutations: &[permutation::prover::Committed], - ) -> Polynomial { - let domain = &pk.vk.domain; - let size = domain.extended_len(); - let rot_scale = 1 << (domain.extended_k() - domain.k()); - let fixed = &pk.fixed_cosets[..]; - let extended_omega = domain.get_extended_omega(); - let isize = size as i32; - let one = C::ScalarExt::ONE; - let l0 = &pk.l0; - let l_last = &pk.l_last; - let l_active_row = &pk.l_active_row; - let p = &pk.vk.cs.permutation; - - // Calculate the advice and instance cosets - let advice: Vec>> = advice_polys - .iter() - .map(|advice_polys| { - advice_polys - .iter() - .map(|poly| domain.coeff_to_extended(poly.clone())) - .collect() - }) - .collect(); - let instance: Vec>> = instance_polys - .iter() - .map(|instance_polys| { - instance_polys - .iter() - .map(|poly| domain.coeff_to_extended(poly.clone())) - .collect() - }) - .collect(); - - let mut values = domain.empty_extended(); - - // Core expression evaluations - let num_threads = multicore::current_num_threads(); - for ((((advice, instance), lookups), shuffles), permutation) in advice - .iter() - .zip(instance.iter()) - .zip(lookups.iter()) - .zip(shuffles.iter()) - .zip(permutations.iter()) - { - // Custom gates - multicore::scope(|scope| { - let chunk_size = (size + num_threads - 1) / num_threads; - for (thread_idx, values) in values.chunks_mut(chunk_size).enumerate() { - let start = thread_idx * chunk_size; - scope.spawn(move |_| { - let mut eval_data = self.custom_gates.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - *value = self.custom_gates.evaluate( - &mut eval_data, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - value, - idx, - rot_scale, - isize, - ); - } - }); - } - }); - - // Permutations - let sets = &permutation.sets; - if !sets.is_empty() { - let blinding_factors = pk.vk.cs.blinding_factors(); - let last_rotation = Rotation(-((blinding_factors + 1) as i32)); - let chunk_len = pk.vk.cs.degree() - 2; - let delta_start = beta * &C::Scalar::ZETA; - - let first_set = sets.first().unwrap(); - let last_set = sets.last().unwrap(); - - // Permutation constraints - parallelize(&mut values, |values, start| { - let mut beta_term = extended_omega.pow_vartime([start as u64, 0, 0, 0]); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - let r_last = get_rotation_idx(idx, last_rotation.0, rot_scale, isize); - - // Enforce only for the first set. - // l_0(X) * (1 - z_0(X)) = 0 - *value = *value * y - + ((one - first_set.permutation_product_coset[idx]) * l0[idx]); - // Enforce only for the last set. - // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 - *value = *value * y - + ((last_set.permutation_product_coset[idx] - * last_set.permutation_product_coset[idx] - - last_set.permutation_product_coset[idx]) - * l_last[idx]); - // Except for the first set, enforce. - // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 - for (set_idx, set) in sets.iter().enumerate() { - if set_idx != 0 { - *value = *value * y - + ((set.permutation_product_coset[idx] - - permutation.sets[set_idx - 1].permutation_product_coset - [r_last]) - * l0[idx]); - } - } - // And for all the sets we enforce: - // (1 - (l_last(X) + l_blind(X))) * ( - // z_i(\omega X) \prod_j (p(X) + \beta s_j(X) + \gamma) - // - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma) - // ) - let mut current_delta = delta_start * beta_term; - for ((set, columns), cosets) in sets - .iter() - .zip(p.columns.chunks(chunk_len)) - .zip(pk.permutation.cosets.chunks(chunk_len)) - { - let mut left = set.permutation_product_coset[r_next]; - for (values, permutation) in columns - .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], - }) - .zip(cosets.iter()) - { - left *= values[idx] + beta * permutation[idx] + gamma; - } - - let mut right = set.permutation_product_coset[idx]; - for values in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], - }) { - right *= values[idx] + current_delta + gamma; - current_delta *= &C::Scalar::DELTA; - } - - *value = *value * y + ((left - right) * l_active_row[idx]); - } - beta_term *= &extended_omega; - } - }); - } - - // Lookups - for (n, lookup) in lookups.iter().enumerate() { - // Polynomials required for this lookup. - // Calculated here so these only have to be kept in memory for the short time - // they are actually needed. - let product_coset = pk.vk.domain.coeff_to_extended(lookup.product_poly.clone()); - let permuted_input_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_input_poly.clone()); - let permuted_table_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_table_poly.clone()); - - // Lookup constraints - parallelize(&mut values, |values, start| { - let lookup_evaluator = &self.lookups[n]; - let mut eval_data = lookup_evaluator.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - - let table_value = lookup_evaluator.evaluate( - &mut eval_data, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - let r_prev = get_rotation_idx(idx, -1, rot_scale, isize); - - let a_minus_s = permuted_input_coset[idx] - permuted_table_coset[idx]; - // l_0(X) * (1 - z(X)) = 0 - *value = *value * y + ((one - product_coset[idx]) * l0[idx]); - // l_last(X) * (z(X)^2 - z(X)) = 0 - *value = *value * y - + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) - * l_last[idx]); - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) - // (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - *value = *value * y - + ((product_coset[r_next] - * (permuted_input_coset[idx] + beta) - * (permuted_table_coset[idx] + gamma) - - product_coset[idx] * table_value) - * l_active_row[idx]); - // Check that the first values in the permuted input expression and permuted - // fixed expression are the same. - // l_0(X) * (a'(X) - s'(X)) = 0 - *value = *value * y + (a_minus_s * l0[idx]); - // Check that each value in the permuted lookup input expression is either - // equal to the value above it, or the value at the same index in the - // permuted table expression. - // (1 - (l_last + l_blind)) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - *value = *value * y - + (a_minus_s - * (permuted_input_coset[idx] - permuted_input_coset[r_prev]) - * l_active_row[idx]); - } - }); - } - - // Shuffle constraints - for (n, shuffle) in shuffles.iter().enumerate() { - let product_coset = pk.vk.domain.coeff_to_extended(shuffle.product_poly.clone()); - - // Shuffle constraints - parallelize(&mut values, |values, start| { - let input_evaluator = &self.shuffles[2 * n]; - let shuffle_evaluator = &self.shuffles[2 * n + 1]; - let mut eval_data_input = shuffle_evaluator.instance(); - let mut eval_data_shuffle = shuffle_evaluator.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - - let input_value = input_evaluator.evaluate( - &mut eval_data_input, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let shuffle_value = shuffle_evaluator.evaluate( - &mut eval_data_shuffle, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - - // l_0(X) * (1 - z(X)) = 0 - *value = *value * y + ((one - product_coset[idx]) * l0[idx]); - // l_last(X) * (z(X)^2 - z(X)) = 0 - *value = *value * y - + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) - * l_last[idx]); - // (1 - (l_last(X) + l_blind(X))) * (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) = 0 - *value = *value * y - + l_active_row[idx] - * (product_coset[r_next] * shuffle_value - - product_coset[idx] * input_value) - } - }); - } - } - values - } -} - -impl Default for GraphEvaluator { - fn default() -> Self { - Self { - // Fixed positions to allow easy access - constants: vec![ - C::ScalarExt::ZERO, - C::ScalarExt::ONE, - C::ScalarExt::from(2u64), - ], - rotations: Vec::new(), - calculations: Vec::new(), - num_intermediates: 0, - } - } -} - -impl GraphEvaluator { - /// Adds a rotation - fn add_rotation(&mut self, rotation: &Rotation) -> usize { - let position = self.rotations.iter().position(|&c| c == rotation.0); - match position { - Some(pos) => pos, - None => { - self.rotations.push(rotation.0); - self.rotations.len() - 1 - } - } - } - - /// Adds a constant - fn add_constant(&mut self, constant: &C::ScalarExt) -> ValueSource { - let position = self.constants.iter().position(|&c| c == *constant); - ValueSource::Constant(match position { - Some(pos) => pos, - None => { - self.constants.push(*constant); - self.constants.len() - 1 - } - }) - } - - /// Adds a calculation. - /// Currently does the simplest thing possible: just stores the - /// resulting value so the result can be reused when that calculation - /// is done multiple times. - fn add_calculation(&mut self, calculation: Calculation) -> ValueSource { - let existing_calculation = self - .calculations - .iter() - .find(|c| c.calculation == calculation); - match existing_calculation { - Some(existing_calculation) => ValueSource::Intermediate(existing_calculation.target), - None => { - let target = self.num_intermediates; - self.calculations.push(CalculationInfo { - calculation, - target, - }); - self.num_intermediates += 1; - ValueSource::Intermediate(target) - } - } - } - - /// Generates an optimized evaluation for the expression - fn add_expression(&mut self, expr: &Expression) -> ValueSource { - match expr { - Expression::Constant(scalar) => self.add_constant(scalar), - Expression::Selector(_selector) => unreachable!(), - Expression::Fixed(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Fixed( - query.column_index, - rot_idx, - ))) - } - Expression::Advice(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Advice( - query.column_index, - rot_idx, - ))) - } - Expression::Instance(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Instance( - query.column_index, - rot_idx, - ))) - } - Expression::Challenge(challenge) => self.add_calculation(Calculation::Store( - ValueSource::Challenge(challenge.index()), - )), - Expression::Negated(a) => match **a { - Expression::Constant(scalar) => self.add_constant(&-scalar), - _ => { - let result_a = self.add_expression(a); - match result_a { - ValueSource::Constant(0) => result_a, - _ => self.add_calculation(Calculation::Negate(result_a)), - } - } - }, - Expression::Sum(a, b) => { - // Undo subtraction stored as a + (-b) in expressions - match &**b { - Expression::Negated(b_int) => { - let result_a = self.add_expression(a); - let result_b = self.add_expression(b_int); - if result_a == ValueSource::Constant(0) { - self.add_calculation(Calculation::Negate(result_b)) - } else if result_b == ValueSource::Constant(0) { - result_a - } else { - self.add_calculation(Calculation::Sub(result_a, result_b)) - } - } - _ => { - let result_a = self.add_expression(a); - let result_b = self.add_expression(b); - if result_a == ValueSource::Constant(0) { - result_b - } else if result_b == ValueSource::Constant(0) { - result_a - } else if result_a <= result_b { - self.add_calculation(Calculation::Add(result_a, result_b)) - } else { - self.add_calculation(Calculation::Add(result_b, result_a)) - } - } - } - } - Expression::Product(a, b) => { - let result_a = self.add_expression(a); - let result_b = self.add_expression(b); - if result_a == ValueSource::Constant(0) || result_b == ValueSource::Constant(0) { - ValueSource::Constant(0) - } else if result_a == ValueSource::Constant(1) { - result_b - } else if result_b == ValueSource::Constant(1) { - result_a - } else if result_a == ValueSource::Constant(2) { - self.add_calculation(Calculation::Double(result_b)) - } else if result_b == ValueSource::Constant(2) { - self.add_calculation(Calculation::Double(result_a)) - } else if result_a == result_b { - self.add_calculation(Calculation::Square(result_a)) - } else if result_a <= result_b { - self.add_calculation(Calculation::Mul(result_a, result_b)) - } else { - self.add_calculation(Calculation::Mul(result_b, result_a)) - } - } - Expression::Scaled(a, f) => { - if *f == C::ScalarExt::ZERO { - ValueSource::Constant(0) - } else if *f == C::ScalarExt::ONE { - self.add_expression(a) - } else { - let cst = self.add_constant(f); - let result_a = self.add_expression(a); - self.add_calculation(Calculation::Mul(result_a, cst)) - } - } - } - } - - /// Creates a new evaluation structure - pub fn instance(&self) -> EvaluationData { - EvaluationData { - intermediates: vec![C::ScalarExt::ZERO; self.num_intermediates], - rotations: vec![0usize; self.rotations.len()], - } - } - - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - data: &mut EvaluationData, - fixed: &[Polynomial], - advice: &[Polynomial], - instance: &[Polynomial], - challenges: &[C::ScalarExt], - beta: &C::ScalarExt, - gamma: &C::ScalarExt, - theta: &C::ScalarExt, - y: &C::ScalarExt, - previous_value: &C::ScalarExt, - idx: usize, - rot_scale: i32, - isize: i32, - ) -> C::ScalarExt { - // All rotation index values - for (rot_idx, rot) in self.rotations.iter().enumerate() { - data.rotations[rot_idx] = get_rotation_idx(idx, *rot, rot_scale, isize); - } - - // All calculations, with cached intermediate results - for calc in self.calculations.iter() { - data.intermediates[calc.target] = calc.calculation.evaluate( - &data.rotations, - &self.constants, - &data.intermediates, - fixed, - advice, - instance, - challenges, - beta, - gamma, - theta, - y, - previous_value, - ); - } - - // Return the result of the last calculation (if any) - if let Some(calc) = self.calculations.last() { - data.intermediates[calc.target] - } else { - C::ScalarExt::ZERO - } - } -} - -/// Simple evaluation of an expression -pub fn evaluate( - expression: &Expression, - size: usize, - rot_scale: i32, - fixed: &[Polynomial], - advice: &[Polynomial], - instance: &[Polynomial], - challenges: &[F], -) -> Vec { - let mut values = vec![F::ZERO; size]; - let isize = size as i32; - parallelize(&mut values, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - *value = expression.evaluate( - &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| { - fixed[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|query| { - advice[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|query| { - instance[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * b, - &|a, scalar| a * scalar, - ); - } - }); - values -} diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index 2dea5d26b1..635eeef827 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -1,5 +1,3 @@ -#![allow(clippy::int_plus_one)] - use std::ops::Range; use group::Curve; @@ -7,8 +5,7 @@ use halo2_middleware::ff::{Field, FromUniformBytes}; use super::{ circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, - evaluation::Evaluator, - permutation, Assigned, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, + permutation, Assigned, Error, LagrangeCoeff, Polynomial, }; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -22,37 +19,12 @@ use halo2_middleware::circuit::{ Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, }; -pub(crate) fn create_domain( - k: u32, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, -) -> ( - EvaluationDomain, - ConstraintSystem, - ConcreteCircuit::Config, -) -where - C: CurveAffine, - ConcreteCircuit: Circuit, -{ - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, params); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - - let degree = cs.degree(); - - let domain = EvaluationDomain::new(degree as u32, k); - - (domain, cs, config) -} - /// Assembly to be used in circuit synthesis. #[derive(Debug)] pub(crate) struct Assembly { pub(crate) k: u32, pub(crate) fixed: Vec, LagrangeCoeff>>, - pub(crate) permutation: permutation::keygen::AssemblyFront, + pub(crate) permutation: permutation::AssemblyFront, pub(crate) selectors: Vec>, // A range of available rows for assignment and copies. pub(crate) usable_rows: Range, @@ -200,195 +172,3 @@ impl Assignment for Assembly { // Do nothing; we don't care about namespaces in this context. } } - -/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. -pub fn keygen_vk_v2<'params, C, P>( - params: &P, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - C::Scalar: FromUniformBytes<64>, -{ - let cs2 = &circuit.cs; - let cs: ConstraintSystem = cs2.clone().into(); - let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs2.permutation, - &circuit.preprocessing.permutation, - )? - .build_vk(params, &domain, &cs.permutation); - - let fixed_commitments = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - params - .commit_lagrange( - &Polynomial::new_lagrange_from_vec(poly.clone()), - Blind::default(), - ) - .to_affine() - }) - .collect(); - - Ok(VerifyingKey::from_parts( - domain, - fixed_commitments, - permutation_vk, - cs, - Vec::new(), - false, - )) -} - -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// By default, selector compression is turned **off**. -pub fn keygen_vk<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - keygen_vk_custom(params, circuit, true) -} - -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// -/// The selector compression optimization is turned on only if `compress_selectors` is `true`. -pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, - compress_selectors: bool, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; - let mut vk = keygen_vk_v2(params, &compiled_circuit)?; - vk.compress_selectors = compress_selectors; - Ok(vk) -} - -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. -pub fn keygen_pk_v2<'params, C, P>( - params: &P, - vk: VerifyingKey, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, -{ - let cs = &circuit.cs; - - if (params.n() as usize) < vk.cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let fixed_polys: Vec<_> = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - vk.domain - .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) - }) - .collect(); - - let fixed_cosets = fixed_polys - .iter() - .map(|poly| vk.domain.coeff_to_extended(poly.clone())) - .collect(); - - let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs.permutation, - &circuit.preprocessing.permutation, - )? - .build_pk(params, &vk.domain, &cs.permutation.clone().into()); - - // Compute l_0(X) - // TODO: this can be done more efficiently - let mut l0 = vk.domain.empty_lagrange(); - l0[0] = C::Scalar::ONE; - let l0 = vk.domain.lagrange_to_coeff(l0); - let l0 = vk.domain.coeff_to_extended(l0); - - // Compute l_blind(X) which evaluates to 1 for each blinding factor row - // and 0 otherwise over the domain. - let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { - *evaluation = C::Scalar::ONE; - } - let l_blind = vk.domain.lagrange_to_coeff(l_blind); - let l_blind = vk.domain.coeff_to_extended(l_blind); - - // Compute l_last(X) which evaluates to 1 on the first inactive row (just - // before the blinding factors) and 0 otherwise over the domain - let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; - let l_last = vk.domain.lagrange_to_coeff(l_last); - let l_last = vk.domain.coeff_to_extended(l_last); - - // Compute l_active_row(X) - let one = C::Scalar::ONE; - let mut l_active_row = vk.domain.empty_extended(); - parallelize(&mut l_active_row, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = i + start; - *value = one - (l_last[idx] + l_blind[idx]); - } - }); - - // Compute the optimized evaluation data structure - let ev = Evaluator::new(&vk.cs); - - Ok(ProvingKey { - vk, - l0, - l_last, - l_active_row, - fixed_values: circuit - .preprocessing - .fixed - .clone() - .into_iter() - .map(Polynomial::new_lagrange_from_vec) - .collect(), - fixed_polys, - fixed_cosets, - permutation: permutation_pk, - ev, - }) -} - -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. -pub fn keygen_pk<'params, C, P, ConcreteCircuit>( - params: &P, - vk: VerifyingKey, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, -{ - let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; - keygen_pk_v2(params, vk, &compiled_circuit) -} diff --git a/common/src/plonk/lookup.rs b/common/src/plonk/lookup.rs index 34426a2952..e7d24f77fb 100644 --- a/common/src/plonk/lookup.rs +++ b/common/src/plonk/lookup.rs @@ -3,9 +3,6 @@ use halo2_middleware::circuit::ExpressionMid; use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; -pub mod prover; -pub mod verifier; - /// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone)] pub struct Argument { diff --git a/common/src/plonk/lookup/prover.rs b/common/src/plonk/lookup/prover.rs deleted file mode 100644 index ed8cdabc77..0000000000 --- a/common/src/plonk/lookup/prover.rs +++ /dev/null @@ -1,476 +0,0 @@ -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, - ProvingKey, -}; -use super::Argument; -use crate::plonk::evaluation::evaluate; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - poly::{ - commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use group::{ - ff::{BatchInvert, Field}, - Curve, -}; -use halo2_middleware::ff::WithSmallOrderMulGroup; -use halo2_middleware::poly::Rotation; -use rand_core::RngCore; -use std::{ - collections::BTreeMap, - iter, - ops::{Mul, MulAssign}, -}; - -#[derive(Debug)] -pub(in crate::plonk) struct Permuted { - compressed_input_expression: Polynomial, - permuted_input_expression: Polynomial, - permuted_input_poly: Polynomial, - permuted_input_blind: Blind, - compressed_table_expression: Polynomial, - permuted_table_expression: Polynomial, - permuted_table_poly: Polynomial, - permuted_table_blind: Blind, -} - -#[derive(Debug)] -pub(in crate::plonk) struct Committed { - pub(in crate::plonk) permuted_input_poly: Polynomial, - permuted_input_blind: Blind, - pub(in crate::plonk) permuted_table_poly: Polynomial, - permuted_table_blind: Blind, - pub(in crate::plonk) product_poly: Polynomial, - product_blind: Blind, -} - -pub(in crate::plonk) struct Evaluated { - constructed: Committed, -} - -impl> Argument { - /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, - /// obtaining A' and S', and - /// - constructs Permuted struct using permuted_input_value = A', and - /// permuted_table_expression = S'. - /// The Permuted struct is used to update the Lookup, and is then returned. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_permuted< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the lookup and compress them - let compressed_input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the lookup and compress them - let compressed_table_expression = compress_expressions(&self.table_expressions); - - // Permute compressed (InputExpression, TableExpression) pair - let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( - pk, - params, - domain, - &mut rng, - &compressed_input_expression, - &compressed_table_expression, - )?; - - // Closure to construct commitment to vector of values - let mut commit_values = |values: &Polynomial| { - let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); - let blind = Blind(C::Scalar::random(&mut rng)); - let commitment = params.commit_lagrange(values, blind).to_affine(); - (poly, blind, commitment) - }; - - // Commit to permuted input expression - let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = - commit_values(&permuted_input_expression); - - // Commit to permuted table expression - let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = - commit_values(&permuted_table_expression); - - // Hash permuted input commitment - transcript.write_point(permuted_input_commitment)?; - - // Hash permuted table commitment - transcript.write_point(permuted_table_commitment)?; - - Ok(Permuted { - compressed_input_expression, - permuted_input_expression, - permuted_input_poly, - permuted_input_blind, - compressed_table_expression, - permuted_table_expression, - permuted_table_poly, - permuted_table_blind, - }) - } -} - -impl Permuted { - /// Given a Lookup with input expressions, table expressions, and the permuted - /// input expression and permuted table expression, this method constructs the - /// grand product polynomial over the lookup. The grand product polynomial - /// is used to populate the Product struct. The Product struct is - /// added to the Lookup and finally returned by the method. - pub(in crate::plonk) fn commit_product< - 'params, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - self, - pk: &ProvingKey, - params: &P, - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - // Goal is to compute the products of fractions - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where a_j(X) is the jth input expression in this lookup, - // where a'(X) is the compression of the permuted input expressions, - // s_j(X) is the jth table expression in this lookup, - // s'(X) is the compression of the permuted table expressions, - // and i is the ith row of the expression. - let mut lookup_product = vec![C::Scalar::ZERO; params.n() as usize]; - // Denominator uses the permuted input expression and permuted table expression - parallelize(&mut lookup_product, |lookup_product, start| { - for ((lookup_product, permuted_input_value), permuted_table_value) in lookup_product - .iter_mut() - .zip(self.permuted_input_expression[start..].iter()) - .zip(self.permuted_table_expression[start..].iter()) - { - *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); - } - }); - - // Batch invert to obtain the denominators for the lookup product - // polynomials - lookup_product.iter_mut().batch_invert(); - - // Finish the computation of the entire fraction by computing the numerators - // (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - parallelize(&mut lookup_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - - *product *= &(self.compressed_input_expression[i] + &*beta); - *product *= &(self.compressed_table_expression[i] + &*gamma); - } - }); - - // The product vector is a vector of products of fractions of the form - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where there are m input expressions and m table expressions, - // a_j(\omega^i) is the jth input expression in this lookup, - // a'j(\omega^i) is the permuted input expression, - // s_j(\omega^i) is the jth table expression in this lookup, - // s'(\omega^i) is the permuted table expression, - // and i is the ith row of the expression. - - // Compute the evaluations of the lookup product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(lookup_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - // This test works only with intermediate representations in this method. - // It can be used for debugging purposes. - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - - // l_0(X) * (1 - z(X)) = 0 - assert_eq!(z[0], C::Scalar::ONE); - - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - for i in 0..u { - let mut left = z[i + 1]; - let permuted_input_value = &self.permuted_input_expression[i]; - - let permuted_table_value = &self.permuted_table_expression[i]; - - left *= &(*beta + permuted_input_value); - left *= &(*gamma + permuted_table_value); - - let mut right = z[i]; - let mut input_term = self.compressed_input_expression[i]; - let mut table_term = self.compressed_table_expression[i]; - - input_term += &(*beta); - table_term += &(*gamma); - right *= &(input_term * &table_term); - - assert_eq!(left, right); - } - - // l_last(X) * (z(X)^2 - z(X)) = 0 - // Assertion will fail only when soundness is broken, in which - // case this z[u] value will be zero. (bad!) - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - permuted_input_poly: self.permuted_input_poly, - permuted_input_blind: self.permuted_input_blind, - permuted_table_poly: self.permuted_table_poly, - permuted_table_blind: self.permuted_table_blind, - product_poly: z, - product_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_inv = domain.rotate_omega(*x, Rotation::prev()); - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - let permuted_input_eval = eval_polynomial(&self.permuted_input_poly, *x); - let permuted_input_inv_eval = eval_polynomial(&self.permuted_input_poly, x_inv); - let permuted_table_eval = eval_polynomial(&self.permuted_table_poly, *x); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - .chain(Some(permuted_input_eval)) - .chain(Some(permuted_input_inv_eval)) - .chain(Some(permuted_table_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = pk.vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open lookup input commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup table commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_table_poly, - blind: self.constructed.permuted_table_blind, - })) - // Open lookup input commitments at x_inv - .chain(Some(ProverQuery { - point: x_inv, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } -} - -type ExpressionPair = (Polynomial, Polynomial); - -/// Given a vector of input values A and a vector of table values S, -/// this method permutes A and S to produce A' and S', such that: -/// - like values in A' are vertically adjacent to each other; and -/// - the first row in a sequence of like values in A' is the row -/// that has the corresponding value in S'. -/// This method returns (A', S') if no errors are encountered. -fn permute_expression_pair<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - mut rng: R, - input_expression: &Polynomial, - table_expression: &Polynomial, -) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - let usable_rows = params.n() as usize - (blinding_factors + 1); - - let mut permuted_input_expression: Vec = input_expression.to_vec(); - permuted_input_expression.truncate(usable_rows); - - // Sort input lookup expression values - permuted_input_expression.sort(); - - // A BTreeMap of each unique element in the table expression and its count - let mut leftover_table_map: BTreeMap = table_expression - .iter() - .take(usable_rows) - .fold(BTreeMap::new(), |mut acc, coeff| { - *acc.entry(*coeff).or_insert(0) += 1; - acc - }); - let mut permuted_table_coeffs = vec![C::Scalar::ZERO; usable_rows]; - - let mut repeated_input_rows = permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter_mut()) - .enumerate() - .filter_map(|(row, (input_value, table_value))| { - // If this is the first occurrence of `input_value` in the input expression - if row == 0 || *input_value != permuted_input_expression[row - 1] { - *table_value = *input_value; - // Remove one instance of input_value from leftover_table_map - if let Some(count) = leftover_table_map.get_mut(input_value) { - assert!(*count > 0); - *count -= 1; - None - } else { - // Return error if input_value not found - Some(Err(Error::ConstraintSystemFailure)) - } - // If input value is repeated - } else { - Some(Ok(row)) - } - }) - .collect::, _>>()?; - - // Populate permuted table at unfilled rows with leftover table elements - for (coeff, count) in leftover_table_map.iter() { - for _ in 0..*count { - permuted_table_coeffs[repeated_input_rows.pop().unwrap()] = *coeff; - } - } - assert!(repeated_input_rows.is_empty()); - - permuted_input_expression - .extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - permuted_table_coeffs.extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - assert_eq!(permuted_input_expression.len(), params.n() as usize); - assert_eq!(permuted_table_coeffs.len(), params.n() as usize); - - #[cfg(feature = "sanity-checks")] - { - let mut last = None; - for (a, b) in permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter()) - .take(usable_rows) - { - if *a != *b { - assert_eq!(*a, last.unwrap()); - } - last = Some(*a); - } - } - - Ok(( - domain.lagrange_from_vec(permuted_input_expression), - domain.lagrange_from_vec(permuted_table_coeffs), - )) -} diff --git a/common/src/plonk/lookup/verifier.rs b/common/src/plonk/lookup/verifier.rs deleted file mode 100644 index 11e780148d..0000000000 --- a/common/src/plonk/lookup/verifier.rs +++ /dev/null @@ -1,212 +0,0 @@ -use std::iter; - -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, -}; -use super::Argument; -use crate::{ - arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, VerifierQuery}, - transcript::{EncodedChallenge, TranscriptRead}, -}; -use halo2_middleware::ff::Field; -use halo2_middleware::poly::Rotation; - -pub struct PermutationCommitments { - permuted_input_commitment: C, - permuted_table_commitment: C, -} - -pub struct Committed { - permuted: PermutationCommitments, - product_commitment: C, -} - -pub struct Evaluated { - committed: Committed, - product_eval: C::Scalar, - product_next_eval: C::Scalar, - permuted_input_eval: C::Scalar, - permuted_input_inv_eval: C::Scalar, - permuted_table_eval: C::Scalar, -} - -impl Argument { - pub(in crate::plonk) fn read_permuted_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let permuted_input_commitment = transcript.read_point()?; - let permuted_table_commitment = transcript.read_point()?; - - Ok(PermutationCommitments { - permuted_input_commitment, - permuted_table_commitment, - }) - } -} - -impl PermutationCommitments { - pub(in crate::plonk) fn read_product_commitment< - E: EncodedChallenge, - T: TranscriptRead, - >( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_commitment = transcript.read_point()?; - - Ok(Committed { - permuted: self, - product_commitment, - }) - } -} - -impl Committed { - pub(crate) fn evaluate, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_eval = transcript.read_scalar()?; - let product_next_eval = transcript.read_scalar()?; - let permuted_input_eval = transcript.read_scalar()?; - let permuted_input_inv_eval = transcript.read_scalar()?; - let permuted_table_eval = transcript.read_scalar()?; - - Ok(Evaluated { - committed: self, - product_eval, - product_next_eval, - permuted_input_eval, - permuted_input_inv_eval, - permuted_table_eval, - }) - } -} - -impl Evaluated { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions<'a>( - &'a self, - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - argument: &'a Argument, - theta: ChallengeTheta, - beta: ChallengeBeta, - gamma: ChallengeGamma, - advice_evals: &[C::Scalar], - fixed_evals: &[C::Scalar], - instance_evals: &[C::Scalar], - challenges: &[C::Scalar], - ) -> impl Iterator + 'a { - let active_rows = C::Scalar::ONE - (l_last + l_blind); - - let product_expression = || { - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - let left = self.product_next_eval - * &(self.permuted_input_eval + &*beta) - * &(self.permuted_table_eval + &*gamma); - - let compress_expressions = |expressions: &[Expression]| { - expressions - .iter() - .map(|expression| { - expression.evaluate( - &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) - }; - let right = self.product_eval - * &(compress_expressions(&argument.input_expressions) + &*beta) - * &(compress_expressions(&argument.table_expressions) + &*gamma); - - (left - &right) * &active_rows - }; - - std::iter::empty() - .chain( - // l_0(X) * (1 - z(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), - ) - .chain( - // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), - ) - .chain( - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - Some(product_expression()), - ) - .chain(Some( - // l_0(X) * (a'(X) - s'(X)) = 0 - l_0 * &(self.permuted_input_eval - &self.permuted_table_eval), - )) - .chain(Some( - // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - (self.permuted_input_eval - &self.permuted_table_eval) - * &(self.permuted_input_eval - &self.permuted_input_inv_eval) - * &active_rows, - )) - } - - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vk: &'r VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitment at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - *x, - self.product_eval, - ))) - // Open lookup input commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - *x, - self.permuted_input_eval, - ))) - // Open lookup table commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_table_commitment, - *x, - self.permuted_table_eval, - ))) - // Open lookup input commitments at \omega^{-1} x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - x_inv, - self.permuted_input_inv_eval, - ))) - // Open lookup product commitment at \omega x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - x_next, - self.product_next_eval, - ))) - } -} diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index 19fcb7eceb..967b4fa663 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -6,20 +6,17 @@ use crate::{ polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, SerdePrimeField, }, + plonk::Error, poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, SerdeFormat, }; use halo2_middleware::circuit::{Any, Column}; -use halo2_middleware::permutation::ArgumentV2; - -pub(crate) mod keygen; -pub(crate) mod prover; -pub(crate) mod verifier; - -pub use keygen::Assembly; +use halo2_middleware::permutation::{ArgumentV2, Cell}; use std::io; +pub mod keygen; + /// A permutation argument. #[derive(Debug, Clone)] pub struct Argument { @@ -179,3 +176,51 @@ impl ProvingKey { + polynomial_slice_byte_length(&self.cosets) } } + +// TODO: Move to frontend +#[derive(Clone, Debug)] +pub struct AssemblyFront { + n: usize, + columns: Vec>, + pub(crate) copies: Vec<(Cell, Cell)>, +} + +impl AssemblyFront { + pub(crate) fn new(n: usize, p: &Argument) -> Self { + Self { + n, + columns: p.columns.clone(), + copies: Vec::new(), + } + } + + pub(crate) fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.columns.contains(&left_column) { + return Err(Error::ColumnNotInPermutation(left_column)); + } + if !self.columns.contains(&right_column) { + return Err(Error::ColumnNotInPermutation(right_column)); + } + // Check bounds + if left_row >= self.n || right_row >= self.n { + return Err(Error::BoundsFailure); + } + self.copies.push(( + Cell { + column: left_column, + row: left_row, + }, + Cell { + column: right_column, + row: right_row, + }, + )); + Ok(()) + } +} diff --git a/common/src/plonk/permutation/keygen.rs b/common/src/plonk/permutation/keygen.rs index 7064c849f9..3a83e6889c 100644 --- a/common/src/plonk/permutation/keygen.rs +++ b/common/src/plonk/permutation/keygen.rs @@ -36,53 +36,6 @@ pub struct Assembly { sizes: Vec>, } -#[derive(Clone, Debug)] -pub struct AssemblyFront { - n: usize, - columns: Vec>, - pub(crate) copies: Vec<(Cell, Cell)>, -} - -impl AssemblyFront { - pub(crate) fn new(n: usize, p: &Argument) -> Self { - Self { - n, - columns: p.columns.clone(), - copies: Vec::new(), - } - } - - pub(crate) fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - if !self.columns.contains(&left_column) { - return Err(Error::ColumnNotInPermutation(left_column)); - } - if !self.columns.contains(&right_column) { - return Err(Error::ColumnNotInPermutation(right_column)); - } - // Check bounds - if left_row >= self.n || right_row >= self.n { - return Err(Error::BoundsFailure); - } - self.copies.push(( - Cell { - column: left_column, - row: left_row, - }, - Cell { - column: right_column, - row: right_row, - }, - )); - Ok(()) - } -} - #[cfg(not(feature = "thread-safe-region"))] impl Assembly { pub(crate) fn new_from_assembly_mid( diff --git a/common/src/plonk/permutation/prover.rs b/common/src/plonk/permutation/prover.rs deleted file mode 100644 index 8d4b168740..0000000000 --- a/common/src/plonk/permutation/prover.rs +++ /dev/null @@ -1,331 +0,0 @@ -use group::{ - ff::{BatchInvert, Field}, - Curve, -}; -use halo2_middleware::ff::PrimeField; -use rand_core::RngCore; -use std::iter::{self, ExactSizeIterator}; - -use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; -use super::{Argument, ProvingKey}; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - plonk::{self, Error}, - poly::{ - commitment::{Blind, Params}, - Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use halo2_middleware::circuit::Any; -use halo2_middleware::poly::Rotation; - -pub(crate) struct CommittedSet { - pub(crate) permutation_product_poly: Polynomial, - pub(crate) permutation_product_coset: Polynomial, - permutation_product_blind: Blind, -} - -pub(crate) struct Committed { - pub(crate) sets: Vec>, -} - -pub struct ConstructedSet { - permutation_product_poly: Polynomial, - permutation_product_blind: Blind, -} - -pub(crate) struct Constructed { - sets: Vec>, -} - -pub(crate) struct Evaluated { - constructed: Constructed, -} - -impl Argument { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit< - 'params, - C: CurveAffine, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - params: &P, - pk: &plonk::ProvingKey, - pkey: &ProvingKey, - advice: &[Polynomial], - fixed: &[Polynomial], - instance: &[Polynomial], - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - - // How many columns can be included in a single permutation polynomial? - // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This - // will never underflow because of the requirement of at least a degree - // 3 circuit for the permutation argument. - assert!(pk.vk.cs_degree >= 3); - let chunk_len = pk.vk.cs_degree - 2; - let blinding_factors = pk.vk.cs.blinding_factors(); - - // Each column gets its own delta power. - let mut deltaomega = C::Scalar::ONE; - - // Track the "last" value from the previous column set - let mut last_z = C::Scalar::ONE; - - let mut sets = vec![]; - - for (columns, permutations) in self - .columns - .chunks(chunk_len) - .zip(pkey.permutations.chunks(chunk_len)) - { - // Goal is to compute the products of fractions - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where p_j(X) is the jth column in this permutation, - // and i is the ith row of the column. - - let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; - - // Iterate over each column of the permutation - for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - for ((modified_values, value), permuted_value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - .zip(permuted_column_values[start..].iter()) - { - *modified_values *= &(*beta * permuted_value + &*gamma + value); - } - }); - } - - // Invert to obtain the denominator for the permutation product polynomial - modified_values.batch_invert(); - - // Iterate over each column again, this time finishing the computation - // of the entire fraction by computing the numerators - for &column in columns.iter() { - let omega = domain.get_omega(); - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); - for (modified_values, value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - { - // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta - *modified_values *= &(deltaomega * &*beta + &*gamma + value); - deltaomega *= ω - } - }); - deltaomega *= &::DELTA; - } - - // The modified_values vector is a vector of products of fractions - // of the form - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where i is the index into modified_values, for the jth column in - // the permutation - - // Compute the evaluations of the permutation product polynomial - // over our domain, starting with z[0] = 1 - let mut z = vec![last_z]; - for row in 1..(params.n() as usize) { - let mut tmp = z[row - 1]; - - tmp *= &modified_values[row - 1]; - z.push(tmp); - } - let mut z = domain.lagrange_from_vec(z); - // Set blinding factors - for z in &mut z[params.n() as usize - blinding_factors..] { - *z = C::Scalar::random(&mut rng); - } - // Set new last_z - last_z = z[params.n() as usize - (blinding_factors + 1)]; - - let blind = Blind(C::Scalar::random(&mut rng)); - - let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); - let permutation_product_blind = blind; - let z = domain.lagrange_to_coeff(z); - let permutation_product_poly = z.clone(); - - let permutation_product_coset = domain.coeff_to_extended(z.clone()); - - let permutation_product_commitment = - permutation_product_commitment_projective.to_affine(); - - // Hash the permutation product commitment - transcript.write_point(permutation_product_commitment)?; - - sets.push(CommittedSet { - permutation_product_poly, - permutation_product_coset, - permutation_product_blind, - }); - } - - Ok(Committed { sets }) - } -} - -impl Committed { - pub(in crate::plonk) fn construct(self) -> Constructed { - Constructed { - sets: self - .sets - .iter() - .map(|set| ConstructedSet { - permutation_product_poly: set.permutation_product_poly.clone(), - permutation_product_blind: set.permutation_product_blind, - }) - .collect(), - } - } -} - -impl super::ProvingKey { - pub(in crate::plonk) fn open( - &self, - x: ChallengeX, - ) -> impl Iterator> + Clone { - self.polys.iter().map(move |poly| ProverQuery { - point: *x, - poly, - blind: Blind::default(), - }) - } - - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - &self, - x: ChallengeX, - transcript: &mut T, - ) -> Result<(), Error> { - // Hash permutation evals - for eval in self.polys.iter().map(|poly| eval_polynomial(poly, *x)) { - transcript.write_scalar(eval)?; - } - - Ok(()) - } -} - -impl Constructed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &plonk::ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let blinding_factors = pk.vk.cs.blinding_factors(); - - { - let mut sets = self.sets.iter(); - - while let Some(set) = sets.next() { - let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); - - let permutation_product_next_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation::next()), - ); - - // Hash permutation product evals - for eval in iter::empty() - .chain(Some(&permutation_product_eval)) - .chain(Some(&permutation_product_next_eval)) - { - transcript.write_scalar(*eval)?; - } - - // If we have any remaining sets to process, evaluate this set at omega^u - // so we can constrain the last value of its running product to equal the - // first value of the next set's running product, chaining them together. - if sets.len() > 0 { - let permutation_product_last_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), - ); - - transcript.write_scalar(permutation_product_last_eval)?; - } - } - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a plonk::ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let blinding_factors = pk.vk.cs.blinding_factors(); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - let x_last = pk - .vk - .domain - .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); - - iter::empty() - .chain(self.constructed.sets.iter().flat_map(move |set| { - iter::empty() - // Open permutation product commitments at x and \omega x - .chain(Some(ProverQuery { - point: *x, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - .chain(Some(ProverQuery { - point: x_next, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - })) - // Open it at \omega^{last} x for all but the last set. This rotation is only - // sensical for the first row, but we only use this rotation in a constraint - // that is gated on l_0. - .chain( - self.constructed - .sets - .iter() - .rev() - .skip(1) - .flat_map(move |set| { - Some(ProverQuery { - point: x_last, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - }) - }), - ) - } -} diff --git a/common/src/plonk/permutation/verifier.rs b/common/src/plonk/permutation/verifier.rs deleted file mode 100644 index 195a771999..0000000000 --- a/common/src/plonk/permutation/verifier.rs +++ /dev/null @@ -1,256 +0,0 @@ -use halo2_middleware::ff::{Field, PrimeField}; -use std::iter; - -use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; -use super::{Argument, VerifyingKey}; -use crate::{ - arithmetic::CurveAffine, - plonk::{self, Error}, - poly::{commitment::MSM, VerifierQuery}, - transcript::{EncodedChallenge, TranscriptRead}, -}; -use halo2_middleware::circuit::Any; -use halo2_middleware::poly::Rotation; - -pub struct Committed { - permutation_product_commitments: Vec, -} - -pub struct EvaluatedSet { - permutation_product_commitment: C, - permutation_product_eval: C::Scalar, - permutation_product_next_eval: C::Scalar, - permutation_product_last_eval: Option, -} - -pub struct CommonEvaluated { - permutation_evals: Vec, -} - -pub struct Evaluated { - sets: Vec>, -} - -impl Argument { - pub(crate) fn read_product_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - vk: &plonk::VerifyingKey, - transcript: &mut T, - ) -> Result, Error> { - let chunk_len = vk.cs_degree - 2; - - let permutation_product_commitments = self - .columns - .chunks(chunk_len) - .map(|_| transcript.read_point()) - .collect::, _>>()?; - - Ok(Committed { - permutation_product_commitments, - }) - } -} - -impl VerifyingKey { - pub(in crate::plonk) fn evaluate, T: TranscriptRead>( - &self, - transcript: &mut T, - ) -> Result, Error> { - let permutation_evals = self - .commitments - .iter() - .map(|_| transcript.read_scalar()) - .collect::, _>>()?; - - Ok(CommonEvaluated { permutation_evals }) - } -} - -impl Committed { - pub(crate) fn evaluate, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let mut sets = vec![]; - - let mut iter = self.permutation_product_commitments.into_iter(); - - while let Some(permutation_product_commitment) = iter.next() { - let permutation_product_eval = transcript.read_scalar()?; - let permutation_product_next_eval = transcript.read_scalar()?; - let permutation_product_last_eval = if iter.len() > 0 { - Some(transcript.read_scalar()?) - } else { - None - }; - - sets.push(EvaluatedSet { - permutation_product_commitment, - permutation_product_eval, - permutation_product_next_eval, - permutation_product_last_eval, - }); - } - - Ok(Evaluated { sets }) - } -} - -impl Evaluated { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions<'a>( - &'a self, - vk: &'a plonk::VerifyingKey, - p: &'a Argument, - common: &'a CommonEvaluated, - advice_evals: &'a [C::Scalar], - fixed_evals: &'a [C::Scalar], - instance_evals: &'a [C::Scalar], - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - beta: ChallengeBeta, - gamma: ChallengeGamma, - x: ChallengeX, - ) -> impl Iterator + 'a { - let chunk_len = vk.cs_degree - 2; - iter::empty() - // Enforce only for the first set. - // l_0(X) * (1 - z_0(X)) = 0 - .chain( - self.sets - .first() - .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), - ) - // Enforce only for the last set. - // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 - .chain(self.sets.last().map(|last_set| { - (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) - * &l_last - })) - // Except for the first set, enforce. - // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 - .chain( - self.sets - .iter() - .skip(1) - .zip(self.sets.iter()) - .map(|(set, last_set)| { - ( - set.permutation_product_eval, - last_set.permutation_product_last_eval.unwrap(), - ) - }) - .map(move |(set, prev_last)| (set - &prev_last) * &l_0), - ) - // And for all the sets we enforce: - // (1 - (l_last(X) + l_blind(X))) * ( - // z_i(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) - // - z_i(X) \prod (p(X) + \delta^i \beta X + \gamma) - // ) - .chain( - self.sets - .iter() - .zip(p.columns.chunks(chunk_len)) - .zip(common.permutation_evals.chunks(chunk_len)) - .enumerate() - .map(move |(chunk_index, ((set, columns), permutation_evals))| { - let mut left = set.permutation_product_next_eval; - for (eval, permutation_eval) in columns - .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => { - advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Fixed => { - fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Instance => { - instance_evals - [vk.cs.get_any_query_index(column, Rotation::cur())] - } - }) - .zip(permutation_evals.iter()) - { - left *= &(eval + &(*beta * permutation_eval) + &*gamma); - } - - let mut right = set.permutation_product_eval; - let mut current_delta = (*beta * &*x) - * &(::DELTA - .pow_vartime([(chunk_index * chunk_len) as u64])); - for eval in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => { - advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Fixed => { - fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Instance => { - instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - }) { - right *= &(eval + ¤t_delta + &*gamma); - current_delta *= &C::Scalar::DELTA; - } - - (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) - }), - ) - } - - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vk: &'r plonk::VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let blinding_factors = vk.cs.blinding_factors(); - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - let x_last = vk - .domain - .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); - - iter::empty() - .chain(self.sets.iter().flat_map(move |set| { - iter::empty() - // Open permutation product commitments at x and \omega^{-1} x - // Open permutation product commitments at x and \omega x - .chain(Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - *x, - set.permutation_product_eval, - ))) - .chain(Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - x_next, - set.permutation_product_next_eval, - ))) - })) - // Open it at \omega^{last} x for all but the last set - .chain(self.sets.iter().rev().skip(1).flat_map(move |set| { - Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - x_last, - set.permutation_product_last_eval.unwrap(), - )) - })) - } -} - -impl CommonEvaluated { - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vkey: &'r VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - // Open permutation commitments for each permutation argument at x - vkey.commitments - .iter() - .zip(self.permutation_evals.iter()) - .map(move |(commitment, &eval)| VerifierQuery::new_commitment(commitment, *x, eval)) - } -} diff --git a/common/src/plonk/shuffle.rs b/common/src/plonk/shuffle.rs index d80c4e498a..c865ada983 100644 --- a/common/src/plonk/shuffle.rs +++ b/common/src/plonk/shuffle.rs @@ -3,9 +3,6 @@ use halo2_middleware::circuit::ExpressionMid; use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; -pub(crate) mod prover; -pub(crate) mod verifier; - /// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] pub struct Argument { diff --git a/common/src/plonk/shuffle/prover.rs b/common/src/plonk/shuffle/prover.rs deleted file mode 100644 index b70184fc8c..0000000000 --- a/common/src/plonk/shuffle/prover.rs +++ /dev/null @@ -1,251 +0,0 @@ -use super::super::{ - circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, -}; -use super::Argument; -use crate::plonk::evaluation::evaluate; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - poly::{ - commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use group::{ff::BatchInvert, Curve}; -use halo2_middleware::ff::WithSmallOrderMulGroup; -use halo2_middleware::poly::Rotation; -use rand_core::RngCore; -use std::{ - iter, - ops::{Mul, MulAssign}, -}; - -#[derive(Debug)] -struct Compressed { - input_expression: Polynomial, - shuffle_expression: Polynomial, -} - -#[derive(Debug)] -pub(in crate::plonk) struct Committed { - pub(in crate::plonk) product_poly: Polynomial, - product_blind: Blind, -} - -pub(in crate::plonk) struct Evaluated { - constructed: Committed, -} - -impl> Argument { - /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - #[allow(clippy::too_many_arguments)] - fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - ) -> Compressed - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the shuffle and compress them - let input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the shuffle and compress them - let shuffle_expression = compress_expressions(&self.shuffle_expressions); - - Compressed { - input_expression, - shuffle_expression, - } - } - - /// Given a Shuffle with input expressions and table expressions this method - /// constructs the grand product polynomial over the shuffle. - /// The grand product polynomial is used to populate the Product struct. - /// The Product struct is added to the Shuffle and finally returned by the method. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_product< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - gamma: ChallengeGamma, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - let compressed = self.compress( - pk, - params, - domain, - theta, - advice_values, - fixed_values, - instance_values, - challenges, - ); - - let blinding_factors = pk.vk.cs.blinding_factors(); - - let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; - parallelize(&mut shuffle_product, |shuffle_product, start| { - for (shuffle_product, shuffle_value) in shuffle_product - .iter_mut() - .zip(compressed.shuffle_expression[start..].iter()) - { - *shuffle_product = *gamma + shuffle_value; - } - }); - - shuffle_product.iter_mut().batch_invert(); - - parallelize(&mut shuffle_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - *product *= &(*gamma + compressed.input_expression[i]); - } - }); - - // Compute the evaluations of the shuffle product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(shuffle_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - assert_eq!(z[0], C::Scalar::ONE); - for i in 0..u { - let mut left = z[i + 1]; - let input_value = &compressed.input_expression[i]; - let shuffle_value = &compressed.shuffle_expression[i]; - left *= &(*gamma + shuffle_value); - let mut right = z[i]; - right *= &(*gamma + input_value); - assert_eq!(left, right); - } - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - product_poly: z, - product_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open shuffle product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open shuffle product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } -} diff --git a/common/src/plonk/shuffle/verifier.rs b/common/src/plonk/shuffle/verifier.rs deleted file mode 100644 index 46a7823c9c..0000000000 --- a/common/src/plonk/shuffle/verifier.rs +++ /dev/null @@ -1,139 +0,0 @@ -use std::iter; - -use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX}; -use super::Argument; -use crate::{ - arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, VerifierQuery}, - transcript::{EncodedChallenge, TranscriptRead}, -}; -use halo2_middleware::ff::Field; -use halo2_middleware::poly::Rotation; - -pub struct Committed { - product_commitment: C, -} - -pub struct Evaluated { - committed: Committed, - product_eval: C::Scalar, - product_next_eval: C::Scalar, -} - -impl Argument { - pub(in crate::plonk) fn read_product_commitment< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let product_commitment = transcript.read_point()?; - - Ok(Committed { product_commitment }) - } -} - -impl Committed { - pub(crate) fn evaluate, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_eval = transcript.read_scalar()?; - let product_next_eval = transcript.read_scalar()?; - - Ok(Evaluated { - committed: self, - product_eval, - product_next_eval, - }) - } -} - -impl Evaluated { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions<'a>( - &'a self, - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - argument: &'a Argument, - theta: ChallengeTheta, - gamma: ChallengeGamma, - advice_evals: &[C::Scalar], - fixed_evals: &[C::Scalar], - instance_evals: &[C::Scalar], - challenges: &[C::Scalar], - ) -> impl Iterator + 'a { - let active_rows = C::Scalar::ONE - (l_last + l_blind); - - let product_expression = || { - // z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma) - let compress_expressions = |expressions: &[Expression]| { - expressions - .iter() - .map(|expression| { - expression.evaluate( - &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) - }; - // z(\omega X) (s(X) + \gamma) - let left = self.product_next_eval - * &(compress_expressions(&argument.shuffle_expressions) + &*gamma); - // z(X) (a(X) + \gamma) - let right = - self.product_eval * &(compress_expressions(&argument.input_expressions) + &*gamma); - - (left - &right) * &active_rows - }; - - std::iter::empty() - .chain( - // l_0(X) * (1 - z'(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), - ) - .chain( - // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), - ) - .chain( - // (1 - (l_last(X) + l_blind(X))) * ( z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) - Some(product_expression()), - ) - } - - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vk: &'r VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open shuffle product commitment at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - *x, - self.product_eval, - ))) - // Open shuffle product commitment at \omega x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - x_next, - self.product_next_eval, - ))) - } -} diff --git a/common/src/plonk/vanishing.rs b/common/src/plonk/vanishing.rs deleted file mode 100644 index 81f86b02e2..0000000000 --- a/common/src/plonk/vanishing.rs +++ /dev/null @@ -1,11 +0,0 @@ -use std::marker::PhantomData; - -use crate::arithmetic::CurveAffine; - -mod prover; -mod verifier; - -/// A vanishing argument. -pub(crate) struct Argument { - _marker: PhantomData, -} diff --git a/common/src/plonk/vanishing/prover.rs b/common/src/plonk/vanishing/prover.rs deleted file mode 100644 index d30d9dc4af..0000000000 --- a/common/src/plonk/vanishing/prover.rs +++ /dev/null @@ -1,199 +0,0 @@ -use std::{collections::HashMap, iter}; - -use group::Curve; -use halo2_middleware::ff::Field; -use rand_chacha::ChaCha20Rng; -use rand_core::{RngCore, SeedableRng}; - -use super::Argument; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - multicore::current_num_threads, - plonk::{ChallengeX, Error}, - poly::{ - commitment::{Blind, ParamsProver}, - Coeff, EvaluationDomain, ExtendedLagrangeCoeff, Polynomial, ProverQuery, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; - -pub(in crate::plonk) struct Committed { - random_poly: Polynomial, - random_blind: Blind, -} - -pub(in crate::plonk) struct Constructed { - h_pieces: Vec>, - h_blinds: Vec>, - committed: Committed, -} - -pub(in crate::plonk) struct Evaluated { - h_poly: Polynomial, - h_blind: Blind, - committed: Committed, -} - -impl Argument { - pub(in crate::plonk) fn commit< - 'params, - P: ParamsProver<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - params: &P, - domain: &EvaluationDomain, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - // Sample a random polynomial of degree n - 1 - let n = 1usize << domain.k() as usize; - let mut rand_vec = vec![C::Scalar::ZERO; n]; - - let num_threads = current_num_threads(); - let chunk_size = n / num_threads; - let thread_seeds = (0..) - .step_by(chunk_size + 1) - .take(n % num_threads) - .chain( - (chunk_size != 0) - .then(|| ((n % num_threads) * (chunk_size + 1)..).step_by(chunk_size)) - .into_iter() - .flatten(), - ) - .take(num_threads) - .zip(iter::repeat_with(|| { - let mut seed = [0u8; 32]; - rng.fill_bytes(&mut seed); - ChaCha20Rng::from_seed(seed) - })) - .collect::>(); - - parallelize(&mut rand_vec, |chunk, offset| { - let mut rng = thread_seeds[&offset].clone(); - chunk - .iter_mut() - .for_each(|v| *v = C::Scalar::random(&mut rng)); - }); - - let random_poly: Polynomial = domain.coeff_from_vec(rand_vec); - - // Sample a random blinding factor - let random_blind = Blind(C::Scalar::random(rng)); - - // Commit - let c = params.commit(&random_poly, random_blind).to_affine(); - transcript.write_point(c)?; - - Ok(Committed { - random_poly, - random_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn construct< - 'params, - P: ParamsProver<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - self, - params: &P, - domain: &EvaluationDomain, - h_poly: Polynomial, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - // Divide by t(X) = X^{params.n} - 1. - let h_poly = domain.divide_by_vanishing_poly(h_poly); - - // Obtain final h(X) polynomial - let h_poly = domain.extended_to_coeff(h_poly); - - // Split h(X) up into pieces - let h_pieces = h_poly - .chunks_exact(params.n() as usize) - .map(|v| domain.coeff_from_vec(v.to_vec())) - .collect::>(); - drop(h_poly); - let h_blinds: Vec<_> = h_pieces - .iter() - .map(|_| Blind(C::Scalar::random(&mut rng))) - .collect(); - - // Compute commitments to each h(X) piece - let h_commitments_projective: Vec<_> = h_pieces - .iter() - .zip(h_blinds.iter()) - .map(|(h_piece, blind)| params.commit(h_piece, *blind)) - .collect(); - let mut h_commitments = vec![C::identity(); h_commitments_projective.len()]; - C::Curve::batch_normalize(&h_commitments_projective, &mut h_commitments); - let h_commitments = h_commitments; - - // Hash each h(X) piece - for c in h_commitments.iter() { - transcript.write_point(*c)?; - } - - Ok(Constructed { - h_pieces, - h_blinds, - committed: self, - }) - } -} - -impl Constructed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - x: ChallengeX, - xn: C::Scalar, - domain: &EvaluationDomain, - transcript: &mut T, - ) -> Result, Error> { - let h_poly = self - .h_pieces - .iter() - .rev() - .fold(domain.empty_coeff(), |acc, eval| acc * xn + eval); - - let h_blind = self - .h_blinds - .iter() - .rev() - .fold(Blind(C::Scalar::ZERO), |acc, eval| acc * Blind(xn) + *eval); - - let random_eval = eval_polynomial(&self.committed.random_poly, *x); - transcript.write_scalar(random_eval)?; - - Ok(Evaluated { - h_poly, - h_blind, - committed: self.committed, - }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open( - &self, - x: ChallengeX, - ) -> impl Iterator> + Clone { - iter::empty() - .chain(Some(ProverQuery { - point: *x, - poly: &self.h_poly, - blind: self.h_blind, - })) - .chain(Some(ProverQuery { - point: *x, - poly: &self.committed.random_poly, - blind: self.committed.random_blind, - })) - } -} diff --git a/common/src/plonk/vanishing/verifier.rs b/common/src/plonk/vanishing/verifier.rs deleted file mode 100644 index 05ccb02a5b..0000000000 --- a/common/src/plonk/vanishing/verifier.rs +++ /dev/null @@ -1,138 +0,0 @@ -use std::iter; - -use halo2_middleware::ff::Field; - -use crate::{ - arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{ - commitment::{Params, MSM}, - VerifierQuery, - }, - transcript::{read_n_points, EncodedChallenge, TranscriptRead}, -}; - -use super::super::{ChallengeX, ChallengeY}; -use super::Argument; - -pub struct Committed { - random_poly_commitment: C, -} - -pub struct Constructed { - h_commitments: Vec, - random_poly_commitment: C, -} - -pub struct PartiallyEvaluated { - h_commitments: Vec, - random_poly_commitment: C, - random_eval: C::Scalar, -} - -pub struct Evaluated> { - h_commitment: M, - random_poly_commitment: C, - expected_h_eval: C::Scalar, - random_eval: C::Scalar, -} - -impl Argument { - pub(in crate::plonk) fn read_commitments_before_y< - E: EncodedChallenge, - T: TranscriptRead, - >( - transcript: &mut T, - ) -> Result, Error> { - let random_poly_commitment = transcript.read_point()?; - - Ok(Committed { - random_poly_commitment, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn read_commitments_after_y< - E: EncodedChallenge, - T: TranscriptRead, - >( - self, - vk: &VerifyingKey, - transcript: &mut T, - ) -> Result, Error> { - // Obtain a commitment to h(X) in the form of multiple pieces of degree n - 1 - let h_commitments = read_n_points(transcript, vk.domain.get_quotient_poly_degree())?; - - Ok(Constructed { - h_commitments, - random_poly_commitment: self.random_poly_commitment, - }) - } -} - -impl Constructed { - pub(in crate::plonk) fn evaluate_after_x, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let random_eval = transcript.read_scalar()?; - - Ok(PartiallyEvaluated { - h_commitments: self.h_commitments, - random_poly_commitment: self.random_poly_commitment, - random_eval, - }) - } -} - -impl PartiallyEvaluated { - pub(in crate::plonk) fn verify<'params, P: Params<'params, C>>( - self, - params: &'params P, - expressions: impl Iterator, - y: ChallengeY, - xn: C::Scalar, - ) -> Evaluated { - let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * &*y + &v); - let expected_h_eval = expected_h_eval * ((xn - C::Scalar::ONE).invert().unwrap()); - - let h_commitment = - self.h_commitments - .iter() - .rev() - .fold(params.empty_msm(), |mut acc, commitment| { - acc.scale(xn); - let commitment: C::CurveExt = (*commitment).into(); - acc.append_term(C::Scalar::ONE, commitment); - - acc - }); - - Evaluated { - expected_h_eval, - h_commitment, - random_poly_commitment: self.random_poly_commitment, - random_eval: self.random_eval, - } - } -} - -impl> Evaluated { - pub(in crate::plonk) fn queries( - &self, - x: ChallengeX, - ) -> impl Iterator> + Clone { - iter::empty() - .chain(Some(VerifierQuery::new_msm( - &self.h_commitment, - *x, - self.expected_h_eval, - ))) - .chain(Some(VerifierQuery::new_commitment( - &self.random_poly_commitment, - *x, - self.random_eval, - ))) - } -} diff --git a/common/src/plonk/verifier.rs b/common/src/plonk/verifier.rs deleted file mode 100644 index e60f19374f..0000000000 --- a/common/src/plonk/verifier.rs +++ /dev/null @@ -1,462 +0,0 @@ -use group::Curve; -use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; -use std::iter; - -use super::{ - vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, - VerifyingKey, -}; -use crate::arithmetic::compute_inner_product; -use crate::poly::commitment::{CommitmentScheme, Verifier}; -use crate::poly::VerificationStrategy; -use crate::poly::{ - commitment::{Blind, Params}, - VerifierQuery, -}; -use crate::transcript::{read_n_scalars, EncodedChallenge, TranscriptRead}; - -#[cfg(feature = "batch")] -mod batch; -#[cfg(feature = "batch")] -pub use batch::BatchVerifier; - -/// Returns a boolean indicating whether or not the proof is valid. Verifies a single proof (not -/// batched). -pub fn verify_proof_single< - 'params, - Scheme: CommitmentScheme, - V: Verifier<'params, Scheme>, - E: EncodedChallenge, - T: TranscriptRead, - Strategy: VerificationStrategy<'params, Scheme, V>, ->( - params: &'params Scheme::ParamsVerifier, - vk: &VerifyingKey, - strategy: Strategy, - instance: &[&[Scheme::Scalar]], - transcript: &mut T, -) -> Result -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - verify_proof(params, vk, strategy, &[instance], transcript) -} - -/// Returns a boolean indicating whether or not the proof is valid -pub fn verify_proof< - 'params, - Scheme: CommitmentScheme, - V: Verifier<'params, Scheme>, - E: EncodedChallenge, - T: TranscriptRead, - Strategy: VerificationStrategy<'params, Scheme, V>, ->( - params: &'params Scheme::ParamsVerifier, - vk: &VerifyingKey, - strategy: Strategy, - instances: &[&[&[Scheme::Scalar]]], - transcript: &mut T, -) -> Result -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - // Check that instances matches the expected number of instance columns - for instances in instances.iter() { - if instances.len() != vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); - } - } - - let instance_commitments = if V::QUERY_INSTANCE { - instances - .iter() - .map(|instance| { - instance - .iter() - .map(|instance| { - if instance.len() > params.n() as usize - (vk.cs.blinding_factors() + 1) { - return Err(Error::InstanceTooLarge); - } - let mut poly = instance.to_vec(); - poly.resize(params.n() as usize, Scheme::Scalar::ZERO); - let poly = vk.domain.lagrange_from_vec(poly); - - Ok(params.commit_lagrange(&poly, Blind::default()).to_affine()) - }) - .collect::, _>>() - }) - .collect::, _>>()? - } else { - vec![vec![]; instances.len()] - }; - - let num_proofs = instance_commitments.len(); - - // Hash verification key into transcript - vk.hash_into(transcript)?; - - if V::QUERY_INSTANCE { - for instance_commitments in instance_commitments.iter() { - // Hash the instance (external) commitments into the transcript - for commitment in instance_commitments { - transcript.common_point(*commitment)? - } - } - } else { - for instance in instances.iter() { - for instance in instance.iter() { - for value in instance.iter() { - transcript.common_scalar(*value)?; - } - } - } - } - - // Hash the prover's advice commitments into the transcript and squeeze challenges - let (advice_commitments, challenges) = { - let mut advice_commitments = - vec![vec![Scheme::Curve::default(); vk.cs.num_advice_columns]; num_proofs]; - let mut challenges = vec![Scheme::Scalar::ZERO; vk.cs.num_challenges]; - - for current_phase in vk.cs.phases() { - for advice_commitments in advice_commitments.iter_mut() { - for (phase, commitment) in vk - .cs - .advice_column_phase - .iter() - .zip(advice_commitments.iter_mut()) - { - if current_phase == *phase { - *commitment = transcript.read_point()?; - } - } - } - for (phase, challenge) in vk.cs.challenge_phase.iter().zip(challenges.iter_mut()) { - if current_phase == *phase { - *challenge = *transcript.squeeze_challenge_scalar::<()>(); - } - } - } - - (advice_commitments, challenges) - }; - - // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); - - let lookups_permuted = (0..num_proofs) - .map(|_| -> Result, _> { - // Hash each lookup permuted commitment - vk.cs - .lookups - .iter() - .map(|argument| argument.read_permuted_commitments(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Sample beta challenge - let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); - - // Sample gamma challenge - let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); - - let permutations_committed = (0..num_proofs) - .map(|_| { - // Hash each permutation product commitment - vk.cs.permutation.read_product_commitments(vk, transcript) - }) - .collect::, _>>()?; - - let lookups_committed = lookups_permuted - .into_iter() - .map(|lookups| { - // Hash each lookup product commitment - lookups - .into_iter() - .map(|lookup| lookup.read_product_commitment(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles_committed = (0..num_proofs) - .map(|_| -> Result, _> { - // Hash each shuffle product commitment - vk.cs - .shuffles - .iter() - .map(|argument| argument.read_product_commitment(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?; - - // Sample y challenge, which keeps the gates linearly independent. - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); - - let vanishing = vanishing.read_commitments_after_y(vk, transcript)?; - - // Sample x challenge, which is used to ensure the circuit is - // satisfied with high probability. - let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); - let instance_evals = if V::QUERY_INSTANCE { - (0..num_proofs) - .map(|_| -> Result, _> { - read_n_scalars(transcript, vk.cs.instance_queries.len()) - }) - .collect::, _>>()? - } else { - let xn = x.pow([params.n()]); - let (min_rotation, max_rotation) = - vk.cs - .instance_queries - .iter() - .fold((0, 0), |(min, max), (_, rotation)| { - if rotation.0 < min { - (rotation.0, max) - } else if rotation.0 > max { - (min, rotation.0) - } else { - (min, max) - } - }); - let max_instance_len = instances - .iter() - .flat_map(|instance| instance.iter().map(|instance| instance.len())) - .max_by(Ord::cmp) - .unwrap_or_default(); - let l_i_s = &vk.domain.l_i_range( - *x, - xn, - -max_rotation..max_instance_len as i32 + min_rotation.abs(), - ); - instances - .iter() - .map(|instances| { - vk.cs - .instance_queries - .iter() - .map(|(column, rotation)| { - let instances = instances[column.index()]; - let offset = (max_rotation - rotation.0) as usize; - compute_inner_product(instances, &l_i_s[offset..offset + instances.len()]) - }) - .collect::>() - }) - .collect::>() - }; - - let advice_evals = (0..num_proofs) - .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) - .collect::, _>>()?; - - let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; - - let vanishing = vanishing.evaluate_after_x(transcript)?; - - let permutations_common = vk.permutation.evaluate(transcript)?; - - let permutations_evaluated = permutations_committed - .into_iter() - .map(|permutation| permutation.evaluate(transcript)) - .collect::, _>>()?; - - let lookups_evaluated = lookups_committed - .into_iter() - .map(|lookups| -> Result, _> { - lookups - .into_iter() - .map(|lookup| lookup.evaluate(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles_evaluated = shuffles_committed - .into_iter() - .map(|shuffles| -> Result, _> { - shuffles - .into_iter() - .map(|shuffle| shuffle.evaluate(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // This check ensures the circuit is satisfied so long as the polynomial - // commitments open to the correct values. - let vanishing = { - // x^n - let xn = x.pow([params.n()]); - - let blinding_factors = vk.cs.blinding_factors(); - let l_evals = vk - .domain - .l_i_range(*x, xn, (-((blinding_factors + 1) as i32))..=0); - assert_eq!(l_evals.len(), 2 + blinding_factors); - let l_last = l_evals[0]; - let l_blind: Scheme::Scalar = l_evals[1..(1 + blinding_factors)] - .iter() - .fold(Scheme::Scalar::ZERO, |acc, eval| acc + eval); - let l_0 = l_evals[1 + blinding_factors]; - - // Compute the expected value of h(x) - let expressions = advice_evals - .iter() - .zip(instance_evals.iter()) - .zip(permutations_evaluated.iter()) - .zip(lookups_evaluated.iter()) - .zip(shuffles_evaluated.iter()) - .flat_map( - |((((advice_evals, instance_evals), permutation), lookups), shuffles)| { - let challenges = &challenges; - let fixed_evals = &fixed_evals; - std::iter::empty() - // Evaluate the circuit using the custom gates provided - .chain(vk.cs.gates.iter().flat_map(move |gate| { - gate.polynomials().iter().map(move |poly| { - poly.evaluate( - &|scalar| scalar, - &|_| { - panic!("virtual selectors are removed during optimization") - }, - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - })) - .chain(permutation.expressions( - vk, - &vk.cs.permutation, - &permutations_common, - advice_evals, - fixed_evals, - instance_evals, - l_0, - l_last, - l_blind, - beta, - gamma, - x, - )) - .chain(lookups.iter().zip(vk.cs.lookups.iter()).flat_map( - move |(p, argument)| { - p.expressions( - l_0, - l_last, - l_blind, - argument, - theta, - beta, - gamma, - advice_evals, - fixed_evals, - instance_evals, - challenges, - ) - }, - )) - .chain(shuffles.iter().zip(vk.cs.shuffles.iter()).flat_map( - move |(p, argument)| { - p.expressions( - l_0, - l_last, - l_blind, - argument, - theta, - gamma, - advice_evals, - fixed_evals, - instance_evals, - challenges, - ) - }, - )) - }, - ); - - vanishing.verify(params, expressions, y, xn) - }; - - let queries = instance_commitments - .iter() - .zip(instance_evals.iter()) - .zip(advice_commitments.iter()) - .zip(advice_evals.iter()) - .zip(permutations_evaluated.iter()) - .zip(lookups_evaluated.iter()) - .zip(shuffles_evaluated.iter()) - .flat_map( - |( - ( - ( - ( - ((instance_commitments, instance_evals), advice_commitments), - advice_evals, - ), - permutation, - ), - lookups, - ), - shuffles, - )| { - iter::empty() - .chain( - V::QUERY_INSTANCE - .then_some(vk.cs.instance_queries.iter().enumerate().map( - move |(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &instance_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - instance_evals[query_index], - ) - }, - )) - .into_iter() - .flatten(), - ) - .chain(vk.cs.advice_queries.iter().enumerate().map( - move |(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &advice_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - advice_evals[query_index], - ) - }, - )) - .chain(permutation.queries(vk, x)) - .chain(lookups.iter().flat_map(move |p| p.queries(vk, x))) - .chain(shuffles.iter().flat_map(move |p| p.queries(vk, x))) - }, - ) - .chain( - vk.cs - .fixed_queries - .iter() - .enumerate() - .map(|(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &vk.fixed_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - fixed_evals[query_index], - ) - }), - ) - .chain(permutations_common.queries(&vk.permutation, x)) - .chain(vanishing.queries(x)); - - // We are now convinced the circuit is satisfied so long as the - // polynomial commitments open to the correct values. - - let verifier = V::new(params); - strategy.process(|msm| { - verifier - .verify_proof(transcript, queries, msm) - .map_err(|_| Error::Opening) - }) -} diff --git a/common/src/plonk/verifier/batch.rs b/common/src/plonk/verifier/batch.rs deleted file mode 100644 index d869d87559..0000000000 --- a/common/src/plonk/verifier/batch.rs +++ /dev/null @@ -1,135 +0,0 @@ -use group::ff::Field; -use halo2_middleware::ff::FromUniformBytes; -use halo2curves::CurveAffine; -use rand_core::OsRng; - -use super::{verify_proof, VerificationStrategy}; -use crate::{ - multicore::{ - IndexedParallelIterator, IntoParallelIterator, ParallelIterator, TryFoldAndReduce, - }, - plonk::{Error, VerifyingKey}, - poly::{ - commitment::{Params, MSM}, - ipa::{ - commitment::{IPACommitmentScheme, ParamsVerifierIPA}, - msm::MSMIPA, - multiopen::VerifierIPA, - strategy::GuardIPA, - }, - }, - transcript::{Blake2bRead, TranscriptReadBuffer}, -}; - -/// A proof verification strategy that returns the proof's MSM. -/// -/// `BatchVerifier` handles the accumulation of the MSMs for the batched proofs. -#[derive(Debug)] -struct BatchStrategy<'params, C: CurveAffine> { - msm: MSMIPA<'params, C>, -} - -impl<'params, C: CurveAffine> - VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> - for BatchStrategy<'params, C> -{ - type Output = MSMIPA<'params, C>; - - fn new(params: &'params ParamsVerifierIPA) -> Self { - BatchStrategy { - msm: MSMIPA::new(params), - } - } - - fn process( - self, - f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, - ) -> Result { - let guard = f(self.msm)?; - Ok(guard.use_challenges()) - } - - fn finalize(self) -> bool { - unreachable!() - } -} - -#[derive(Debug)] -struct BatchItem { - instances: Vec>>, - proof: Vec, -} - -/// A verifier that checks multiple proofs in a batch. **This requires the -/// `batch` crate feature to be enabled.** -#[derive(Debug, Default)] -pub struct BatchVerifier { - items: Vec>, -} - -impl BatchVerifier -where - C::Scalar: FromUniformBytes<64>, -{ - /// Constructs a new batch verifier. - pub fn new() -> Self { - Self { items: vec![] } - } - - /// Adds a proof to the batch. - pub fn add_proof(&mut self, instances: Vec>>, proof: Vec) { - self.items.push(BatchItem { instances, proof }) - } - - /// Finalizes the batch and checks its validity. - /// - /// Returns `false` if *some* proof was invalid. If the caller needs to identify - /// specific failing proofs, it must re-process the proofs separately. - /// - /// This uses [`OsRng`] internally instead of taking an `R: RngCore` argument, because - /// the internal parallelization requires access to a RNG that is guaranteed to not - /// clone its internal state when shared between threads. - pub fn finalize(self, params: &ParamsVerifierIPA, vk: &VerifyingKey) -> bool { - fn accumulate_msm<'params, C: CurveAffine>( - mut acc: MSMIPA<'params, C>, - msm: MSMIPA<'params, C>, - ) -> MSMIPA<'params, C> { - // Scale the MSM by a random factor to ensure that if the existing MSM has - // `is_zero() == false` then this argument won't be able to interfere with it - // to make it true, with high probability. - acc.scale(C::Scalar::random(OsRng)); - - acc.add_msm(&msm); - acc - } - - let final_msm = self - .items - .into_par_iter() - .enumerate() - .map(|(i, item)| { - let instances: Vec> = item - .instances - .iter() - .map(|i| i.iter().map(|c| &c[..]).collect()) - .collect(); - let instances: Vec<_> = instances.iter().map(|i| &i[..]).collect(); - - let strategy = BatchStrategy::new(params); - let mut transcript = Blake2bRead::init(&item.proof[..]); - verify_proof(params, vk, strategy, &instances, &mut transcript).map_err(|e| { - tracing::debug!("Batch item {} failed verification: {}", i, e); - e - }) - }) - .try_fold_and_reduce( - || params.empty_msm(), - |acc, res| res.map(|proof_msm| accumulate_msm(acc, proof_msm)), - ); - - match final_msm { - Ok(msm) => msm.check(), - Err(_) => false, - } - } -} From 5c7d2ad568a62d6cacdcb5d11a0db436370358c0 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 16:55:05 +0000 Subject: [PATCH 43/79] Checkpoint --- backend/src/plonk.rs | 458 +++++++++ backend/src/plonk/evaluation.rs | 873 ++++++++++++++++++ backend/src/plonk/keygen.rs | 394 ++++++++ backend/src/plonk/lookup.rs | 2 + backend/src/plonk/lookup/prover.rs | 476 ++++++++++ backend/src/plonk/lookup/verifier.rs | 212 +++++ backend/src/plonk/permutation.rs | 109 +++ .../src/plonk/permutation/keygen.rs | 0 backend/src/plonk/permutation/prover.rs | 332 +++++++ backend/src/plonk/permutation/verifier.rs | 256 +++++ backend/src/plonk/shuffle.rs | 2 + backend/src/plonk/shuffle/prover.rs | 251 +++++ backend/src/plonk/shuffle/verifier.rs | 139 +++ backend/src/plonk/vanishing.rs | 11 + backend/src/plonk/vanishing/prover.rs | 199 ++++ backend/src/plonk/vanishing/verifier.rs | 138 +++ backend/src/plonk/verifier.rs | 462 +++++++++ backend/src/plonk/verifier/batch.rs | 135 +++ common/src/lib.rs | 4 +- common/src/plonk/permutation.rs | 92 -- 20 files changed, 4452 insertions(+), 93 deletions(-) create mode 100644 backend/src/plonk/evaluation.rs create mode 100644 backend/src/plonk/keygen.rs create mode 100644 backend/src/plonk/lookup.rs create mode 100644 backend/src/plonk/lookup/prover.rs create mode 100644 backend/src/plonk/lookup/verifier.rs create mode 100644 backend/src/plonk/permutation.rs rename {common => backend}/src/plonk/permutation/keygen.rs (100%) create mode 100644 backend/src/plonk/permutation/prover.rs create mode 100644 backend/src/plonk/permutation/verifier.rs create mode 100644 backend/src/plonk/shuffle.rs create mode 100644 backend/src/plonk/shuffle/prover.rs create mode 100644 backend/src/plonk/shuffle/verifier.rs create mode 100644 backend/src/plonk/vanishing.rs create mode 100644 backend/src/plonk/vanishing/prover.rs create mode 100644 backend/src/plonk/vanishing/verifier.rs create mode 100644 backend/src/plonk/verifier.rs create mode 100644 backend/src/plonk/verifier/batch.rs diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index b8fcb1c31a..4a5267ebd5 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -1 +1,459 @@ +use blake2b_simd::Params as Blake2bParams; +use group::ff::{Field, FromUniformBytes, PrimeField}; + +use crate::arithmetic::CurveAffine; +use crate::helpers::{ + polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, + SerdePrimeField, +}; +use crate::poly::{ + Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, + Polynomial, +}; +use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; +use crate::SerdeFormat; +use evaluation::Evaluator; +use halo2_common::plonk::{Circuit, ConstraintSystem, PinnedConstraintSystem}; +use halo2_middleware::circuit::{ + Advice, AdviceQueryMid, Challenge, Column, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, + Instance, InstanceQueryMid, PreprocessingV2, +}; +use halo2_middleware::poly::Rotation; + +use std::io; + +mod evaluation; +mod keygen; +mod lookup; +mod permutation; pub mod prover; +mod shuffle; +mod vanishing; + +/// This is a verifying key which allows for the verification of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct VerifyingKey { + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: halo2_common::plonk::permutation::VerifyingKey, + cs: ConstraintSystem, + /// Cached maximum degree of `cs` (which doesn't change after construction). + cs_degree: usize, + /// The representative of this `VerifyingKey` in transcripts. + transcript_repr: C::Scalar, + selectors: Vec>, + /// Whether selector compression is turned on or not. + compress_selectors: bool, +} + +// Current version of the VK +const VERSION: u8 = 0x03; + +impl VerifyingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a verifying key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + // Version byte that will be checked on read. + writer.write_all(&[VERSION])?; + let k = &self.domain.k(); + assert!(*k <= C::Scalar::S); + // k value fits in 1 byte + writer.write_all(&[*k as u8])?; + writer.write_all(&[self.compress_selectors as u8])?; + writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; + for commitment in &self.fixed_commitments { + commitment.write(writer, format)?; + } + self.permutation.write(writer, format)?; + + if !self.compress_selectors { + assert!(self.selectors.is_empty()); + } + // write self.selectors + for selector in &self.selectors { + // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write + for bits in selector.chunks(8) { + writer.write_all(&[crate::helpers::pack(bits)])?; + } + } + Ok(()) + } + + /// Reads a verification key from a buffer. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let mut version_byte = [0u8; 1]; + reader.read_exact(&mut version_byte)?; + if VERSION != version_byte[0] { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected version byte", + )); + } + + let mut k = [0u8; 1]; + reader.read_exact(&mut k)?; + let k = u8::from_le_bytes(k); + if k as u32 > C::Scalar::S { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!( + "circuit size value (k): {} exceeds maxium: {}", + k, + C::Scalar::S + ), + )); + } + let mut compress_selectors = [0u8; 1]; + reader.read_exact(&mut compress_selectors)?; + if compress_selectors[0] != 0 && compress_selectors[0] != 1 { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected compress_selectors not boolean", + )); + } + let compress_selectors = compress_selectors[0] == 1; + let (domain, cs, _) = keygen::create_domain::( + k as u32, + #[cfg(feature = "circuit-params")] + params, + ); + let mut num_fixed_columns = [0u8; 4]; + reader.read_exact(&mut num_fixed_columns)?; + let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); + + let fixed_commitments: Vec<_> = (0..num_fixed_columns) + .map(|_| C::read(reader, format)) + .collect::>()?; + + let permutation = + halo2_common::plonk::permutation::VerifyingKey::read(reader, &cs.permutation, format)?; + + let (cs, selectors) = if compress_selectors { + // read selectors + let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] + .into_iter() + .map(|mut selector| { + let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; + reader.read_exact(&mut selector_bytes)?; + for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { + crate::helpers::unpack(byte, bits); + } + Ok(selector) + }) + .collect::>()?; + let (cs, _) = cs.compress_selectors(selectors.clone()); + (cs, selectors) + } else { + // we still need to replace selectors with fixed Expressions in `cs` + let fake_selectors = vec![vec![]; cs.num_selectors]; + let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); + (cs, vec![]) + }; + + Ok(Self::from_parts( + domain, + fixed_commitments, + permutation, + cs, + selectors, + compress_selectors, + )) + } + + /// Writes a verifying key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + /// Reads a verification key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) + } +} + +impl VerifyingKey { + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + 10 + (self.fixed_commitments.len() * C::byte_length(format)) + + self.permutation.bytes_length(format) + + self.selectors.len() + * (self + .selectors + .get(0) + .map(|selector| (selector.len() + 7) / 8) + .unwrap_or(0)) + } + + fn from_parts( + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: halo2_common::plonk::permutation::VerifyingKey, + cs: ConstraintSystem, + selectors: Vec>, + compress_selectors: bool, + ) -> Self + where + C::ScalarExt: FromUniformBytes<64>, + { + // Compute cached values. + let cs_degree = cs.degree(); + + let mut vk = Self { + domain, + fixed_commitments, + permutation, + cs, + cs_degree, + // Temporary, this is not pinned. + transcript_repr: C::Scalar::ZERO, + selectors, + compress_selectors, + }; + + let mut hasher = Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Verify-Key") + .to_state(); + + let s = format!("{:?}", vk.pinned()); + + hasher.update(&(s.len() as u64).to_le_bytes()); + hasher.update(s.as_bytes()); + + // Hash in final Blake2bState + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + + vk + } + + /// Hashes a verification key into a transcript. + pub fn hash_into, T: Transcript>( + &self, + transcript: &mut T, + ) -> io::Result<()> { + transcript.common_scalar(self.transcript_repr)?; + + Ok(()) + } + + /// Obtains a pinned representation of this verification key that contains + /// the minimal information necessary to reconstruct the verification key. + pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { + PinnedVerificationKey { + base_modulus: C::Base::MODULUS, + scalar_modulus: C::Scalar::MODULUS, + domain: self.domain.pinned(), + fixed_commitments: &self.fixed_commitments, + permutation: &self.permutation, + cs: self.cs.pinned(), + } + } + + /// Returns commitments of fixed polynomials + pub fn fixed_commitments(&self) -> &Vec { + &self.fixed_commitments + } + + /// Returns `VerifyingKey` of permutation + pub fn permutation(&self) -> &halo2_common::plonk::permutation::VerifyingKey { + &self.permutation + } + + /// Returns `ConstraintSystem` + pub fn cs(&self) -> &ConstraintSystem { + &self.cs + } + + /// Returns representative of this `VerifyingKey` in transcripts + pub fn transcript_repr(&self) -> C::Scalar { + self.transcript_repr + } +} + +/// Minimal representation of a verification key that can be used to identify +/// its active contents. +#[allow(dead_code)] +#[derive(Debug)] +pub struct PinnedVerificationKey<'a, C: CurveAffine> { + base_modulus: &'static str, + scalar_modulus: &'static str, + domain: PinnedEvaluationDomain<'a, C::Scalar>, + cs: PinnedConstraintSystem<'a, C::Scalar>, + fixed_commitments: &'a Vec, + permutation: &'a halo2_common::plonk::permutation::VerifyingKey, +} + +/// This is a proving key which allows for the creation of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct ProvingKey { + vk: VerifyingKey, + l0: Polynomial, + l_last: Polynomial, + l_active_row: Polynomial, + fixed_values: Vec>, + fixed_polys: Vec>, + fixed_cosets: Vec>, + permutation: halo2_common::plonk::permutation::ProvingKey, + ev: Evaluator, +} + +impl ProvingKey +where + C::Scalar: FromUniformBytes<64>, +{ + /// Get the underlying [`VerifyingKey`]. + pub fn get_vk(&self) -> &VerifyingKey { + &self.vk + } + + /// Gets the total number of bytes in the serialization of `self` + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + let scalar_len = C::Scalar::default().to_repr().as_ref().len(); + self.vk.bytes_length(format) + + 12 + + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) + + polynomial_slice_byte_length(&self.fixed_values) + + polynomial_slice_byte_length(&self.fixed_polys) + + polynomial_slice_byte_length(&self.fixed_cosets) + + self.permutation.bytes_length() + } +} + +impl ProvingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a proving key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + self.vk.write(writer, format)?; + self.l0.write(writer, format)?; + self.l_last.write(writer, format)?; + self.l_active_row.write(writer, format)?; + write_polynomial_slice(&self.fixed_values, writer, format)?; + write_polynomial_slice(&self.fixed_polys, writer, format)?; + write_polynomial_slice(&self.fixed_cosets, writer, format)?; + self.permutation.write(writer, format)?; + Ok(()) + } + + /// Reads a proving key from a buffer. + /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let vk = VerifyingKey::::read::( + reader, + format, + #[cfg(feature = "circuit-params")] + params, + )?; + let l0 = Polynomial::read(reader, format)?; + let l_last = Polynomial::read(reader, format)?; + let l_active_row = Polynomial::read(reader, format)?; + let fixed_values = read_polynomial_vec(reader, format)?; + let fixed_polys = read_polynomial_vec(reader, format)?; + let fixed_cosets = read_polynomial_vec(reader, format)?; + let permutation = halo2_common::plonk::permutation::ProvingKey::read(reader, format)?; + let ev = Evaluator::new(vk.cs()); + Ok(Self { + vk, + l0, + l_last, + l_active_row, + fixed_values, + fixed_polys, + fixed_cosets, + permutation, + ev, + }) + } + + /// Writes a proving key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + /// Reads a proving key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) + } +} + +impl VerifyingKey { + /// Get the underlying [`EvaluationDomain`]. + pub fn get_domain(&self) -> &EvaluationDomain { + &self.domain + } +} diff --git a/backend/src/plonk/evaluation.rs b/backend/src/plonk/evaluation.rs new file mode 100644 index 0000000000..2cd00a5f7c --- /dev/null +++ b/backend/src/plonk/evaluation.rs @@ -0,0 +1,873 @@ +use crate::multicore; +use crate::plonk::{lookup, permutation, ProvingKey}; +use crate::poly::Basis; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + poly::{Coeff, ExtendedLagrangeCoeff, Polynomial}, +}; +use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; + +use super::{shuffle, ConstraintSystem, Expression}; + +/// Return the index in the polynomial of size `isize` after rotation `rot`. +fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { + (((idx as i32) + (rot * rot_scale)).rem_euclid(isize)) as usize +} + +/// Value used in a calculation +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] +pub enum ValueSource { + /// This is a constant value + Constant(usize), + /// This is an intermediate value + Intermediate(usize), + /// This is a fixed column + Fixed(usize, usize), + /// This is an advice (witness) column + Advice(usize, usize), + /// This is an instance (external) column + Instance(usize, usize), + /// This is a challenge + Challenge(usize), + /// beta + Beta(), + /// gamma + Gamma(), + /// theta + Theta(), + /// y + Y(), + /// Previous value + PreviousValue(), +} + +impl Default for ValueSource { + fn default() -> Self { + ValueSource::Constant(0) + } +} + +impl ValueSource { + /// Get the value for this source + #[allow(clippy::too_many_arguments)] + pub fn get( + &self, + rotations: &[usize], + constants: &[F], + intermediates: &[F], + fixed_values: &[Polynomial], + advice_values: &[Polynomial], + instance_values: &[Polynomial], + challenges: &[F], + beta: &F, + gamma: &F, + theta: &F, + y: &F, + previous_value: &F, + ) -> F { + match self { + ValueSource::Constant(idx) => constants[*idx], + ValueSource::Intermediate(idx) => intermediates[*idx], + ValueSource::Fixed(column_index, rotation) => { + fixed_values[*column_index][rotations[*rotation]] + } + ValueSource::Advice(column_index, rotation) => { + advice_values[*column_index][rotations[*rotation]] + } + ValueSource::Instance(column_index, rotation) => { + instance_values[*column_index][rotations[*rotation]] + } + ValueSource::Challenge(index) => challenges[*index], + ValueSource::Beta() => *beta, + ValueSource::Gamma() => *gamma, + ValueSource::Theta() => *theta, + ValueSource::Y() => *y, + ValueSource::PreviousValue() => *previous_value, + } + } +} + +/// Calculation +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Calculation { + /// This is an addition + Add(ValueSource, ValueSource), + /// This is a subtraction + Sub(ValueSource, ValueSource), + /// This is a product + Mul(ValueSource, ValueSource), + /// This is a square + Square(ValueSource), + /// This is a double + Double(ValueSource), + /// This is a negation + Negate(ValueSource), + /// This is Horner's rule: `val = a; val = val * c + b[]` + Horner(ValueSource, Vec, ValueSource), + /// This is a simple assignment + Store(ValueSource), +} + +impl Calculation { + /// Get the resulting value of this calculation + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + rotations: &[usize], + constants: &[F], + intermediates: &[F], + fixed_values: &[Polynomial], + advice_values: &[Polynomial], + instance_values: &[Polynomial], + challenges: &[F], + beta: &F, + gamma: &F, + theta: &F, + y: &F, + previous_value: &F, + ) -> F { + let get_value = |value: &ValueSource| { + value.get( + rotations, + constants, + intermediates, + fixed_values, + advice_values, + instance_values, + challenges, + beta, + gamma, + theta, + y, + previous_value, + ) + }; + match self { + Calculation::Add(a, b) => get_value(a) + get_value(b), + Calculation::Sub(a, b) => get_value(a) - get_value(b), + Calculation::Mul(a, b) => get_value(a) * get_value(b), + Calculation::Square(v) => get_value(v).square(), + Calculation::Double(v) => get_value(v).double(), + Calculation::Negate(v) => -get_value(v), + Calculation::Horner(start_value, parts, factor) => { + let factor = get_value(factor); + let mut value = get_value(start_value); + for part in parts.iter() { + value = value * factor + get_value(part); + } + value + } + Calculation::Store(v) => get_value(v), + } + } +} + +/// Evaluator +#[derive(Clone, Default, Debug)] +pub struct Evaluator { + /// Custom gates evalution + pub custom_gates: GraphEvaluator, + /// Lookups evalution + pub lookups: Vec>, + /// Shuffle evalution + pub shuffles: Vec>, +} + +/// GraphEvaluator +#[derive(Clone, Debug)] +pub struct GraphEvaluator { + /// Constants + pub constants: Vec, + /// Rotations + pub rotations: Vec, + /// Calculations + pub calculations: Vec, + /// Number of intermediates + pub num_intermediates: usize, +} + +/// EvaluationData +#[derive(Default, Debug)] +pub struct EvaluationData { + /// Intermediates + pub intermediates: Vec, + /// Rotations + pub rotations: Vec, +} + +/// CaluclationInfo +#[derive(Clone, Debug)] +pub struct CalculationInfo { + /// Calculation + pub calculation: Calculation, + /// Target + pub target: usize, +} + +impl Evaluator { + /// Creates a new evaluation structure + pub fn new(cs: &ConstraintSystem) -> Self { + let mut ev = Evaluator::default(); + + // Custom gates + let mut parts = Vec::new(); + for gate in cs.gates.iter() { + parts.extend( + gate.polynomials() + .iter() + .map(|poly| ev.custom_gates.add_expression(poly)), + ); + } + ev.custom_gates.add_calculation(Calculation::Horner( + ValueSource::PreviousValue(), + parts, + ValueSource::Y(), + )); + + // Lookups + for lookup in cs.lookups.iter() { + let mut graph = GraphEvaluator::default(); + + let mut evaluate_lc = |expressions: &Vec>| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + // Input coset + let compressed_input_coset = evaluate_lc(&lookup.input_expressions); + // table coset + let compressed_table_coset = evaluate_lc(&lookup.table_expressions); + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + let right_gamma = graph.add_calculation(Calculation::Add( + compressed_table_coset, + ValueSource::Gamma(), + )); + let lc = graph.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Beta(), + )); + graph.add_calculation(Calculation::Mul(lc, right_gamma)); + + ev.lookups.push(graph); + } + + // Shuffles + for shuffle in cs.shuffles.iter() { + let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { + let parts = expressions + .iter() + .map(|expr| graph.add_expression(expr)) + .collect(); + graph.add_calculation(Calculation::Horner( + ValueSource::Constant(0), + parts, + ValueSource::Theta(), + )) + }; + + let mut graph_input = GraphEvaluator::default(); + let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); + let _ = graph_input.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Gamma(), + )); + + let mut graph_shuffle = GraphEvaluator::default(); + let compressed_shuffle_coset = + evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); + let _ = graph_shuffle.add_calculation(Calculation::Add( + compressed_shuffle_coset, + ValueSource::Gamma(), + )); + + ev.shuffles.push(graph_input); + ev.shuffles.push(graph_shuffle); + } + + ev + } + + /// Evaluate h poly + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn evaluate_h( + &self, + pk: &ProvingKey, + advice_polys: &[&[Polynomial]], + instance_polys: &[&[Polynomial]], + challenges: &[C::ScalarExt], + y: C::ScalarExt, + beta: C::ScalarExt, + gamma: C::ScalarExt, + theta: C::ScalarExt, + lookups: &[Vec>], + shuffles: &[Vec>], + permutations: &[permutation::prover::Committed], + ) -> Polynomial { + let domain = &pk.vk.domain; + let size = domain.extended_len(); + let rot_scale = 1 << (domain.extended_k() - domain.k()); + let fixed = &pk.fixed_cosets[..]; + let extended_omega = domain.get_extended_omega(); + let isize = size as i32; + let one = C::ScalarExt::ONE; + let l0 = &pk.l0; + let l_last = &pk.l_last; + let l_active_row = &pk.l_active_row; + let p = &pk.vk.cs.permutation; + + // Calculate the advice and instance cosets + let advice: Vec>> = advice_polys + .iter() + .map(|advice_polys| { + advice_polys + .iter() + .map(|poly| domain.coeff_to_extended(poly.clone())) + .collect() + }) + .collect(); + let instance: Vec>> = instance_polys + .iter() + .map(|instance_polys| { + instance_polys + .iter() + .map(|poly| domain.coeff_to_extended(poly.clone())) + .collect() + }) + .collect(); + + let mut values = domain.empty_extended(); + + // Core expression evaluations + let num_threads = multicore::current_num_threads(); + for ((((advice, instance), lookups), shuffles), permutation) in advice + .iter() + .zip(instance.iter()) + .zip(lookups.iter()) + .zip(shuffles.iter()) + .zip(permutations.iter()) + { + // Custom gates + multicore::scope(|scope| { + let chunk_size = (size + num_threads - 1) / num_threads; + for (thread_idx, values) in values.chunks_mut(chunk_size).enumerate() { + let start = thread_idx * chunk_size; + scope.spawn(move |_| { + let mut eval_data = self.custom_gates.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + *value = self.custom_gates.evaluate( + &mut eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + value, + idx, + rot_scale, + isize, + ); + } + }); + } + }); + + // Permutations + let sets = &permutation.sets; + if !sets.is_empty() { + let blinding_factors = pk.vk.cs.blinding_factors(); + let last_rotation = Rotation(-((blinding_factors + 1) as i32)); + let chunk_len = pk.vk.cs.degree() - 2; + let delta_start = beta * &C::Scalar::ZETA; + + let first_set = sets.first().unwrap(); + let last_set = sets.last().unwrap(); + + // Permutation constraints + parallelize(&mut values, |values, start| { + let mut beta_term = extended_omega.pow_vartime([start as u64, 0, 0, 0]); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + let r_last = get_rotation_idx(idx, last_rotation.0, rot_scale, isize); + + // Enforce only for the first set. + // l_0(X) * (1 - z_0(X)) = 0 + *value = *value * y + + ((one - first_set.permutation_product_coset[idx]) * l0[idx]); + // Enforce only for the last set. + // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 + *value = *value * y + + ((last_set.permutation_product_coset[idx] + * last_set.permutation_product_coset[idx] + - last_set.permutation_product_coset[idx]) + * l_last[idx]); + // Except for the first set, enforce. + // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 + for (set_idx, set) in sets.iter().enumerate() { + if set_idx != 0 { + *value = *value * y + + ((set.permutation_product_coset[idx] + - permutation.sets[set_idx - 1].permutation_product_coset + [r_last]) + * l0[idx]); + } + } + // And for all the sets we enforce: + // (1 - (l_last(X) + l_blind(X))) * ( + // z_i(\omega X) \prod_j (p(X) + \beta s_j(X) + \gamma) + // - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma) + // ) + let mut current_delta = delta_start * beta_term; + for ((set, columns), cosets) in sets + .iter() + .zip(p.columns.chunks(chunk_len)) + .zip(pk.permutation.cosets.chunks(chunk_len)) + { + let mut left = set.permutation_product_coset[r_next]; + for (values, permutation) in columns + .iter() + .map(|&column| match column.column_type() { + Any::Advice(_) => &advice[column.index()], + Any::Fixed => &fixed[column.index()], + Any::Instance => &instance[column.index()], + }) + .zip(cosets.iter()) + { + left *= values[idx] + beta * permutation[idx] + gamma; + } + + let mut right = set.permutation_product_coset[idx]; + for values in columns.iter().map(|&column| match column.column_type() { + Any::Advice(_) => &advice[column.index()], + Any::Fixed => &fixed[column.index()], + Any::Instance => &instance[column.index()], + }) { + right *= values[idx] + current_delta + gamma; + current_delta *= &C::Scalar::DELTA; + } + + *value = *value * y + ((left - right) * l_active_row[idx]); + } + beta_term *= &extended_omega; + } + }); + } + + // Lookups + for (n, lookup) in lookups.iter().enumerate() { + // Polynomials required for this lookup. + // Calculated here so these only have to be kept in memory for the short time + // they are actually needed. + let product_coset = pk.vk.domain.coeff_to_extended(lookup.product_poly.clone()); + let permuted_input_coset = pk + .vk + .domain + .coeff_to_extended(lookup.permuted_input_poly.clone()); + let permuted_table_coset = pk + .vk + .domain + .coeff_to_extended(lookup.permuted_table_poly.clone()); + + // Lookup constraints + parallelize(&mut values, |values, start| { + let lookup_evaluator = &self.lookups[n]; + let mut eval_data = lookup_evaluator.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + + let table_value = lookup_evaluator.evaluate( + &mut eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + let r_prev = get_rotation_idx(idx, -1, rot_scale, isize); + + let a_minus_s = permuted_input_coset[idx] - permuted_table_coset[idx]; + // l_0(X) * (1 - z(X)) = 0 + *value = *value * y + ((one - product_coset[idx]) * l0[idx]); + // l_last(X) * (z(X)^2 - z(X)) = 0 + *value = *value * y + + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) + * l_last[idx]); + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) + // (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + // ) = 0 + *value = *value * y + + ((product_coset[r_next] + * (permuted_input_coset[idx] + beta) + * (permuted_table_coset[idx] + gamma) + - product_coset[idx] * table_value) + * l_active_row[idx]); + // Check that the first values in the permuted input expression and permuted + // fixed expression are the same. + // l_0(X) * (a'(X) - s'(X)) = 0 + *value = *value * y + (a_minus_s * l0[idx]); + // Check that each value in the permuted lookup input expression is either + // equal to the value above it, or the value at the same index in the + // permuted table expression. + // (1 - (l_last + l_blind)) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 + *value = *value * y + + (a_minus_s + * (permuted_input_coset[idx] - permuted_input_coset[r_prev]) + * l_active_row[idx]); + } + }); + } + + // Shuffle constraints + for (n, shuffle) in shuffles.iter().enumerate() { + let product_coset = pk.vk.domain.coeff_to_extended(shuffle.product_poly.clone()); + + // Shuffle constraints + parallelize(&mut values, |values, start| { + let input_evaluator = &self.shuffles[2 * n]; + let shuffle_evaluator = &self.shuffles[2 * n + 1]; + let mut eval_data_input = shuffle_evaluator.instance(); + let mut eval_data_shuffle = shuffle_evaluator.instance(); + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + + let input_value = input_evaluator.evaluate( + &mut eval_data_input, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let shuffle_value = shuffle_evaluator.evaluate( + &mut eval_data_shuffle, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ); + + let r_next = get_rotation_idx(idx, 1, rot_scale, isize); + + // l_0(X) * (1 - z(X)) = 0 + *value = *value * y + ((one - product_coset[idx]) * l0[idx]); + // l_last(X) * (z(X)^2 - z(X)) = 0 + *value = *value * y + + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) + * l_last[idx]); + // (1 - (l_last(X) + l_blind(X))) * (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) = 0 + *value = *value * y + + l_active_row[idx] + * (product_coset[r_next] * shuffle_value + - product_coset[idx] * input_value) + } + }); + } + } + values + } +} + +impl Default for GraphEvaluator { + fn default() -> Self { + Self { + // Fixed positions to allow easy access + constants: vec![ + C::ScalarExt::ZERO, + C::ScalarExt::ONE, + C::ScalarExt::from(2u64), + ], + rotations: Vec::new(), + calculations: Vec::new(), + num_intermediates: 0, + } + } +} + +impl GraphEvaluator { + /// Adds a rotation + fn add_rotation(&mut self, rotation: &Rotation) -> usize { + let position = self.rotations.iter().position(|&c| c == rotation.0); + match position { + Some(pos) => pos, + None => { + self.rotations.push(rotation.0); + self.rotations.len() - 1 + } + } + } + + /// Adds a constant + fn add_constant(&mut self, constant: &C::ScalarExt) -> ValueSource { + let position = self.constants.iter().position(|&c| c == *constant); + ValueSource::Constant(match position { + Some(pos) => pos, + None => { + self.constants.push(*constant); + self.constants.len() - 1 + } + }) + } + + /// Adds a calculation. + /// Currently does the simplest thing possible: just stores the + /// resulting value so the result can be reused when that calculation + /// is done multiple times. + fn add_calculation(&mut self, calculation: Calculation) -> ValueSource { + let existing_calculation = self + .calculations + .iter() + .find(|c| c.calculation == calculation); + match existing_calculation { + Some(existing_calculation) => ValueSource::Intermediate(existing_calculation.target), + None => { + let target = self.num_intermediates; + self.calculations.push(CalculationInfo { + calculation, + target, + }); + self.num_intermediates += 1; + ValueSource::Intermediate(target) + } + } + } + + /// Generates an optimized evaluation for the expression + fn add_expression(&mut self, expr: &Expression) -> ValueSource { + match expr { + Expression::Constant(scalar) => self.add_constant(scalar), + Expression::Selector(_selector) => unreachable!(), + Expression::Fixed(query) => { + let rot_idx = self.add_rotation(&query.rotation); + self.add_calculation(Calculation::Store(ValueSource::Fixed( + query.column_index, + rot_idx, + ))) + } + Expression::Advice(query) => { + let rot_idx = self.add_rotation(&query.rotation); + self.add_calculation(Calculation::Store(ValueSource::Advice( + query.column_index, + rot_idx, + ))) + } + Expression::Instance(query) => { + let rot_idx = self.add_rotation(&query.rotation); + self.add_calculation(Calculation::Store(ValueSource::Instance( + query.column_index, + rot_idx, + ))) + } + Expression::Challenge(challenge) => self.add_calculation(Calculation::Store( + ValueSource::Challenge(challenge.index()), + )), + Expression::Negated(a) => match **a { + Expression::Constant(scalar) => self.add_constant(&-scalar), + _ => { + let result_a = self.add_expression(a); + match result_a { + ValueSource::Constant(0) => result_a, + _ => self.add_calculation(Calculation::Negate(result_a)), + } + } + }, + Expression::Sum(a, b) => { + // Undo subtraction stored as a + (-b) in expressions + match &**b { + Expression::Negated(b_int) => { + let result_a = self.add_expression(a); + let result_b = self.add_expression(b_int); + if result_a == ValueSource::Constant(0) { + self.add_calculation(Calculation::Negate(result_b)) + } else if result_b == ValueSource::Constant(0) { + result_a + } else { + self.add_calculation(Calculation::Sub(result_a, result_b)) + } + } + _ => { + let result_a = self.add_expression(a); + let result_b = self.add_expression(b); + if result_a == ValueSource::Constant(0) { + result_b + } else if result_b == ValueSource::Constant(0) { + result_a + } else if result_a <= result_b { + self.add_calculation(Calculation::Add(result_a, result_b)) + } else { + self.add_calculation(Calculation::Add(result_b, result_a)) + } + } + } + } + Expression::Product(a, b) => { + let result_a = self.add_expression(a); + let result_b = self.add_expression(b); + if result_a == ValueSource::Constant(0) || result_b == ValueSource::Constant(0) { + ValueSource::Constant(0) + } else if result_a == ValueSource::Constant(1) { + result_b + } else if result_b == ValueSource::Constant(1) { + result_a + } else if result_a == ValueSource::Constant(2) { + self.add_calculation(Calculation::Double(result_b)) + } else if result_b == ValueSource::Constant(2) { + self.add_calculation(Calculation::Double(result_a)) + } else if result_a == result_b { + self.add_calculation(Calculation::Square(result_a)) + } else if result_a <= result_b { + self.add_calculation(Calculation::Mul(result_a, result_b)) + } else { + self.add_calculation(Calculation::Mul(result_b, result_a)) + } + } + Expression::Scaled(a, f) => { + if *f == C::ScalarExt::ZERO { + ValueSource::Constant(0) + } else if *f == C::ScalarExt::ONE { + self.add_expression(a) + } else { + let cst = self.add_constant(f); + let result_a = self.add_expression(a); + self.add_calculation(Calculation::Mul(result_a, cst)) + } + } + } + } + + /// Creates a new evaluation structure + pub fn instance(&self) -> EvaluationData { + EvaluationData { + intermediates: vec![C::ScalarExt::ZERO; self.num_intermediates], + rotations: vec![0usize; self.rotations.len()], + } + } + + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + data: &mut EvaluationData, + fixed: &[Polynomial], + advice: &[Polynomial], + instance: &[Polynomial], + challenges: &[C::ScalarExt], + beta: &C::ScalarExt, + gamma: &C::ScalarExt, + theta: &C::ScalarExt, + y: &C::ScalarExt, + previous_value: &C::ScalarExt, + idx: usize, + rot_scale: i32, + isize: i32, + ) -> C::ScalarExt { + // All rotation index values + for (rot_idx, rot) in self.rotations.iter().enumerate() { + data.rotations[rot_idx] = get_rotation_idx(idx, *rot, rot_scale, isize); + } + + // All calculations, with cached intermediate results + for calc in self.calculations.iter() { + data.intermediates[calc.target] = calc.calculation.evaluate( + &data.rotations, + &self.constants, + &data.intermediates, + fixed, + advice, + instance, + challenges, + beta, + gamma, + theta, + y, + previous_value, + ); + } + + // Return the result of the last calculation (if any) + if let Some(calc) = self.calculations.last() { + data.intermediates[calc.target] + } else { + C::ScalarExt::ZERO + } + } +} + +/// Simple evaluation of an expression +pub fn evaluate( + expression: &Expression, + size: usize, + rot_scale: i32, + fixed: &[Polynomial], + advice: &[Polynomial], + instance: &[Polynomial], + challenges: &[F], +) -> Vec { + let mut values = vec![F::ZERO; size]; + let isize = size as i32; + parallelize(&mut values, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = start + i; + *value = expression.evaluate( + &|scalar| scalar, + &|_| panic!("virtual selectors are removed during optimization"), + &|query| { + fixed[query.column_index] + [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] + }, + &|query| { + advice[query.column_index] + [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] + }, + &|query| { + instance[query.column_index] + [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] + }, + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * b, + &|a, scalar| a * scalar, + ); + } + }); + values +} diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs new file mode 100644 index 0000000000..2dea5d26b1 --- /dev/null +++ b/backend/src/plonk/keygen.rs @@ -0,0 +1,394 @@ +#![allow(clippy::int_plus_one)] + +use std::ops::Range; + +use group::Curve; +use halo2_middleware::ff::{Field, FromUniformBytes}; + +use super::{ + circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, + evaluation::Evaluator, + permutation, Assigned, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, +}; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + circuit::Value, + poly::{ + commitment::{Blind, Params}, + EvaluationDomain, + }, +}; +use halo2_middleware::circuit::{ + Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, +}; + +pub(crate) fn create_domain( + k: u32, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, +) -> ( + EvaluationDomain, + ConstraintSystem, + ConcreteCircuit::Config, +) +where + C: CurveAffine, + ConcreteCircuit: Circuit, +{ + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, params); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + + let degree = cs.degree(); + + let domain = EvaluationDomain::new(degree as u32, k); + + (domain, cs, config) +} + +/// Assembly to be used in circuit synthesis. +#[derive(Debug)] +pub(crate) struct Assembly { + pub(crate) k: u32, + pub(crate) fixed: Vec, LagrangeCoeff>>, + pub(crate) permutation: permutation::keygen::AssemblyFront, + pub(crate) selectors: Vec>, + // A range of available rows for assignment and copies. + pub(crate) usable_rows: Range, + pub(crate) _marker: std::marker::PhantomData, +} + +impl Assignment for Assembly { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.selectors[selector.0][row] = true; + + Ok(()) + } + + fn query_instance(&self, _: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + // There is no instance in this context. + Ok(Value::unknown()) + } + + fn assign_advice( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about fixed columns here + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .fixed + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.permutation + .copy(left_column, left_row, right_column, right_row) + } + + fn fill_from_row( + &mut self, + column: Column, + from_row: usize, + to: Value>, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&from_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + let col = self + .fixed + .get_mut(column.index()) + .ok_or(Error::BoundsFailure)?; + + let filler = to.assign()?; + for row in self.usable_rows.clone().skip(from_row) { + col[row] = filler; + } + + Ok(()) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. +pub fn keygen_vk_v2<'params, C, P>( + params: &P, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + C::Scalar: FromUniformBytes<64>, +{ + let cs2 = &circuit.cs; + let cs: ConstraintSystem = cs2.clone().into(); + let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); + + if (params.n() as usize) < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs2.permutation, + &circuit.preprocessing.permutation, + )? + .build_vk(params, &domain, &cs.permutation); + + let fixed_commitments = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + params + .commit_lagrange( + &Polynomial::new_lagrange_from_vec(poly.clone()), + Blind::default(), + ) + .to_affine() + }) + .collect(); + + Ok(VerifyingKey::from_parts( + domain, + fixed_commitments, + permutation_vk, + cs, + Vec::new(), + false, + )) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// By default, selector compression is turned **off**. +pub fn keygen_vk<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + keygen_vk_custom(params, circuit, true) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// +/// The selector compression optimization is turned on only if `compress_selectors` is `true`. +pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; + let mut vk = keygen_vk_v2(params, &compiled_circuit)?; + vk.compress_selectors = compress_selectors; + Ok(vk) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. +pub fn keygen_pk_v2<'params, C, P>( + params: &P, + vk: VerifyingKey, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, +{ + let cs = &circuit.cs; + + if (params.n() as usize) < vk.cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let fixed_polys: Vec<_> = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + vk.domain + .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) + }) + .collect(); + + let fixed_cosets = fixed_polys + .iter() + .map(|poly| vk.domain.coeff_to_extended(poly.clone())) + .collect(); + + let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_pk(params, &vk.domain, &cs.permutation.clone().into()); + + // Compute l_0(X) + // TODO: this can be done more efficiently + let mut l0 = vk.domain.empty_lagrange(); + l0[0] = C::Scalar::ONE; + let l0 = vk.domain.lagrange_to_coeff(l0); + let l0 = vk.domain.coeff_to_extended(l0); + + // Compute l_blind(X) which evaluates to 1 for each blinding factor row + // and 0 otherwise over the domain. + let mut l_blind = vk.domain.empty_lagrange(); + for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { + *evaluation = C::Scalar::ONE; + } + let l_blind = vk.domain.lagrange_to_coeff(l_blind); + let l_blind = vk.domain.coeff_to_extended(l_blind); + + // Compute l_last(X) which evaluates to 1 on the first inactive row (just + // before the blinding factors) and 0 otherwise over the domain + let mut l_last = vk.domain.empty_lagrange(); + l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; + let l_last = vk.domain.lagrange_to_coeff(l_last); + let l_last = vk.domain.coeff_to_extended(l_last); + + // Compute l_active_row(X) + let one = C::Scalar::ONE; + let mut l_active_row = vk.domain.empty_extended(); + parallelize(&mut l_active_row, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = i + start; + *value = one - (l_last[idx] + l_blind[idx]); + } + }); + + // Compute the optimized evaluation data structure + let ev = Evaluator::new(&vk.cs); + + Ok(ProvingKey { + vk, + l0, + l_last, + l_active_row, + fixed_values: circuit + .preprocessing + .fixed + .clone() + .into_iter() + .map(Polynomial::new_lagrange_from_vec) + .collect(), + fixed_polys, + fixed_cosets, + permutation: permutation_pk, + ev, + }) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. +pub fn keygen_pk<'params, C, P, ConcreteCircuit>( + params: &P, + vk: VerifyingKey, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; + keygen_pk_v2(params, vk, &compiled_circuit) +} diff --git a/backend/src/plonk/lookup.rs b/backend/src/plonk/lookup.rs new file mode 100644 index 0000000000..96e9e18468 --- /dev/null +++ b/backend/src/plonk/lookup.rs @@ -0,0 +1,2 @@ +pub mod prover; +pub mod verifier; diff --git a/backend/src/plonk/lookup/prover.rs b/backend/src/plonk/lookup/prover.rs new file mode 100644 index 0000000000..ed8cdabc77 --- /dev/null +++ b/backend/src/plonk/lookup/prover.rs @@ -0,0 +1,476 @@ +use super::super::{ + circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, + ProvingKey, +}; +use super::Argument; +use crate::plonk::evaluation::evaluate; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + poly::{ + commitment::{Blind, Params}, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use group::{ + ff::{BatchInvert, Field}, + Curve, +}; +use halo2_middleware::ff::WithSmallOrderMulGroup; +use halo2_middleware::poly::Rotation; +use rand_core::RngCore; +use std::{ + collections::BTreeMap, + iter, + ops::{Mul, MulAssign}, +}; + +#[derive(Debug)] +pub(in crate::plonk) struct Permuted { + compressed_input_expression: Polynomial, + permuted_input_expression: Polynomial, + permuted_input_poly: Polynomial, + permuted_input_blind: Blind, + compressed_table_expression: Polynomial, + permuted_table_expression: Polynomial, + permuted_table_poly: Polynomial, + permuted_table_blind: Blind, +} + +#[derive(Debug)] +pub(in crate::plonk) struct Committed { + pub(in crate::plonk) permuted_input_poly: Polynomial, + permuted_input_blind: Blind, + pub(in crate::plonk) permuted_table_poly: Polynomial, + permuted_table_blind: Blind, + pub(in crate::plonk) product_poly: Polynomial, + product_blind: Blind, +} + +pub(in crate::plonk) struct Evaluated { + constructed: Committed, +} + +impl> Argument { + /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions + /// [S_0, S_1, ..., S_{m-1}], this method + /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} + /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, + /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, + /// obtaining A' and S', and + /// - constructs Permuted struct using permuted_input_value = A', and + /// permuted_table_expression = S'. + /// The Permuted struct is used to update the Lookup, and is then returned. + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit_permuted< + 'a, + 'params: 'a, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, + ) -> Result, Error> + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the lookup and compress them + let compressed_input_expression = compress_expressions(&self.input_expressions); + + // Get values of table expressions involved in the lookup and compress them + let compressed_table_expression = compress_expressions(&self.table_expressions); + + // Permute compressed (InputExpression, TableExpression) pair + let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( + pk, + params, + domain, + &mut rng, + &compressed_input_expression, + &compressed_table_expression, + )?; + + // Closure to construct commitment to vector of values + let mut commit_values = |values: &Polynomial| { + let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); + let blind = Blind(C::Scalar::random(&mut rng)); + let commitment = params.commit_lagrange(values, blind).to_affine(); + (poly, blind, commitment) + }; + + // Commit to permuted input expression + let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = + commit_values(&permuted_input_expression); + + // Commit to permuted table expression + let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = + commit_values(&permuted_table_expression); + + // Hash permuted input commitment + transcript.write_point(permuted_input_commitment)?; + + // Hash permuted table commitment + transcript.write_point(permuted_table_commitment)?; + + Ok(Permuted { + compressed_input_expression, + permuted_input_expression, + permuted_input_poly, + permuted_input_blind, + compressed_table_expression, + permuted_table_expression, + permuted_table_poly, + permuted_table_blind, + }) + } +} + +impl Permuted { + /// Given a Lookup with input expressions, table expressions, and the permuted + /// input expression and permuted table expression, this method constructs the + /// grand product polynomial over the lookup. The grand product polynomial + /// is used to populate the Product struct. The Product struct is + /// added to the Lookup and finally returned by the method. + pub(in crate::plonk) fn commit_product< + 'params, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + self, + pk: &ProvingKey, + params: &P, + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + let blinding_factors = pk.vk.cs.blinding_factors(); + // Goal is to compute the products of fractions + // + // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) + // + // where a_j(X) is the jth input expression in this lookup, + // where a'(X) is the compression of the permuted input expressions, + // s_j(X) is the jth table expression in this lookup, + // s'(X) is the compression of the permuted table expressions, + // and i is the ith row of the expression. + let mut lookup_product = vec![C::Scalar::ZERO; params.n() as usize]; + // Denominator uses the permuted input expression and permuted table expression + parallelize(&mut lookup_product, |lookup_product, start| { + for ((lookup_product, permuted_input_value), permuted_table_value) in lookup_product + .iter_mut() + .zip(self.permuted_input_expression[start..].iter()) + .zip(self.permuted_table_expression[start..].iter()) + { + *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); + } + }); + + // Batch invert to obtain the denominators for the lookup product + // polynomials + lookup_product.iter_mut().batch_invert(); + + // Finish the computation of the entire fraction by computing the numerators + // (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + parallelize(&mut lookup_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + + *product *= &(self.compressed_input_expression[i] + &*beta); + *product *= &(self.compressed_table_expression[i] + &*gamma); + } + }); + + // The product vector is a vector of products of fractions of the form + // + // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) + // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) + // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) + // + // where there are m input expressions and m table expressions, + // a_j(\omega^i) is the jth input expression in this lookup, + // a'j(\omega^i) is the permuted input expression, + // s_j(\omega^i) is the jth table expression in this lookup, + // s'(\omega^i) is the permuted table expression, + // and i is the ith row of the expression. + + // Compute the evaluations of the lookup product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(lookup_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) + }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + // This test works only with intermediate representations in this method. + // It can be used for debugging purposes. + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + + // l_0(X) * (1 - z(X)) = 0 + assert_eq!(z[0], C::Scalar::ONE); + + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + for i in 0..u { + let mut left = z[i + 1]; + let permuted_input_value = &self.permuted_input_expression[i]; + + let permuted_table_value = &self.permuted_table_expression[i]; + + left *= &(*beta + permuted_input_value); + left *= &(*gamma + permuted_table_value); + + let mut right = z[i]; + let mut input_term = self.compressed_input_expression[i]; + let mut table_term = self.compressed_table_expression[i]; + + input_term += &(*beta); + table_term += &(*gamma); + right *= &(input_term * &table_term); + + assert_eq!(left, right); + } + + // l_last(X) * (z(X)^2 - z(X)) = 0 + // Assertion will fail only when soundness is broken, in which + // case this z[u] value will be zero. (bad!) + assert_eq!(z[u], C::Scalar::ONE); + } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + permuted_input_poly: self.permuted_input_poly, + permuted_input_blind: self.permuted_input_blind, + permuted_table_poly: self.permuted_table_poly, + permuted_table_blind: self.permuted_table_blind, + product_poly: z, + product_blind, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_inv = domain.rotate_omega(*x, Rotation::prev()); + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let product_eval = eval_polynomial(&self.product_poly, *x); + let product_next_eval = eval_polynomial(&self.product_poly, x_next); + let permuted_input_eval = eval_polynomial(&self.permuted_input_poly, *x); + let permuted_input_inv_eval = eval_polynomial(&self.permuted_input_poly, x_inv); + let permuted_table_eval = eval_polynomial(&self.permuted_table_poly, *x); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(product_eval)) + .chain(Some(product_next_eval)) + .chain(Some(permuted_input_eval)) + .chain(Some(permuted_input_inv_eval)) + .chain(Some(permuted_table_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_inv = pk.vk.domain.rotate_omega(*x, Rotation::prev()); + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open lookup product commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + // Open lookup input commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.permuted_input_poly, + blind: self.constructed.permuted_input_blind, + })) + // Open lookup table commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.permuted_table_poly, + blind: self.constructed.permuted_table_blind, + })) + // Open lookup input commitments at x_inv + .chain(Some(ProverQuery { + point: x_inv, + poly: &self.constructed.permuted_input_poly, + blind: self.constructed.permuted_input_blind, + })) + // Open lookup product commitments at x_next + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + } +} + +type ExpressionPair = (Polynomial, Polynomial); + +/// Given a vector of input values A and a vector of table values S, +/// this method permutes A and S to produce A' and S', such that: +/// - like values in A' are vertically adjacent to each other; and +/// - the first row in a sequence of like values in A' is the row +/// that has the corresponding value in S'. +/// This method returns (A', S') if no errors are encountered. +fn permute_expression_pair<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + mut rng: R, + input_expression: &Polynomial, + table_expression: &Polynomial, +) -> Result, Error> { + let blinding_factors = pk.vk.cs.blinding_factors(); + let usable_rows = params.n() as usize - (blinding_factors + 1); + + let mut permuted_input_expression: Vec = input_expression.to_vec(); + permuted_input_expression.truncate(usable_rows); + + // Sort input lookup expression values + permuted_input_expression.sort(); + + // A BTreeMap of each unique element in the table expression and its count + let mut leftover_table_map: BTreeMap = table_expression + .iter() + .take(usable_rows) + .fold(BTreeMap::new(), |mut acc, coeff| { + *acc.entry(*coeff).or_insert(0) += 1; + acc + }); + let mut permuted_table_coeffs = vec![C::Scalar::ZERO; usable_rows]; + + let mut repeated_input_rows = permuted_input_expression + .iter() + .zip(permuted_table_coeffs.iter_mut()) + .enumerate() + .filter_map(|(row, (input_value, table_value))| { + // If this is the first occurrence of `input_value` in the input expression + if row == 0 || *input_value != permuted_input_expression[row - 1] { + *table_value = *input_value; + // Remove one instance of input_value from leftover_table_map + if let Some(count) = leftover_table_map.get_mut(input_value) { + assert!(*count > 0); + *count -= 1; + None + } else { + // Return error if input_value not found + Some(Err(Error::ConstraintSystemFailure)) + } + // If input value is repeated + } else { + Some(Ok(row)) + } + }) + .collect::, _>>()?; + + // Populate permuted table at unfilled rows with leftover table elements + for (coeff, count) in leftover_table_map.iter() { + for _ in 0..*count { + permuted_table_coeffs[repeated_input_rows.pop().unwrap()] = *coeff; + } + } + assert!(repeated_input_rows.is_empty()); + + permuted_input_expression + .extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); + permuted_table_coeffs.extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); + assert_eq!(permuted_input_expression.len(), params.n() as usize); + assert_eq!(permuted_table_coeffs.len(), params.n() as usize); + + #[cfg(feature = "sanity-checks")] + { + let mut last = None; + for (a, b) in permuted_input_expression + .iter() + .zip(permuted_table_coeffs.iter()) + .take(usable_rows) + { + if *a != *b { + assert_eq!(*a, last.unwrap()); + } + last = Some(*a); + } + } + + Ok(( + domain.lagrange_from_vec(permuted_input_expression), + domain.lagrange_from_vec(permuted_table_coeffs), + )) +} diff --git a/backend/src/plonk/lookup/verifier.rs b/backend/src/plonk/lookup/verifier.rs new file mode 100644 index 0000000000..11e780148d --- /dev/null +++ b/backend/src/plonk/lookup/verifier.rs @@ -0,0 +1,212 @@ +use std::iter; + +use super::super::{ + circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, +}; +use super::Argument; +use crate::{ + arithmetic::CurveAffine, + plonk::{Error, VerifyingKey}, + poly::{commitment::MSM, VerifierQuery}, + transcript::{EncodedChallenge, TranscriptRead}, +}; +use halo2_middleware::ff::Field; +use halo2_middleware::poly::Rotation; + +pub struct PermutationCommitments { + permuted_input_commitment: C, + permuted_table_commitment: C, +} + +pub struct Committed { + permuted: PermutationCommitments, + product_commitment: C, +} + +pub struct Evaluated { + committed: Committed, + product_eval: C::Scalar, + product_next_eval: C::Scalar, + permuted_input_eval: C::Scalar, + permuted_input_inv_eval: C::Scalar, + permuted_table_eval: C::Scalar, +} + +impl Argument { + pub(in crate::plonk) fn read_permuted_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + transcript: &mut T, + ) -> Result, Error> { + let permuted_input_commitment = transcript.read_point()?; + let permuted_table_commitment = transcript.read_point()?; + + Ok(PermutationCommitments { + permuted_input_commitment, + permuted_table_commitment, + }) + } +} + +impl PermutationCommitments { + pub(in crate::plonk) fn read_product_commitment< + E: EncodedChallenge, + T: TranscriptRead, + >( + self, + transcript: &mut T, + ) -> Result, Error> { + let product_commitment = transcript.read_point()?; + + Ok(Committed { + permuted: self, + product_commitment, + }) + } +} + +impl Committed { + pub(crate) fn evaluate, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let product_eval = transcript.read_scalar()?; + let product_next_eval = transcript.read_scalar()?; + let permuted_input_eval = transcript.read_scalar()?; + let permuted_input_inv_eval = transcript.read_scalar()?; + let permuted_table_eval = transcript.read_scalar()?; + + Ok(Evaluated { + committed: self, + product_eval, + product_next_eval, + permuted_input_eval, + permuted_input_inv_eval, + permuted_table_eval, + }) + } +} + +impl Evaluated { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn expressions<'a>( + &'a self, + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + argument: &'a Argument, + theta: ChallengeTheta, + beta: ChallengeBeta, + gamma: ChallengeGamma, + advice_evals: &[C::Scalar], + fixed_evals: &[C::Scalar], + instance_evals: &[C::Scalar], + challenges: &[C::Scalar], + ) -> impl Iterator + 'a { + let active_rows = C::Scalar::ONE - (l_last + l_blind); + + let product_expression = || { + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + let left = self.product_next_eval + * &(self.permuted_input_eval + &*beta) + * &(self.permuted_table_eval + &*gamma); + + let compress_expressions = |expressions: &[Expression]| { + expressions + .iter() + .map(|expression| { + expression.evaluate( + &|scalar| scalar, + &|_| panic!("virtual selectors are removed during optimization"), + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + }; + let right = self.product_eval + * &(compress_expressions(&argument.input_expressions) + &*beta) + * &(compress_expressions(&argument.table_expressions) + &*gamma); + + (left - &right) * &active_rows + }; + + std::iter::empty() + .chain( + // l_0(X) * (1 - z(X)) = 0 + Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + ) + .chain( + // l_last(X) * (z(X)^2 - z(X)) = 0 + Some(l_last * &(self.product_eval.square() - &self.product_eval)), + ) + .chain( + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + // ) = 0 + Some(product_expression()), + ) + .chain(Some( + // l_0(X) * (a'(X) - s'(X)) = 0 + l_0 * &(self.permuted_input_eval - &self.permuted_table_eval), + )) + .chain(Some( + // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 + (self.permuted_input_eval - &self.permuted_table_eval) + * &(self.permuted_input_eval - &self.permuted_input_inv_eval) + * &active_rows, + )) + } + + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vk: &'r VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_inv = vk.domain.rotate_omega(*x, Rotation::prev()); + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open lookup product commitment at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + *x, + self.product_eval, + ))) + // Open lookup input commitments at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_input_commitment, + *x, + self.permuted_input_eval, + ))) + // Open lookup table commitments at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_table_commitment, + *x, + self.permuted_table_eval, + ))) + // Open lookup input commitments at \omega^{-1} x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.permuted.permuted_input_commitment, + x_inv, + self.permuted_input_inv_eval, + ))) + // Open lookup product commitment at \omega x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + x_next, + self.product_next_eval, + ))) + } +} diff --git a/backend/src/plonk/permutation.rs b/backend/src/plonk/permutation.rs new file mode 100644 index 0000000000..e1261f8f10 --- /dev/null +++ b/backend/src/plonk/permutation.rs @@ -0,0 +1,109 @@ +//! Implementation of permutation argument. + +use crate::{ + arithmetic::CurveAffine, + helpers::{ + polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, + SerdeCurveAffine, SerdePrimeField, + }, + plonk::Error, + poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, + SerdeFormat, +}; +use halo2_middleware::circuit::{Any, Column}; +use halo2_middleware::permutation::{ArgumentV2, Cell}; + +use std::io; + +pub mod prover; +pub mod verifier; + +/// The verifying key for a single permutation argument. +#[derive(Clone, Debug)] +pub struct VerifyingKey { + commitments: Vec, +} + +impl VerifyingKey { + /// Returns commitments of sigma polynomials + pub fn commitments(&self) -> &Vec { + &self.commitments + } + + pub(crate) fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> + where + C: SerdeCurveAffine, + { + for commitment in &self.commitments { + commitment.write(writer, format)?; + } + Ok(()) + } + + pub(crate) fn read( + reader: &mut R, + argument: &Argument, + format: SerdeFormat, + ) -> io::Result + where + C: SerdeCurveAffine, + { + let commitments = (0..argument.columns.len()) + .map(|_| C::read(reader, format)) + .collect::, _>>()?; + Ok(VerifyingKey { commitments }) + } + + pub(crate) fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + self.commitments.len() * C::byte_length(format) + } +} + +/// The proving key for a single permutation argument. +#[derive(Clone, Debug)] +pub struct ProvingKey { + permutations: Vec>, + polys: Vec>, + pub(super) cosets: Vec>, +} + +impl ProvingKey +where + C::Scalar: SerdePrimeField, +{ + /// Reads proving key for a single permutation argument from buffer using `Polynomial::read`. + pub(super) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + let permutations = read_polynomial_vec(reader, format)?; + let polys = read_polynomial_vec(reader, format)?; + let cosets = read_polynomial_vec(reader, format)?; + Ok(ProvingKey { + permutations, + polys, + cosets, + }) + } + + /// Writes proving key for a single permutation argument to buffer using `Polynomial::write`. + pub(super) fn write( + &self, + writer: &mut W, + format: SerdeFormat, + ) -> io::Result<()> { + write_polynomial_slice(&self.permutations, writer, format)?; + write_polynomial_slice(&self.polys, writer, format)?; + write_polynomial_slice(&self.cosets, writer, format)?; + Ok(()) + } +} + +impl ProvingKey { + /// Gets the total number of bytes in the serialization of `self` + pub(super) fn bytes_length(&self) -> usize { + polynomial_slice_byte_length(&self.permutations) + + polynomial_slice_byte_length(&self.polys) + + polynomial_slice_byte_length(&self.cosets) + } +} diff --git a/common/src/plonk/permutation/keygen.rs b/backend/src/plonk/permutation/keygen.rs similarity index 100% rename from common/src/plonk/permutation/keygen.rs rename to backend/src/plonk/permutation/keygen.rs diff --git a/backend/src/plonk/permutation/prover.rs b/backend/src/plonk/permutation/prover.rs new file mode 100644 index 0000000000..daf6f49e23 --- /dev/null +++ b/backend/src/plonk/permutation/prover.rs @@ -0,0 +1,332 @@ +use group::{ + ff::{BatchInvert, Field}, + Curve, +}; +use halo2_middleware::ff::PrimeField; +use rand_core::RngCore; +use std::iter::{self, ExactSizeIterator}; + +use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::Argument; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + plonk::{self, Error}, + poly::{ + commitment::{Blind, Params}, + Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use halo2_common::plonk::permutation::ProvingKey; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; + +pub(crate) struct CommittedSet { + pub(crate) permutation_product_poly: Polynomial, + pub(crate) permutation_product_coset: Polynomial, + permutation_product_blind: Blind, +} + +pub(crate) struct Committed { + pub(crate) sets: Vec>, +} + +pub struct ConstructedSet { + permutation_product_poly: Polynomial, + permutation_product_blind: Blind, +} + +pub(crate) struct Constructed { + sets: Vec>, +} + +pub(crate) struct Evaluated { + constructed: Constructed, +} + +impl Argument { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit< + 'params, + C: CurveAffine, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + params: &P, + pk: &plonk::ProvingKey, + pkey: &ProvingKey, + advice: &[Polynomial], + fixed: &[Polynomial], + instance: &[Polynomial], + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + + // How many columns can be included in a single permutation polynomial? + // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This + // will never underflow because of the requirement of at least a degree + // 3 circuit for the permutation argument. + assert!(pk.vk.cs_degree >= 3); + let chunk_len = pk.vk.cs_degree - 2; + let blinding_factors = pk.vk.cs.blinding_factors(); + + // Each column gets its own delta power. + let mut deltaomega = C::Scalar::ONE; + + // Track the "last" value from the previous column set + let mut last_z = C::Scalar::ONE; + + let mut sets = vec![]; + + for (columns, permutations) in self + .columns + .chunks(chunk_len) + .zip(pkey.permutations.chunks(chunk_len)) + { + // Goal is to compute the products of fractions + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where p_j(X) is the jth column in this permutation, + // and i is the ith row of the column. + + let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; + + // Iterate over each column of the permutation + for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + for ((modified_values, value), permuted_value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + .zip(permuted_column_values[start..].iter()) + { + *modified_values *= &(*beta * permuted_value + &*gamma + value); + } + }); + } + + // Invert to obtain the denominator for the permutation product polynomial + modified_values.batch_invert(); + + // Iterate over each column again, this time finishing the computation + // of the entire fraction by computing the numerators + for &column in columns.iter() { + let omega = domain.get_omega(); + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); + for (modified_values, value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + { + // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta + *modified_values *= &(deltaomega * &*beta + &*gamma + value); + deltaomega *= ω + } + }); + deltaomega *= &::DELTA; + } + + // The modified_values vector is a vector of products of fractions + // of the form + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where i is the index into modified_values, for the jth column in + // the permutation + + // Compute the evaluations of the permutation product polynomial + // over our domain, starting with z[0] = 1 + let mut z = vec![last_z]; + for row in 1..(params.n() as usize) { + let mut tmp = z[row - 1]; + + tmp *= &modified_values[row - 1]; + z.push(tmp); + } + let mut z = domain.lagrange_from_vec(z); + // Set blinding factors + for z in &mut z[params.n() as usize - blinding_factors..] { + *z = C::Scalar::random(&mut rng); + } + // Set new last_z + last_z = z[params.n() as usize - (blinding_factors + 1)]; + + let blind = Blind(C::Scalar::random(&mut rng)); + + let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); + let permutation_product_blind = blind; + let z = domain.lagrange_to_coeff(z); + let permutation_product_poly = z.clone(); + + let permutation_product_coset = domain.coeff_to_extended(z.clone()); + + let permutation_product_commitment = + permutation_product_commitment_projective.to_affine(); + + // Hash the permutation product commitment + transcript.write_point(permutation_product_commitment)?; + + sets.push(CommittedSet { + permutation_product_poly, + permutation_product_coset, + permutation_product_blind, + }); + } + + Ok(Committed { sets }) + } +} + +impl Committed { + pub(in crate::plonk) fn construct(self) -> Constructed { + Constructed { + sets: self + .sets + .iter() + .map(|set| ConstructedSet { + permutation_product_poly: set.permutation_product_poly.clone(), + permutation_product_blind: set.permutation_product_blind, + }) + .collect(), + } + } +} + +impl super::ProvingKey { + pub(in crate::plonk) fn open( + &self, + x: ChallengeX, + ) -> impl Iterator> + Clone { + self.polys.iter().map(move |poly| ProverQuery { + point: *x, + poly, + blind: Blind::default(), + }) + } + + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + &self, + x: ChallengeX, + transcript: &mut T, + ) -> Result<(), Error> { + // Hash permutation evals + for eval in self.polys.iter().map(|poly| eval_polynomial(poly, *x)) { + transcript.write_scalar(eval)?; + } + + Ok(()) + } +} + +impl Constructed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &plonk::ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let blinding_factors = pk.vk.cs.blinding_factors(); + + { + let mut sets = self.sets.iter(); + + while let Some(set) = sets.next() { + let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); + + let permutation_product_next_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation::next()), + ); + + // Hash permutation product evals + for eval in iter::empty() + .chain(Some(&permutation_product_eval)) + .chain(Some(&permutation_product_next_eval)) + { + transcript.write_scalar(*eval)?; + } + + // If we have any remaining sets to process, evaluate this set at omega^u + // so we can constrain the last value of its running product to equal the + // first value of the next set's running product, chaining them together. + if sets.len() > 0 { + let permutation_product_last_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), + ); + + transcript.write_scalar(permutation_product_last_eval)?; + } + } + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a plonk::ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let blinding_factors = pk.vk.cs.blinding_factors(); + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + let x_last = pk + .vk + .domain + .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); + + iter::empty() + .chain(self.constructed.sets.iter().flat_map(move |set| { + iter::empty() + // Open permutation product commitments at x and \omega x + .chain(Some(ProverQuery { + point: *x, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + .chain(Some(ProverQuery { + point: x_next, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + })) + // Open it at \omega^{last} x for all but the last set. This rotation is only + // sensical for the first row, but we only use this rotation in a constraint + // that is gated on l_0. + .chain( + self.constructed + .sets + .iter() + .rev() + .skip(1) + .flat_map(move |set| { + Some(ProverQuery { + point: x_last, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + }) + }), + ) + } +} diff --git a/backend/src/plonk/permutation/verifier.rs b/backend/src/plonk/permutation/verifier.rs new file mode 100644 index 0000000000..195a771999 --- /dev/null +++ b/backend/src/plonk/permutation/verifier.rs @@ -0,0 +1,256 @@ +use halo2_middleware::ff::{Field, PrimeField}; +use std::iter; + +use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; +use super::{Argument, VerifyingKey}; +use crate::{ + arithmetic::CurveAffine, + plonk::{self, Error}, + poly::{commitment::MSM, VerifierQuery}, + transcript::{EncodedChallenge, TranscriptRead}, +}; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; + +pub struct Committed { + permutation_product_commitments: Vec, +} + +pub struct EvaluatedSet { + permutation_product_commitment: C, + permutation_product_eval: C::Scalar, + permutation_product_next_eval: C::Scalar, + permutation_product_last_eval: Option, +} + +pub struct CommonEvaluated { + permutation_evals: Vec, +} + +pub struct Evaluated { + sets: Vec>, +} + +impl Argument { + pub(crate) fn read_product_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + vk: &plonk::VerifyingKey, + transcript: &mut T, + ) -> Result, Error> { + let chunk_len = vk.cs_degree - 2; + + let permutation_product_commitments = self + .columns + .chunks(chunk_len) + .map(|_| transcript.read_point()) + .collect::, _>>()?; + + Ok(Committed { + permutation_product_commitments, + }) + } +} + +impl VerifyingKey { + pub(in crate::plonk) fn evaluate, T: TranscriptRead>( + &self, + transcript: &mut T, + ) -> Result, Error> { + let permutation_evals = self + .commitments + .iter() + .map(|_| transcript.read_scalar()) + .collect::, _>>()?; + + Ok(CommonEvaluated { permutation_evals }) + } +} + +impl Committed { + pub(crate) fn evaluate, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let mut sets = vec![]; + + let mut iter = self.permutation_product_commitments.into_iter(); + + while let Some(permutation_product_commitment) = iter.next() { + let permutation_product_eval = transcript.read_scalar()?; + let permutation_product_next_eval = transcript.read_scalar()?; + let permutation_product_last_eval = if iter.len() > 0 { + Some(transcript.read_scalar()?) + } else { + None + }; + + sets.push(EvaluatedSet { + permutation_product_commitment, + permutation_product_eval, + permutation_product_next_eval, + permutation_product_last_eval, + }); + } + + Ok(Evaluated { sets }) + } +} + +impl Evaluated { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn expressions<'a>( + &'a self, + vk: &'a plonk::VerifyingKey, + p: &'a Argument, + common: &'a CommonEvaluated, + advice_evals: &'a [C::Scalar], + fixed_evals: &'a [C::Scalar], + instance_evals: &'a [C::Scalar], + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + beta: ChallengeBeta, + gamma: ChallengeGamma, + x: ChallengeX, + ) -> impl Iterator + 'a { + let chunk_len = vk.cs_degree - 2; + iter::empty() + // Enforce only for the first set. + // l_0(X) * (1 - z_0(X)) = 0 + .chain( + self.sets + .first() + .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), + ) + // Enforce only for the last set. + // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 + .chain(self.sets.last().map(|last_set| { + (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) + * &l_last + })) + // Except for the first set, enforce. + // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 + .chain( + self.sets + .iter() + .skip(1) + .zip(self.sets.iter()) + .map(|(set, last_set)| { + ( + set.permutation_product_eval, + last_set.permutation_product_last_eval.unwrap(), + ) + }) + .map(move |(set, prev_last)| (set - &prev_last) * &l_0), + ) + // And for all the sets we enforce: + // (1 - (l_last(X) + l_blind(X))) * ( + // z_i(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) + // - z_i(X) \prod (p(X) + \delta^i \beta X + \gamma) + // ) + .chain( + self.sets + .iter() + .zip(p.columns.chunks(chunk_len)) + .zip(common.permutation_evals.chunks(chunk_len)) + .enumerate() + .map(move |(chunk_index, ((set, columns), permutation_evals))| { + let mut left = set.permutation_product_next_eval; + for (eval, permutation_eval) in columns + .iter() + .map(|&column| match column.column_type() { + Any::Advice(_) => { + advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Fixed => { + fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Instance => { + instance_evals + [vk.cs.get_any_query_index(column, Rotation::cur())] + } + }) + .zip(permutation_evals.iter()) + { + left *= &(eval + &(*beta * permutation_eval) + &*gamma); + } + + let mut right = set.permutation_product_eval; + let mut current_delta = (*beta * &*x) + * &(::DELTA + .pow_vartime([(chunk_index * chunk_len) as u64])); + for eval in columns.iter().map(|&column| match column.column_type() { + Any::Advice(_) => { + advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Fixed => { + fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + Any::Instance => { + instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] + } + }) { + right *= &(eval + ¤t_delta + &*gamma); + current_delta *= &C::Scalar::DELTA; + } + + (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) + }), + ) + } + + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vk: &'r plonk::VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let blinding_factors = vk.cs.blinding_factors(); + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + let x_last = vk + .domain + .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); + + iter::empty() + .chain(self.sets.iter().flat_map(move |set| { + iter::empty() + // Open permutation product commitments at x and \omega^{-1} x + // Open permutation product commitments at x and \omega x + .chain(Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + *x, + set.permutation_product_eval, + ))) + .chain(Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + x_next, + set.permutation_product_next_eval, + ))) + })) + // Open it at \omega^{last} x for all but the last set + .chain(self.sets.iter().rev().skip(1).flat_map(move |set| { + Some(VerifierQuery::new_commitment( + &set.permutation_product_commitment, + x_last, + set.permutation_product_last_eval.unwrap(), + )) + })) + } +} + +impl CommonEvaluated { + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vkey: &'r VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + // Open permutation commitments for each permutation argument at x + vkey.commitments + .iter() + .zip(self.permutation_evals.iter()) + .map(move |(commitment, &eval)| VerifierQuery::new_commitment(commitment, *x, eval)) + } +} diff --git a/backend/src/plonk/shuffle.rs b/backend/src/plonk/shuffle.rs new file mode 100644 index 0000000000..96e9e18468 --- /dev/null +++ b/backend/src/plonk/shuffle.rs @@ -0,0 +1,2 @@ +pub mod prover; +pub mod verifier; diff --git a/backend/src/plonk/shuffle/prover.rs b/backend/src/plonk/shuffle/prover.rs new file mode 100644 index 0000000000..b70184fc8c --- /dev/null +++ b/backend/src/plonk/shuffle/prover.rs @@ -0,0 +1,251 @@ +use super::super::{ + circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, +}; +use super::Argument; +use crate::plonk::evaluation::evaluate; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + poly::{ + commitment::{Blind, Params}, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use group::{ff::BatchInvert, Curve}; +use halo2_middleware::ff::WithSmallOrderMulGroup; +use halo2_middleware::poly::Rotation; +use rand_core::RngCore; +use std::{ + iter, + ops::{Mul, MulAssign}, +}; + +#[derive(Debug)] +struct Compressed { + input_expression: Polynomial, + shuffle_expression: Polynomial, +} + +#[derive(Debug)] +pub(in crate::plonk) struct Committed { + pub(in crate::plonk) product_poly: Polynomial, + product_blind: Blind, +} + +pub(in crate::plonk) struct Evaluated { + constructed: Committed, +} + +impl> Argument { + /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions + /// [S_0, S_1, ..., S_{m-1}], this method + /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} + /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, + #[allow(clippy::too_many_arguments)] + fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( + &self, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + ) -> Compressed + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the shuffle and compress them + let input_expression = compress_expressions(&self.input_expressions); + + // Get values of table expressions involved in the shuffle and compress them + let shuffle_expression = compress_expressions(&self.shuffle_expressions); + + Compressed { + input_expression, + shuffle_expression, + } + } + + /// Given a Shuffle with input expressions and table expressions this method + /// constructs the grand product polynomial over the shuffle. + /// The grand product polynomial is used to populate the Product struct. + /// The Product struct is added to the Shuffle and finally returned by the method. + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn commit_product< + 'a, + 'params: 'a, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + gamma: ChallengeGamma, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, + ) -> Result, Error> + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + let compressed = self.compress( + pk, + params, + domain, + theta, + advice_values, + fixed_values, + instance_values, + challenges, + ); + + let blinding_factors = pk.vk.cs.blinding_factors(); + + let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; + parallelize(&mut shuffle_product, |shuffle_product, start| { + for (shuffle_product, shuffle_value) in shuffle_product + .iter_mut() + .zip(compressed.shuffle_expression[start..].iter()) + { + *shuffle_product = *gamma + shuffle_value; + } + }); + + shuffle_product.iter_mut().batch_invert(); + + parallelize(&mut shuffle_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + *product *= &(*gamma + compressed.input_expression[i]); + } + }); + + // Compute the evaluations of the shuffle product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(shuffle_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) + }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + assert_eq!(z[0], C::Scalar::ONE); + for i in 0..u { + let mut left = z[i + 1]; + let input_value = &compressed.input_expression[i]; + let shuffle_value = &compressed.shuffle_expression[i]; + left *= &(*gamma + shuffle_value); + let mut right = z[i]; + right *= &(*gamma + input_value); + assert_eq!(left, right); + } + assert_eq!(z[u], C::Scalar::ONE); + } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + product_poly: z, + product_blind, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let product_eval = eval_polynomial(&self.product_poly, *x); + let product_next_eval = eval_polynomial(&self.product_poly, x_next); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(product_eval)) + .chain(Some(product_next_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open shuffle product commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + // Open shuffle product commitments at x_next + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + } +} diff --git a/backend/src/plonk/shuffle/verifier.rs b/backend/src/plonk/shuffle/verifier.rs new file mode 100644 index 0000000000..46a7823c9c --- /dev/null +++ b/backend/src/plonk/shuffle/verifier.rs @@ -0,0 +1,139 @@ +use std::iter; + +use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX}; +use super::Argument; +use crate::{ + arithmetic::CurveAffine, + plonk::{Error, VerifyingKey}, + poly::{commitment::MSM, VerifierQuery}, + transcript::{EncodedChallenge, TranscriptRead}, +}; +use halo2_middleware::ff::Field; +use halo2_middleware::poly::Rotation; + +pub struct Committed { + product_commitment: C, +} + +pub struct Evaluated { + committed: Committed, + product_eval: C::Scalar, + product_next_eval: C::Scalar, +} + +impl Argument { + pub(in crate::plonk) fn read_product_commitment< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + transcript: &mut T, + ) -> Result, Error> { + let product_commitment = transcript.read_point()?; + + Ok(Committed { product_commitment }) + } +} + +impl Committed { + pub(crate) fn evaluate, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let product_eval = transcript.read_scalar()?; + let product_next_eval = transcript.read_scalar()?; + + Ok(Evaluated { + committed: self, + product_eval, + product_next_eval, + }) + } +} + +impl Evaluated { + #[allow(clippy::too_many_arguments)] + pub(in crate::plonk) fn expressions<'a>( + &'a self, + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + argument: &'a Argument, + theta: ChallengeTheta, + gamma: ChallengeGamma, + advice_evals: &[C::Scalar], + fixed_evals: &[C::Scalar], + instance_evals: &[C::Scalar], + challenges: &[C::Scalar], + ) -> impl Iterator + 'a { + let active_rows = C::Scalar::ONE - (l_last + l_blind); + + let product_expression = || { + // z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma) + let compress_expressions = |expressions: &[Expression]| { + expressions + .iter() + .map(|expression| { + expression.evaluate( + &|scalar| scalar, + &|_| panic!("virtual selectors are removed during optimization"), + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + }; + // z(\omega X) (s(X) + \gamma) + let left = self.product_next_eval + * &(compress_expressions(&argument.shuffle_expressions) + &*gamma); + // z(X) (a(X) + \gamma) + let right = + self.product_eval * &(compress_expressions(&argument.input_expressions) + &*gamma); + + (left - &right) * &active_rows + }; + + std::iter::empty() + .chain( + // l_0(X) * (1 - z'(X)) = 0 + Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + ) + .chain( + // l_last(X) * (z(X)^2 - z(X)) = 0 + Some(l_last * &(self.product_eval.square() - &self.product_eval)), + ) + .chain( + // (1 - (l_last(X) + l_blind(X))) * ( z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) + Some(product_expression()), + ) + } + + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vk: &'r VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open shuffle product commitment at x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + *x, + self.product_eval, + ))) + // Open shuffle product commitment at \omega x + .chain(Some(VerifierQuery::new_commitment( + &self.committed.product_commitment, + x_next, + self.product_next_eval, + ))) + } +} diff --git a/backend/src/plonk/vanishing.rs b/backend/src/plonk/vanishing.rs new file mode 100644 index 0000000000..81f86b02e2 --- /dev/null +++ b/backend/src/plonk/vanishing.rs @@ -0,0 +1,11 @@ +use std::marker::PhantomData; + +use crate::arithmetic::CurveAffine; + +mod prover; +mod verifier; + +/// A vanishing argument. +pub(crate) struct Argument { + _marker: PhantomData, +} diff --git a/backend/src/plonk/vanishing/prover.rs b/backend/src/plonk/vanishing/prover.rs new file mode 100644 index 0000000000..d30d9dc4af --- /dev/null +++ b/backend/src/plonk/vanishing/prover.rs @@ -0,0 +1,199 @@ +use std::{collections::HashMap, iter}; + +use group::Curve; +use halo2_middleware::ff::Field; +use rand_chacha::ChaCha20Rng; +use rand_core::{RngCore, SeedableRng}; + +use super::Argument; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + multicore::current_num_threads, + plonk::{ChallengeX, Error}, + poly::{ + commitment::{Blind, ParamsProver}, + Coeff, EvaluationDomain, ExtendedLagrangeCoeff, Polynomial, ProverQuery, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; + +pub(in crate::plonk) struct Committed { + random_poly: Polynomial, + random_blind: Blind, +} + +pub(in crate::plonk) struct Constructed { + h_pieces: Vec>, + h_blinds: Vec>, + committed: Committed, +} + +pub(in crate::plonk) struct Evaluated { + h_poly: Polynomial, + h_blind: Blind, + committed: Committed, +} + +impl Argument { + pub(in crate::plonk) fn commit< + 'params, + P: ParamsProver<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + params: &P, + domain: &EvaluationDomain, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + // Sample a random polynomial of degree n - 1 + let n = 1usize << domain.k() as usize; + let mut rand_vec = vec![C::Scalar::ZERO; n]; + + let num_threads = current_num_threads(); + let chunk_size = n / num_threads; + let thread_seeds = (0..) + .step_by(chunk_size + 1) + .take(n % num_threads) + .chain( + (chunk_size != 0) + .then(|| ((n % num_threads) * (chunk_size + 1)..).step_by(chunk_size)) + .into_iter() + .flatten(), + ) + .take(num_threads) + .zip(iter::repeat_with(|| { + let mut seed = [0u8; 32]; + rng.fill_bytes(&mut seed); + ChaCha20Rng::from_seed(seed) + })) + .collect::>(); + + parallelize(&mut rand_vec, |chunk, offset| { + let mut rng = thread_seeds[&offset].clone(); + chunk + .iter_mut() + .for_each(|v| *v = C::Scalar::random(&mut rng)); + }); + + let random_poly: Polynomial = domain.coeff_from_vec(rand_vec); + + // Sample a random blinding factor + let random_blind = Blind(C::Scalar::random(rng)); + + // Commit + let c = params.commit(&random_poly, random_blind).to_affine(); + transcript.write_point(c)?; + + Ok(Committed { + random_poly, + random_blind, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn construct< + 'params, + P: ParamsProver<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + self, + params: &P, + domain: &EvaluationDomain, + h_poly: Polynomial, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + // Divide by t(X) = X^{params.n} - 1. + let h_poly = domain.divide_by_vanishing_poly(h_poly); + + // Obtain final h(X) polynomial + let h_poly = domain.extended_to_coeff(h_poly); + + // Split h(X) up into pieces + let h_pieces = h_poly + .chunks_exact(params.n() as usize) + .map(|v| domain.coeff_from_vec(v.to_vec())) + .collect::>(); + drop(h_poly); + let h_blinds: Vec<_> = h_pieces + .iter() + .map(|_| Blind(C::Scalar::random(&mut rng))) + .collect(); + + // Compute commitments to each h(X) piece + let h_commitments_projective: Vec<_> = h_pieces + .iter() + .zip(h_blinds.iter()) + .map(|(h_piece, blind)| params.commit(h_piece, *blind)) + .collect(); + let mut h_commitments = vec![C::identity(); h_commitments_projective.len()]; + C::Curve::batch_normalize(&h_commitments_projective, &mut h_commitments); + let h_commitments = h_commitments; + + // Hash each h(X) piece + for c in h_commitments.iter() { + transcript.write_point(*c)?; + } + + Ok(Constructed { + h_pieces, + h_blinds, + committed: self, + }) + } +} + +impl Constructed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + x: ChallengeX, + xn: C::Scalar, + domain: &EvaluationDomain, + transcript: &mut T, + ) -> Result, Error> { + let h_poly = self + .h_pieces + .iter() + .rev() + .fold(domain.empty_coeff(), |acc, eval| acc * xn + eval); + + let h_blind = self + .h_blinds + .iter() + .rev() + .fold(Blind(C::Scalar::ZERO), |acc, eval| acc * Blind(xn) + *eval); + + let random_eval = eval_polynomial(&self.committed.random_poly, *x); + transcript.write_scalar(random_eval)?; + + Ok(Evaluated { + h_poly, + h_blind, + committed: self.committed, + }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open( + &self, + x: ChallengeX, + ) -> impl Iterator> + Clone { + iter::empty() + .chain(Some(ProverQuery { + point: *x, + poly: &self.h_poly, + blind: self.h_blind, + })) + .chain(Some(ProverQuery { + point: *x, + poly: &self.committed.random_poly, + blind: self.committed.random_blind, + })) + } +} diff --git a/backend/src/plonk/vanishing/verifier.rs b/backend/src/plonk/vanishing/verifier.rs new file mode 100644 index 0000000000..05ccb02a5b --- /dev/null +++ b/backend/src/plonk/vanishing/verifier.rs @@ -0,0 +1,138 @@ +use std::iter; + +use halo2_middleware::ff::Field; + +use crate::{ + arithmetic::CurveAffine, + plonk::{Error, VerifyingKey}, + poly::{ + commitment::{Params, MSM}, + VerifierQuery, + }, + transcript::{read_n_points, EncodedChallenge, TranscriptRead}, +}; + +use super::super::{ChallengeX, ChallengeY}; +use super::Argument; + +pub struct Committed { + random_poly_commitment: C, +} + +pub struct Constructed { + h_commitments: Vec, + random_poly_commitment: C, +} + +pub struct PartiallyEvaluated { + h_commitments: Vec, + random_poly_commitment: C, + random_eval: C::Scalar, +} + +pub struct Evaluated> { + h_commitment: M, + random_poly_commitment: C, + expected_h_eval: C::Scalar, + random_eval: C::Scalar, +} + +impl Argument { + pub(in crate::plonk) fn read_commitments_before_y< + E: EncodedChallenge, + T: TranscriptRead, + >( + transcript: &mut T, + ) -> Result, Error> { + let random_poly_commitment = transcript.read_point()?; + + Ok(Committed { + random_poly_commitment, + }) + } +} + +impl Committed { + pub(in crate::plonk) fn read_commitments_after_y< + E: EncodedChallenge, + T: TranscriptRead, + >( + self, + vk: &VerifyingKey, + transcript: &mut T, + ) -> Result, Error> { + // Obtain a commitment to h(X) in the form of multiple pieces of degree n - 1 + let h_commitments = read_n_points(transcript, vk.domain.get_quotient_poly_degree())?; + + Ok(Constructed { + h_commitments, + random_poly_commitment: self.random_poly_commitment, + }) + } +} + +impl Constructed { + pub(in crate::plonk) fn evaluate_after_x, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let random_eval = transcript.read_scalar()?; + + Ok(PartiallyEvaluated { + h_commitments: self.h_commitments, + random_poly_commitment: self.random_poly_commitment, + random_eval, + }) + } +} + +impl PartiallyEvaluated { + pub(in crate::plonk) fn verify<'params, P: Params<'params, C>>( + self, + params: &'params P, + expressions: impl Iterator, + y: ChallengeY, + xn: C::Scalar, + ) -> Evaluated { + let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * &*y + &v); + let expected_h_eval = expected_h_eval * ((xn - C::Scalar::ONE).invert().unwrap()); + + let h_commitment = + self.h_commitments + .iter() + .rev() + .fold(params.empty_msm(), |mut acc, commitment| { + acc.scale(xn); + let commitment: C::CurveExt = (*commitment).into(); + acc.append_term(C::Scalar::ONE, commitment); + + acc + }); + + Evaluated { + expected_h_eval, + h_commitment, + random_poly_commitment: self.random_poly_commitment, + random_eval: self.random_eval, + } + } +} + +impl> Evaluated { + pub(in crate::plonk) fn queries( + &self, + x: ChallengeX, + ) -> impl Iterator> + Clone { + iter::empty() + .chain(Some(VerifierQuery::new_msm( + &self.h_commitment, + *x, + self.expected_h_eval, + ))) + .chain(Some(VerifierQuery::new_commitment( + &self.random_poly_commitment, + *x, + self.random_eval, + ))) + } +} diff --git a/backend/src/plonk/verifier.rs b/backend/src/plonk/verifier.rs new file mode 100644 index 0000000000..e60f19374f --- /dev/null +++ b/backend/src/plonk/verifier.rs @@ -0,0 +1,462 @@ +use group::Curve; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use std::iter; + +use super::{ + vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, + VerifyingKey, +}; +use crate::arithmetic::compute_inner_product; +use crate::poly::commitment::{CommitmentScheme, Verifier}; +use crate::poly::VerificationStrategy; +use crate::poly::{ + commitment::{Blind, Params}, + VerifierQuery, +}; +use crate::transcript::{read_n_scalars, EncodedChallenge, TranscriptRead}; + +#[cfg(feature = "batch")] +mod batch; +#[cfg(feature = "batch")] +pub use batch::BatchVerifier; + +/// Returns a boolean indicating whether or not the proof is valid. Verifies a single proof (not +/// batched). +pub fn verify_proof_single< + 'params, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptRead, + Strategy: VerificationStrategy<'params, Scheme, V>, +>( + params: &'params Scheme::ParamsVerifier, + vk: &VerifyingKey, + strategy: Strategy, + instance: &[&[Scheme::Scalar]], + transcript: &mut T, +) -> Result +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + verify_proof(params, vk, strategy, &[instance], transcript) +} + +/// Returns a boolean indicating whether or not the proof is valid +pub fn verify_proof< + 'params, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptRead, + Strategy: VerificationStrategy<'params, Scheme, V>, +>( + params: &'params Scheme::ParamsVerifier, + vk: &VerifyingKey, + strategy: Strategy, + instances: &[&[&[Scheme::Scalar]]], + transcript: &mut T, +) -> Result +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + // Check that instances matches the expected number of instance columns + for instances in instances.iter() { + if instances.len() != vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } + } + + let instance_commitments = if V::QUERY_INSTANCE { + instances + .iter() + .map(|instance| { + instance + .iter() + .map(|instance| { + if instance.len() > params.n() as usize - (vk.cs.blinding_factors() + 1) { + return Err(Error::InstanceTooLarge); + } + let mut poly = instance.to_vec(); + poly.resize(params.n() as usize, Scheme::Scalar::ZERO); + let poly = vk.domain.lagrange_from_vec(poly); + + Ok(params.commit_lagrange(&poly, Blind::default()).to_affine()) + }) + .collect::, _>>() + }) + .collect::, _>>()? + } else { + vec![vec![]; instances.len()] + }; + + let num_proofs = instance_commitments.len(); + + // Hash verification key into transcript + vk.hash_into(transcript)?; + + if V::QUERY_INSTANCE { + for instance_commitments in instance_commitments.iter() { + // Hash the instance (external) commitments into the transcript + for commitment in instance_commitments { + transcript.common_point(*commitment)? + } + } + } else { + for instance in instances.iter() { + for instance in instance.iter() { + for value in instance.iter() { + transcript.common_scalar(*value)?; + } + } + } + } + + // Hash the prover's advice commitments into the transcript and squeeze challenges + let (advice_commitments, challenges) = { + let mut advice_commitments = + vec![vec![Scheme::Curve::default(); vk.cs.num_advice_columns]; num_proofs]; + let mut challenges = vec![Scheme::Scalar::ZERO; vk.cs.num_challenges]; + + for current_phase in vk.cs.phases() { + for advice_commitments in advice_commitments.iter_mut() { + for (phase, commitment) in vk + .cs + .advice_column_phase + .iter() + .zip(advice_commitments.iter_mut()) + { + if current_phase == *phase { + *commitment = transcript.read_point()?; + } + } + } + for (phase, challenge) in vk.cs.challenge_phase.iter().zip(challenges.iter_mut()) { + if current_phase == *phase { + *challenge = *transcript.squeeze_challenge_scalar::<()>(); + } + } + } + + (advice_commitments, challenges) + }; + + // Sample theta challenge for keeping lookup columns linearly independent + let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); + + let lookups_permuted = (0..num_proofs) + .map(|_| -> Result, _> { + // Hash each lookup permuted commitment + vk.cs + .lookups + .iter() + .map(|argument| argument.read_permuted_commitments(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Sample beta challenge + let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); + + // Sample gamma challenge + let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); + + let permutations_committed = (0..num_proofs) + .map(|_| { + // Hash each permutation product commitment + vk.cs.permutation.read_product_commitments(vk, transcript) + }) + .collect::, _>>()?; + + let lookups_committed = lookups_permuted + .into_iter() + .map(|lookups| { + // Hash each lookup product commitment + lookups + .into_iter() + .map(|lookup| lookup.read_product_commitment(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles_committed = (0..num_proofs) + .map(|_| -> Result, _> { + // Hash each shuffle product commitment + vk.cs + .shuffles + .iter() + .map(|argument| argument.read_product_commitment(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?; + + // Sample y challenge, which keeps the gates linearly independent. + let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + + let vanishing = vanishing.read_commitments_after_y(vk, transcript)?; + + // Sample x challenge, which is used to ensure the circuit is + // satisfied with high probability. + let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); + let instance_evals = if V::QUERY_INSTANCE { + (0..num_proofs) + .map(|_| -> Result, _> { + read_n_scalars(transcript, vk.cs.instance_queries.len()) + }) + .collect::, _>>()? + } else { + let xn = x.pow([params.n()]); + let (min_rotation, max_rotation) = + vk.cs + .instance_queries + .iter() + .fold((0, 0), |(min, max), (_, rotation)| { + if rotation.0 < min { + (rotation.0, max) + } else if rotation.0 > max { + (min, rotation.0) + } else { + (min, max) + } + }); + let max_instance_len = instances + .iter() + .flat_map(|instance| instance.iter().map(|instance| instance.len())) + .max_by(Ord::cmp) + .unwrap_or_default(); + let l_i_s = &vk.domain.l_i_range( + *x, + xn, + -max_rotation..max_instance_len as i32 + min_rotation.abs(), + ); + instances + .iter() + .map(|instances| { + vk.cs + .instance_queries + .iter() + .map(|(column, rotation)| { + let instances = instances[column.index()]; + let offset = (max_rotation - rotation.0) as usize; + compute_inner_product(instances, &l_i_s[offset..offset + instances.len()]) + }) + .collect::>() + }) + .collect::>() + }; + + let advice_evals = (0..num_proofs) + .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) + .collect::, _>>()?; + + let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; + + let vanishing = vanishing.evaluate_after_x(transcript)?; + + let permutations_common = vk.permutation.evaluate(transcript)?; + + let permutations_evaluated = permutations_committed + .into_iter() + .map(|permutation| permutation.evaluate(transcript)) + .collect::, _>>()?; + + let lookups_evaluated = lookups_committed + .into_iter() + .map(|lookups| -> Result, _> { + lookups + .into_iter() + .map(|lookup| lookup.evaluate(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles_evaluated = shuffles_committed + .into_iter() + .map(|shuffles| -> Result, _> { + shuffles + .into_iter() + .map(|shuffle| shuffle.evaluate(transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // This check ensures the circuit is satisfied so long as the polynomial + // commitments open to the correct values. + let vanishing = { + // x^n + let xn = x.pow([params.n()]); + + let blinding_factors = vk.cs.blinding_factors(); + let l_evals = vk + .domain + .l_i_range(*x, xn, (-((blinding_factors + 1) as i32))..=0); + assert_eq!(l_evals.len(), 2 + blinding_factors); + let l_last = l_evals[0]; + let l_blind: Scheme::Scalar = l_evals[1..(1 + blinding_factors)] + .iter() + .fold(Scheme::Scalar::ZERO, |acc, eval| acc + eval); + let l_0 = l_evals[1 + blinding_factors]; + + // Compute the expected value of h(x) + let expressions = advice_evals + .iter() + .zip(instance_evals.iter()) + .zip(permutations_evaluated.iter()) + .zip(lookups_evaluated.iter()) + .zip(shuffles_evaluated.iter()) + .flat_map( + |((((advice_evals, instance_evals), permutation), lookups), shuffles)| { + let challenges = &challenges; + let fixed_evals = &fixed_evals; + std::iter::empty() + // Evaluate the circuit using the custom gates provided + .chain(vk.cs.gates.iter().flat_map(move |gate| { + gate.polynomials().iter().map(move |poly| { + poly.evaluate( + &|scalar| scalar, + &|_| { + panic!("virtual selectors are removed during optimization") + }, + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + })) + .chain(permutation.expressions( + vk, + &vk.cs.permutation, + &permutations_common, + advice_evals, + fixed_evals, + instance_evals, + l_0, + l_last, + l_blind, + beta, + gamma, + x, + )) + .chain(lookups.iter().zip(vk.cs.lookups.iter()).flat_map( + move |(p, argument)| { + p.expressions( + l_0, + l_last, + l_blind, + argument, + theta, + beta, + gamma, + advice_evals, + fixed_evals, + instance_evals, + challenges, + ) + }, + )) + .chain(shuffles.iter().zip(vk.cs.shuffles.iter()).flat_map( + move |(p, argument)| { + p.expressions( + l_0, + l_last, + l_blind, + argument, + theta, + gamma, + advice_evals, + fixed_evals, + instance_evals, + challenges, + ) + }, + )) + }, + ); + + vanishing.verify(params, expressions, y, xn) + }; + + let queries = instance_commitments + .iter() + .zip(instance_evals.iter()) + .zip(advice_commitments.iter()) + .zip(advice_evals.iter()) + .zip(permutations_evaluated.iter()) + .zip(lookups_evaluated.iter()) + .zip(shuffles_evaluated.iter()) + .flat_map( + |( + ( + ( + ( + ((instance_commitments, instance_evals), advice_commitments), + advice_evals, + ), + permutation, + ), + lookups, + ), + shuffles, + )| { + iter::empty() + .chain( + V::QUERY_INSTANCE + .then_some(vk.cs.instance_queries.iter().enumerate().map( + move |(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &instance_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + instance_evals[query_index], + ) + }, + )) + .into_iter() + .flatten(), + ) + .chain(vk.cs.advice_queries.iter().enumerate().map( + move |(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &advice_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + advice_evals[query_index], + ) + }, + )) + .chain(permutation.queries(vk, x)) + .chain(lookups.iter().flat_map(move |p| p.queries(vk, x))) + .chain(shuffles.iter().flat_map(move |p| p.queries(vk, x))) + }, + ) + .chain( + vk.cs + .fixed_queries + .iter() + .enumerate() + .map(|(query_index, &(column, at))| { + VerifierQuery::new_commitment( + &vk.fixed_commitments[column.index()], + vk.domain.rotate_omega(*x, at), + fixed_evals[query_index], + ) + }), + ) + .chain(permutations_common.queries(&vk.permutation, x)) + .chain(vanishing.queries(x)); + + // We are now convinced the circuit is satisfied so long as the + // polynomial commitments open to the correct values. + + let verifier = V::new(params); + strategy.process(|msm| { + verifier + .verify_proof(transcript, queries, msm) + .map_err(|_| Error::Opening) + }) +} diff --git a/backend/src/plonk/verifier/batch.rs b/backend/src/plonk/verifier/batch.rs new file mode 100644 index 0000000000..d869d87559 --- /dev/null +++ b/backend/src/plonk/verifier/batch.rs @@ -0,0 +1,135 @@ +use group::ff::Field; +use halo2_middleware::ff::FromUniformBytes; +use halo2curves::CurveAffine; +use rand_core::OsRng; + +use super::{verify_proof, VerificationStrategy}; +use crate::{ + multicore::{ + IndexedParallelIterator, IntoParallelIterator, ParallelIterator, TryFoldAndReduce, + }, + plonk::{Error, VerifyingKey}, + poly::{ + commitment::{Params, MSM}, + ipa::{ + commitment::{IPACommitmentScheme, ParamsVerifierIPA}, + msm::MSMIPA, + multiopen::VerifierIPA, + strategy::GuardIPA, + }, + }, + transcript::{Blake2bRead, TranscriptReadBuffer}, +}; + +/// A proof verification strategy that returns the proof's MSM. +/// +/// `BatchVerifier` handles the accumulation of the MSMs for the batched proofs. +#[derive(Debug)] +struct BatchStrategy<'params, C: CurveAffine> { + msm: MSMIPA<'params, C>, +} + +impl<'params, C: CurveAffine> + VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> + for BatchStrategy<'params, C> +{ + type Output = MSMIPA<'params, C>; + + fn new(params: &'params ParamsVerifierIPA) -> Self { + BatchStrategy { + msm: MSMIPA::new(params), + } + } + + fn process( + self, + f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, + ) -> Result { + let guard = f(self.msm)?; + Ok(guard.use_challenges()) + } + + fn finalize(self) -> bool { + unreachable!() + } +} + +#[derive(Debug)] +struct BatchItem { + instances: Vec>>, + proof: Vec, +} + +/// A verifier that checks multiple proofs in a batch. **This requires the +/// `batch` crate feature to be enabled.** +#[derive(Debug, Default)] +pub struct BatchVerifier { + items: Vec>, +} + +impl BatchVerifier +where + C::Scalar: FromUniformBytes<64>, +{ + /// Constructs a new batch verifier. + pub fn new() -> Self { + Self { items: vec![] } + } + + /// Adds a proof to the batch. + pub fn add_proof(&mut self, instances: Vec>>, proof: Vec) { + self.items.push(BatchItem { instances, proof }) + } + + /// Finalizes the batch and checks its validity. + /// + /// Returns `false` if *some* proof was invalid. If the caller needs to identify + /// specific failing proofs, it must re-process the proofs separately. + /// + /// This uses [`OsRng`] internally instead of taking an `R: RngCore` argument, because + /// the internal parallelization requires access to a RNG that is guaranteed to not + /// clone its internal state when shared between threads. + pub fn finalize(self, params: &ParamsVerifierIPA, vk: &VerifyingKey) -> bool { + fn accumulate_msm<'params, C: CurveAffine>( + mut acc: MSMIPA<'params, C>, + msm: MSMIPA<'params, C>, + ) -> MSMIPA<'params, C> { + // Scale the MSM by a random factor to ensure that if the existing MSM has + // `is_zero() == false` then this argument won't be able to interfere with it + // to make it true, with high probability. + acc.scale(C::Scalar::random(OsRng)); + + acc.add_msm(&msm); + acc + } + + let final_msm = self + .items + .into_par_iter() + .enumerate() + .map(|(i, item)| { + let instances: Vec> = item + .instances + .iter() + .map(|i| i.iter().map(|c| &c[..]).collect()) + .collect(); + let instances: Vec<_> = instances.iter().map(|i| &i[..]).collect(); + + let strategy = BatchStrategy::new(params); + let mut transcript = Blake2bRead::init(&item.proof[..]); + verify_proof(params, vk, strategy, &instances, &mut transcript).map_err(|e| { + tracing::debug!("Batch item {} failed verification: {}", i, e); + e + }) + }) + .try_fold_and_reduce( + || params.empty_msm(), + |acc, res| res.map(|proof_msm| accumulate_msm(acc, proof_msm)), + ); + + match final_msm { + Ok(msm) => msm.check(), + Err(_) => false, + } + } +} diff --git a/common/src/lib.rs b/common/src/lib.rs index 832d3ee8ad..f173d61bba 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -18,6 +18,8 @@ pub mod plonk; pub mod poly; pub mod transcript; -pub mod dev; +// TODO: Move to backend for now. The end goal is to have this in the frontend, but it requires +// many changes because the MockProver heavliy uses backend types. +// pub mod dev; mod helpers; pub use helpers::SerdeFormat; diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index 967b4fa663..e53d2870b2 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -15,8 +15,6 @@ use halo2_middleware::permutation::{ArgumentV2, Cell}; use std::io; -pub mod keygen; - /// A permutation argument. #[derive(Debug, Clone)] pub struct Argument { @@ -87,96 +85,6 @@ impl Argument { } } -/// The verifying key for a single permutation argument. -#[derive(Clone, Debug)] -pub struct VerifyingKey { - commitments: Vec, -} - -impl VerifyingKey { - /// Returns commitments of sigma polynomials - pub fn commitments(&self) -> &Vec { - &self.commitments - } - - pub(crate) fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> - where - C: SerdeCurveAffine, - { - for commitment in &self.commitments { - commitment.write(writer, format)?; - } - Ok(()) - } - - pub(crate) fn read( - reader: &mut R, - argument: &Argument, - format: SerdeFormat, - ) -> io::Result - where - C: SerdeCurveAffine, - { - let commitments = (0..argument.columns.len()) - .map(|_| C::read(reader, format)) - .collect::, _>>()?; - Ok(VerifyingKey { commitments }) - } - - pub(crate) fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - self.commitments.len() * C::byte_length(format) - } -} - -/// The proving key for a single permutation argument. -#[derive(Clone, Debug)] -pub(crate) struct ProvingKey { - permutations: Vec>, - polys: Vec>, - pub(super) cosets: Vec>, -} - -impl ProvingKey -where - C::Scalar: SerdePrimeField, -{ - /// Reads proving key for a single permutation argument from buffer using `Polynomial::read`. - pub(super) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - let permutations = read_polynomial_vec(reader, format)?; - let polys = read_polynomial_vec(reader, format)?; - let cosets = read_polynomial_vec(reader, format)?; - Ok(ProvingKey { - permutations, - polys, - cosets, - }) - } - - /// Writes proving key for a single permutation argument to buffer using `Polynomial::write`. - pub(super) fn write( - &self, - writer: &mut W, - format: SerdeFormat, - ) -> io::Result<()> { - write_polynomial_slice(&self.permutations, writer, format)?; - write_polynomial_slice(&self.polys, writer, format)?; - write_polynomial_slice(&self.cosets, writer, format)?; - Ok(()) - } -} - -impl ProvingKey { - /// Gets the total number of bytes in the serialization of `self` - pub(super) fn bytes_length(&self) -> usize { - polynomial_slice_byte_length(&self.permutations) - + polynomial_slice_byte_length(&self.polys) - + polynomial_slice_byte_length(&self.cosets) - } -} - // TODO: Move to frontend #[derive(Clone, Debug)] pub struct AssemblyFront { From 15027e6232c4acb7cde2bdb829c1bfaaa2eb6e00 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 22 Jan 2024 17:06:26 +0000 Subject: [PATCH 44/79] Checkpoint --- backend/src/plonk.rs | 33 ++++++++++++++++---------------- backend/src/plonk/keygen.rs | 2 +- backend/src/plonk/permutation.rs | 2 ++ backend/src/plonk/prover.rs | 4 ++-- common/src/helpers.rs | 6 +++--- common/src/lib.rs | 3 ++- 6 files changed, 26 insertions(+), 24 deletions(-) diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index 4a5267ebd5..7b116da304 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -1,19 +1,19 @@ use blake2b_simd::Params as Blake2bParams; use group::ff::{Field, FromUniformBytes, PrimeField}; -use crate::arithmetic::CurveAffine; -use crate::helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, - SerdePrimeField, -}; use crate::poly::{ Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, Polynomial, }; -use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; -use crate::SerdeFormat; use evaluation::Evaluator; +use halo2_common::arithmetic::CurveAffine; +use halo2_common::helpers::{ + self, polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, + SerdeCurveAffine, SerdePrimeField, +}; use halo2_common::plonk::{Circuit, ConstraintSystem, PinnedConstraintSystem}; +use halo2_common::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; +use halo2_common::SerdeFormat; use halo2_middleware::circuit::{ Advice, AdviceQueryMid, Challenge, Column, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, PreprocessingV2, @@ -36,7 +36,7 @@ mod vanishing; pub struct VerifyingKey { domain: EvaluationDomain, fixed_commitments: Vec, - permutation: halo2_common::plonk::permutation::VerifyingKey, + permutation: permutation::VerifyingKey, cs: ConstraintSystem, /// Cached maximum degree of `cs` (which doesn't change after construction). cs_degree: usize, @@ -84,7 +84,7 @@ where for selector in &self.selectors { // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write for bits in selector.chunks(8) { - writer.write_all(&[crate::helpers::pack(bits)])?; + writer.write_all(&[helpers::pack(bits)])?; } } Ok(()) @@ -149,8 +149,7 @@ where .map(|_| C::read(reader, format)) .collect::>()?; - let permutation = - halo2_common::plonk::permutation::VerifyingKey::read(reader, &cs.permutation, format)?; + let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; let (cs, selectors) = if compress_selectors { // read selectors @@ -160,7 +159,7 @@ where let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; reader.read_exact(&mut selector_bytes)?; for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { - crate::helpers::unpack(byte, bits); + helpers::unpack(byte, bits); } Ok(selector) }) @@ -224,7 +223,7 @@ impl VerifyingKey { fn from_parts( domain: EvaluationDomain, fixed_commitments: Vec, - permutation: halo2_common::plonk::permutation::VerifyingKey, + permutation: permutation::VerifyingKey, cs: ConstraintSystem, selectors: Vec>, compress_selectors: bool, @@ -292,7 +291,7 @@ impl VerifyingKey { } /// Returns `VerifyingKey` of permutation - pub fn permutation(&self) -> &halo2_common::plonk::permutation::VerifyingKey { + pub fn permutation(&self) -> &permutation::VerifyingKey { &self.permutation } @@ -317,7 +316,7 @@ pub struct PinnedVerificationKey<'a, C: CurveAffine> { domain: PinnedEvaluationDomain<'a, C::Scalar>, cs: PinnedConstraintSystem<'a, C::Scalar>, fixed_commitments: &'a Vec, - permutation: &'a halo2_common::plonk::permutation::VerifyingKey, + permutation: &'a permutation::VerifyingKey, } /// This is a proving key which allows for the creation of proofs for a @@ -331,7 +330,7 @@ pub struct ProvingKey { fixed_values: Vec>, fixed_polys: Vec>, fixed_cosets: Vec>, - permutation: halo2_common::plonk::permutation::ProvingKey, + permutation: permutation::ProvingKey, ev: Evaluator, } @@ -414,7 +413,7 @@ where let fixed_values = read_polynomial_vec(reader, format)?; let fixed_polys = read_polynomial_vec(reader, format)?; let fixed_cosets = read_polynomial_vec(reader, format)?; - let permutation = halo2_common::plonk::permutation::ProvingKey::read(reader, format)?; + let permutation = permutation::ProvingKey::read(reader, format)?; let ev = Evaluator::new(vk.cs()); Ok(Self { vk, diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs index 2dea5d26b1..2a73ce3b2b 100644 --- a/backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -52,7 +52,7 @@ where pub(crate) struct Assembly { pub(crate) k: u32, pub(crate) fixed: Vec, LagrangeCoeff>>, - pub(crate) permutation: permutation::keygen::AssemblyFront, + pub(crate) permutation: halo2_common::plonk::permutation::AssemblyFront, pub(crate) selectors: Vec>, // A range of available rows for assignment and copies. pub(crate) usable_rows: Range, diff --git a/backend/src/plonk/permutation.rs b/backend/src/plonk/permutation.rs index e1261f8f10..172c54da29 100644 --- a/backend/src/plonk/permutation.rs +++ b/backend/src/plonk/permutation.rs @@ -10,11 +10,13 @@ use crate::{ poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, SerdeFormat, }; +use halo2_common::plonk::permutation::Argument; use halo2_middleware::circuit::{Any, Column}; use halo2_middleware::permutation::{ArgumentV2, Cell}; use std::io; +pub mod keygen; pub mod prover; pub mod verifier; diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index 3ae9b1f64d..4b5e31b040 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -4,10 +4,10 @@ use rand_core::RngCore; use std::collections::{BTreeSet, HashSet}; use std::{collections::HashMap, iter}; +use crate::plonk::{lookup, permutation, shuffle, vanishing}; use halo2_common::plonk::{ circuit::{sealed, Assignment, Circuit, Selector}, - lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, - ChallengeX, ChallengeY, Error, ProvingKey, + ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; diff --git a/common/src/helpers.rs b/common/src/helpers.rs index 3b1e5769f8..3ff80c845c 100644 --- a/common/src/helpers.rs +++ b/common/src/helpers.rs @@ -121,7 +121,7 @@ pub fn unpack(byte: u8, bits: &mut [bool]) { } /// Reads a vector of polynomials from buffer -pub(crate) fn read_polynomial_vec( +pub fn read_polynomial_vec( reader: &mut R, format: SerdeFormat, ) -> io::Result>> { @@ -135,7 +135,7 @@ pub(crate) fn read_polynomial_vec( } /// Writes a slice of polynomials to buffer -pub(crate) fn write_polynomial_slice( +pub fn write_polynomial_slice( slice: &[Polynomial], writer: &mut W, format: SerdeFormat, @@ -148,7 +148,7 @@ pub(crate) fn write_polynomial_slice( } /// Gets the total number of bytes of a slice of polynomials, assuming all polynomials are the same length -pub(crate) fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize { +pub fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize { let field_len = F::default().to_repr().as_ref().len(); 4 + slice.len() * (4 + field_len * slice.get(0).map(|poly| poly.len()).unwrap_or(0)) } diff --git a/common/src/lib.rs b/common/src/lib.rs index f173d61bba..85ce36142a 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -15,11 +15,12 @@ pub mod circuit; pub use halo2curves; mod multicore; pub mod plonk; +// TODO: Try to move this to backend and use a lightweight Polynomial type in the frontend pub mod poly; pub mod transcript; // TODO: Move to backend for now. The end goal is to have this in the frontend, but it requires // many changes because the MockProver heavliy uses backend types. // pub mod dev; -mod helpers; +pub mod helpers; pub use helpers::SerdeFormat; From 0dcdf7d05beb0ee5e6b901f81b2b38bd539ac550 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 23 Jan 2024 15:25:57 +0000 Subject: [PATCH 45/79] Checkpoint, backend compiles --- backend/src/lib.rs | 9 + backend/src/plonk.rs | 12 + backend/src/plonk/keygen.rs | 3 +- backend/src/plonk/lookup.rs | 2 + backend/src/plonk/lookup/prover.rs | 211 +++--- backend/src/plonk/lookup/verifier.rs | 31 +- backend/src/plonk/permutation.rs | 2 +- backend/src/plonk/permutation/prover.rs | 280 ++++---- backend/src/plonk/permutation/verifier.rs | 45 +- backend/src/plonk/prover.rs | 18 +- backend/src/plonk/shuffle.rs | 2 + backend/src/plonk/shuffle/prover.rs | 315 +++++---- backend/src/plonk/shuffle/verifier.rs | 22 +- backend/src/plonk/vanishing/verifier.rs | 3 +- backend/src/plonk/verifier.rs | 9 +- common/src/arithmetic.rs | 4 +- common/src/circuit.rs | 5 +- .../src/circuit/floor_planner/single_pass.rs | 3 +- common/src/circuit/floor_planner/v1.rs | 3 +- .../src/circuit/floor_planner/v1/strategy.rs | 8 +- common/src/circuit/layouter.rs | 3 +- common/src/circuit/table_layouter.rs | 5 +- common/src/circuit/value.rs | 7 +- common/src/dev/cost.rs | 30 +- common/src/dev/metadata.rs | 2 +- common/src/dev/util.rs | 2 +- common/src/helpers.rs | 2 +- common/src/lib.rs | 5 +- common/src/plonk/assigned.rs | 665 +----------------- common/src/plonk/circuit.rs | 107 +-- common/src/plonk/error.rs | 2 +- common/src/plonk/keygen.rs | 17 +- common/src/plonk/lookup.rs | 8 +- common/src/plonk/permutation.rs | 14 +- common/src/plonk/shuffle.rs | 8 +- common/src/poly.rs | 20 +- common/src/poly/domain.rs | 6 +- common/src/poly/ipa/commitment.rs | 12 +- common/src/poly/ipa/msm.rs | 2 +- common/src/poly/ipa/multiopen.rs | 8 +- common/src/poly/ipa/multiopen/prover.rs | 2 +- common/src/poly/ipa/strategy.rs | 8 +- common/src/poly/kzg/commitment.rs | 12 +- common/src/poly/kzg/msm.rs | 18 +- common/src/poly/kzg/strategy.rs | 8 +- common/src/poly/query.rs | 16 +- common/src/transcript.rs | 4 +- 47 files changed, 673 insertions(+), 1307 deletions(-) diff --git a/backend/src/lib.rs b/backend/src/lib.rs index 904a19c0ed..6058e4980d 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -2,3 +2,12 @@ #![allow(unused_imports)] // TODO: Remove pub mod plonk; + +// Internal re-exports +pub use halo2_common::arithmetic; +pub use halo2_common::circuit; +pub use halo2_common::helpers; +pub use halo2_common::multicore; +pub use halo2_common::poly; +pub use halo2_common::transcript; +pub use halo2_common::SerdeFormat; diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index 7b116da304..edf4bc5a84 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -22,6 +22,18 @@ use halo2_middleware::poly::Rotation; use std::io; +// TODO: Import propertly instead of reimporting from inside +pub use halo2_common::plonk::{ + ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, +}; +pub mod circuit { + pub use halo2_common::plonk::circuit::{ + compile_circuit, Assignment, Circuit, ConstraintSystem, Selector, + }; + pub use halo2_common::plonk::Expression; +} +pub use halo2_common::plonk::Expression; + mod evaluation; mod keygen; mod lookup; diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs index 2a73ce3b2b..ad144956ee 100644 --- a/backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -8,7 +8,7 @@ use halo2_middleware::ff::{Field, FromUniformBytes}; use super::{ circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, evaluation::Evaluator, - permutation, Assigned, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, + permutation, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, }; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -21,6 +21,7 @@ use crate::{ use halo2_middleware::circuit::{ Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, }; +use halo2_middleware::plonk::Assigned; pub(crate) fn create_domain( k: u32, diff --git a/backend/src/plonk/lookup.rs b/backend/src/plonk/lookup.rs index 96e9e18468..f0691166ec 100644 --- a/backend/src/plonk/lookup.rs +++ b/backend/src/plonk/lookup.rs @@ -1,2 +1,4 @@ pub mod prover; pub mod verifier; + +pub use halo2_common::plonk::lookup::Argument; diff --git a/backend/src/plonk/lookup/prover.rs b/backend/src/plonk/lookup/prover.rs index ed8cdabc77..b0124ca494 100644 --- a/backend/src/plonk/lookup/prover.rs +++ b/backend/src/plonk/lookup/prover.rs @@ -51,112 +51,111 @@ pub(in crate::plonk) struct Evaluated { constructed: Committed, } -impl> Argument { - /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, - /// obtaining A' and S', and - /// - constructs Permuted struct using permuted_input_value = A', and - /// permuted_table_expression = S'. - /// The Permuted struct is used to update the Lookup, and is then returned. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_permuted< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the lookup and compress them - let compressed_input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the lookup and compress them - let compressed_table_expression = compress_expressions(&self.table_expressions); - - // Permute compressed (InputExpression, TableExpression) pair - let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( - pk, - params, - domain, - &mut rng, - &compressed_input_expression, - &compressed_table_expression, - )?; - - // Closure to construct commitment to vector of values - let mut commit_values = |values: &Polynomial| { - let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); - let blind = Blind(C::Scalar::random(&mut rng)); - let commitment = params.commit_lagrange(values, blind).to_affine(); - (poly, blind, commitment) - }; - - // Commit to permuted input expression - let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = - commit_values(&permuted_input_expression); - - // Commit to permuted table expression - let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = - commit_values(&permuted_table_expression); - - // Hash permuted input commitment - transcript.write_point(permuted_input_commitment)?; - - // Hash permuted table commitment - transcript.write_point(permuted_table_commitment)?; - - Ok(Permuted { - compressed_input_expression, - permuted_input_expression, - permuted_input_poly, - permuted_input_blind, - compressed_table_expression, - permuted_table_expression, - permuted_table_poly, - permuted_table_blind, - }) - } +/// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions +/// [S_0, S_1, ..., S_{m-1}], this method +/// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} +/// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, +/// - permutes A_compressed and S_compressed using permute_expression_pair() helper, +/// obtaining A' and S', and +/// - constructs Permuted struct using permuted_input_value = A', and +/// permuted_table_expression = S'. +/// The Permuted struct is used to update the Lookup, and is then returned. +#[allow(clippy::too_many_arguments)] +pub(in crate::plonk) fn lookup_commit_permuted< + 'a, + 'params: 'a, + F: WithSmallOrderMulGroup<3>, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +>( + arg: &Argument, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, +) -> Result, Error> +where + C: CurveAffine, + C::Curve: Mul + MulAssign, +{ + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the lookup and compress them + let compressed_input_expression = compress_expressions(&arg.input_expressions); + + // Get values of table expressions involved in the lookup and compress them + let compressed_table_expression = compress_expressions(&arg.table_expressions); + + // Permute compressed (InputExpression, TableExpression) pair + let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( + pk, + params, + domain, + &mut rng, + &compressed_input_expression, + &compressed_table_expression, + )?; + + // Closure to construct commitment to vector of values + let mut commit_values = |values: &Polynomial| { + let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); + let blind = Blind(C::Scalar::random(&mut rng)); + let commitment = params.commit_lagrange(values, blind).to_affine(); + (poly, blind, commitment) + }; + + // Commit to permuted input expression + let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = + commit_values(&permuted_input_expression); + + // Commit to permuted table expression + let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = + commit_values(&permuted_table_expression); + + // Hash permuted input commitment + transcript.write_point(permuted_input_commitment)?; + + // Hash permuted table commitment + transcript.write_point(permuted_table_commitment)?; + + Ok(Permuted { + compressed_input_expression, + permuted_input_expression, + permuted_input_poly, + permuted_input_blind, + compressed_table_expression, + permuted_table_expression, + permuted_table_poly, + permuted_table_blind, + }) } impl Permuted { diff --git a/backend/src/plonk/lookup/verifier.rs b/backend/src/plonk/lookup/verifier.rs index 11e780148d..8639077a07 100644 --- a/backend/src/plonk/lookup/verifier.rs +++ b/backend/src/plonk/lookup/verifier.rs @@ -32,23 +32,20 @@ pub struct Evaluated { permuted_table_eval: C::Scalar, } -impl Argument { - pub(in crate::plonk) fn read_permuted_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let permuted_input_commitment = transcript.read_point()?; - let permuted_table_commitment = transcript.read_point()?; - - Ok(PermutationCommitments { - permuted_input_commitment, - permuted_table_commitment, - }) - } +pub(in crate::plonk) fn lookup_read_permuted_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, +>( + transcript: &mut T, +) -> Result, Error> { + let permuted_input_commitment = transcript.read_point()?; + let permuted_table_commitment = transcript.read_point()?; + + Ok(PermutationCommitments { + permuted_input_commitment, + permuted_table_commitment, + }) } impl PermutationCommitments { diff --git a/backend/src/plonk/permutation.rs b/backend/src/plonk/permutation.rs index 172c54da29..12f6291398 100644 --- a/backend/src/plonk/permutation.rs +++ b/backend/src/plonk/permutation.rs @@ -10,7 +10,7 @@ use crate::{ poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, SerdeFormat, }; -use halo2_common::plonk::permutation::Argument; +pub use halo2_common::plonk::permutation::Argument; use halo2_middleware::circuit::{Any, Column}; use halo2_middleware::permutation::{ArgumentV2, Cell}; diff --git a/backend/src/plonk/permutation/prover.rs b/backend/src/plonk/permutation/prover.rs index daf6f49e23..317286b6cd 100644 --- a/backend/src/plonk/permutation/prover.rs +++ b/backend/src/plonk/permutation/prover.rs @@ -10,14 +10,13 @@ use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::Argument; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, - plonk::{self, Error}, + plonk::{self, permutation::ProvingKey, Error}, poly::{ commitment::{Blind, Params}, Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use halo2_common::plonk::permutation::ProvingKey; use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; @@ -44,155 +43,152 @@ pub(crate) struct Evaluated { constructed: Constructed, } -impl Argument { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit< - 'params, - C: CurveAffine, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - params: &P, - pk: &plonk::ProvingKey, - pkey: &ProvingKey, - advice: &[Polynomial], - fixed: &[Polynomial], - instance: &[Polynomial], - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - - // How many columns can be included in a single permutation polynomial? - // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This - // will never underflow because of the requirement of at least a degree - // 3 circuit for the permutation argument. - assert!(pk.vk.cs_degree >= 3); - let chunk_len = pk.vk.cs_degree - 2; - let blinding_factors = pk.vk.cs.blinding_factors(); - - // Each column gets its own delta power. - let mut deltaomega = C::Scalar::ONE; - - // Track the "last" value from the previous column set - let mut last_z = C::Scalar::ONE; - - let mut sets = vec![]; - - for (columns, permutations) in self - .columns - .chunks(chunk_len) - .zip(pkey.permutations.chunks(chunk_len)) - { - // Goal is to compute the products of fractions - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where p_j(X) is the jth column in this permutation, - // and i is the ith row of the column. - - let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; - - // Iterate over each column of the permutation - for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - for ((modified_values, value), permuted_value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - .zip(permuted_column_values[start..].iter()) - { - *modified_values *= &(*beta * permuted_value + &*gamma + value); - } - }); - } - - // Invert to obtain the denominator for the permutation product polynomial - modified_values.batch_invert(); - - // Iterate over each column again, this time finishing the computation - // of the entire fraction by computing the numerators - for &column in columns.iter() { - let omega = domain.get_omega(); - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); - for (modified_values, value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - { - // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta - *modified_values *= &(deltaomega * &*beta + &*gamma + value); - deltaomega *= ω - } - }); - deltaomega *= &::DELTA; - } +#[allow(clippy::too_many_arguments)] +pub(in crate::plonk) fn permutation_commit< + 'params, + C: CurveAffine, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +>( + arg: &Argument, + params: &P, + pk: &plonk::ProvingKey, + pkey: &ProvingKey, + advice: &[Polynomial], + fixed: &[Polynomial], + instance: &[Polynomial], + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, +) -> Result, Error> { + let domain = &pk.vk.domain; + + // How many columns can be included in a single permutation polynomial? + // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This + // will never underflow because of the requirement of at least a degree + // 3 circuit for the permutation argument. + assert!(pk.vk.cs_degree >= 3); + let chunk_len = pk.vk.cs_degree - 2; + let blinding_factors = pk.vk.cs.blinding_factors(); + + // Each column gets its own delta power. + let mut deltaomega = C::Scalar::ONE; + + // Track the "last" value from the previous column set + let mut last_z = C::Scalar::ONE; + + let mut sets = vec![]; + + for (columns, permutations) in arg + .columns + .chunks(chunk_len) + .zip(pkey.permutations.chunks(chunk_len)) + { + // Goal is to compute the products of fractions + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where p_j(X) is the jth column in this permutation, + // and i is the ith row of the column. + + let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; + + // Iterate over each column of the permutation + for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + for ((modified_values, value), permuted_value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + .zip(permuted_column_values[start..].iter()) + { + *modified_values *= &(*beta * permuted_value + &*gamma + value); + } + }); + } - // The modified_values vector is a vector of products of fractions - // of the form - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where i is the index into modified_values, for the jth column in - // the permutation - - // Compute the evaluations of the permutation product polynomial - // over our domain, starting with z[0] = 1 - let mut z = vec![last_z]; - for row in 1..(params.n() as usize) { - let mut tmp = z[row - 1]; - - tmp *= &modified_values[row - 1]; - z.push(tmp); - } - let mut z = domain.lagrange_from_vec(z); - // Set blinding factors - for z in &mut z[params.n() as usize - blinding_factors..] { - *z = C::Scalar::random(&mut rng); - } - // Set new last_z - last_z = z[params.n() as usize - (blinding_factors + 1)]; + // Invert to obtain the denominator for the permutation product polynomial + modified_values.batch_invert(); + + // Iterate over each column again, this time finishing the computation + // of the entire fraction by computing the numerators + for &column in columns.iter() { + let omega = domain.get_omega(); + let values = match column.column_type() { + Any::Advice(_) => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); + for (modified_values, value) in modified_values + .iter_mut() + .zip(values[column.index()][start..].iter()) + { + // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta + *modified_values *= &(deltaomega * &*beta + &*gamma + value); + deltaomega *= ω + } + }); + deltaomega *= &::DELTA; + } - let blind = Blind(C::Scalar::random(&mut rng)); + // The modified_values vector is a vector of products of fractions + // of the form + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where i is the index into modified_values, for the jth column in + // the permutation + + // Compute the evaluations of the permutation product polynomial + // over our domain, starting with z[0] = 1 + let mut z = vec![last_z]; + for row in 1..(params.n() as usize) { + let mut tmp = z[row - 1]; + + tmp *= &modified_values[row - 1]; + z.push(tmp); + } + let mut z = domain.lagrange_from_vec(z); + // Set blinding factors + for z in &mut z[params.n() as usize - blinding_factors..] { + *z = C::Scalar::random(&mut rng); + } + // Set new last_z + last_z = z[params.n() as usize - (blinding_factors + 1)]; - let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); - let permutation_product_blind = blind; - let z = domain.lagrange_to_coeff(z); - let permutation_product_poly = z.clone(); + let blind = Blind(C::Scalar::random(&mut rng)); - let permutation_product_coset = domain.coeff_to_extended(z.clone()); + let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); + let permutation_product_blind = blind; + let z = domain.lagrange_to_coeff(z); + let permutation_product_poly = z.clone(); - let permutation_product_commitment = - permutation_product_commitment_projective.to_affine(); + let permutation_product_coset = domain.coeff_to_extended(z.clone()); - // Hash the permutation product commitment - transcript.write_point(permutation_product_commitment)?; + let permutation_product_commitment = permutation_product_commitment_projective.to_affine(); - sets.push(CommittedSet { - permutation_product_poly, - permutation_product_coset, - permutation_product_blind, - }); - } + // Hash the permutation product commitment + transcript.write_point(permutation_product_commitment)?; - Ok(Committed { sets }) + sets.push(CommittedSet { + permutation_product_poly, + permutation_product_coset, + permutation_product_blind, + }); } + + Ok(Committed { sets }) } impl Committed { diff --git a/backend/src/plonk/permutation/verifier.rs b/backend/src/plonk/permutation/verifier.rs index 195a771999..9fa98ddc2e 100644 --- a/backend/src/plonk/permutation/verifier.rs +++ b/backend/src/plonk/permutation/verifier.rs @@ -5,10 +5,11 @@ use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, VerifyingKey}; use crate::{ arithmetic::CurveAffine, - plonk::{self, Error}, + plonk::{self}, poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; +use halo2_common::plonk::Error; use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; @@ -31,28 +32,26 @@ pub struct Evaluated { sets: Vec>, } -impl Argument { - pub(crate) fn read_product_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - vk: &plonk::VerifyingKey, - transcript: &mut T, - ) -> Result, Error> { - let chunk_len = vk.cs_degree - 2; - - let permutation_product_commitments = self - .columns - .chunks(chunk_len) - .map(|_| transcript.read_point()) - .collect::, _>>()?; - - Ok(Committed { - permutation_product_commitments, - }) - } +pub(crate) fn permutation_read_product_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, +>( + arg: &Argument, + vk: &plonk::VerifyingKey, + transcript: &mut T, +) -> Result, Error> { + let chunk_len = vk.cs_degree - 2; + + let permutation_product_commitments = arg + .columns + .chunks(chunk_len) + .map(|_| transcript.read_point()) + .collect::, _>>()?; + + Ok(Committed { + permutation_product_commitments, + }) } impl VerifyingKey { diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index 4b5e31b040..0ff79f680e 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -4,10 +4,13 @@ use rand_core::RngCore; use std::collections::{BTreeSet, HashSet}; use std::{collections::HashMap, iter}; -use crate::plonk::{lookup, permutation, shuffle, vanishing}; +use crate::plonk::lookup::prover::lookup_commit_permuted; +use crate::plonk::permutation::prover::permutation_commit; +use crate::plonk::shuffle::prover::shuffle_commit_product; +use crate::plonk::{lookup, permutation, shuffle, vanishing, ProvingKey}; use halo2_common::plonk::{ circuit::{sealed, Assignment, Circuit, Selector}, - ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, + ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; @@ -15,7 +18,6 @@ use group::prime::PrimeCurveAffine; use halo2_common::{ arithmetic::{eval_polynomial, CurveAffine}, circuit::Value, - plonk::Assigned, poly::{ commitment::{Blind, CommitmentScheme, Params, Prover}, Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, @@ -25,6 +27,7 @@ use halo2_common::{ poly::batch_invert_assigned, transcript::{EncodedChallenge, TranscriptWrite}, }; +use halo2_middleware::plonk::Assigned; /// Collection of instance data used during proving for a single circuit proof. #[derive(Debug)] @@ -452,7 +455,8 @@ impl< meta.lookups .iter() .map(|lookup| { - lookup.commit_permuted( + lookup_commit_permuted( + &lookup, pk, params, domain, @@ -487,7 +491,8 @@ impl< .iter() .zip(advice.iter()) .map(|(instance, advice)| { - meta.permutation.commit( + permutation_commit( + &meta.permutation, params, pk, &pk.permutation, @@ -523,7 +528,8 @@ impl< meta.shuffles .iter() .map(|shuffle| { - shuffle.commit_product( + shuffle_commit_product( + &shuffle, pk, params, domain, diff --git a/backend/src/plonk/shuffle.rs b/backend/src/plonk/shuffle.rs index 96e9e18468..4c48e5a7b6 100644 --- a/backend/src/plonk/shuffle.rs +++ b/backend/src/plonk/shuffle.rs @@ -1,2 +1,4 @@ pub mod prover; pub mod verifier; + +pub use halo2_common::plonk::shuffle::Argument; diff --git a/backend/src/plonk/shuffle/prover.rs b/backend/src/plonk/shuffle/prover.rs index b70184fc8c..3903962d81 100644 --- a/backend/src/plonk/shuffle/prover.rs +++ b/backend/src/plonk/shuffle/prover.rs @@ -1,6 +1,4 @@ -use super::super::{ - circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, -}; +use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, ProvingKey}; use super::Argument; use crate::plonk::evaluation::evaluate; use crate::{ @@ -12,6 +10,7 @@ use crate::{ transcript::{EncodedChallenge, TranscriptWrite}, }; use group::{ff::BatchInvert, Curve}; +use halo2_common::plonk::Error; use halo2_middleware::ff::WithSmallOrderMulGroup; use halo2_middleware::poly::Rotation; use rand_core::RngCore; @@ -36,169 +35,169 @@ pub(in crate::plonk) struct Evaluated { constructed: Committed, } -impl> Argument { - /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - #[allow(clippy::too_many_arguments)] - fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - ) -> Compressed - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the shuffle and compress them - let input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the shuffle and compress them - let shuffle_expression = compress_expressions(&self.shuffle_expressions); - - Compressed { - input_expression, - shuffle_expression, - } +/// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions +/// [S_0, S_1, ..., S_{m-1}], this method +/// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} +/// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, +#[allow(clippy::too_many_arguments)] +fn shuffle_compress<'a, 'params: 'a, F: WithSmallOrderMulGroup<3>, C, P: Params<'params, C>>( + arg: &Argument, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], +) -> Compressed +where + C: CurveAffine, + C::Curve: Mul + MulAssign, +{ + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the shuffle and compress them + let input_expression = compress_expressions(&arg.input_expressions); + + // Get values of table expressions involved in the shuffle and compress them + let shuffle_expression = compress_expressions(&arg.shuffle_expressions); + + Compressed { + input_expression, + shuffle_expression, } +} - /// Given a Shuffle with input expressions and table expressions this method - /// constructs the grand product polynomial over the shuffle. - /// The grand product polynomial is used to populate the Product struct. - /// The Product struct is added to the Shuffle and finally returned by the method. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_product< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - gamma: ChallengeGamma, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - let compressed = self.compress( - pk, - params, - domain, - theta, - advice_values, - fixed_values, - instance_values, - challenges, - ); - - let blinding_factors = pk.vk.cs.blinding_factors(); - - let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; - parallelize(&mut shuffle_product, |shuffle_product, start| { - for (shuffle_product, shuffle_value) in shuffle_product - .iter_mut() - .zip(compressed.shuffle_expression[start..].iter()) - { - *shuffle_product = *gamma + shuffle_value; - } - }); - - shuffle_product.iter_mut().batch_invert(); - - parallelize(&mut shuffle_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - *product *= &(*gamma + compressed.input_expression[i]); - } - }); - - // Compute the evaluations of the shuffle product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(shuffle_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] +/// Given a Shuffle with input expressions and table expressions this method +/// constructs the grand product polynomial over the shuffle. +/// The grand product polynomial is used to populate the Product struct. +/// The Product struct is added to the Shuffle and finally returned by the method. +#[allow(clippy::too_many_arguments)] +pub(in crate::plonk) fn shuffle_commit_product< + 'a, + 'params: 'a, + F: WithSmallOrderMulGroup<3>, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +>( + arg: &Argument, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + gamma: ChallengeGamma, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, +) -> Result, Error> +where + C: CurveAffine, + C::Curve: Mul + MulAssign, +{ + let compressed = shuffle_compress( + arg, + pk, + params, + domain, + theta, + advice_values, + fixed_values, + instance_values, + challenges, + ); + + let blinding_factors = pk.vk.cs.blinding_factors(); + + let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; + parallelize(&mut shuffle_product, |shuffle_product, start| { + for (shuffle_product, shuffle_value) in shuffle_product + .iter_mut() + .zip(compressed.shuffle_expression[start..].iter()) { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - assert_eq!(z[0], C::Scalar::ONE); - for i in 0..u { - let mut left = z[i + 1]; - let input_value = &compressed.input_expression[i]; - let shuffle_value = &compressed.shuffle_expression[i]; - left *= &(*gamma + shuffle_value); - let mut right = z[i]; - right *= &(*gamma + input_value); - assert_eq!(left, right); - } - assert_eq!(z[u], C::Scalar::ONE); + *shuffle_product = *gamma + shuffle_value; } + }); - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); + shuffle_product.iter_mut().batch_invert(); - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - product_poly: z, - product_blind, + parallelize(&mut shuffle_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + *product *= &(*gamma + compressed.input_expression[i]); + } + }); + + // Compute the evaluations of the shuffle product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(shuffle_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + assert_eq!(z[0], C::Scalar::ONE); + for i in 0..u { + let mut left = z[i + 1]; + let input_value = &compressed.input_expression[i]; + let shuffle_value = &compressed.shuffle_expression[i]; + left *= &(*gamma + shuffle_value); + let mut right = z[i]; + right *= &(*gamma + input_value); + assert_eq!(left, right); + } + assert_eq!(z[u], C::Scalar::ONE); } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + product_poly: z, + product_blind, + }) } impl Committed { diff --git a/backend/src/plonk/shuffle/verifier.rs b/backend/src/plonk/shuffle/verifier.rs index 46a7823c9c..9bbb122d9e 100644 --- a/backend/src/plonk/shuffle/verifier.rs +++ b/backend/src/plonk/shuffle/verifier.rs @@ -21,19 +21,17 @@ pub struct Evaluated { product_next_eval: C::Scalar, } -impl Argument { - pub(in crate::plonk) fn read_product_commitment< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let product_commitment = transcript.read_point()?; +pub(in crate::plonk) fn shuffle_read_product_commitment< + F: Field, + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, +>( + transcript: &mut T, +) -> Result, Error> { + let product_commitment = transcript.read_point()?; - Ok(Committed { product_commitment }) - } + Ok(Committed { product_commitment }) } impl Committed { diff --git a/backend/src/plonk/vanishing/verifier.rs b/backend/src/plonk/vanishing/verifier.rs index 05ccb02a5b..d570a93b59 100644 --- a/backend/src/plonk/vanishing/verifier.rs +++ b/backend/src/plonk/vanishing/verifier.rs @@ -1,10 +1,11 @@ use std::iter; +use halo2_common::plonk::Error; use halo2_middleware::ff::Field; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, + plonk::VerifyingKey, poly::{ commitment::{Params, MSM}, VerifierQuery, diff --git a/backend/src/plonk/verifier.rs b/backend/src/plonk/verifier.rs index e60f19374f..aab7e192b1 100644 --- a/backend/src/plonk/verifier.rs +++ b/backend/src/plonk/verifier.rs @@ -7,6 +7,9 @@ use super::{ VerifyingKey, }; use crate::arithmetic::compute_inner_product; +use crate::plonk::lookup::verifier::lookup_read_permuted_commitments; +use crate::plonk::permutation::verifier::permutation_read_product_commitments; +use crate::plonk::shuffle::verifier::shuffle_read_product_commitments; use crate::poly::commitment::{CommitmentScheme, Verifier}; use crate::poly::VerificationStrategy; use crate::poly::{ @@ -150,7 +153,7 @@ where vk.cs .lookups .iter() - .map(|argument| argument.read_permuted_commitments(transcript)) + .map(|argument| lookup_read_permuted_commitments(transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -164,7 +167,7 @@ where let permutations_committed = (0..num_proofs) .map(|_| { // Hash each permutation product commitment - vk.cs.permutation.read_product_commitments(vk, transcript) + permutation_read_product_commitments(&vk.cs.permutation, vk, transcript) }) .collect::, _>>()?; @@ -185,7 +188,7 @@ where vk.cs .shuffles .iter() - .map(|argument| argument.read_product_commitment(transcript)) + .map(|argument| shuffle_read_product_commitment(transcript)) .collect::, _>>() }) .collect::, _>>()?; diff --git a/common/src/arithmetic.rs b/common/src/arithmetic.rs index 5c85dec61d..d6b546e852 100644 --- a/common/src/arithmetic.rs +++ b/common/src/arithmetic.rs @@ -503,7 +503,7 @@ pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { } } -pub(crate) fn evaluate_vanishing_polynomial(roots: &[F], z: F) -> F { +pub fn evaluate_vanishing_polynomial(roots: &[F], z: F) -> F { fn evaluate(roots: &[F], z: F) -> F { roots.iter().fold(F::ONE, |acc, point| (z - point) * acc) } @@ -523,7 +523,7 @@ pub(crate) fn evaluate_vanishing_polynomial(roots: &[F], z: F) -> F { } } -pub(crate) fn powers(base: F) -> impl Iterator { +pub fn powers(base: F) -> impl Iterator { std::iter::successors(Some(F::ONE), move |power| Some(base * power)) } diff --git a/common/src/circuit.rs b/common/src/circuit.rs index 7f68562594..85a42b430c 100644 --- a/common/src/circuit.rs +++ b/common/src/circuit.rs @@ -4,8 +4,9 @@ use std::{fmt, marker::PhantomData}; use halo2_middleware::ff::Field; -use crate::plonk::{Assigned, Error, Selector, TableColumn}; +use crate::plonk::{Error, Selector, TableColumn}; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; mod value; pub use value::Value; @@ -191,7 +192,7 @@ impl<'r, F: Field> From<&'r mut dyn layouter::RegionLayouter> for Region<'r, impl<'r, F: Field> Region<'r, F> { /// Enables a selector at the given offset. - pub(crate) fn enable_selector( + pub fn enable_selector( &mut self, annotation: A, selector: &Selector, diff --git a/common/src/circuit/floor_planner/single_pass.rs b/common/src/circuit/floor_planner/single_pass.rs index 448fd89c33..dca8fa5b4c 100644 --- a/common/src/circuit/floor_planner/single_pass.rs +++ b/common/src/circuit/floor_planner/single_pass.rs @@ -11,9 +11,10 @@ use crate::{ table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{Assigned, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, + plonk::{Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; /// A simple [`FloorPlanner`] that performs minimal optimizations. /// diff --git a/common/src/circuit/floor_planner/v1.rs b/common/src/circuit/floor_planner/v1.rs index ed738c4b4e..fe93680d45 100644 --- a/common/src/circuit/floor_planner/v1.rs +++ b/common/src/circuit/floor_planner/v1.rs @@ -8,9 +8,10 @@ use crate::{ table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{Assigned, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, + plonk::{Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; mod strategy; diff --git a/common/src/circuit/floor_planner/v1/strategy.rs b/common/src/circuit/floor_planner/v1/strategy.rs index 86db63124d..bb73c48e4c 100644 --- a/common/src/circuit/floor_planner/v1/strategy.rs +++ b/common/src/circuit/floor_planner/v1/strategy.rs @@ -30,7 +30,7 @@ impl PartialOrd for AllocatedRegion { } /// An area of empty space within a column. -pub(crate) struct EmptySpace { +pub struct EmptySpace { // The starting position (inclusive) of the empty space. start: usize, // The ending position (exclusive) of the empty space, or `None` if unbounded. @@ -38,7 +38,7 @@ pub(crate) struct EmptySpace { } impl EmptySpace { - pub(crate) fn range(&self) -> Option> { + pub fn range(&self) -> Option> { self.end.map(|end| self.start..end) } } @@ -51,7 +51,7 @@ pub struct Allocations(BTreeSet); impl Allocations { /// Returns the row that forms the unbounded unallocated interval [row, None). - pub(crate) fn unbounded_interval_start(&self) -> usize { + pub fn unbounded_interval_start(&self) -> usize { self.0 .iter() .last() @@ -62,7 +62,7 @@ impl Allocations { /// Return all the *unallocated* nonempty intervals intersecting [start, end). /// /// `end = None` represents an unbounded end. - pub(crate) fn free_intervals( + pub fn free_intervals( &self, start: usize, end: Option, diff --git a/common/src/circuit/layouter.rs b/common/src/circuit/layouter.rs index 455a5e4418..578825734c 100644 --- a/common/src/circuit/layouter.rs +++ b/common/src/circuit/layouter.rs @@ -8,8 +8,9 @@ use halo2_middleware::ff::Field; pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; -use crate::plonk::{Assigned, Error, Selector}; +use crate::plonk::{Error, Selector}; use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; /// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. #[cfg(feature = "thread-safe-region")] diff --git a/common/src/circuit/table_layouter.rs b/common/src/circuit/table_layouter.rs index d6b1d6fc5b..b3455675c7 100644 --- a/common/src/circuit/table_layouter.rs +++ b/common/src/circuit/table_layouter.rs @@ -7,7 +7,8 @@ use std::{ use halo2_middleware::ff::Field; -use crate::plonk::{Assigned, Assignment, Error, TableColumn, TableError}; +use crate::plonk::{Assignment, Error, TableColumn, TableError}; +use halo2_middleware::plonk::Assigned; use super::Value; @@ -117,7 +118,7 @@ impl<'r, 'a, F: Field, CS: Assignment + 'a> TableLayouter } } -pub(crate) fn compute_table_lengths( +pub fn compute_table_lengths( default_and_assigned: &HashMap, Vec)>, ) -> Result { let column_lengths: Result, Error> = default_and_assigned diff --git a/common/src/circuit/value.rs b/common/src/circuit/value.rs index f3ea6a39ea..5ec5942c94 100644 --- a/common/src/circuit/value.rs +++ b/common/src/circuit/value.rs @@ -3,7 +3,8 @@ use std::ops::{Add, Mul, Neg, Sub}; use group::ff::Field; -use crate::plonk::{Assigned, Error}; +use crate::plonk::Error; +use halo2_middleware::plonk::Assigned; /// A value that might exist within a circuit. /// @@ -45,7 +46,7 @@ impl Value { /// Obtains the inner value for assigning into the circuit. /// /// Returns `Error::Synthesis` if this is [`Value::unknown()`]. - pub(crate) fn assign(self) -> Result { + pub fn assign(self) -> Result { self.inner.ok_or(Error::Synthesis) } @@ -64,7 +65,7 @@ impl Value { } /// ONLY FOR INTERNAL CRATE USAGE; DO NOT EXPOSE! - pub(crate) fn into_option(self) -> Option { + pub fn into_option(self) -> Option { self.inner } diff --git a/common/src/dev/cost.rs b/common/src/dev/cost.rs index 7fbeb8a6fa..2f03dff9df 100644 --- a/common/src/dev/cost.rs +++ b/common/src/dev/cost.rs @@ -55,39 +55,39 @@ pub struct CircuitCost> { /// Region implementation used by Layout #[allow(dead_code)] #[derive(Debug)] -pub(crate) struct LayoutRegion { +pub struct LayoutRegion { /// The name of the region. Not required to be unique. - pub(crate) name: String, + pub name: String, /// The columns used by this region. - pub(crate) columns: HashSet, + pub columns: HashSet, /// The row that this region starts on, if known. - pub(crate) offset: Option, + pub offset: Option, /// The number of rows that this region takes up. - pub(crate) rows: usize, + pub rows: usize, /// The cells assigned in this region. - pub(crate) cells: Vec<(RegionColumn, usize)>, + pub cells: Vec<(RegionColumn, usize)>, } /// Cost and graphing layouter #[derive(Default, Debug)] -pub(crate) struct Layout { +pub struct Layout { /// k = 1 << n - pub(crate) k: u32, + pub k: u32, /// Regions of the layout - pub(crate) regions: Vec, + pub regions: Vec, current_region: Option, /// Total row count - pub(crate) total_rows: usize, + pub total_rows: usize, /// Total advice rows - pub(crate) total_advice_rows: usize, + pub total_advice_rows: usize, /// Total fixed rows - pub(crate) total_fixed_rows: usize, + pub total_fixed_rows: usize, /// Any cells assigned outside of a region. - pub(crate) loose_cells: Vec<(RegionColumn, usize)>, + pub loose_cells: Vec<(RegionColumn, usize)>, /// Pairs of cells between which we have equality constraints. - pub(crate) equality: Vec<(Column, usize, Column, usize)>, + pub equality: Vec<(Column, usize, Column, usize)>, /// Selector assignments used for optimization pass - pub(crate) selectors: Vec>, + pub selectors: Vec>, } impl Layout { diff --git a/common/src/dev/metadata.rs b/common/src/dev/metadata.rs index 5ca99410b0..690c432628 100644 --- a/common/src/dev/metadata.rs +++ b/common/src/dev/metadata.rs @@ -207,7 +207,7 @@ impl Region { /// This function will return `None` if: /// - There's no annotation map generated for this `Region`. /// - There's no entry on the annotation map corresponding to the metadata provided. - pub(crate) fn get_column_annotation(&self, metadata: ColumnMetadata) -> Option { + pub fn get_column_annotation(&self, metadata: ColumnMetadata) -> Option { self.column_annotations .as_ref() .and_then(|map| map.get(&metadata).cloned()) diff --git a/common/src/dev/util.rs b/common/src/dev/util.rs index df91f29b16..52c2de2460 100644 --- a/common/src/dev/util.rs +++ b/common/src/dev/util.rs @@ -6,7 +6,7 @@ use crate::plonk::{AdviceQuery, Expression, FixedQuery, Gate, InstanceQuery, Vir use halo2_middleware::circuit::{Advice, Any, Column, ColumnType}; use halo2_middleware::poly::Rotation; -pub(crate) struct AnyQuery { +pub struct AnyQuery { /// Query index pub index: Option, /// Column type diff --git a/common/src/helpers.rs b/common/src/helpers.rs index 3ff80c845c..aaae481ccc 100644 --- a/common/src/helpers.rs +++ b/common/src/helpers.rs @@ -20,7 +20,7 @@ pub enum SerdeFormat { } // Keep this trait for compatibility with IPA serialization -pub(crate) trait CurveRead: CurveAffine { +pub trait CurveRead: CurveAffine { /// Reads a compressed element from the buffer and attempts to parse it /// using `from_bytes`. fn read(reader: &mut R) -> io::Result { diff --git a/common/src/lib.rs b/common/src/lib.rs index 85ce36142a..48cc4b9884 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -13,7 +13,7 @@ pub mod arithmetic; pub mod circuit; pub use halo2curves; -mod multicore; +pub mod multicore; pub mod plonk; // TODO: Try to move this to backend and use a lightweight Polynomial type in the frontend pub mod poly; @@ -24,3 +24,6 @@ pub mod transcript; // pub mod dev; pub mod helpers; pub use helpers::SerdeFormat; + +// TODO: Everything that is moved from this crate to frontend or backend should recover the +// pub status whenever possible. diff --git a/common/src/plonk/assigned.rs b/common/src/plonk/assigned.rs index 07de325678..b1f7c7f73b 100644 --- a/common/src/plonk/assigned.rs +++ b/common/src/plonk/assigned.rs @@ -1,665 +1,2 @@ -use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; - use group::ff::Field; - -/// A value assigned to a cell within a circuit. -/// -/// Stored as a fraction, so the backend can use batch inversion. -/// -/// A denominator of zero maps to an assigned value of zero. -#[derive(Clone, Copy, Debug)] -pub enum Assigned { - /// The field element zero. - Zero, - /// A value that does not require inversion to evaluate. - Trivial(F), - /// A value stored as a fraction to enable batch inversion. - Rational(F, F), -} - -impl From<&Assigned> for Assigned { - fn from(val: &Assigned) -> Self { - *val - } -} - -impl From<&F> for Assigned { - fn from(numerator: &F) -> Self { - Assigned::Trivial(*numerator) - } -} - -impl From for Assigned { - fn from(numerator: F) -> Self { - Assigned::Trivial(numerator) - } -} - -impl From<(F, F)> for Assigned { - fn from((numerator, denominator): (F, F)) -> Self { - Assigned::Rational(numerator, denominator) - } -} - -impl PartialEq for Assigned { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - // At least one side is directly zero. - (Self::Zero, Self::Zero) => true, - (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - x.is_zero_vartime() - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, - (Self::Trivial(x), Self::Rational(numerator, denominator)) - | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { - &(*x * denominator) == numerator - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, - } - } -} - -impl Eq for Assigned {} - -impl Neg for Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - match self { - Self::Zero => Self::Zero, - Self::Trivial(numerator) => Self::Trivial(-numerator), - Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), - } - } -} - -impl Neg for &Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - -*self - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - // One side is directly zero. - (Self::Zero, _) => rhs, - (_, Self::Zero) => self, - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - other - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator + denominator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - self + Self::Trivial(rhs) - } -} - -impl Add for &Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for Assigned { - type Output = Assigned; - fn add(self, rhs: &Self) -> Assigned { - self + *rhs - } -} - -impl Add> for &Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for &Assigned { - type Output = Assigned; - fn add(self, rhs: &Assigned) -> Assigned { - *self + *rhs - } -} - -impl AddAssign for Assigned { - fn add_assign(&mut self, rhs: Self) { - *self = *self + rhs; - } -} - -impl AddAssign<&Assigned> for Assigned { - fn add_assign(&mut self, rhs: &Self) { - *self = *self + rhs; - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - self + (-rhs) - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - self + (-rhs) - } -} - -impl Sub for &Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for Assigned { - type Output = Assigned; - fn sub(self, rhs: &Self) -> Assigned { - self - *rhs - } -} - -impl Sub> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: &Assigned) -> Assigned { - *self - *rhs - } -} - -impl SubAssign for Assigned { - fn sub_assign(&mut self, rhs: Self) { - *self = *self - rhs; - } -} - -impl SubAssign<&Assigned> for Assigned { - fn sub_assign(&mut self, rhs: &Self) { - *self = *self - rhs; - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - (Self::Zero, _) | (_, Self::Zero) => Self::Zero, - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - self * Self::Trivial(rhs) - } -} - -impl Mul for &Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - *self * rhs - } -} - -impl Mul<&Assigned> for Assigned { - type Output = Assigned; - fn mul(self, rhs: &Assigned) -> Assigned { - self * *rhs - } -} - -impl MulAssign for Assigned { - fn mul_assign(&mut self, rhs: Self) { - *self = *self * rhs; - } -} - -impl MulAssign<&Assigned> for Assigned { - fn mul_assign(&mut self, rhs: &Self) { - *self = *self * rhs; - } -} - -impl Assigned { - /// Returns the numerator. - pub fn numerator(&self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => *x, - Self::Rational(numerator, _) => *numerator, - } - } - - /// Returns the denominator, if non-trivial. - pub fn denominator(&self) -> Option { - match self { - Self::Zero => None, - Self::Trivial(_) => None, - Self::Rational(_, denominator) => Some(*denominator), - } - } - - /// Returns true iff this element is zero. - pub fn is_zero_vartime(&self) -> bool { - match self { - Self::Zero => true, - Self::Trivial(x) => x.is_zero_vartime(), - // Assigned maps x/0 -> 0. - Self::Rational(numerator, denominator) => { - numerator.is_zero_vartime() || denominator.is_zero_vartime() - } - } - } - - /// Doubles this element. - #[must_use] - pub fn double(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.double()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.double(), *denominator) - } - } - } - - /// Squares this element. - #[must_use] - pub fn square(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.square()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.square(), denominator.square()) - } - } - } - - /// Cubes this element. - #[must_use] - pub fn cube(&self) -> Self { - self.square() * self - } - - /// Inverts this assigned value (taking the inverse of zero to be zero). - pub fn invert(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Rational(F::ONE, *x), - Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), - } - } - - /// Evaluates this assigned value directly, performing an unbatched inversion if - /// necessary. - /// - /// If the denominator is zero, this returns zero. - pub fn evaluate(self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => x, - Self::Rational(numerator, denominator) => { - if denominator == F::ONE { - numerator - } else { - numerator * denominator.invert().unwrap_or(F::ZERO) - } - } - } - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use super::Assigned; - // We use (numerator, denominator) in the comments below to denote a rational. - #[test] - fn add_trivial_to_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // 2 + (1,0) = 2 + 0 = 2 - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn add_rational_to_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) + (1,0) = (1,2) + 0 = (1,2) - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn sub_trivial_from_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - 2 = 0 - 2 = -2 - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // 2 - (1,0) = 2 - 0 = 2 - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn sub_rational_from_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - (1,2) = 0 - (1,2) = -(1,2) - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // (1,2) - (1,0) = (1,2) - 0 = (1,2) - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn mul_rational_by_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) * (1,0) = (1,2) * 0 = 0 - assert_eq!((a * b).evaluate(), Fp::zero()); - - // (1,0) * (1,2) = 0 * (1,2) = 0 - assert_eq!((b * a).evaluate(), Fp::zero()); - } -} - -#[cfg(test)] -mod proptests { - use std::{ - cmp, - ops::{Add, Mul, Neg, Sub}, - }; - - use group::ff::Field; - use halo2curves::pasta::Fp; - use proptest::{collection::vec, prelude::*, sample::select}; - - use super::Assigned; - - trait UnaryOperand: Neg { - fn double(&self) -> Self; - fn square(&self) -> Self; - fn cube(&self) -> Self; - fn inv0(&self) -> Self; - } - - impl UnaryOperand for F { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert().unwrap_or(F::ZERO) - } - } - - impl UnaryOperand for Assigned { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert() - } - } - - #[derive(Clone, Debug)] - enum UnaryOperator { - Neg, - Double, - Square, - Cube, - Inv0, - } - - const UNARY_OPERATORS: &[UnaryOperator] = &[ - UnaryOperator::Neg, - UnaryOperator::Double, - UnaryOperator::Square, - UnaryOperator::Cube, - UnaryOperator::Inv0, - ]; - - impl UnaryOperator { - fn apply(&self, a: F) -> F { - match self { - Self::Neg => -a, - Self::Double => a.double(), - Self::Square => a.square(), - Self::Cube => a.cube(), - Self::Inv0 => a.inv0(), - } - } - } - - trait BinaryOperand: Sized + Add + Sub + Mul {} - impl BinaryOperand for F {} - impl BinaryOperand for Assigned {} - - #[derive(Clone, Debug)] - enum BinaryOperator { - Add, - Sub, - Mul, - } - - const BINARY_OPERATORS: &[BinaryOperator] = &[ - BinaryOperator::Add, - BinaryOperator::Sub, - BinaryOperator::Mul, - ]; - - impl BinaryOperator { - fn apply(&self, a: F, b: F) -> F { - match self { - Self::Add => a + b, - Self::Sub => a - b, - Self::Mul => a * b, - } - } - } - - #[derive(Clone, Debug)] - enum Operator { - Unary(UnaryOperator), - Binary(BinaryOperator), - } - - prop_compose! { - /// Use narrow that can be easily reduced. - fn arb_element()(val in any::()) -> Fp { - Fp::from(val) - } - } - - prop_compose! { - fn arb_trivial()(element in arb_element()) -> Assigned { - Assigned::Trivial(element) - } - } - - prop_compose! { - /// Generates half of the denominators as zero to represent a deferred inversion. - fn arb_rational()( - numerator in arb_element(), - denominator in prop_oneof![ - 1 => Just(Fp::zero()), - 2 => arb_element(), - ], - ) -> Assigned { - Assigned::Rational(numerator, denominator) - } - } - - prop_compose! { - fn arb_operators(num_unary: usize, num_binary: usize)( - unary in vec(select(UNARY_OPERATORS), num_unary), - binary in vec(select(BINARY_OPERATORS), num_binary), - ) -> Vec { - unary.into_iter() - .map(Operator::Unary) - .chain(binary.into_iter().map(Operator::Binary)) - .collect() - } - } - - prop_compose! { - fn arb_testcase()( - num_unary in 0usize..5, - num_binary in 0usize..5, - )( - values in vec( - prop_oneof![ - 1 => Just(Assigned::Zero), - 2 => arb_trivial(), - 2 => arb_rational(), - ], - // Ensure that: - // - we have at least one value to apply unary operators to. - // - we can apply every binary operator pairwise sequentially. - cmp::max(usize::from(num_unary > 0), num_binary + 1)), - operations in arb_operators(num_unary, num_binary).prop_shuffle(), - ) -> (Vec>, Vec) { - (values, operations) - } - } - - proptest! { - #[test] - fn operation_commutativity((values, operations) in arb_testcase()) { - // Evaluate the values at the start. - let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); - - // Apply the operations to both the deferred and evaluated values. - fn evaluate( - items: Vec, - operators: &[Operator], - ) -> F { - let mut ops = operators.iter(); - - // Process all binary operators. We are guaranteed to have exactly as many - // binary operators as we need calls to the reduction closure. - let mut res = items.into_iter().reduce(|mut a, b| loop { - match ops.next() { - Some(Operator::Unary(op)) => a = op.apply(a), - Some(Operator::Binary(op)) => break op.apply(a, b), - None => unreachable!(), - } - }).unwrap(); - - // Process any unary operators that weren't handled in the reduce() call - // above (either if we only had one item, or there were unary operators - // after the last binary operator). We are guaranteed to have no binary - // operators remaining at this point. - loop { - match ops.next() { - Some(Operator::Unary(op)) => res = op.apply(res), - Some(Operator::Binary(_)) => unreachable!(), - None => break res, - } - } - } - let deferred_result = evaluate(values, &operations); - let evaluated_result = evaluate(elements, &operations); - - // The two should be equal, i.e. deferred inversion should commute with the - // list of operations. - assert_eq!(deferred_result.evaluate(), evaluated_result); - } - } -} +use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 1ee8ffefc6..e80ea5f81d 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1,4 +1,4 @@ -use super::{lookup, permutation, shuffle, Assigned, Error, Queries}; +use super::{lookup, permutation, shuffle, Error, Queries}; use crate::circuit::layouter::SyncDeps; use crate::{ circuit::{Layouter, Region, Value}, @@ -13,6 +13,7 @@ use halo2_middleware::circuit::{ }; use halo2_middleware::ff::Field; use halo2_middleware::metadata; +use halo2_middleware::plonk::Assigned; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; use std::collections::BTreeSet; @@ -31,7 +32,7 @@ mod compress_selectors; pub mod sealed { /// Phase of advice column #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct Phase(pub(crate) u8); + pub struct Phase(pub u8); impl Phase { pub fn prev(&self) -> Option { @@ -138,7 +139,7 @@ impl SealedPhase for super::ThirdPhase { /// } /// ``` #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Selector(pub(crate) usize, bool); +pub struct Selector(pub usize, bool); impl Selector { /// Enable this selector at the given offset within the given region. @@ -167,11 +168,11 @@ impl Selector { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct FixedQuery { /// Query index - pub(crate) index: Option, + pub index: Option, /// Column index - pub(crate) column_index: usize, + pub column_index: usize, /// Rotation of this query - pub(crate) rotation: Rotation, + pub rotation: Rotation, } impl FixedQuery { @@ -190,13 +191,13 @@ impl FixedQuery { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct AdviceQuery { /// Query index - pub(crate) index: Option, + pub index: Option, /// Column index - pub(crate) column_index: usize, + pub column_index: usize, /// Rotation of this query - pub(crate) rotation: Rotation, + pub rotation: Rotation, /// Phase of this advice column - pub(crate) phase: sealed::Phase, + pub phase: sealed::Phase, } impl AdviceQuery { @@ -220,11 +221,11 @@ impl AdviceQuery { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct InstanceQuery { /// Query index - pub(crate) index: Option, + pub index: Option, /// Column index - pub(crate) column_index: usize, + pub column_index: usize, /// Rotation of this query - pub(crate) rotation: Rotation, + pub rotation: Rotation, } impl InstanceQuery { @@ -1077,14 +1078,14 @@ impl Product for Expression { /// Represents an index into a vector where each entry corresponds to a distinct /// point that polynomials are queried at. #[derive(Copy, Clone, Debug)] -pub(crate) struct PointIndex(pub usize); +pub struct PointIndex(pub usize); /// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset /// within a custom gate. #[derive(Clone, Debug)] pub struct VirtualCell { - pub(crate) column: Column, - pub(crate) rotation: Rotation, + pub column: Column, + pub rotation: Rotation, } impl>> From<(Col, Rotation)> for VirtualCell { @@ -1243,11 +1244,11 @@ impl Gate { &self.polys } - pub(crate) fn queried_selectors(&self) -> &[Selector] { + pub fn queried_selectors(&self) -> &[Selector] { &self.queried_selectors } - pub(crate) fn queried_cells(&self) -> &[VirtualCell] { + pub fn queried_cells(&self) -> &[VirtualCell] { &self.queried_cells } } @@ -1479,15 +1480,15 @@ impl Into> for ConstraintSystem { */ // TODO: Move to frontend -pub(crate) struct WitnessCollection<'a, F: Field> { - pub(crate) k: u32, - pub(crate) current_phase: sealed::Phase, - pub(crate) advice: Vec>>, - // pub(crate) unblinded_advice: HashSet, - pub(crate) challenges: &'a HashMap, - pub(crate) instances: &'a [&'a [F]], - pub(crate) usable_rows: RangeTo, - pub(crate) _marker: std::marker::PhantomData, +pub struct WitnessCollection<'a, F: Field> { + pub k: u32, + pub current_phase: sealed::Phase, + pub advice: Vec>>, + // pub unblinded_advice: HashSet, + pub challenges: &'a HashMap, + pub instances: &'a [&'a [F]], + pub usable_rows: RangeTo, + pub _marker: std::marker::PhantomData, } impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { @@ -1869,7 +1870,7 @@ fn cs2_collect_queries_shuffles( /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the /// expressions of gates, lookups and shuffles into equivalent ones with indexed query /// references. -pub(crate) fn collect_queries( +pub fn collect_queries( cs2: &ConstraintSystemV2Backend, ) -> ( Queries, @@ -1921,53 +1922,53 @@ pub(crate) fn collect_queries( /// permutation arrangements. #[derive(Debug, Clone)] pub struct ConstraintSystem { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_selectors: usize, - pub(crate) num_challenges: usize, + pub num_fixed_columns: usize, + pub num_advice_columns: usize, + pub num_instance_columns: usize, + pub num_selectors: usize, + pub num_challenges: usize, /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, + pub unblinded_advice_columns: Vec, /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, + pub advice_column_phase: Vec, /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, + pub challenge_phase: Vec, /// This is a cached vector that maps virtual selectors to the concrete /// fixed column that they were compressed into. This is just used by dev /// tooling right now. - pub(crate) selector_map: Vec>, + pub selector_map: Vec>, - pub(crate) gates: Vec>, - pub(crate) advice_queries: Vec<(Column, Rotation)>, + pub gates: Vec>, + pub advice_queries: Vec<(Column, Rotation)>, // Contains an integer for each advice column // identifying how many distinct queries it has // so far; should be same length as num_advice_columns. - pub(crate) num_advice_queries: Vec, - pub(crate) instance_queries: Vec<(Column, Rotation)>, - pub(crate) fixed_queries: Vec<(Column, Rotation)>, + pub num_advice_queries: Vec, + pub instance_queries: Vec<(Column, Rotation)>, + pub fixed_queries: Vec<(Column, Rotation)>, // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, + pub permutation: permutation::Argument, // Vector of lookup arguments, where each corresponds to a sequence of // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, + pub lookups: Vec>, // Vector of shuffle arguments, where each corresponds to a sequence of // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, + pub shuffles: Vec>, // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, + pub general_column_annotations: HashMap, // Vector of fixed columns, which can be used to store constant values // that are copied into advice columns. - pub(crate) constants: Vec>, + pub constants: Vec>, - pub(crate) minimum_degree: Option, + pub minimum_degree: Option, } impl From> for ConstraintSystem { @@ -2236,7 +2237,7 @@ impl ConstraintSystem { index } - pub(crate) fn query_advice_index(&mut self, column: Column, at: Rotation) -> usize { + pub fn query_advice_index(&mut self, column: Column, at: Rotation) -> usize { // Return existing query, if it exists for (index, advice_query) in self.advice_queries.iter().enumerate() { if advice_query == &(column, at) { @@ -2279,7 +2280,7 @@ impl ConstraintSystem { } } - pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { + pub fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { for (index, advice_query) in self.advice_queries.iter().enumerate() { if advice_query == &(column, at) { return index; @@ -2289,7 +2290,7 @@ impl ConstraintSystem { panic!("get_advice_query_index called for non-existent query"); } - pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { + pub fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { for (index, fixed_query) in self.fixed_queries.iter().enumerate() { if fixed_query == &(column, at) { return index; @@ -2299,7 +2300,7 @@ impl ConstraintSystem { panic!("get_fixed_query_index called for non-existent query"); } - pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { + pub fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { for (index, instance_query) in self.instance_queries.iter().enumerate() { if instance_query == &(column, at) { return index; @@ -2309,7 +2310,7 @@ impl ConstraintSystem { panic!("get_instance_query_index called for non-existent query"); } - pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { + pub fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { match column.column_type() { Any::Advice(_) => { self.get_advice_query_index(Column::::try_from(column).unwrap(), at) diff --git a/common/src/plonk/error.rs b/common/src/plonk/error.rs index 84ead5aa7c..a1b4ac30eb 100644 --- a/common/src/plonk/error.rs +++ b/common/src/plonk/error.rs @@ -52,7 +52,7 @@ impl From for Error { impl Error { /// Constructs an `Error::NotEnoughRowsAvailable`. - pub(crate) fn not_enough_rows_available(current_k: u32) -> Self { + pub fn not_enough_rows_available(current_k: u32) -> Self { Error::NotEnoughRowsAvailable { current_k } } } diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index 635eeef827..96b8b20661 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -5,7 +5,7 @@ use halo2_middleware::ff::{Field, FromUniformBytes}; use super::{ circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, - permutation, Assigned, Error, LagrangeCoeff, Polynomial, + permutation, Error, LagrangeCoeff, Polynomial, }; use crate::{ arithmetic::{parallelize, CurveAffine}, @@ -18,17 +18,18 @@ use crate::{ use halo2_middleware::circuit::{ Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, }; +use halo2_middleware::plonk::Assigned; /// Assembly to be used in circuit synthesis. #[derive(Debug)] -pub(crate) struct Assembly { - pub(crate) k: u32, - pub(crate) fixed: Vec, LagrangeCoeff>>, - pub(crate) permutation: permutation::AssemblyFront, - pub(crate) selectors: Vec>, +pub struct Assembly { + pub k: u32, + pub fixed: Vec, LagrangeCoeff>>, + pub permutation: permutation::AssemblyFront, + pub selectors: Vec>, // A range of available rows for assignment and copies. - pub(crate) usable_rows: Range, - pub(crate) _marker: std::marker::PhantomData, + pub usable_rows: Range, + pub _marker: std::marker::PhantomData, } impl Assignment for Assembly { diff --git a/common/src/plonk/lookup.rs b/common/src/plonk/lookup.rs index e7d24f77fb..80ef82624e 100644 --- a/common/src/plonk/lookup.rs +++ b/common/src/plonk/lookup.rs @@ -6,9 +6,9 @@ use std::fmt::{self, Debug}; /// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone)] pub struct Argument { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) table_expressions: Vec>, + pub name: String, + pub input_expressions: Vec>, + pub table_expressions: Vec>, } impl Debug for Argument { @@ -33,7 +33,7 @@ impl Argument { } } - pub(crate) fn required_degree(&self) -> usize { + pub fn required_degree(&self) -> usize { assert_eq!(self.input_expressions.len(), self.table_expressions.len()); // The first value in the permutation poly should be one. diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index e53d2870b2..b49761231e 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -19,7 +19,7 @@ use std::io; #[derive(Debug, Clone)] pub struct Argument { /// A sequence of columns involved in the argument. - pub(super) columns: Vec>, + pub columns: Vec>, } impl From for Argument { @@ -31,14 +31,14 @@ impl From for Argument { } impl Argument { - pub(crate) fn new() -> Self { + pub fn new() -> Self { Argument { columns: vec![] } } /// Returns the minimum circuit degree required by the permutation argument. /// The argument may use larger degree gates depending on the actual /// circuit's degree and how many columns are involved in the permutation. - pub(crate) fn required_degree(&self) -> usize { + pub fn required_degree(&self) -> usize { // degree 2: // l_0(X) * (1 - z(X)) = 0 // @@ -73,7 +73,7 @@ impl Argument { 3 } - pub(crate) fn add_column(&mut self, column: Column) { + pub fn add_column(&mut self, column: Column) { if !self.columns.contains(&column) { self.columns.push(column); } @@ -90,11 +90,11 @@ impl Argument { pub struct AssemblyFront { n: usize, columns: Vec>, - pub(crate) copies: Vec<(Cell, Cell)>, + pub copies: Vec<(Cell, Cell)>, } impl AssemblyFront { - pub(crate) fn new(n: usize, p: &Argument) -> Self { + pub fn new(n: usize, p: &Argument) -> Self { Self { n, columns: p.columns.clone(), @@ -102,7 +102,7 @@ impl AssemblyFront { } } - pub(crate) fn copy( + pub fn copy( &mut self, left_column: Column, left_row: usize, diff --git a/common/src/plonk/shuffle.rs b/common/src/plonk/shuffle.rs index c865ada983..68fc375cac 100644 --- a/common/src/plonk/shuffle.rs +++ b/common/src/plonk/shuffle.rs @@ -6,9 +6,9 @@ use std::fmt::{self, Debug}; /// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] pub struct Argument { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) shuffle_expressions: Vec>, + pub name: String, + pub input_expressions: Vec>, + pub shuffle_expressions: Vec>, } impl Debug for Argument { @@ -33,7 +33,7 @@ impl Argument { } } - pub(crate) fn required_degree(&self) -> usize { + pub fn required_degree(&self) -> usize { assert_eq!(self.input_expressions.len(), self.shuffle_expressions.len()); let mut input_degree = 1; diff --git a/common/src/poly.rs b/common/src/poly.rs index 9fe12b5da5..fd126dae62 100644 --- a/common/src/poly.rs +++ b/common/src/poly.rs @@ -4,10 +4,10 @@ use crate::arithmetic::parallelize; use crate::helpers::SerdePrimeField; -use crate::plonk::Assigned; use crate::SerdeFormat; use group::ff::{BatchInvert, Field}; +use halo2_middleware::plonk::Assigned; use halo2_middleware::poly::Rotation; use std::fmt::Debug; use std::io; @@ -66,12 +66,12 @@ impl Basis for ExtendedLagrangeCoeff {} /// basis. #[derive(Clone, Debug)] pub struct Polynomial { - pub(crate) values: Vec, - pub(crate) _marker: PhantomData, + pub values: Vec, + pub _marker: PhantomData, } impl Polynomial { - pub(crate) fn new_empty(size: usize, zero: F) -> Self { + pub fn new_empty(size: usize, zero: F) -> Self { Polynomial { values: vec![zero; size], _marker: PhantomData, @@ -83,7 +83,7 @@ impl Polynomial { /// Obtains a polynomial in Lagrange form when given a vector of Lagrange /// coefficients of size `n`; panics if the provided vector is the wrong /// length. - pub(crate) fn new_lagrange_from_vec(values: Vec) -> Polynomial { + pub fn new_lagrange_from_vec(values: Vec) -> Polynomial { Polynomial { values, _marker: PhantomData, @@ -169,7 +169,7 @@ impl Polynomial { impl Polynomial { /// Reads polynomial from buffer using `SerdePrimeField::read`. - pub(crate) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { + pub fn read(reader: &mut R, format: SerdeFormat) -> io::Result { let mut poly_len = [0u8; 4]; reader.read_exact(&mut poly_len)?; let poly_len = u32::from_be_bytes(poly_len); @@ -184,11 +184,7 @@ impl Polynomial { } /// Writes polynomial to buffer using `SerdePrimeField::write`. - pub(crate) fn write( - &self, - writer: &mut W, - format: SerdeFormat, - ) -> io::Result<()> { + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { writer.write_all(&(self.values.len() as u32).to_be_bytes())?; for value in self.values.iter() { value.write(writer, format)?; @@ -227,7 +223,7 @@ pub fn batch_invert_assigned( } impl Polynomial, LagrangeCoeff> { - pub(crate) fn invert( + pub fn invert( &self, inv_denoms: impl Iterator + ExactSizeIterator, ) -> Polynomial { diff --git a/common/src/poly/domain.rs b/common/src/poly/domain.rs index edeaefb8b2..e2f6b91f8d 100644 --- a/common/src/poly/domain.rs +++ b/common/src/poly/domain.rs @@ -1,14 +1,12 @@ //! Contains utilities for performing polynomial arithmetic over an evaluation //! domain that is of a suitable size for the application. -use crate::{ - arithmetic::{best_fft, parallelize}, - plonk::Assigned, -}; +use crate::arithmetic::{best_fft, parallelize}; use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}; use group::ff::{BatchInvert, Field}; use halo2_middleware::ff::WithSmallOrderMulGroup; +use halo2_middleware::plonk::Assigned; use halo2_middleware::poly::Rotation; use std::marker::PhantomData; diff --git a/common/src/poly/ipa/commitment.rs b/common/src/poly/ipa/commitment.rs index 7be053c49c..0058262601 100644 --- a/common/src/poly/ipa/commitment.rs +++ b/common/src/poly/ipa/commitment.rs @@ -23,12 +23,12 @@ use std::io; /// Public parameters for IPA commitment scheme #[derive(Debug, Clone)] pub struct ParamsIPA { - pub(crate) k: u32, - pub(crate) n: u64, - pub(crate) g: Vec, - pub(crate) g_lagrange: Vec, - pub(crate) w: C, - pub(crate) u: C, + pub k: u32, + pub n: u64, + pub g: Vec, + pub g_lagrange: Vec, + pub w: C, + pub u: C, } /// Concrete IPA commitment scheme diff --git a/common/src/poly/ipa/msm.rs b/common/src/poly/ipa/msm.rs index 59c99d1f29..b481bc19a4 100644 --- a/common/src/poly/ipa/msm.rs +++ b/common/src/poly/ipa/msm.rs @@ -7,7 +7,7 @@ use std::collections::BTreeMap; /// A multiscalar multiplication in the polynomial commitment scheme #[derive(Debug, Clone)] pub struct MSMIPA<'params, C: CurveAffine> { - pub(crate) params: &'params ParamsVerifierIPA, + pub params: &'params ParamsVerifierIPA, g_scalars: Option>, w_scalar: Option, u_scalar: Option, diff --git a/common/src/poly/ipa/multiopen.rs b/common/src/poly/ipa/multiopen.rs index 1df7f41daa..045e9ac3f1 100644 --- a/common/src/poly/ipa/multiopen.rs +++ b/common/src/poly/ipa/multiopen.rs @@ -37,10 +37,10 @@ type ChallengeX4 = ChallengeScalar; #[derive(Debug)] struct CommitmentData { - pub(crate) commitment: T, - pub(crate) set_index: usize, - pub(crate) point_indices: Vec, - pub(crate) evals: Vec, + pub commitment: T, + pub set_index: usize, + pub point_indices: Vec, + pub evals: Vec, } impl CommitmentData { diff --git a/common/src/poly/ipa/multiopen/prover.rs b/common/src/poly/ipa/multiopen/prover.rs index 923248704f..75494973c5 100644 --- a/common/src/poly/ipa/multiopen/prover.rs +++ b/common/src/poly/ipa/multiopen/prover.rs @@ -16,7 +16,7 @@ use std::marker::PhantomData; /// IPA multi-open prover #[derive(Debug)] pub struct ProverIPA<'params, C: CurveAffine> { - pub(crate) params: &'params ParamsIPA, + pub params: &'params ParamsIPA, } impl<'params, C: CurveAffine> Prover<'params, IPACommitmentScheme> for ProverIPA<'params, C> { diff --git a/common/src/poly/ipa/strategy.rs b/common/src/poly/ipa/strategy.rs index 6900981f01..cb8dba6b1c 100644 --- a/common/src/poly/ipa/strategy.rs +++ b/common/src/poly/ipa/strategy.rs @@ -17,10 +17,10 @@ use rand_core::OsRng; /// Wrapper for verification accumulator #[derive(Debug, Clone)] pub struct GuardIPA<'params, C: CurveAffine> { - pub(crate) msm: MSMIPA<'params, C>, - pub(crate) neg_c: C::Scalar, - pub(crate) u: Vec, - pub(crate) u_packed: Vec, + pub msm: MSMIPA<'params, C>, + pub neg_c: C::Scalar, + pub u: Vec, + pub u_packed: Vec, } /// An accumulator instance consisting of an evaluation claim and a proof. diff --git a/common/src/poly/kzg/commitment.rs b/common/src/poly/kzg/commitment.rs index e162694941..07c6eb8519 100644 --- a/common/src/poly/kzg/commitment.rs +++ b/common/src/poly/kzg/commitment.rs @@ -19,12 +19,12 @@ use super::msm::MSMKZG; /// These are the public parameters for the polynomial commitment scheme. #[derive(Debug, Clone)] pub struct ParamsKZG { - pub(crate) k: u32, - pub(crate) n: u64, - pub(crate) g: Vec, - pub(crate) g_lagrange: Vec, - pub(crate) g2: E::G2Affine, - pub(crate) s_g2: E::G2Affine, + pub k: u32, + pub n: u64, + pub g: Vec, + pub g_lagrange: Vec, + pub g2: E::G2Affine, + pub s_g2: E::G2Affine, } /// Umbrella commitment scheme construction for all KZG variants diff --git a/common/src/poly/kzg/msm.rs b/common/src/poly/kzg/msm.rs index 6244209965..fa8359f94a 100644 --- a/common/src/poly/kzg/msm.rs +++ b/common/src/poly/kzg/msm.rs @@ -18,8 +18,8 @@ where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) scalars: Vec, - pub(crate) bases: Vec, + pub scalars: Vec, + pub bases: Vec, } impl MSMKZG @@ -95,7 +95,7 @@ where /// A projective point collector #[derive(Debug, Clone)] -pub(crate) struct PreMSM +pub struct PreMSM where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, @@ -108,13 +108,13 @@ where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) fn new() -> Self { + pub fn new() -> Self { PreMSM { projectives_msms: vec![], } } - pub(crate) fn normalize(self) -> MSMKZG { + pub fn normalize(self) -> MSMKZG { let (scalars, bases) = self .projectives_msms .into_iter() @@ -127,7 +127,7 @@ where } } - pub(crate) fn add_msm(&mut self, other: MSMKZG) { + pub fn add_msm(&mut self, other: MSMKZG) { self.projectives_msms.push(other); } } @@ -149,9 +149,9 @@ where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) params: &'a ParamsKZG, - pub(crate) left: MSMKZG, - pub(crate) right: MSMKZG, + pub params: &'a ParamsKZG, + pub left: MSMKZG, + pub right: MSMKZG, } impl<'a, E: MultiMillerLoop + Debug> DualMSM<'a, E> diff --git a/common/src/poly/kzg/strategy.rs b/common/src/poly/kzg/strategy.rs index 78d182fbf6..a114146129 100644 --- a/common/src/poly/kzg/strategy.rs +++ b/common/src/poly/kzg/strategy.rs @@ -25,7 +25,7 @@ where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) msm_accumulator: DualMSM<'params, E>, + pub msm_accumulator: DualMSM<'params, E>, } /// Define accumulator type as `DualMSM` @@ -45,7 +45,7 @@ where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) fn new(msm_accumulator: DualMSM<'params, E>) -> Self { + pub fn new(msm_accumulator: DualMSM<'params, E>) -> Self { Self { msm_accumulator } } } @@ -57,7 +57,7 @@ where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) msm_accumulator: DualMSM<'params, E>, + pub msm_accumulator: DualMSM<'params, E>, } impl<'params, E: MultiMillerLoop + Debug> AccumulatorStrategy<'params, E> @@ -85,7 +85,7 @@ where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) msm: DualMSM<'params, E>, + pub msm: DualMSM<'params, E>, } impl<'params, E: MultiMillerLoop + Debug> SingleStrategy<'params, E> diff --git a/common/src/poly/query.rs b/common/src/poly/query.rs index bc7a20c240..4c53bf0336 100644 --- a/common/src/poly/query.rs +++ b/common/src/poly/query.rs @@ -20,11 +20,11 @@ pub trait Query: Sized + Clone + Send + Sync { #[derive(Debug, Clone, Copy)] pub struct ProverQuery<'com, C: CurveAffine> { /// Point at which polynomial is queried - pub(crate) point: C::Scalar, + pub point: C::Scalar, /// Coefficients of polynomial - pub(crate) poly: &'com Polynomial, + pub poly: &'com Polynomial, /// Blinding factor of polynomial - pub(crate) blind: Blind, + pub blind: Blind, } impl<'com, C> ProverQuery<'com, C> @@ -44,8 +44,8 @@ where #[doc(hidden)] #[derive(Copy, Clone)] pub struct PolynomialPointer<'com, C: CurveAffine> { - pub(crate) poly: &'com Polynomial, - pub(crate) blind: Blind, + pub poly: &'com Polynomial, + pub blind: Blind, } impl<'com, C: CurveAffine> PartialEq for PolynomialPointer<'com, C> { @@ -96,11 +96,11 @@ impl<'com, C: CurveAffine, M: MSM> VerifierQuery<'com, C, M> { #[derive(Debug, Clone, Copy)] pub struct VerifierQuery<'com, C: CurveAffine, M: MSM> { /// Point at which polynomial is queried - pub(crate) point: C::Scalar, + pub point: C::Scalar, /// Commitment to polynomial - pub(crate) commitment: CommitmentReference<'com, C, M>, + pub commitment: CommitmentReference<'com, C, M>, /// Evaluation of polynomial at query point - pub(crate) eval: C::Scalar, + pub eval: C::Scalar, } impl<'com, C, M> VerifierQuery<'com, C, M> diff --git a/common/src/transcript.rs b/common/src/transcript.rs index 6e4f812bdf..79678f5c97 100644 --- a/common/src/transcript.rs +++ b/common/src/transcript.rs @@ -539,14 +539,14 @@ where } } -pub(crate) fn read_n_points, T: TranscriptRead>( +pub fn read_n_points, T: TranscriptRead>( transcript: &mut T, n: usize, ) -> io::Result> { (0..n).map(|_| transcript.read_point()).collect() } -pub(crate) fn read_n_scalars, T: TranscriptRead>( +pub fn read_n_scalars, T: TranscriptRead>( transcript: &mut T, n: usize, ) -> io::Result> { From b0a17cd7ba646599972d26e8b0dcceb846f8057c Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 23 Jan 2024 15:50:03 +0000 Subject: [PATCH 46/79] Add TODOs --- common/src/poly.rs | 3 +++ frontend/src/circuit.rs | 0 frontend/src/lib.rs | 1 + 3 files changed, 4 insertions(+) create mode 100644 frontend/src/circuit.rs diff --git a/common/src/poly.rs b/common/src/poly.rs index fd126dae62..bfabff6011 100644 --- a/common/src/poly.rs +++ b/common/src/poly.rs @@ -33,6 +33,9 @@ pub use domain::*; pub use query::{ProverQuery, VerifierQuery}; pub use strategy::{Guard, VerificationStrategy}; +// TODO: move everything from the poly module to the backend. This requires that the frontend +// works without Poly (and just Vec). + /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up #[derive(Debug)] diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index e69de29bb2..db89e61256 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -0,0 +1 @@ +pub mod circuit; From 5d9c09da079935baea5d37b4f859fda1aaa9eb71 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 23 Jan 2024 16:24:34 +0000 Subject: [PATCH 47/79] Add TODOs --- backend/src/plonk.rs | 4 +--- backend/src/plonk/keygen.rs | 8 +++++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index edf4bc5a84..eb00beff44 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -27,9 +27,7 @@ pub use halo2_common::plonk::{ ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, }; pub mod circuit { - pub use halo2_common::plonk::circuit::{ - compile_circuit, Assignment, Circuit, ConstraintSystem, Selector, - }; + pub use halo2_common::plonk::circuit::{Assignment, Circuit, ConstraintSystem, Selector}; pub use halo2_common::plonk::Expression; } pub use halo2_common::plonk::Expression; diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs index ad144956ee..9b2c557f72 100644 --- a/backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -6,7 +6,7 @@ use group::Curve; use halo2_middleware::ff::{Field, FromUniformBytes}; use super::{ - circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, + circuit::{Assignment, Circuit, ConstraintSystem, Selector}, evaluation::Evaluator, permutation, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, }; @@ -251,6 +251,8 @@ where )) } +// TODO: Move to halo2_proofs, as a legacy wrapper +/* /// Generate a `VerifyingKey` from an instance of `Circuit`. /// By default, selector compression is turned **off**. pub fn keygen_vk<'params, C, P, ConcreteCircuit>( @@ -285,6 +287,7 @@ where vk.compress_selectors = compress_selectors; Ok(vk) } +*/ /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. pub fn keygen_pk_v2<'params, C, P>( @@ -379,6 +382,8 @@ where }) } +// TODO: Move to halo2_proofs, as a legacy wrapper +/* /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. pub fn keygen_pk<'params, C, P, ConcreteCircuit>( params: &P, @@ -393,3 +398,4 @@ where let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; keygen_pk_v2(params, vk, &compiled_circuit) } +*/ From f9703ecd960ebc5e34a7a55627b55aff28aa606d Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 23 Jan 2024 16:31:55 +0000 Subject: [PATCH 48/79] Checkpoint. compile_circuit in frontend --- common/src/plonk/circuit.rs | 74 ------------------------- common/src/plonk/keygen.rs | 2 +- frontend/src/circuit.rs | 105 ++++++++++++++++++++++++++++++++++++ frontend/src/lib.rs | 3 ++ 4 files changed, 109 insertions(+), 75 deletions(-) diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index e80ea5f81d..66eb624fa1 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1727,80 +1727,6 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret } } -/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the -/// circuit into its columns and the column configuration; as well as doing the fixed column and -/// copy constraints assignments. The output of this function can then be used for the key -/// generation, and proof generation. -/// If `compress_selectors` is true, multiple selector columns may be multiplexed. -pub fn compile_circuit>( - k: u32, - circuit: &ConcreteCircuit, - compress_selectors: bool, -) -> Result< - ( - CompiledCircuitV2, - ConcreteCircuit::Config, - ConstraintSystem, - ), - Error, -> { - let n = 2usize.pow(k); - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - let cs = cs; - - if n < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(k)); - } - - let mut assembly = crate::plonk::keygen::Assembly { - k, - fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], - permutation: permutation::AssemblyFront::new(n, &cs.permutation), - selectors: vec![vec![false; n]; cs.num_selectors], - usable_rows: 0..n - (cs.blinding_factors() + 1), - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain URS - ConcreteCircuit::FloorPlanner::synthesize( - &mut assembly, - circuit, - config.clone(), - cs.constants.clone(), - )?; - - let fixed = batch_invert_assigned(assembly.fixed); - let (cs, selector_polys) = if compress_selectors { - cs.compress_selectors(assembly.selectors.clone()) - } else { - // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. - let selectors = std::mem::take(&mut assembly.selectors); - cs.directly_convert_selectors_to_fixed(selectors) - }; - let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); - fixed.extend(selector_polys.into_iter()); - - let preprocessing = PreprocessingV2 { - permutation: halo2_middleware::permutation::AssemblyMid { - copies: assembly.permutation.copies, - }, - fixed, - }; - - Ok(( - CompiledCircuitV2 { - cs: cs.clone().into(), - preprocessing, - }, - config, - cs, - )) -} - /// Collect queries used in gates while mapping those gates to equivalent ones with indexed /// query references in the expressions. fn cs2_collect_queries_gates( diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index 96b8b20661..0c51e1519d 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -4,7 +4,7 @@ use group::Curve; use halo2_middleware::ff::{Field, FromUniformBytes}; use super::{ - circuit::{compile_circuit, Assignment, Circuit, ConstraintSystem, Selector}, + circuit::{Assignment, Circuit, ConstraintSystem, Selector}, permutation, Error, LagrangeCoeff, Polynomial, }; use crate::{ diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index e69de29bb2..0be1636306 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -0,0 +1,105 @@ +//! Traits and structs for implementing circuit components. + +use core::cmp::max; +use core::ops::{Add, Mul}; +use halo2_common::circuit::layouter::SyncDeps; +use halo2_common::plonk::sealed::SealedPhase; +use halo2_common::plonk::FloorPlanner; +use halo2_common::plonk::{lookup, permutation, shuffle, Error, Queries}; +use halo2_common::plonk::{Circuit, ConstraintSystem}; +use halo2_common::{ + circuit::{Layouter, Region, Value}, + poly::{batch_invert_assigned, Polynomial}, +}; +use halo2_middleware::circuit::{ + Advice, AdviceQueryMid, Any, Challenge, Column, CompiledCircuitV2, ConstraintSystemV2Backend, + ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, + PreprocessingV2, +}; +use halo2_middleware::ff::Field; +use halo2_middleware::metadata; +use halo2_middleware::plonk::Assigned; +use halo2_middleware::poly::Rotation; +use std::collections::BTreeSet; +use std::collections::HashMap; +use std::fmt::Debug; +use std::iter::{Product, Sum}; +use std::ops::RangeTo; +use std::{ + convert::TryFrom, + ops::{Neg, Sub}, +}; + +/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the +/// circuit into its columns and the column configuration; as well as doing the fixed column and +/// copy constraints assignments. The output of this function can then be used for the key +/// generation, and proof generation. +/// If `compress_selectors` is true, multiple selector columns may be multiplexed. +pub fn compile_circuit>( + k: u32, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result< + ( + CompiledCircuitV2, + ConcreteCircuit::Config, + ConstraintSystem, + ), + Error, +> { + let n = 2usize.pow(k); + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let cs = cs; + + if n < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(k)); + } + + let mut assembly = halo2_common::plonk::keygen::Assembly { + k, + fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], + permutation: permutation::AssemblyFront::new(n, &cs.permutation), + selectors: vec![vec![false; n]; cs.num_selectors], + usable_rows: 0..n - (cs.blinding_factors() + 1), + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain URS + ConcreteCircuit::FloorPlanner::synthesize( + &mut assembly, + circuit, + config.clone(), + cs.constants.clone(), + )?; + + let fixed = batch_invert_assigned(assembly.fixed); + let (cs, selector_polys) = if compress_selectors { + cs.compress_selectors(assembly.selectors.clone()) + } else { + // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. + let selectors = std::mem::take(&mut assembly.selectors); + cs.directly_convert_selectors_to_fixed(selectors) + }; + let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); + fixed.extend(selector_polys.into_iter()); + + let preprocessing = PreprocessingV2 { + permutation: halo2_middleware::permutation::AssemblyMid { + copies: assembly.permutation.copies, + }, + fixed, + }; + + Ok(( + CompiledCircuitV2 { + cs: cs.clone().into(), + preprocessing, + }, + config, + cs, + )) +} diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index db89e61256..3fca57d042 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -1 +1,4 @@ +#![allow(dead_code)] // TODO: Remove +#![allow(unused_imports)] // TODO: Remove + pub mod circuit; From 29b6c7527e8d28069325d0b6c47f6aab7d5184e0 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 23 Jan 2024 16:35:29 +0000 Subject: [PATCH 49/79] Checkpoint. WitnessCalculator in backend --- common/src/plonk/circuit.rs | 248 ----------------------------------- frontend/src/circuit.rs | 250 +++++++++++++++++++++++++++++++++++- 2 files changed, 249 insertions(+), 249 deletions(-) diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 66eb624fa1..b226c70449 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1479,254 +1479,6 @@ impl Into> for ConstraintSystem { } */ -// TODO: Move to frontend -pub struct WitnessCollection<'a, F: Field> { - pub k: u32, - pub current_phase: sealed::Phase, - pub advice: Vec>>, - // pub unblinded_advice: HashSet, - pub challenges: &'a HashMap, - pub instances: &'a [&'a [F]], - pub usable_rows: RangeTo, - pub _marker: std::marker::PhantomData, -} - -impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn query_instance(&self, column: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.instances - .get(column.index()) - .and_then(|column| column.get(row)) - .map(|v| Value::known(*v)) - .ok_or(Error::BoundsFailure) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Ignore assignment of advice column in different phase than current one. - if self.current_phase.0 != column.column_type().phase { - return Ok(()); - } - - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .advice - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { - // We only care about advice columns here - - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.challenges - .get(&challenge.index()) - .cloned() - .map(Value::known) - .unwrap_or_else(Value::unknown) - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} - -/// Witness calculator. Frontend function -#[derive(Debug)] -pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { - k: u32, - n: usize, - unusable_rows_start: usize, - circuit: &'a ConcreteCircuit, - config: &'a ConcreteCircuit::Config, - cs: &'a ConstraintSystem, - instances: &'a [&'a [F]], - next_phase: u8, -} - -impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, ConcreteCircuit> { - /// Create a new WitnessCalculator - pub fn new( - k: u32, - circuit: &'a ConcreteCircuit, - config: &'a ConcreteCircuit::Config, - cs: &'a ConstraintSystem, - instances: &'a [&'a [F]], - ) -> Self { - let n = 2usize.pow(k); - let unusable_rows_start = n - (cs.blinding_factors() + 1); - Self { - k, - n, - unusable_rows_start, - circuit, - config, - cs, - instances, - next_phase: 0, - } - } - - /// Calculate witness at phase - pub fn calc( - &mut self, - phase: u8, - challenges: &HashMap, - ) -> Result>>>, Error> { - if phase != self.next_phase { - return Err(Error::Other(format!( - "Expected phase {}, got {}", - self.next_phase, phase - ))); - } - let current_phase = match phase { - 0 => FirstPhase.to_sealed(), - 1 => SecondPhase.to_sealed(), - 2 => ThirdPhase.to_sealed(), - _ => unreachable!("only phase [0,2] supported"), - }; - - let mut witness = WitnessCollection { - k: self.k, - current_phase, - advice: vec![vec![Assigned::Zero; self.n]; self.cs.num_advice_columns], - instances: self.instances, - challenges, - // The prover will not be allowed to assign values to advice - // cells that exist within inactive rows, which include some - // number of blinding factors and an extra row for use in the - // permutation argument. - usable_rows: ..self.unusable_rows_start, - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain the witness and other information. - ConcreteCircuit::FloorPlanner::synthesize( - &mut witness, - self.circuit, - self.config.clone(), - self.cs.constants.clone(), - ) - .expect("todo"); - - let column_indices = self - .cs - .advice_column_phase - .iter() - .enumerate() - .filter_map(|(column_index, phase)| { - if current_phase == *phase { - Some(column_index) - } else { - None - } - }) - .collect::>(); - - self.next_phase += 1; - Ok(witness - .advice - .into_iter() - .enumerate() - .map(|(column_index, advice)| { - if column_indices.contains(&column_index) { - Some(advice) - } else { - None - } - }) - .collect()) - } -} - /// Collect queries used in gates while mapping those gates to equivalent ones with indexed /// query references in the expressions. fn cs2_collect_queries_gates( diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index 0be1636306..b76062ac50 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -3,9 +3,10 @@ use core::cmp::max; use core::ops::{Add, Mul}; use halo2_common::circuit::layouter::SyncDeps; -use halo2_common::plonk::sealed::SealedPhase; +use halo2_common::plonk::sealed::{self, SealedPhase}; use halo2_common::plonk::FloorPlanner; use halo2_common::plonk::{lookup, permutation, shuffle, Error, Queries}; +use halo2_common::plonk::{Assignment, FirstPhase, SecondPhase, Selector, ThirdPhase}; use halo2_common::plonk::{Circuit, ConstraintSystem}; use halo2_common::{ circuit::{Layouter, Region, Value}, @@ -103,3 +104,250 @@ pub fn compile_circuit>( cs, )) } + +pub struct WitnessCollection<'a, F: Field> { + pub k: u32, + pub current_phase: sealed::Phase, + pub advice: Vec>>, + // pub unblinded_advice: HashSet, + pub challenges: &'a HashMap, + pub instances: &'a [&'a [F]], + pub usable_rows: RangeTo, + pub _marker: std::marker::PhantomData, +} + +impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn query_instance(&self, column: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.instances + .get(column.index()) + .and_then(|column| column.get(row)) + .map(|v| Value::known(*v)) + .ok_or(Error::BoundsFailure) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Ignore assignment of advice column in different phase than current one. + if self.current_phase.0 != column.column_type().phase { + return Ok(()); + } + + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .advice + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { + // We only care about advice columns here + + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.challenges + .get(&challenge.index()) + .cloned() + .map(Value::known) + .unwrap_or_else(Value::unknown) + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +/// Witness calculator. Frontend function +#[derive(Debug)] +pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { + k: u32, + n: usize, + unusable_rows_start: usize, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [&'a [F]], + next_phase: u8, +} + +impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, ConcreteCircuit> { + /// Create a new WitnessCalculator + pub fn new( + k: u32, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [&'a [F]], + ) -> Self { + let n = 2usize.pow(k); + let unusable_rows_start = n - (cs.blinding_factors() + 1); + Self { + k, + n, + unusable_rows_start, + circuit, + config, + cs, + instances, + next_phase: 0, + } + } + + /// Calculate witness at phase + pub fn calc( + &mut self, + phase: u8, + challenges: &HashMap, + ) -> Result>>>, Error> { + if phase != self.next_phase { + return Err(Error::Other(format!( + "Expected phase {}, got {}", + self.next_phase, phase + ))); + } + let current_phase = match phase { + 0 => FirstPhase.to_sealed(), + 1 => SecondPhase.to_sealed(), + 2 => ThirdPhase.to_sealed(), + _ => unreachable!("only phase [0,2] supported"), + }; + + let mut witness = WitnessCollection { + k: self.k, + current_phase, + advice: vec![vec![Assigned::Zero; self.n]; self.cs.num_advice_columns], + instances: self.instances, + challenges, + // The prover will not be allowed to assign values to advice + // cells that exist within inactive rows, which include some + // number of blinding factors and an extra row for use in the + // permutation argument. + usable_rows: ..self.unusable_rows_start, + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain the witness and other information. + ConcreteCircuit::FloorPlanner::synthesize( + &mut witness, + self.circuit, + self.config.clone(), + self.cs.constants.clone(), + ) + .expect("todo"); + + let column_indices = self + .cs + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == *phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + self.next_phase += 1; + Ok(witness + .advice + .into_iter() + .enumerate() + .map(|(column_index, advice)| { + if column_indices.contains(&column_index) { + Some(advice) + } else { + None + } + }) + .collect()) + } +} From 552c567f54b2e87c113dca00f68c33624fc3719f Mon Sep 17 00:00:00 2001 From: Eduard S Date: Wed, 24 Jan 2024 10:27:09 +0000 Subject: [PATCH 50/79] Checkpoint, halo2_proofs legacy --- Cargo.toml | 1 + backend/src/plonk.rs | 9 +- backend/src/plonk/keygen.rs | 56 - backend/src/plonk/prover.rs | 118 +-- backend/src/plonk/verifier.rs | 2 +- halo2_proofs/Cargo.toml | 42 +- halo2_proofs/katex-header.html | 15 - halo2_proofs/src/lib.rs | 28 +- halo2_proofs/src/plonk.rs | 553 +--------- halo2_proofs/src/plonk/keygen.rs | 344 +----- halo2_proofs/src/plonk/prover.rs | 885 +--------------- .../CHANGELOG.md | 0 halo2_proofs_rm/Cargo.toml | 112 ++ {halo2_proofs => halo2_proofs_rm}/README.md | 0 .../benches/arithmetic.rs | 0 .../benches/commit_zk.rs | 0 .../benches/dev_lookup.rs | 0 .../benches/fft.rs | 0 .../benches/hashtocurve.rs | 0 .../benches/plonk.rs | 0 .../examples/circuit-layout.rs | 0 .../examples/proof-size.rs | 0 .../examples/serialization.rs | 0 .../examples/shuffle.rs | 0 .../examples/shuffle_api.rs | 0 .../examples/simple-example.rs | 0 .../examples/two-chip.rs | 0 .../examples/vector-mul.rs | 0 .../examples/vector-ops-unblinded.rs | 0 .../proptest-regressions/plonk/assigned.txt | 0 .../plonk/circuit/compress_selectors.txt | 0 .../src/arithmetic.rs | 0 .../src/circuit.rs | 0 .../src/circuit/floor_planner.rs | 0 .../src/circuit/floor_planner/single_pass.rs | 0 .../src/circuit/floor_planner/v1.rs | 0 .../src/circuit/floor_planner/v1/strategy.rs | 0 .../src/circuit/layouter.rs | 0 .../src/circuit/table_layouter.rs | 0 .../src/circuit/value.rs | 0 {halo2_proofs => halo2_proofs_rm}/src/dev.rs | 0 .../src/dev/cost.rs | 0 .../src/dev/cost_model.rs | 0 .../src/dev/failure.rs | 0 .../src/dev/failure/emitter.rs | 0 .../src/dev/gates.rs | 0 .../src/dev/graph.rs | 0 .../src/dev/graph/layout.rs | 0 .../src/dev/metadata.rs | 0 .../src/dev/tfp.rs | 0 .../src/dev/util.rs | 0 .../src/helpers.rs | 0 halo2_proofs_rm/src/lib.rs | 21 + .../src/multicore.rs | 0 halo2_proofs_rm/src/plonk.rs | 549 ++++++++++ .../src/plonk/assigned.rs | 0 .../src/plonk/circuit.rs | 0 .../src/plonk/circuit/compress_selectors.rs | 0 .../src/plonk/error.rs | 0 .../src/plonk/evaluation.rs | 0 halo2_proofs_rm/src/plonk/keygen.rs | 394 +++++++ .../src/plonk/lookup.rs | 0 .../src/plonk/lookup/prover.rs | 0 .../src/plonk/lookup/verifier.rs | 0 .../src/plonk/permutation.rs | 0 .../src/plonk/permutation/keygen.rs | 0 .../src/plonk/permutation/prover.rs | 0 .../src/plonk/permutation/verifier.rs | 0 halo2_proofs_rm/src/plonk/prover.rs | 994 ++++++++++++++++++ .../src/plonk/shuffle.rs | 0 .../src/plonk/shuffle/prover.rs | 0 .../src/plonk/shuffle/verifier.rs | 0 .../src/plonk/vanishing.rs | 0 .../src/plonk/vanishing/prover.rs | 0 .../src/plonk/vanishing/verifier.rs | 0 .../src/plonk/verifier.rs | 0 .../src/plonk/verifier/batch.rs | 0 {halo2_proofs => halo2_proofs_rm}/src/poly.rs | 0 .../src/poly/commitment.rs | 0 .../src/poly/domain.rs | 0 .../src/poly/ipa/commitment.rs | 0 .../src/poly/ipa/commitment/prover.rs | 0 .../src/poly/ipa/commitment/verifier.rs | 0 .../src/poly/ipa/mod.rs | 0 .../src/poly/ipa/msm.rs | 0 .../src/poly/ipa/multiopen.rs | 0 .../src/poly/ipa/multiopen/prover.rs | 0 .../src/poly/ipa/multiopen/verifier.rs | 0 .../src/poly/ipa/strategy.rs | 0 .../src/poly/kzg/commitment.rs | 0 .../src/poly/kzg/mod.rs | 0 .../src/poly/kzg/msm.rs | 0 .../src/poly/kzg/multiopen.rs | 0 .../src/poly/kzg/multiopen/gwc.rs | 0 .../src/poly/kzg/multiopen/gwc/prover.rs | 0 .../src/poly/kzg/multiopen/gwc/verifier.rs | 0 .../src/poly/kzg/multiopen/shplonk.rs | 0 .../src/poly/kzg/multiopen/shplonk/prover.rs | 0 .../poly/kzg/multiopen/shplonk/verifier.rs | 0 .../src/poly/kzg/strategy.rs | 0 .../src/poly/multiopen_test.rs | 0 .../src/poly/query.rs | 0 .../src/poly/strategy.rs | 0 .../src/transcript.rs | 0 .../tests/frontend_backend_split.rs | 0 .../tests/plonk_api.rs | 0 106 files changed, 2126 insertions(+), 1997 deletions(-) delete mode 100644 halo2_proofs/katex-header.html rename {halo2_proofs => halo2_proofs_rm}/CHANGELOG.md (100%) create mode 100644 halo2_proofs_rm/Cargo.toml rename {halo2_proofs => halo2_proofs_rm}/README.md (100%) rename {halo2_proofs => halo2_proofs_rm}/benches/arithmetic.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/benches/commit_zk.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/benches/dev_lookup.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/benches/fft.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/benches/hashtocurve.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/benches/plonk.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/circuit-layout.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/proof-size.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/serialization.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/shuffle.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/shuffle_api.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/simple-example.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/two-chip.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/vector-mul.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/examples/vector-ops-unblinded.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/proptest-regressions/plonk/assigned.txt (100%) rename {halo2_proofs => halo2_proofs_rm}/proptest-regressions/plonk/circuit/compress_selectors.txt (100%) rename {halo2_proofs => halo2_proofs_rm}/src/arithmetic.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit/floor_planner.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit/floor_planner/single_pass.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit/floor_planner/v1.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit/floor_planner/v1/strategy.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit/layouter.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit/table_layouter.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/circuit/value.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/cost.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/cost_model.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/failure.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/failure/emitter.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/gates.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/graph.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/graph/layout.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/metadata.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/tfp.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/dev/util.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/helpers.rs (100%) create mode 100644 halo2_proofs_rm/src/lib.rs rename {halo2_proofs => halo2_proofs_rm}/src/multicore.rs (100%) create mode 100644 halo2_proofs_rm/src/plonk.rs rename {halo2_proofs => halo2_proofs_rm}/src/plonk/assigned.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/circuit.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/circuit/compress_selectors.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/error.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/evaluation.rs (100%) create mode 100644 halo2_proofs_rm/src/plonk/keygen.rs rename {halo2_proofs => halo2_proofs_rm}/src/plonk/lookup.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/lookup/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/lookup/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/permutation.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/permutation/keygen.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/permutation/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/permutation/verifier.rs (100%) create mode 100644 halo2_proofs_rm/src/plonk/prover.rs rename {halo2_proofs => halo2_proofs_rm}/src/plonk/shuffle.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/shuffle/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/shuffle/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/vanishing.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/vanishing/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/vanishing/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/plonk/verifier/batch.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/commitment.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/domain.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/commitment.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/commitment/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/commitment/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/mod.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/msm.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/multiopen.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/multiopen/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/multiopen/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/ipa/strategy.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/commitment.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/mod.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/msm.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/multiopen.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/multiopen/gwc.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/multiopen/gwc/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/multiopen/gwc/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/multiopen/shplonk.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/multiopen/shplonk/prover.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/multiopen/shplonk/verifier.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/kzg/strategy.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/multiopen_test.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/query.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/poly/strategy.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/src/transcript.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/tests/frontend_backend_split.rs (100%) rename {halo2_proofs => halo2_proofs_rm}/tests/plonk_api.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index 0b5d9a1ccf..d7e905a8f0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ members = [ "halo2", "halo2_proofs", + "halo2_proofs_rm", # TODO: Remove "frontend", "middleware", "backend", diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index eb00beff44..daa4e64aa5 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -33,12 +33,13 @@ pub mod circuit { pub use halo2_common::plonk::Expression; mod evaluation; -mod keygen; +pub mod keygen; mod lookup; mod permutation; pub mod prover; mod shuffle; mod vanishing; +pub mod verifier; /// This is a verifying key which allows for the verification of proofs for a /// particular circuit. @@ -53,8 +54,9 @@ pub struct VerifyingKey { /// The representative of this `VerifyingKey` in transcripts. transcript_repr: C::Scalar, selectors: Vec>, + // TODO: Use setter/getter /// Whether selector compression is turned on or not. - compress_selectors: bool, + pub compress_selectors: bool, } // Current version of the VK @@ -333,7 +335,8 @@ pub struct PinnedVerificationKey<'a, C: CurveAffine> { /// particular circuit. #[derive(Clone, Debug)] pub struct ProvingKey { - vk: VerifyingKey, + // TODO: Add getter + pub vk: VerifyingKey, l0: Polynomial, l_last: Polynomial, l_active_row: Polynomial, diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs index 9b2c557f72..14ac9c3a3b 100644 --- a/backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -251,44 +251,6 @@ where )) } -// TODO: Move to halo2_proofs, as a legacy wrapper -/* -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// By default, selector compression is turned **off**. -pub fn keygen_vk<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - keygen_vk_custom(params, circuit, true) -} - -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// -/// The selector compression optimization is turned on only if `compress_selectors` is `true`. -pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, - compress_selectors: bool, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; - let mut vk = keygen_vk_v2(params, &compiled_circuit)?; - vk.compress_selectors = compress_selectors; - Ok(vk) -} -*/ - /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. pub fn keygen_pk_v2<'params, C, P>( params: &P, @@ -381,21 +343,3 @@ where ev, }) } - -// TODO: Move to halo2_proofs, as a legacy wrapper -/* -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. -pub fn keygen_pk<'params, C, P, ConcreteCircuit>( - params: &P, - vk: VerifyingKey, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, -{ - let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; - keygen_pk_v2(params, vk, &compiled_circuit) -} -*/ diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index 0ff79f680e..0fd17bbd99 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -124,7 +124,8 @@ pub struct ProverV2< // Circuit and setup fields params: &'params Scheme::ParamsProver, pk: &'a ProvingKey, - phases: Vec, + // TODO: Add getter + pub phases: Vec, // State instance: Vec>, advice: Vec>, @@ -742,118 +743,3 @@ impl< Ok(()) } } - -// TODO: Move this to halo2_proofs as a legacy wrapper -/* -/// This creates a proof for the provided `circuit` when given the public -/// parameters `params` and the proving key [`ProvingKey`] that was -/// generated previously for the same circuit. The provided `instances` -/// are zero-padded internally. -pub fn create_proof< - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - ConcreteCircuit: Circuit, ->( - params: &'params Scheme::ParamsProver, - pk: &ProvingKey, - circuits: &[ConcreteCircuit], - instances: &[&[&[Scheme::Scalar]]], - rng: R, - transcript: &mut T, -) -> Result<(), Error> -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - if circuits.len() != instances.len() { - return Err(Error::InvalidInstances); - } - let (_, config, cs) = compile_circuit(params.k(), &circuits[0], pk.vk.compress_selectors)?; - let mut witness_calcs: Vec<_> = circuits - .iter() - .enumerate() - .map(|(i, circuit)| WitnessCalculator::new(params.k(), circuit, &config, &cs, instances[i])) - .collect(); - let mut prover = ProverV2::::new(params, pk, instances, rng, transcript)?; - let mut challenges = HashMap::new(); - let phases = prover.phases.clone(); - for phase in &phases { - println!("DBG phase {}", phase.0); - let mut witnesses = Vec::with_capacity(circuits.len()); - for witness_calc in witness_calcs.iter_mut() { - witnesses.push(witness_calc.calc(phase.0, &challenges)?); - } - challenges = prover.commit_phase(phase.0, witnesses).unwrap(); - } - prover.create_proof() -} - -#[test] -fn test_create_proof() { - use crate::{ - circuit::SimpleFloorPlanner, - plonk::{keygen_pk, keygen_vk, ConstraintSystem}, - poly::kzg::{ - commitment::{KZGCommitmentScheme, ParamsKZG}, - multiopen::ProverSHPLONK, - }, - transcript::{Blake2bWrite, Challenge255, TranscriptWriterBuffer}, - }; - use halo2curves::bn256::Bn256; - use rand_core::OsRng; - - #[derive(Clone, Copy)] - struct MyCircuit; - - impl Circuit for MyCircuit { - type Config = (); - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - *self - } - - fn configure(_meta: &mut ConstraintSystem) -> Self::Config {} - - fn synthesize( - &self, - _config: Self::Config, - _layouter: impl crate::circuit::Layouter, - ) -> Result<(), Error> { - Ok(()) - } - } - - let params: ParamsKZG = ParamsKZG::setup(3, OsRng); - let vk = keygen_vk(¶ms, &MyCircuit).expect("keygen_vk should not fail"); - let pk = keygen_pk(¶ms, vk, &MyCircuit).expect("keygen_pk should not fail"); - let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); - - // Create proof with wrong number of instances - let proof = create_proof::, ProverSHPLONK<_>, _, _, _, _>( - ¶ms, - &pk, - &[MyCircuit, MyCircuit], - &[], - OsRng, - &mut transcript, - ); - assert!(matches!(proof.unwrap_err(), Error::InvalidInstances)); - - // Create proof with correct number of instances - create_proof::, ProverSHPLONK<_>, _, _, _, _>( - ¶ms, - &pk, - &[MyCircuit, MyCircuit], - &[&[], &[]], - OsRng, - &mut transcript, - ) - .expect("proof generation should not fail"); -} -*/ diff --git a/backend/src/plonk/verifier.rs b/backend/src/plonk/verifier.rs index aab7e192b1..919530fb9c 100644 --- a/backend/src/plonk/verifier.rs +++ b/backend/src/plonk/verifier.rs @@ -9,7 +9,7 @@ use super::{ use crate::arithmetic::compute_inner_product; use crate::plonk::lookup::verifier::lookup_read_permuted_commitments; use crate::plonk::permutation::verifier::permutation_read_product_commitments; -use crate::plonk::shuffle::verifier::shuffle_read_product_commitments; +use crate::plonk::shuffle::verifier::shuffle_read_product_commitment; use crate::poly::commitment::{CommitmentScheme, Verifier}; use crate::poly::VerificationStrategy; use crate::poly::{ diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index 6b1cc59a83..40bd92a923 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -11,11 +11,11 @@ authors = [ edition = "2021" rust-version = "1.66.0" description = """ -Fast PLONK-based zero-knowledge proving system with no trusted setup +TODO """ license = "MIT OR Apache-2.0" -repository = "https://github.com/zcash/halo2" -documentation = "https://docs.rs/halo2_proofs" +repository = "TODO" +documentation = "TODO" readme = "README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "zkp", "zkSNARKs"] @@ -24,30 +24,6 @@ keywords = ["halo", "proofs", "zkp", "zkSNARKs"] all-features = true rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] -[[bench]] -name = "arithmetic" -harness = false - -[[bench]] -name = "commit_zk" -harness = false - -[[bench]] -name = "hashtocurve" -harness = false - -[[bench]] -name = "plonk" -harness = false - -[[bench]] -name = "dev_lookup" -harness = false - -[[bench]] -name = "fft" -harness = false - [dependencies] backtrace = { version = "0.3", optional = true } ff = "0.13" @@ -61,6 +37,10 @@ rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" +halo2_middleware = { path = "../middleware" } +halo2_common = { path = "../common" } +halo2_backend = { path = "../backend" } +halo2_frontend = { path = "../frontend" } # Developer tooling dependencies plotters = { version = "0.3.0", default-features = false, optional = true } @@ -102,11 +82,3 @@ derive_serde = ["halo2curves/derive_serde"] [lib] bench = false - -[[example]] -name = "circuit-layout" -required-features = ["test-dev-graph"] - -[[example]] -name = "proof-size" -required-features = ["cost-estimator"] diff --git a/halo2_proofs/katex-header.html b/halo2_proofs/katex-header.html deleted file mode 100644 index 98e85904fa..0000000000 --- a/halo2_proofs/katex-header.html +++ /dev/null @@ -1,15 +0,0 @@ - - - - \ No newline at end of file diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index acc26aff15..76b89a91e3 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -1,21 +1,15 @@ -//! # halo2_proofs +//! Legacy halo2 API that wraps the frontend-backend split API. This crate doesn't implement any +//! core functionality, it just imports from the other crates and offers the legacy API in the same +//! module structure so that projects depending on halo2 can update their dependency towards it +//! without breaking. -#![cfg_attr(docsrs, feature(doc_cfg))] -// The actual lints we want to disable. -#![allow(clippy::op_ref, clippy::many_single_char_names)] -#![deny(rustdoc::broken_intra_doc_links)] -#![deny(missing_debug_implementations)] -#![deny(missing_docs)] -#![deny(unsafe_code)] +#![allow(dead_code)] // TODO: Remove +#![allow(unused_imports)] // TODO: Remove -pub mod arithmetic; -pub mod circuit; -pub use halo2curves; -mod multicore; pub mod plonk; -pub mod poly; -pub mod transcript; -pub mod dev; -mod helpers; -pub use helpers::SerdeFormat; +pub mod circuit { + pub use halo2_common::circuit::{Layouter, SimpleFloorPlanner}; +} +pub use halo2_common::poly; +pub use halo2_common::transcript; diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index eade0e5a74..837a5032bd 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -1,549 +1,8 @@ -//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] -//! that is designed specifically for the polynomial commitment scheme described -//! in the [Halo][halo] paper. -//! -//! [halo]: https://eprint.iacr.org/2019/1021 -//! [plonk]: https://eprint.iacr.org/2019/953 - -use blake2b_simd::Params as Blake2bParams; -use group::ff::{Field, FromUniformBytes, PrimeField}; - -use crate::arithmetic::CurveAffine; -use crate::helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, - SerdePrimeField, -}; -use crate::poly::{ - Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, - Polynomial, Rotation, -}; -use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; -use crate::SerdeFormat; - -mod assigned; -mod circuit; -mod error; -mod evaluation; -mod keygen; -mod lookup; -pub mod permutation; -mod shuffle; -mod vanishing; - -mod prover; -mod verifier; - -pub use assigned::*; -pub use circuit::*; -pub use error::*; -pub use keygen::*; -pub use prover::*; -pub use verifier::*; - -use evaluation::Evaluator; -use std::io; - -/// List of queries (columns and rotations) used by a circuit -#[derive(Debug, Clone)] -pub struct Queries { - /// List of unique advice queries - pub advice: Vec<(Column, Rotation)>, - /// List of unique instance queries - pub instance: Vec<(Column, Rotation)>, - /// List of unique fixed queries - pub fixed: Vec<(Column, Rotation)>, - /// Contains an integer for each advice column - /// identifying how many distinct queries it has - /// so far; should be same length as cs.num_advice_columns. - pub num_advice_queries: Vec, -} - -impl Queries { - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } - - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. - - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); - - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; - - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. - - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. - - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 - } -} - -/// This is a verifying key which allows for the verification of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct VerifyingKey { - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - /// Cached maximum degree of `cs` (which doesn't change after construction). - cs_degree: usize, - /// The representative of this `VerifyingKey` in transcripts. - transcript_repr: C::Scalar, - selectors: Vec>, - /// Whether selector compression is turned on or not. - compress_selectors: bool, -} - -// Current version of the VK -const VERSION: u8 = 0x03; - -impl VerifyingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a verifying key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - // Version byte that will be checked on read. - writer.write_all(&[VERSION])?; - let k = &self.domain.k(); - assert!(*k <= C::Scalar::S); - // k value fits in 1 byte - writer.write_all(&[*k as u8])?; - writer.write_all(&[self.compress_selectors as u8])?; - writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; - for commitment in &self.fixed_commitments { - commitment.write(writer, format)?; - } - self.permutation.write(writer, format)?; - - if !self.compress_selectors { - assert!(self.selectors.is_empty()); - } - // write self.selectors - for selector in &self.selectors { - // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write - for bits in selector.chunks(8) { - writer.write_all(&[crate::helpers::pack(bits)])?; - } - } - Ok(()) - } - - /// Reads a verification key from a buffer. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let mut version_byte = [0u8; 1]; - reader.read_exact(&mut version_byte)?; - if VERSION != version_byte[0] { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected version byte", - )); - } - - let mut k = [0u8; 1]; - reader.read_exact(&mut k)?; - let k = u8::from_le_bytes(k); - if k as u32 > C::Scalar::S { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - format!( - "circuit size value (k): {} exceeds maxium: {}", - k, - C::Scalar::S - ), - )); - } - let mut compress_selectors = [0u8; 1]; - reader.read_exact(&mut compress_selectors)?; - if compress_selectors[0] != 0 && compress_selectors[0] != 1 { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected compress_selectors not boolean", - )); - } - let compress_selectors = compress_selectors[0] == 1; - let (domain, cs, _) = keygen::create_domain::( - k as u32, - #[cfg(feature = "circuit-params")] - params, - ); - let mut num_fixed_columns = [0u8; 4]; - reader.read_exact(&mut num_fixed_columns)?; - let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); - - let fixed_commitments: Vec<_> = (0..num_fixed_columns) - .map(|_| C::read(reader, format)) - .collect::>()?; - - let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; - - let (cs, selectors) = if compress_selectors { - // read selectors - let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] - .into_iter() - .map(|mut selector| { - let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; - reader.read_exact(&mut selector_bytes)?; - for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { - crate::helpers::unpack(byte, bits); - } - Ok(selector) - }) - .collect::>()?; - let (cs, _) = cs.compress_selectors(selectors.clone()); - (cs, selectors) - } else { - // we still need to replace selectors with fixed Expressions in `cs` - let fake_selectors = vec![vec![]; cs.num_selectors]; - let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); - (cs, vec![]) - }; - - Ok(Self::from_parts( - domain, - fixed_commitments, - permutation, - cs, - selectors, - compress_selectors, - )) - } - - /// Writes a verifying key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a verification key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } +pub mod keygen; +pub mod prover; +pub mod verifier { + pub use halo2_backend::plonk::verifier::verify_proof; } -impl VerifyingKey { - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - 10 + (self.fixed_commitments.len() * C::byte_length(format)) - + self.permutation.bytes_length(format) - + self.selectors.len() - * (self - .selectors - .get(0) - .map(|selector| (selector.len() + 7) / 8) - .unwrap_or(0)) - } - - fn from_parts( - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - selectors: Vec>, - compress_selectors: bool, - ) -> Self - where - C::ScalarExt: FromUniformBytes<64>, - { - // Compute cached values. - let cs_degree = cs.degree(); - - let mut vk = Self { - domain, - fixed_commitments, - permutation, - cs, - cs_degree, - // Temporary, this is not pinned. - transcript_repr: C::Scalar::ZERO, - selectors, - compress_selectors, - }; - - let mut hasher = Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Verify-Key") - .to_state(); - - let s = format!("{:?}", vk.pinned()); - - hasher.update(&(s.len() as u64).to_le_bytes()); - hasher.update(s.as_bytes()); - - // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - - vk - } - - /// Hashes a verification key into a transcript. - pub fn hash_into, T: Transcript>( - &self, - transcript: &mut T, - ) -> io::Result<()> { - transcript.common_scalar(self.transcript_repr)?; - - Ok(()) - } - - /// Obtains a pinned representation of this verification key that contains - /// the minimal information necessary to reconstruct the verification key. - pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { - PinnedVerificationKey { - base_modulus: C::Base::MODULUS, - scalar_modulus: C::Scalar::MODULUS, - domain: self.domain.pinned(), - fixed_commitments: &self.fixed_commitments, - permutation: &self.permutation, - cs: self.cs.pinned(), - } - } - - /// Returns commitments of fixed polynomials - pub fn fixed_commitments(&self) -> &Vec { - &self.fixed_commitments - } - - /// Returns `VerifyingKey` of permutation - pub fn permutation(&self) -> &permutation::VerifyingKey { - &self.permutation - } - - /// Returns `ConstraintSystem` - pub fn cs(&self) -> &ConstraintSystem { - &self.cs - } - - /// Returns representative of this `VerifyingKey` in transcripts - pub fn transcript_repr(&self) -> C::Scalar { - self.transcript_repr - } -} - -/// Minimal representation of a verification key that can be used to identify -/// its active contents. -#[allow(dead_code)] -#[derive(Debug)] -pub struct PinnedVerificationKey<'a, C: CurveAffine> { - base_modulus: &'static str, - scalar_modulus: &'static str, - domain: PinnedEvaluationDomain<'a, C::Scalar>, - cs: PinnedConstraintSystem<'a, C::Scalar>, - fixed_commitments: &'a Vec, - permutation: &'a permutation::VerifyingKey, -} - -/// This is a proving key which allows for the creation of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct ProvingKey { - vk: VerifyingKey, - l0: Polynomial, - l_last: Polynomial, - l_active_row: Polynomial, - fixed_values: Vec>, - fixed_polys: Vec>, - fixed_cosets: Vec>, - permutation: permutation::ProvingKey, - ev: Evaluator, -} - -impl ProvingKey -where - C::Scalar: FromUniformBytes<64>, -{ - /// Get the underlying [`VerifyingKey`]. - pub fn get_vk(&self) -> &VerifyingKey { - &self.vk - } - - /// Gets the total number of bytes in the serialization of `self` - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - let scalar_len = C::Scalar::default().to_repr().as_ref().len(); - self.vk.bytes_length(format) - + 12 - + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) - + polynomial_slice_byte_length(&self.fixed_values) - + polynomial_slice_byte_length(&self.fixed_polys) - + polynomial_slice_byte_length(&self.fixed_cosets) - + self.permutation.bytes_length() - } -} - -impl ProvingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a proving key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - self.vk.write(writer, format)?; - self.l0.write(writer, format)?; - self.l_last.write(writer, format)?; - self.l_active_row.write(writer, format)?; - write_polynomial_slice(&self.fixed_values, writer, format)?; - write_polynomial_slice(&self.fixed_polys, writer, format)?; - write_polynomial_slice(&self.fixed_cosets, writer, format)?; - self.permutation.write(writer, format)?; - Ok(()) - } - - /// Reads a proving key from a buffer. - /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let vk = VerifyingKey::::read::( - reader, - format, - #[cfg(feature = "circuit-params")] - params, - )?; - let l0 = Polynomial::read(reader, format)?; - let l_last = Polynomial::read(reader, format)?; - let l_active_row = Polynomial::read(reader, format)?; - let fixed_values = read_polynomial_vec(reader, format)?; - let fixed_polys = read_polynomial_vec(reader, format)?; - let fixed_cosets = read_polynomial_vec(reader, format)?; - let permutation = permutation::ProvingKey::read(reader, format)?; - let ev = Evaluator::new(vk.cs()); - Ok(Self { - vk, - l0, - l_last, - l_active_row, - fixed_values, - fixed_polys, - fixed_cosets, - permutation, - ev, - }) - } - - /// Writes a proving key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a proving key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } -} - -impl VerifyingKey { - /// Get the underlying [`EvaluationDomain`]. - pub fn get_domain(&self) -> &EvaluationDomain { - &self.domain - } -} - -#[derive(Clone, Copy, Debug)] -struct Theta; -type ChallengeTheta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Beta; -type ChallengeBeta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Gamma; -type ChallengeGamma = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Y; -type ChallengeY = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct X; -type ChallengeX = ChallengeScalar; +pub use halo2_common::plonk::ConstraintSystem; +pub use keygen::{keygen_pk, keygen_vk}; diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 81b890cf65..a00e824650 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -1,19 +1,9 @@ -#![allow(clippy::int_plus_one)] - -use std::ops::Range; - -use ff::{Field, FromUniformBytes}; -use group::Curve; - -use super::{ - circuit::{ - compile_circuit, Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, - ConstraintSystem, Fixed, Instance, Selector, - }, - evaluation::Evaluator, - permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, +use halo2_backend::plonk::{ + circuit::{Assignment, Circuit, ConstraintSystem, Selector}, + keygen::{keygen_pk_v2, keygen_vk_v2}, + ProvingKey, VerifyingKey, }; -use crate::{ +use halo2_backend::{ arithmetic::{parallelize, CurveAffine}, circuit::Value, poly::{ @@ -21,234 +11,9 @@ use crate::{ EvaluationDomain, }, }; - -pub(crate) fn create_domain( - k: u32, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, -) -> ( - EvaluationDomain, - ConstraintSystem, - ConcreteCircuit::Config, -) -where - C: CurveAffine, - ConcreteCircuit: Circuit, -{ - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, params); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - - let degree = cs.degree(); - - let domain = EvaluationDomain::new(degree as u32, k); - - (domain, cs, config) -} - -/// Assembly to be used in circuit synthesis. -#[derive(Debug)] -pub(crate) struct Assembly { - pub(crate) k: u32, - pub(crate) fixed: Vec, LagrangeCoeff>>, - pub(crate) permutation: permutation::keygen::AssemblyFront, - pub(crate) selectors: Vec>, - // A range of available rows for assignment and copies. - pub(crate) usable_rows: Range, - pub(crate) _marker: std::marker::PhantomData, -} - -impl Assignment for Assembly { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.selectors[selector.0][row] = true; - - Ok(()) - } - - fn query_instance(&self, _: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - // There is no instance in this context. - Ok(Value::unknown()) - } - - fn assign_advice( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about fixed columns here - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .fixed - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.permutation - .copy(left_column, left_row, right_column, right_row) - } - - fn fill_from_row( - &mut self, - column: Column, - from_row: usize, - to: Value>, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&from_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - let col = self - .fixed - .get_mut(column.index()) - .ok_or(Error::BoundsFailure)?; - - let filler = to.assign()?; - for row in self.usable_rows.clone().skip(from_row) { - col[row] = filler; - } - - Ok(()) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} - -/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. -pub fn keygen_vk_v2<'params, C, P>( - params: &P, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - C::Scalar: FromUniformBytes<64>, -{ - let cs2 = &circuit.cs; - let cs: ConstraintSystem = cs2.clone().into(); - let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs.permutation, - &circuit.preprocessing.permutation, - )? - .build_vk(params, &domain, &cs.permutation); - - let fixed_commitments = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - params - .commit_lagrange( - &Polynomial::new_lagrange_from_vec(poly.clone()), - Blind::default(), - ) - .to_affine() - }) - .collect(); - - Ok(VerifyingKey::from_parts( - domain, - fixed_commitments, - permutation_vk, - cs, - Vec::new(), - false, - )) -} +use halo2_common::plonk::Error; +use halo2_frontend::circuit::compile_circuit; +use halo2_middleware::ff::FromUniformBytes; /// Generate a `VerifyingKey` from an instance of `Circuit`. /// By default, selector compression is turned **off**. @@ -285,99 +50,6 @@ where Ok(vk) } -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. -pub fn keygen_pk_v2<'params, C, P>( - params: &P, - vk: VerifyingKey, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, -{ - let cs = &circuit.cs; - - if (params.n() as usize) < vk.cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let fixed_polys: Vec<_> = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - vk.domain - .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) - }) - .collect(); - - let fixed_cosets = fixed_polys - .iter() - .map(|poly| vk.domain.coeff_to_extended(poly.clone())) - .collect(); - - let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs.permutation, - &circuit.preprocessing.permutation, - )? - .build_pk(params, &vk.domain, &cs.permutation); - - // Compute l_0(X) - // TODO: this can be done more efficiently - let mut l0 = vk.domain.empty_lagrange(); - l0[0] = C::Scalar::ONE; - let l0 = vk.domain.lagrange_to_coeff(l0); - let l0 = vk.domain.coeff_to_extended(l0); - - // Compute l_blind(X) which evaluates to 1 for each blinding factor row - // and 0 otherwise over the domain. - let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { - *evaluation = C::Scalar::ONE; - } - let l_blind = vk.domain.lagrange_to_coeff(l_blind); - let l_blind = vk.domain.coeff_to_extended(l_blind); - - // Compute l_last(X) which evaluates to 1 on the first inactive row (just - // before the blinding factors) and 0 otherwise over the domain - let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; - let l_last = vk.domain.lagrange_to_coeff(l_last); - let l_last = vk.domain.coeff_to_extended(l_last); - - // Compute l_active_row(X) - let one = C::Scalar::ONE; - let mut l_active_row = vk.domain.empty_extended(); - parallelize(&mut l_active_row, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = i + start; - *value = one - (l_last[idx] + l_blind[idx]); - } - }); - - // Compute the optimized evaluation data structure - let ev = Evaluator::new(&vk.cs); - - Ok(ProvingKey { - vk, - l0, - l_last, - l_active_row, - fixed_values: circuit - .preprocessing - .fixed - .clone() - .into_iter() - .map(Polynomial::new_lagrange_from_vec) - .collect(), - fixed_polys, - fixed_cosets, - permutation: permutation_pk, - ev, - }) -} - /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. pub fn keygen_pk<'params, C, P, ConcreteCircuit>( params: &P, diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 1168b1d519..014eaf4469 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -1,885 +1,28 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; -use group::Curve; -use rand_core::RngCore; -use std::collections::{BTreeSet, HashSet}; -use std::ops::RangeTo; -use std::{collections::HashMap, iter}; - -use super::{ - circuit::{ - compile_circuit, - sealed::{self}, - Advice, Any, Assignment, Challenge, Circuit, Column, Fixed, Instance, Selector, - WitnessCalculator, - }, - lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, - ChallengeX, ChallengeY, Error, ProvingKey, +use halo2_backend::plonk::{ + circuit::{Assignment, Circuit, ConstraintSystem, Selector}, + keygen::{keygen_pk_v2, keygen_vk_v2}, + prover::ProverV2, + ProvingKey, VerifyingKey, }; - -use crate::{ +use halo2_backend::{arithmetic::parallelize, poly::EvaluationDomain}; +use halo2_common::plonk::Error; +use halo2_common::{ arithmetic::{eval_polynomial, CurveAffine}, circuit::Value, - plonk::Assigned, poly::{ commitment::{Blind, CommitmentScheme, Params, Prover}, Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, }, }; -use crate::{ +use halo2_common::{ poly::batch_invert_assigned, transcript::{EncodedChallenge, TranscriptWrite}, }; -use group::prime::PrimeCurveAffine; - -/// Collection of instance data used during proving for a single circuit proof. -#[derive(Debug)] -struct InstanceSingle { - pub instance_values: Vec>, - pub instance_polys: Vec>, -} - -/// Collection of advice data used during proving for a single circuit proof. -#[derive(Debug, Clone)] -struct AdviceSingle { - pub advice_polys: Vec>, - pub advice_blinds: Vec>, -} - -/// The prover object used to create proofs interactively by passing the witnesses to commit at -/// each phase. This works for a single proof. This is a wrapper over ProverV2. -#[derive(Debug)] -pub struct ProverV2Single< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, ->(ProverV2<'a, 'params, Scheme, P, E, R, T>); - -impl< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - > ProverV2Single<'a, 'params, Scheme, P, E, R, T> -{ - /// Create a new prover object - pub fn new( - params: &'params Scheme::ParamsProver, - pk: &'a ProvingKey, - // TODO: If this was a vector the usage would be simpler - instance: &[&[Scheme::Scalar]], - rng: R, - transcript: &'a mut T, - ) -> Result - // TODO: Can I move this `where` to the struct definition? - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - Ok(Self(ProverV2::new( - params, - pk, - &[instance], - rng, - transcript, - )?)) - } - - /// Commit the `witness` at `phase` and return the challenges after `phase`. - pub fn commit_phase( - &mut self, - phase: u8, - witness: Vec>>>, - ) -> Result, Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - self.0.commit_phase(phase, vec![witness]) - } - - /// Finalizes the proof creation. - pub fn create_proof(self) -> Result<(), Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - self.0.create_proof() - } -} - -/// The prover object used to create proofs interactively by passing the witnesses to commit at -/// each phase. This supports batch proving. -#[derive(Debug)] -pub struct ProverV2< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, -> { - // Circuit and setup fields - params: &'params Scheme::ParamsProver, - pk: &'a ProvingKey, - phases: Vec, - // State - instance: Vec>, - advice: Vec>, - challenges: HashMap, - next_phase_index: usize, - rng: R, - transcript: &'a mut T, - _marker: std::marker::PhantomData<(P, E)>, -} - -impl< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - > ProverV2<'a, 'params, Scheme, P, E, R, T> -{ - /// Create a new prover object - pub fn new( - params: &'params Scheme::ParamsProver, - pk: &'a ProvingKey, - // TODO: If this was a vector the usage would be simpler - instances: &[&[&[Scheme::Scalar]]], - rng: R, - transcript: &'a mut T, - ) -> Result - // TODO: Can I move this `where` to the struct definition? - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - for instance in instances.iter() { - if instance.len() != pk.vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); - } - } - - // Hash verification key into transcript - pk.vk.hash_into(transcript)?; - - let meta = &pk.vk.cs; - let phases = meta.phases().collect(); - - let domain = &pk.vk.domain; - - // TODO: Name this better - let mut instance_fn = - |instance: &[&[Scheme::Scalar]]| -> Result, Error> { - let instance_values = instance - .iter() - .map(|values| { - let mut poly = domain.empty_lagrange(); - assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { - return Err(Error::InstanceTooLarge); - } - for (poly, value) in poly.iter_mut().zip(values.iter()) { - if !P::QUERY_INSTANCE { - // dbg!(1, value); - transcript.common_scalar(*value)?; - } - *poly = *value; - } - Ok(poly) - }) - .collect::, _>>()?; - - if P::QUERY_INSTANCE { - let instance_commitments_projective: Vec<_> = instance_values - .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default())) - .collect(); - let mut instance_commitments = - vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &instance_commitments_projective, - &mut instance_commitments, - ); - let instance_commitments = instance_commitments; - drop(instance_commitments_projective); - - for commitment in &instance_commitments { - // dbg!(2, commitment); - transcript.common_point(*commitment)?; - } - } - - let instance_polys: Vec<_> = instance_values - .iter() - .map(|poly| { - let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); - domain.lagrange_to_coeff(lagrange_vec) - }) - .collect(); - - Ok(InstanceSingle { - instance_values, - instance_polys, - }) - }; - let instance: Vec> = instances - .iter() - .map(|instance| instance_fn(instance)) - .collect::, _>>()?; - - let advice = vec![ - AdviceSingle:: { - // Create vectors with empty polynomials to free space while they are not being used - advice_polys: vec![ - Polynomial::new_empty(0, Scheme::Scalar::ZERO); - meta.num_advice_columns - ], - advice_blinds: vec![Blind::default(); meta.num_advice_columns], - }; - instances.len() - ]; - let challenges = HashMap::::with_capacity(meta.num_challenges); - - Ok(ProverV2 { - params, - pk, - phases, - instance, - rng, - transcript, - advice, - challenges, - next_phase_index: 0, - _marker: std::marker::PhantomData {}, - }) - } - - /// Commit the `witness` at `phase` and return the challenges after `phase`. - pub fn commit_phase( - &mut self, - phase: u8, - witness: Vec>>>>, - ) -> Result, Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - let current_phase = match self.phases.get(self.next_phase_index) { - Some(phase) => phase, - None => { - panic!("TODO: Return Error instead. All phases already commited"); - } - }; - if phase != current_phase.0 { - panic!("TODO: Return Error instead. Committing invalid phase"); - } - - let params = self.params; - let meta = &self.pk.vk.cs; - - let mut rng = &mut self.rng; - - let advice = &mut self.advice; - let challenges = &mut self.challenges; - - let column_indices = meta - .advice_column_phase - .iter() - .enumerate() - .filter_map(|(column_index, phase)| { - if current_phase == phase { - Some(column_index) - } else { - None - } - }) - .collect::>(); - - if witness.len() != advice.len() { - return Err(Error::Other("witness.len() != advice.len()".to_string())); - } - for witness_circuit in &witness { - if witness_circuit.len() != meta.num_advice_columns { - return Err(Error::Other(format!( - "unexpected length in witness_circuitk. Got {}, expected {}", - witness_circuit.len(), - meta.num_advice_columns, - ))); - } - // Check that all current_phase advice columns are Some, and their length is correct - for (column_index, advice_column) in witness_circuit.iter().enumerate() { - if column_indices.contains(&column_index) { - match advice_column { - None => { - return Err(Error::Other(format!( - "expected advice column with index {} at phase {}", - column_index, current_phase.0 - ))) - } - Some(advice_column) => { - if advice_column.len() != params.n() as usize { - return Err(Error::Other(format!( - "expected advice column with index {} to have length {}", - column_index, - params.n(), - ))); - } - } - } - } else if advice_column.is_some() { - return Err(Error::Other(format!( - "expected no advice column with index {} at phase {}", - column_index, current_phase.0 - ))); - }; - } - } - - let mut commit_phase_fn = |advice: &mut AdviceSingle, - witness: Vec< - Option, LagrangeCoeff>>, - >| - -> Result<(), Error> { - let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); - let mut advice_values = - batch_invert_assigned::(witness.into_iter().flatten().collect()); - let unblinded_advice: HashSet = - HashSet::from_iter(meta.unblinded_advice_columns.clone()); - - // Add blinding factors to advice columns - for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { - if !unblinded_advice.contains(column_index) { - for cell in &mut advice_values[unusable_rows_start..] { - *cell = Scheme::Scalar::random(&mut rng); - } - } else { - #[cfg(feature = "sanity-checks")] - for cell in &advice_values[unusable_rows_start..] { - assert_eq!(*cell, Scheme::Scalar::ZERO); - } - } - } - - // Compute commitments to advice column polynomials - let blinds: Vec<_> = column_indices - .iter() - .map(|i| { - if unblinded_advice.contains(i) { - Blind::default() - } else { - Blind(Scheme::Scalar::random(&mut rng)) - } - }) - .collect(); - let advice_commitments_projective: Vec<_> = advice_values - .iter() - .zip(blinds.iter()) - .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) - .collect(); - let mut advice_commitments = - vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &advice_commitments_projective, - &mut advice_commitments, - ); - let advice_commitments = advice_commitments; - drop(advice_commitments_projective); - - for commitment in &advice_commitments { - self.transcript.write_point(*commitment)?; - } - for ((column_index, advice_values), blind) in - column_indices.iter().zip(advice_values).zip(blinds) - { - advice.advice_polys[*column_index] = advice_values; - advice.advice_blinds[*column_index] = blind; - } - Ok(()) - }; - - for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { - commit_phase_fn( - advice, - witness - .into_iter() - .map(|v| v.map(Polynomial::new_lagrange_from_vec)) - .collect(), - )?; - } - - for (index, phase) in meta.challenge_phase.iter().enumerate() { - if current_phase == phase { - let existing = - challenges.insert(index, *self.transcript.squeeze_challenge_scalar::<()>()); - assert!(existing.is_none()); - } - } - - self.next_phase_index += 1; - Ok(challenges.clone()) - } - - /// Finalizes the proof creation. - pub fn create_proof(mut self) -> Result<(), Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - let params = self.params; - let meta = &self.pk.vk.cs; - // let queries = &self.pk.vk.queries; - let pk = self.pk; - let domain = &self.pk.vk.domain; - - let mut rng = self.rng; - - let instance = std::mem::take(&mut self.instance); - let advice = std::mem::take(&mut self.advice); - let mut challenges = self.challenges; - - assert_eq!(challenges.len(), meta.num_challenges); - let challenges = (0..meta.num_challenges) - .map(|index| challenges.remove(&index).unwrap()) - .collect::>(); - - // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = self.transcript.squeeze_challenge_scalar(); - - let mut lookups_fn = - |instance: &InstanceSingle, - advice: &AdviceSingle| - -> Result>, Error> { - meta.lookups - .iter() - .map(|lookup| { - lookup.commit_permuted( - pk, - params, - domain, - theta, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - self.transcript, - ) - }) - .collect::, _>>() - }; - let lookups: Vec>> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, Error> { - // Construct and commit to permuted values for each lookup - lookups_fn(instance, advice) - }) - .collect::, _>>()?; - - // Sample beta challenge - let beta: ChallengeBeta<_> = self.transcript.squeeze_challenge_scalar(); - - // Sample gamma challenge - let gamma: ChallengeGamma<_> = self.transcript.squeeze_challenge_scalar(); - - // Commit to permutation. - let permutations: Vec> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| { - meta.permutation.commit( - params, - pk, - &pk.permutation, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - beta, - gamma, - &mut rng, - self.transcript, - ) - }) - .collect::, _>>()?; - - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - // Construct and commit to products for each lookup - lookups - .into_iter() - .map(|lookup| { - lookup.commit_product(pk, params, beta, gamma, &mut rng, self.transcript) - }) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles: Vec>> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, _> { - // Compress expressions for each shuffle - meta.shuffles - .iter() - .map(|shuffle| { - shuffle.commit_product( - pk, - params, - domain, - theta, - gamma, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - self.transcript, - ) - }) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Commit to the vanishing argument's random polynomial for blinding h(x_3) - let vanishing = vanishing::Argument::commit(params, domain, &mut rng, self.transcript)?; - - // Obtain challenge for keeping all separate gates linearly independent - let y: ChallengeY<_> = self.transcript.squeeze_challenge_scalar(); - - // Calculate the advice polys - let advice: Vec> = advice - .into_iter() - .map( - |AdviceSingle { - advice_polys, - advice_blinds, - }| { - AdviceSingle { - advice_polys: advice_polys - .into_iter() - .map(|poly| domain.lagrange_to_coeff(poly)) - .collect::>(), - advice_blinds, - } - }, - ) - .collect(); - - // Evaluate the h(X) polynomial - let h_poly = pk.ev.evaluate_h( - pk, - &advice - .iter() - .map(|a| a.advice_polys.as_slice()) - .collect::>(), - &instance - .iter() - .map(|i| i.instance_polys.as_slice()) - .collect::>(), - &challenges, - *y, - *beta, - *gamma, - *theta, - &lookups, - &shuffles, - &permutations, - ); - - // Construct the vanishing argument's h(X) commitments - let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, self.transcript)?; - - let x: ChallengeX<_> = self.transcript.squeeze_challenge_scalar(); - let xn = x.pow([params.n()]); - - if P::QUERY_INSTANCE { - // Compute and hash instance evals for the circuit instance - for instance in instance.iter() { - // Evaluate polynomials at omega^i x - let instance_evals: Vec<_> = meta - .instance_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &instance.instance_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - - // Hash each instance column evaluation - for eval in instance_evals.iter() { - self.transcript.write_scalar(*eval)?; - } - } - } - - // Compute and hash advice evals for the circuit instance - for advice in advice.iter() { - // Evaluate polynomials at omega^i x - let advice_evals: Vec<_> = meta - .advice_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &advice.advice_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - // dbg!(&advice_evals); - - // Hash each advice column evaluation - for eval in advice_evals.iter() { - self.transcript.write_scalar(*eval)?; - } - } - - // Compute and hash fixed evals - let fixed_evals: Vec<_> = meta - .fixed_queries - .iter() - .map(|&(column, at)| { - eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) - }) - .collect(); - - // Hash each fixed column evaluation - for eval in fixed_evals.iter() { - self.transcript.write_scalar(*eval)?; - } - - let vanishing = vanishing.evaluate(x, xn, domain, self.transcript)?; - - // Evaluate common permutation data - pk.permutation.evaluate(x, self.transcript)?; - - // Evaluate the permutations, if any, at omega^i x. - let permutations: Vec> = permutations - .into_iter() - .map(|permutation| -> Result<_, _> { - permutation.construct().evaluate(pk, x, self.transcript) - }) - .collect::, _>>()?; - - // Evaluate the lookups, if any, at omega^i x. - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - lookups - .into_iter() - .map(|p| p.evaluate(pk, x, self.transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Evaluate the shuffles, if any, at omega^i x. - let shuffles: Vec>> = shuffles - .into_iter() - .map(|shuffles| -> Result, _> { - shuffles - .into_iter() - .map(|p| p.evaluate(pk, x, self.transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let instances = instance - .iter() - .zip(advice.iter()) - .zip(permutations.iter()) - .zip(lookups.iter()) - .zip(shuffles.iter()) - .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { - iter::empty() - .chain( - P::QUERY_INSTANCE - .then_some(meta.instance_queries.iter().map(move |&(column, at)| { - ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &instance.instance_polys[column.index()], - blind: Blind::default(), - } - })) - .into_iter() - .flatten(), - ) - .chain( - meta.advice_queries - .iter() - .map(move |&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &advice.advice_polys[column.index()], - blind: advice.advice_blinds[column.index()], - }), - ) - .chain(permutation.open(pk, x)) - .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) - .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) - }) - .chain(meta.fixed_queries.iter().map(|&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &pk.fixed_polys[column.index()], - blind: Blind::default(), - })) - .chain(pk.permutation.open(x)) - // We query the h(X) polynomial at x - .chain(vanishing.open(x)); - - let prover = P::new(params); - println!("DBG create_proof"); - prover - .create_proof(rng, self.transcript, instances) - .map_err(|_| Error::ConstraintSystemFailure)?; - - Ok(()) - } -} - -pub(crate) struct WitnessCollection<'a, F: Field> { - pub(crate) k: u32, - pub(crate) current_phase: sealed::Phase, - pub(crate) advice: Vec>>, - // pub(crate) unblinded_advice: HashSet, - pub(crate) challenges: &'a HashMap, - pub(crate) instances: &'a [&'a [F]], - pub(crate) usable_rows: RangeTo, - pub(crate) _marker: std::marker::PhantomData, -} - -impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn query_instance(&self, column: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.instances - .get(column.index()) - .and_then(|column| column.get(row)) - .map(|v| Value::known(*v)) - .ok_or(Error::BoundsFailure) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Ignore assignment of advice column in different phase than current one. - if self.current_phase != column.column_type().phase { - return Ok(()); - } - - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .advice - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { - // We only care about advice columns here - - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.challenges - .get(&challenge.index()) - .cloned() - .map(Value::known) - .unwrap_or_else(Value::unknown) - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} +use halo2_frontend::circuit::{compile_circuit, WitnessCalculator}; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_middleware::plonk::Assigned; +use rand_core::RngCore; +use std::{collections::HashMap, iter}; /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was diff --git a/halo2_proofs/CHANGELOG.md b/halo2_proofs_rm/CHANGELOG.md similarity index 100% rename from halo2_proofs/CHANGELOG.md rename to halo2_proofs_rm/CHANGELOG.md diff --git a/halo2_proofs_rm/Cargo.toml b/halo2_proofs_rm/Cargo.toml new file mode 100644 index 0000000000..1a746ad414 --- /dev/null +++ b/halo2_proofs_rm/Cargo.toml @@ -0,0 +1,112 @@ +[package] +name = "halo2_proofs_rm" +version = "0.3.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.66.0" +description = """ +Fast PLONK-based zero-knowledge proving system with no trusted setup +""" +license = "MIT OR Apache-2.0" +repository = "https://github.com/zcash/halo2" +documentation = "https://docs.rs/halo2_proofs" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[[bench]] +name = "arithmetic" +harness = false + +[[bench]] +name = "commit_zk" +harness = false + +[[bench]] +name = "hashtocurve" +harness = false + +[[bench]] +name = "plonk" +harness = false + +[[bench]] +name = "dev_lookup" +harness = false + +[[bench]] +name = "fft" +harness = false + +[dependencies] +backtrace = { version = "0.3", optional = true } +ff = "0.13" +group = "0.13" +halo2curves = { version = "0.6.0", default-features = false } +rand_core = { version = "0.6", default-features = false } +tracing = "0.1" +blake2b_simd = "1" # MSRV 1.66.0 +sha3 = "0.9.1" +rand_chacha = "0.3" +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +rayon = "1.8" + +# Developer tooling dependencies +plotters = { version = "0.3.0", default-features = false, optional = true } +tabbycat = { version = "0.1", features = ["attributes"], optional = true } + +# Legacy circuit compatibility +halo2_legacy_pdqsort = { version = "0.1.0", optional = true } + +[dev-dependencies] +assert_matches = "1.5" +criterion = "0.3" +gumdrop = "0.8" +proptest = "1" +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +dhat = "0.3.2" +serde_json = "1" + +[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] +getrandom = { version = "0.2", features = ["js"] } + +[features] +default = ["batch", "bits"] +dev-graph = ["plotters", "tabbycat"] +test-dev-graph = [ + "dev-graph", + "plotters/bitmap_backend", + "plotters/bitmap_encoder", + "plotters/ttf", +] +bits = ["halo2curves/bits"] +gadget-traces = ["backtrace"] +thread-safe-region = [] +sanity-checks = [] +batch = ["rand_core/getrandom"] +circuit-params = [] +heap-profiling = [] +cost-estimator = ["serde", "serde_derive"] +derive_serde = ["halo2curves/derive_serde"] + +[lib] +bench = false + +[[example]] +name = "circuit-layout" +required-features = ["test-dev-graph"] + +[[example]] +name = "proof-size" +required-features = ["cost-estimator"] diff --git a/halo2_proofs/README.md b/halo2_proofs_rm/README.md similarity index 100% rename from halo2_proofs/README.md rename to halo2_proofs_rm/README.md diff --git a/halo2_proofs/benches/arithmetic.rs b/halo2_proofs_rm/benches/arithmetic.rs similarity index 100% rename from halo2_proofs/benches/arithmetic.rs rename to halo2_proofs_rm/benches/arithmetic.rs diff --git a/halo2_proofs/benches/commit_zk.rs b/halo2_proofs_rm/benches/commit_zk.rs similarity index 100% rename from halo2_proofs/benches/commit_zk.rs rename to halo2_proofs_rm/benches/commit_zk.rs diff --git a/halo2_proofs/benches/dev_lookup.rs b/halo2_proofs_rm/benches/dev_lookup.rs similarity index 100% rename from halo2_proofs/benches/dev_lookup.rs rename to halo2_proofs_rm/benches/dev_lookup.rs diff --git a/halo2_proofs/benches/fft.rs b/halo2_proofs_rm/benches/fft.rs similarity index 100% rename from halo2_proofs/benches/fft.rs rename to halo2_proofs_rm/benches/fft.rs diff --git a/halo2_proofs/benches/hashtocurve.rs b/halo2_proofs_rm/benches/hashtocurve.rs similarity index 100% rename from halo2_proofs/benches/hashtocurve.rs rename to halo2_proofs_rm/benches/hashtocurve.rs diff --git a/halo2_proofs/benches/plonk.rs b/halo2_proofs_rm/benches/plonk.rs similarity index 100% rename from halo2_proofs/benches/plonk.rs rename to halo2_proofs_rm/benches/plonk.rs diff --git a/halo2_proofs/examples/circuit-layout.rs b/halo2_proofs_rm/examples/circuit-layout.rs similarity index 100% rename from halo2_proofs/examples/circuit-layout.rs rename to halo2_proofs_rm/examples/circuit-layout.rs diff --git a/halo2_proofs/examples/proof-size.rs b/halo2_proofs_rm/examples/proof-size.rs similarity index 100% rename from halo2_proofs/examples/proof-size.rs rename to halo2_proofs_rm/examples/proof-size.rs diff --git a/halo2_proofs/examples/serialization.rs b/halo2_proofs_rm/examples/serialization.rs similarity index 100% rename from halo2_proofs/examples/serialization.rs rename to halo2_proofs_rm/examples/serialization.rs diff --git a/halo2_proofs/examples/shuffle.rs b/halo2_proofs_rm/examples/shuffle.rs similarity index 100% rename from halo2_proofs/examples/shuffle.rs rename to halo2_proofs_rm/examples/shuffle.rs diff --git a/halo2_proofs/examples/shuffle_api.rs b/halo2_proofs_rm/examples/shuffle_api.rs similarity index 100% rename from halo2_proofs/examples/shuffle_api.rs rename to halo2_proofs_rm/examples/shuffle_api.rs diff --git a/halo2_proofs/examples/simple-example.rs b/halo2_proofs_rm/examples/simple-example.rs similarity index 100% rename from halo2_proofs/examples/simple-example.rs rename to halo2_proofs_rm/examples/simple-example.rs diff --git a/halo2_proofs/examples/two-chip.rs b/halo2_proofs_rm/examples/two-chip.rs similarity index 100% rename from halo2_proofs/examples/two-chip.rs rename to halo2_proofs_rm/examples/two-chip.rs diff --git a/halo2_proofs/examples/vector-mul.rs b/halo2_proofs_rm/examples/vector-mul.rs similarity index 100% rename from halo2_proofs/examples/vector-mul.rs rename to halo2_proofs_rm/examples/vector-mul.rs diff --git a/halo2_proofs/examples/vector-ops-unblinded.rs b/halo2_proofs_rm/examples/vector-ops-unblinded.rs similarity index 100% rename from halo2_proofs/examples/vector-ops-unblinded.rs rename to halo2_proofs_rm/examples/vector-ops-unblinded.rs diff --git a/halo2_proofs/proptest-regressions/plonk/assigned.txt b/halo2_proofs_rm/proptest-regressions/plonk/assigned.txt similarity index 100% rename from halo2_proofs/proptest-regressions/plonk/assigned.txt rename to halo2_proofs_rm/proptest-regressions/plonk/assigned.txt diff --git a/halo2_proofs/proptest-regressions/plonk/circuit/compress_selectors.txt b/halo2_proofs_rm/proptest-regressions/plonk/circuit/compress_selectors.txt similarity index 100% rename from halo2_proofs/proptest-regressions/plonk/circuit/compress_selectors.txt rename to halo2_proofs_rm/proptest-regressions/plonk/circuit/compress_selectors.txt diff --git a/halo2_proofs/src/arithmetic.rs b/halo2_proofs_rm/src/arithmetic.rs similarity index 100% rename from halo2_proofs/src/arithmetic.rs rename to halo2_proofs_rm/src/arithmetic.rs diff --git a/halo2_proofs/src/circuit.rs b/halo2_proofs_rm/src/circuit.rs similarity index 100% rename from halo2_proofs/src/circuit.rs rename to halo2_proofs_rm/src/circuit.rs diff --git a/halo2_proofs/src/circuit/floor_planner.rs b/halo2_proofs_rm/src/circuit/floor_planner.rs similarity index 100% rename from halo2_proofs/src/circuit/floor_planner.rs rename to halo2_proofs_rm/src/circuit/floor_planner.rs diff --git a/halo2_proofs/src/circuit/floor_planner/single_pass.rs b/halo2_proofs_rm/src/circuit/floor_planner/single_pass.rs similarity index 100% rename from halo2_proofs/src/circuit/floor_planner/single_pass.rs rename to halo2_proofs_rm/src/circuit/floor_planner/single_pass.rs diff --git a/halo2_proofs/src/circuit/floor_planner/v1.rs b/halo2_proofs_rm/src/circuit/floor_planner/v1.rs similarity index 100% rename from halo2_proofs/src/circuit/floor_planner/v1.rs rename to halo2_proofs_rm/src/circuit/floor_planner/v1.rs diff --git a/halo2_proofs/src/circuit/floor_planner/v1/strategy.rs b/halo2_proofs_rm/src/circuit/floor_planner/v1/strategy.rs similarity index 100% rename from halo2_proofs/src/circuit/floor_planner/v1/strategy.rs rename to halo2_proofs_rm/src/circuit/floor_planner/v1/strategy.rs diff --git a/halo2_proofs/src/circuit/layouter.rs b/halo2_proofs_rm/src/circuit/layouter.rs similarity index 100% rename from halo2_proofs/src/circuit/layouter.rs rename to halo2_proofs_rm/src/circuit/layouter.rs diff --git a/halo2_proofs/src/circuit/table_layouter.rs b/halo2_proofs_rm/src/circuit/table_layouter.rs similarity index 100% rename from halo2_proofs/src/circuit/table_layouter.rs rename to halo2_proofs_rm/src/circuit/table_layouter.rs diff --git a/halo2_proofs/src/circuit/value.rs b/halo2_proofs_rm/src/circuit/value.rs similarity index 100% rename from halo2_proofs/src/circuit/value.rs rename to halo2_proofs_rm/src/circuit/value.rs diff --git a/halo2_proofs/src/dev.rs b/halo2_proofs_rm/src/dev.rs similarity index 100% rename from halo2_proofs/src/dev.rs rename to halo2_proofs_rm/src/dev.rs diff --git a/halo2_proofs/src/dev/cost.rs b/halo2_proofs_rm/src/dev/cost.rs similarity index 100% rename from halo2_proofs/src/dev/cost.rs rename to halo2_proofs_rm/src/dev/cost.rs diff --git a/halo2_proofs/src/dev/cost_model.rs b/halo2_proofs_rm/src/dev/cost_model.rs similarity index 100% rename from halo2_proofs/src/dev/cost_model.rs rename to halo2_proofs_rm/src/dev/cost_model.rs diff --git a/halo2_proofs/src/dev/failure.rs b/halo2_proofs_rm/src/dev/failure.rs similarity index 100% rename from halo2_proofs/src/dev/failure.rs rename to halo2_proofs_rm/src/dev/failure.rs diff --git a/halo2_proofs/src/dev/failure/emitter.rs b/halo2_proofs_rm/src/dev/failure/emitter.rs similarity index 100% rename from halo2_proofs/src/dev/failure/emitter.rs rename to halo2_proofs_rm/src/dev/failure/emitter.rs diff --git a/halo2_proofs/src/dev/gates.rs b/halo2_proofs_rm/src/dev/gates.rs similarity index 100% rename from halo2_proofs/src/dev/gates.rs rename to halo2_proofs_rm/src/dev/gates.rs diff --git a/halo2_proofs/src/dev/graph.rs b/halo2_proofs_rm/src/dev/graph.rs similarity index 100% rename from halo2_proofs/src/dev/graph.rs rename to halo2_proofs_rm/src/dev/graph.rs diff --git a/halo2_proofs/src/dev/graph/layout.rs b/halo2_proofs_rm/src/dev/graph/layout.rs similarity index 100% rename from halo2_proofs/src/dev/graph/layout.rs rename to halo2_proofs_rm/src/dev/graph/layout.rs diff --git a/halo2_proofs/src/dev/metadata.rs b/halo2_proofs_rm/src/dev/metadata.rs similarity index 100% rename from halo2_proofs/src/dev/metadata.rs rename to halo2_proofs_rm/src/dev/metadata.rs diff --git a/halo2_proofs/src/dev/tfp.rs b/halo2_proofs_rm/src/dev/tfp.rs similarity index 100% rename from halo2_proofs/src/dev/tfp.rs rename to halo2_proofs_rm/src/dev/tfp.rs diff --git a/halo2_proofs/src/dev/util.rs b/halo2_proofs_rm/src/dev/util.rs similarity index 100% rename from halo2_proofs/src/dev/util.rs rename to halo2_proofs_rm/src/dev/util.rs diff --git a/halo2_proofs/src/helpers.rs b/halo2_proofs_rm/src/helpers.rs similarity index 100% rename from halo2_proofs/src/helpers.rs rename to halo2_proofs_rm/src/helpers.rs diff --git a/halo2_proofs_rm/src/lib.rs b/halo2_proofs_rm/src/lib.rs new file mode 100644 index 0000000000..acc26aff15 --- /dev/null +++ b/halo2_proofs_rm/src/lib.rs @@ -0,0 +1,21 @@ +//! # halo2_proofs + +#![cfg_attr(docsrs, feature(doc_cfg))] +// The actual lints we want to disable. +#![allow(clippy::op_ref, clippy::many_single_char_names)] +#![deny(rustdoc::broken_intra_doc_links)] +#![deny(missing_debug_implementations)] +#![deny(missing_docs)] +#![deny(unsafe_code)] + +pub mod arithmetic; +pub mod circuit; +pub use halo2curves; +mod multicore; +pub mod plonk; +pub mod poly; +pub mod transcript; + +pub mod dev; +mod helpers; +pub use helpers::SerdeFormat; diff --git a/halo2_proofs/src/multicore.rs b/halo2_proofs_rm/src/multicore.rs similarity index 100% rename from halo2_proofs/src/multicore.rs rename to halo2_proofs_rm/src/multicore.rs diff --git a/halo2_proofs_rm/src/plonk.rs b/halo2_proofs_rm/src/plonk.rs new file mode 100644 index 0000000000..eade0e5a74 --- /dev/null +++ b/halo2_proofs_rm/src/plonk.rs @@ -0,0 +1,549 @@ +//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] +//! that is designed specifically for the polynomial commitment scheme described +//! in the [Halo][halo] paper. +//! +//! [halo]: https://eprint.iacr.org/2019/1021 +//! [plonk]: https://eprint.iacr.org/2019/953 + +use blake2b_simd::Params as Blake2bParams; +use group::ff::{Field, FromUniformBytes, PrimeField}; + +use crate::arithmetic::CurveAffine; +use crate::helpers::{ + polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, + SerdePrimeField, +}; +use crate::poly::{ + Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, + Polynomial, Rotation, +}; +use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; +use crate::SerdeFormat; + +mod assigned; +mod circuit; +mod error; +mod evaluation; +mod keygen; +mod lookup; +pub mod permutation; +mod shuffle; +mod vanishing; + +mod prover; +mod verifier; + +pub use assigned::*; +pub use circuit::*; +pub use error::*; +pub use keygen::*; +pub use prover::*; +pub use verifier::*; + +use evaluation::Evaluator; +use std::io; + +/// List of queries (columns and rotations) used by a circuit +#[derive(Debug, Clone)] +pub struct Queries { + /// List of unique advice queries + pub advice: Vec<(Column, Rotation)>, + /// List of unique instance queries + pub instance: Vec<(Column, Rotation)>, + /// List of unique fixed queries + pub fixed: Vec<(Column, Rotation)>, + /// Contains an integer for each advice column + /// identifying how many distinct queries it has + /// so far; should be same length as cs.num_advice_columns. + pub num_advice_queries: Vec, +} + +impl Queries { + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } +} + +/// This is a verifying key which allows for the verification of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct VerifyingKey { + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystem, + /// Cached maximum degree of `cs` (which doesn't change after construction). + cs_degree: usize, + /// The representative of this `VerifyingKey` in transcripts. + transcript_repr: C::Scalar, + selectors: Vec>, + /// Whether selector compression is turned on or not. + compress_selectors: bool, +} + +// Current version of the VK +const VERSION: u8 = 0x03; + +impl VerifyingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a verifying key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + // Version byte that will be checked on read. + writer.write_all(&[VERSION])?; + let k = &self.domain.k(); + assert!(*k <= C::Scalar::S); + // k value fits in 1 byte + writer.write_all(&[*k as u8])?; + writer.write_all(&[self.compress_selectors as u8])?; + writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; + for commitment in &self.fixed_commitments { + commitment.write(writer, format)?; + } + self.permutation.write(writer, format)?; + + if !self.compress_selectors { + assert!(self.selectors.is_empty()); + } + // write self.selectors + for selector in &self.selectors { + // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write + for bits in selector.chunks(8) { + writer.write_all(&[crate::helpers::pack(bits)])?; + } + } + Ok(()) + } + + /// Reads a verification key from a buffer. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let mut version_byte = [0u8; 1]; + reader.read_exact(&mut version_byte)?; + if VERSION != version_byte[0] { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected version byte", + )); + } + + let mut k = [0u8; 1]; + reader.read_exact(&mut k)?; + let k = u8::from_le_bytes(k); + if k as u32 > C::Scalar::S { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!( + "circuit size value (k): {} exceeds maxium: {}", + k, + C::Scalar::S + ), + )); + } + let mut compress_selectors = [0u8; 1]; + reader.read_exact(&mut compress_selectors)?; + if compress_selectors[0] != 0 && compress_selectors[0] != 1 { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected compress_selectors not boolean", + )); + } + let compress_selectors = compress_selectors[0] == 1; + let (domain, cs, _) = keygen::create_domain::( + k as u32, + #[cfg(feature = "circuit-params")] + params, + ); + let mut num_fixed_columns = [0u8; 4]; + reader.read_exact(&mut num_fixed_columns)?; + let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); + + let fixed_commitments: Vec<_> = (0..num_fixed_columns) + .map(|_| C::read(reader, format)) + .collect::>()?; + + let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; + + let (cs, selectors) = if compress_selectors { + // read selectors + let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] + .into_iter() + .map(|mut selector| { + let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; + reader.read_exact(&mut selector_bytes)?; + for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { + crate::helpers::unpack(byte, bits); + } + Ok(selector) + }) + .collect::>()?; + let (cs, _) = cs.compress_selectors(selectors.clone()); + (cs, selectors) + } else { + // we still need to replace selectors with fixed Expressions in `cs` + let fake_selectors = vec![vec![]; cs.num_selectors]; + let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); + (cs, vec![]) + }; + + Ok(Self::from_parts( + domain, + fixed_commitments, + permutation, + cs, + selectors, + compress_selectors, + )) + } + + /// Writes a verifying key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + /// Reads a verification key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) + } +} + +impl VerifyingKey { + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + 10 + (self.fixed_commitments.len() * C::byte_length(format)) + + self.permutation.bytes_length(format) + + self.selectors.len() + * (self + .selectors + .get(0) + .map(|selector| (selector.len() + 7) / 8) + .unwrap_or(0)) + } + + fn from_parts( + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystem, + selectors: Vec>, + compress_selectors: bool, + ) -> Self + where + C::ScalarExt: FromUniformBytes<64>, + { + // Compute cached values. + let cs_degree = cs.degree(); + + let mut vk = Self { + domain, + fixed_commitments, + permutation, + cs, + cs_degree, + // Temporary, this is not pinned. + transcript_repr: C::Scalar::ZERO, + selectors, + compress_selectors, + }; + + let mut hasher = Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Verify-Key") + .to_state(); + + let s = format!("{:?}", vk.pinned()); + + hasher.update(&(s.len() as u64).to_le_bytes()); + hasher.update(s.as_bytes()); + + // Hash in final Blake2bState + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + + vk + } + + /// Hashes a verification key into a transcript. + pub fn hash_into, T: Transcript>( + &self, + transcript: &mut T, + ) -> io::Result<()> { + transcript.common_scalar(self.transcript_repr)?; + + Ok(()) + } + + /// Obtains a pinned representation of this verification key that contains + /// the minimal information necessary to reconstruct the verification key. + pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { + PinnedVerificationKey { + base_modulus: C::Base::MODULUS, + scalar_modulus: C::Scalar::MODULUS, + domain: self.domain.pinned(), + fixed_commitments: &self.fixed_commitments, + permutation: &self.permutation, + cs: self.cs.pinned(), + } + } + + /// Returns commitments of fixed polynomials + pub fn fixed_commitments(&self) -> &Vec { + &self.fixed_commitments + } + + /// Returns `VerifyingKey` of permutation + pub fn permutation(&self) -> &permutation::VerifyingKey { + &self.permutation + } + + /// Returns `ConstraintSystem` + pub fn cs(&self) -> &ConstraintSystem { + &self.cs + } + + /// Returns representative of this `VerifyingKey` in transcripts + pub fn transcript_repr(&self) -> C::Scalar { + self.transcript_repr + } +} + +/// Minimal representation of a verification key that can be used to identify +/// its active contents. +#[allow(dead_code)] +#[derive(Debug)] +pub struct PinnedVerificationKey<'a, C: CurveAffine> { + base_modulus: &'static str, + scalar_modulus: &'static str, + domain: PinnedEvaluationDomain<'a, C::Scalar>, + cs: PinnedConstraintSystem<'a, C::Scalar>, + fixed_commitments: &'a Vec, + permutation: &'a permutation::VerifyingKey, +} + +/// This is a proving key which allows for the creation of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct ProvingKey { + vk: VerifyingKey, + l0: Polynomial, + l_last: Polynomial, + l_active_row: Polynomial, + fixed_values: Vec>, + fixed_polys: Vec>, + fixed_cosets: Vec>, + permutation: permutation::ProvingKey, + ev: Evaluator, +} + +impl ProvingKey +where + C::Scalar: FromUniformBytes<64>, +{ + /// Get the underlying [`VerifyingKey`]. + pub fn get_vk(&self) -> &VerifyingKey { + &self.vk + } + + /// Gets the total number of bytes in the serialization of `self` + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + let scalar_len = C::Scalar::default().to_repr().as_ref().len(); + self.vk.bytes_length(format) + + 12 + + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) + + polynomial_slice_byte_length(&self.fixed_values) + + polynomial_slice_byte_length(&self.fixed_polys) + + polynomial_slice_byte_length(&self.fixed_cosets) + + self.permutation.bytes_length() + } +} + +impl ProvingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a proving key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + self.vk.write(writer, format)?; + self.l0.write(writer, format)?; + self.l_last.write(writer, format)?; + self.l_active_row.write(writer, format)?; + write_polynomial_slice(&self.fixed_values, writer, format)?; + write_polynomial_slice(&self.fixed_polys, writer, format)?; + write_polynomial_slice(&self.fixed_cosets, writer, format)?; + self.permutation.write(writer, format)?; + Ok(()) + } + + /// Reads a proving key from a buffer. + /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + let vk = VerifyingKey::::read::( + reader, + format, + #[cfg(feature = "circuit-params")] + params, + )?; + let l0 = Polynomial::read(reader, format)?; + let l_last = Polynomial::read(reader, format)?; + let l_active_row = Polynomial::read(reader, format)?; + let fixed_values = read_polynomial_vec(reader, format)?; + let fixed_polys = read_polynomial_vec(reader, format)?; + let fixed_cosets = read_polynomial_vec(reader, format)?; + let permutation = permutation::ProvingKey::read(reader, format)?; + let ev = Evaluator::new(vk.cs()); + Ok(Self { + vk, + l0, + l_last, + l_active_row, + fixed_values, + fixed_polys, + fixed_cosets, + permutation, + ev, + }) + } + + /// Writes a proving key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + /// Reads a proving key from a slice of bytes using [`Self::read`]. + pub fn from_bytes>( + mut bytes: &[u8], + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, + ) -> io::Result { + Self::read::<_, ConcreteCircuit>( + &mut bytes, + format, + #[cfg(feature = "circuit-params")] + params, + ) + } +} + +impl VerifyingKey { + /// Get the underlying [`EvaluationDomain`]. + pub fn get_domain(&self) -> &EvaluationDomain { + &self.domain + } +} + +#[derive(Clone, Copy, Debug)] +struct Theta; +type ChallengeTheta = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct Beta; +type ChallengeBeta = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct Gamma; +type ChallengeGamma = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct Y; +type ChallengeY = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X; +type ChallengeX = ChallengeScalar; diff --git a/halo2_proofs/src/plonk/assigned.rs b/halo2_proofs_rm/src/plonk/assigned.rs similarity index 100% rename from halo2_proofs/src/plonk/assigned.rs rename to halo2_proofs_rm/src/plonk/assigned.rs diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs_rm/src/plonk/circuit.rs similarity index 100% rename from halo2_proofs/src/plonk/circuit.rs rename to halo2_proofs_rm/src/plonk/circuit.rs diff --git a/halo2_proofs/src/plonk/circuit/compress_selectors.rs b/halo2_proofs_rm/src/plonk/circuit/compress_selectors.rs similarity index 100% rename from halo2_proofs/src/plonk/circuit/compress_selectors.rs rename to halo2_proofs_rm/src/plonk/circuit/compress_selectors.rs diff --git a/halo2_proofs/src/plonk/error.rs b/halo2_proofs_rm/src/plonk/error.rs similarity index 100% rename from halo2_proofs/src/plonk/error.rs rename to halo2_proofs_rm/src/plonk/error.rs diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs_rm/src/plonk/evaluation.rs similarity index 100% rename from halo2_proofs/src/plonk/evaluation.rs rename to halo2_proofs_rm/src/plonk/evaluation.rs diff --git a/halo2_proofs_rm/src/plonk/keygen.rs b/halo2_proofs_rm/src/plonk/keygen.rs new file mode 100644 index 0000000000..81b890cf65 --- /dev/null +++ b/halo2_proofs_rm/src/plonk/keygen.rs @@ -0,0 +1,394 @@ +#![allow(clippy::int_plus_one)] + +use std::ops::Range; + +use ff::{Field, FromUniformBytes}; +use group::Curve; + +use super::{ + circuit::{ + compile_circuit, Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, + ConstraintSystem, Fixed, Instance, Selector, + }, + evaluation::Evaluator, + permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, +}; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + circuit::Value, + poly::{ + commitment::{Blind, Params}, + EvaluationDomain, + }, +}; + +pub(crate) fn create_domain( + k: u32, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, +) -> ( + EvaluationDomain, + ConstraintSystem, + ConcreteCircuit::Config, +) +where + C: CurveAffine, + ConcreteCircuit: Circuit, +{ + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, params); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + + let degree = cs.degree(); + + let domain = EvaluationDomain::new(degree as u32, k); + + (domain, cs, config) +} + +/// Assembly to be used in circuit synthesis. +#[derive(Debug)] +pub(crate) struct Assembly { + pub(crate) k: u32, + pub(crate) fixed: Vec, LagrangeCoeff>>, + pub(crate) permutation: permutation::keygen::AssemblyFront, + pub(crate) selectors: Vec>, + // A range of available rows for assignment and copies. + pub(crate) usable_rows: Range, + pub(crate) _marker: std::marker::PhantomData, +} + +impl Assignment for Assembly { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.selectors[selector.0][row] = true; + + Ok(()) + } + + fn query_instance(&self, _: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + // There is no instance in this context. + Ok(Value::unknown()) + } + + fn assign_advice( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about fixed columns here + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .fixed + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.permutation + .copy(left_column, left_row, right_column, right_row) + } + + fn fill_from_row( + &mut self, + column: Column, + from_row: usize, + to: Value>, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&from_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + let col = self + .fixed + .get_mut(column.index()) + .ok_or(Error::BoundsFailure)?; + + let filler = to.assign()?; + for row in self.usable_rows.clone().skip(from_row) { + col[row] = filler; + } + + Ok(()) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. +pub fn keygen_vk_v2<'params, C, P>( + params: &P, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + C::Scalar: FromUniformBytes<64>, +{ + let cs2 = &circuit.cs; + let cs: ConstraintSystem = cs2.clone().into(); + let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); + + if (params.n() as usize) < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_vk(params, &domain, &cs.permutation); + + let fixed_commitments = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + params + .commit_lagrange( + &Polynomial::new_lagrange_from_vec(poly.clone()), + Blind::default(), + ) + .to_affine() + }) + .collect(); + + Ok(VerifyingKey::from_parts( + domain, + fixed_commitments, + permutation_vk, + cs, + Vec::new(), + false, + )) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// By default, selector compression is turned **off**. +pub fn keygen_vk<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + keygen_vk_custom(params, circuit, true) +} + +/// Generate a `VerifyingKey` from an instance of `Circuit`. +/// +/// The selector compression optimization is turned on only if `compress_selectors` is `true`. +pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( + params: &P, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, + C::Scalar: FromUniformBytes<64>, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; + let mut vk = keygen_vk_v2(params, &compiled_circuit)?; + vk.compress_selectors = compress_selectors; + Ok(vk) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. +pub fn keygen_pk_v2<'params, C, P>( + params: &P, + vk: VerifyingKey, + circuit: &CompiledCircuitV2, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, +{ + let cs = &circuit.cs; + + if (params.n() as usize) < vk.cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let fixed_polys: Vec<_> = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + vk.domain + .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) + }) + .collect(); + + let fixed_cosets = fixed_polys + .iter() + .map(|poly| vk.domain.coeff_to_extended(poly.clone())) + .collect(); + + let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_pk(params, &vk.domain, &cs.permutation); + + // Compute l_0(X) + // TODO: this can be done more efficiently + let mut l0 = vk.domain.empty_lagrange(); + l0[0] = C::Scalar::ONE; + let l0 = vk.domain.lagrange_to_coeff(l0); + let l0 = vk.domain.coeff_to_extended(l0); + + // Compute l_blind(X) which evaluates to 1 for each blinding factor row + // and 0 otherwise over the domain. + let mut l_blind = vk.domain.empty_lagrange(); + for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { + *evaluation = C::Scalar::ONE; + } + let l_blind = vk.domain.lagrange_to_coeff(l_blind); + let l_blind = vk.domain.coeff_to_extended(l_blind); + + // Compute l_last(X) which evaluates to 1 on the first inactive row (just + // before the blinding factors) and 0 otherwise over the domain + let mut l_last = vk.domain.empty_lagrange(); + l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; + let l_last = vk.domain.lagrange_to_coeff(l_last); + let l_last = vk.domain.coeff_to_extended(l_last); + + // Compute l_active_row(X) + let one = C::Scalar::ONE; + let mut l_active_row = vk.domain.empty_extended(); + parallelize(&mut l_active_row, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = i + start; + *value = one - (l_last[idx] + l_blind[idx]); + } + }); + + // Compute the optimized evaluation data structure + let ev = Evaluator::new(&vk.cs); + + Ok(ProvingKey { + vk, + l0, + l_last, + l_active_row, + fixed_values: circuit + .preprocessing + .fixed + .clone() + .into_iter() + .map(Polynomial::new_lagrange_from_vec) + .collect(), + fixed_polys, + fixed_cosets, + permutation: permutation_pk, + ev, + }) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. +pub fn keygen_pk<'params, C, P, ConcreteCircuit>( + params: &P, + vk: VerifyingKey, + circuit: &ConcreteCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params<'params, C>, + ConcreteCircuit: Circuit, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; + keygen_pk_v2(params, vk, &compiled_circuit) +} diff --git a/halo2_proofs/src/plonk/lookup.rs b/halo2_proofs_rm/src/plonk/lookup.rs similarity index 100% rename from halo2_proofs/src/plonk/lookup.rs rename to halo2_proofs_rm/src/plonk/lookup.rs diff --git a/halo2_proofs/src/plonk/lookup/prover.rs b/halo2_proofs_rm/src/plonk/lookup/prover.rs similarity index 100% rename from halo2_proofs/src/plonk/lookup/prover.rs rename to halo2_proofs_rm/src/plonk/lookup/prover.rs diff --git a/halo2_proofs/src/plonk/lookup/verifier.rs b/halo2_proofs_rm/src/plonk/lookup/verifier.rs similarity index 100% rename from halo2_proofs/src/plonk/lookup/verifier.rs rename to halo2_proofs_rm/src/plonk/lookup/verifier.rs diff --git a/halo2_proofs/src/plonk/permutation.rs b/halo2_proofs_rm/src/plonk/permutation.rs similarity index 100% rename from halo2_proofs/src/plonk/permutation.rs rename to halo2_proofs_rm/src/plonk/permutation.rs diff --git a/halo2_proofs/src/plonk/permutation/keygen.rs b/halo2_proofs_rm/src/plonk/permutation/keygen.rs similarity index 100% rename from halo2_proofs/src/plonk/permutation/keygen.rs rename to halo2_proofs_rm/src/plonk/permutation/keygen.rs diff --git a/halo2_proofs/src/plonk/permutation/prover.rs b/halo2_proofs_rm/src/plonk/permutation/prover.rs similarity index 100% rename from halo2_proofs/src/plonk/permutation/prover.rs rename to halo2_proofs_rm/src/plonk/permutation/prover.rs diff --git a/halo2_proofs/src/plonk/permutation/verifier.rs b/halo2_proofs_rm/src/plonk/permutation/verifier.rs similarity index 100% rename from halo2_proofs/src/plonk/permutation/verifier.rs rename to halo2_proofs_rm/src/plonk/permutation/verifier.rs diff --git a/halo2_proofs_rm/src/plonk/prover.rs b/halo2_proofs_rm/src/plonk/prover.rs new file mode 100644 index 0000000000..1168b1d519 --- /dev/null +++ b/halo2_proofs_rm/src/plonk/prover.rs @@ -0,0 +1,994 @@ +use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use group::Curve; +use rand_core::RngCore; +use std::collections::{BTreeSet, HashSet}; +use std::ops::RangeTo; +use std::{collections::HashMap, iter}; + +use super::{ + circuit::{ + compile_circuit, + sealed::{self}, + Advice, Any, Assignment, Challenge, Circuit, Column, Fixed, Instance, Selector, + WitnessCalculator, + }, + lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, + ChallengeX, ChallengeY, Error, ProvingKey, +}; + +use crate::{ + arithmetic::{eval_polynomial, CurveAffine}, + circuit::Value, + plonk::Assigned, + poly::{ + commitment::{Blind, CommitmentScheme, Params, Prover}, + Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, + }, +}; +use crate::{ + poly::batch_invert_assigned, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use group::prime::PrimeCurveAffine; + +/// Collection of instance data used during proving for a single circuit proof. +#[derive(Debug)] +struct InstanceSingle { + pub instance_values: Vec>, + pub instance_polys: Vec>, +} + +/// Collection of advice data used during proving for a single circuit proof. +#[derive(Debug, Clone)] +struct AdviceSingle { + pub advice_polys: Vec>, + pub advice_blinds: Vec>, +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. This works for a single proof. This is a wrapper over ProverV2. +#[derive(Debug)] +pub struct ProverV2Single< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +>(ProverV2<'a, 'params, Scheme, P, E, R, T>); + +impl< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + > ProverV2Single<'a, 'params, Scheme, P, E, R, T> +{ + /// Create a new prover object + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + // TODO: If this was a vector the usage would be simpler + instance: &[&[Scheme::Scalar]], + rng: R, + transcript: &'a mut T, + ) -> Result + // TODO: Can I move this `where` to the struct definition? + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + Ok(Self(ProverV2::new( + params, + pk, + &[instance], + rng, + transcript, + )?)) + } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + pub fn commit_phase( + &mut self, + phase: u8, + witness: Vec>>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.commit_phase(phase, vec![witness]) + } + + /// Finalizes the proof creation. + pub fn create_proof(self) -> Result<(), Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.create_proof() + } +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. This supports batch proving. +#[derive(Debug)] +pub struct ProverV2< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, +> { + // Circuit and setup fields + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + phases: Vec, + // State + instance: Vec>, + advice: Vec>, + challenges: HashMap, + next_phase_index: usize, + rng: R, + transcript: &'a mut T, + _marker: std::marker::PhantomData<(P, E)>, +} + +impl< + 'a, + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + > ProverV2<'a, 'params, Scheme, P, E, R, T> +{ + /// Create a new prover object + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + // TODO: If this was a vector the usage would be simpler + instances: &[&[&[Scheme::Scalar]]], + rng: R, + transcript: &'a mut T, + ) -> Result + // TODO: Can I move this `where` to the struct definition? + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + for instance in instances.iter() { + if instance.len() != pk.vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } + } + + // Hash verification key into transcript + pk.vk.hash_into(transcript)?; + + let meta = &pk.vk.cs; + let phases = meta.phases().collect(); + + let domain = &pk.vk.domain; + + // TODO: Name this better + let mut instance_fn = + |instance: &[&[Scheme::Scalar]]| -> Result, Error> { + let instance_values = instance + .iter() + .map(|values| { + let mut poly = domain.empty_lagrange(); + assert_eq!(poly.len(), params.n() as usize); + if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { + return Err(Error::InstanceTooLarge); + } + for (poly, value) in poly.iter_mut().zip(values.iter()) { + if !P::QUERY_INSTANCE { + // dbg!(1, value); + transcript.common_scalar(*value)?; + } + *poly = *value; + } + Ok(poly) + }) + .collect::, _>>()?; + + if P::QUERY_INSTANCE { + let instance_commitments_projective: Vec<_> = instance_values + .iter() + .map(|poly| params.commit_lagrange(poly, Blind::default())) + .collect(); + let mut instance_commitments = + vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &instance_commitments_projective, + &mut instance_commitments, + ); + let instance_commitments = instance_commitments; + drop(instance_commitments_projective); + + for commitment in &instance_commitments { + // dbg!(2, commitment); + transcript.common_point(*commitment)?; + } + } + + let instance_polys: Vec<_> = instance_values + .iter() + .map(|poly| { + let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); + domain.lagrange_to_coeff(lagrange_vec) + }) + .collect(); + + Ok(InstanceSingle { + instance_values, + instance_polys, + }) + }; + let instance: Vec> = instances + .iter() + .map(|instance| instance_fn(instance)) + .collect::, _>>()?; + + let advice = vec![ + AdviceSingle:: { + // Create vectors with empty polynomials to free space while they are not being used + advice_polys: vec![ + Polynomial::new_empty(0, Scheme::Scalar::ZERO); + meta.num_advice_columns + ], + advice_blinds: vec![Blind::default(); meta.num_advice_columns], + }; + instances.len() + ]; + let challenges = HashMap::::with_capacity(meta.num_challenges); + + Ok(ProverV2 { + params, + pk, + phases, + instance, + rng, + transcript, + advice, + challenges, + next_phase_index: 0, + _marker: std::marker::PhantomData {}, + }) + } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + pub fn commit_phase( + &mut self, + phase: u8, + witness: Vec>>>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let current_phase = match self.phases.get(self.next_phase_index) { + Some(phase) => phase, + None => { + panic!("TODO: Return Error instead. All phases already commited"); + } + }; + if phase != current_phase.0 { + panic!("TODO: Return Error instead. Committing invalid phase"); + } + + let params = self.params; + let meta = &self.pk.vk.cs; + + let mut rng = &mut self.rng; + + let advice = &mut self.advice; + let challenges = &mut self.challenges; + + let column_indices = meta + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + if witness.len() != advice.len() { + return Err(Error::Other("witness.len() != advice.len()".to_string())); + } + for witness_circuit in &witness { + if witness_circuit.len() != meta.num_advice_columns { + return Err(Error::Other(format!( + "unexpected length in witness_circuitk. Got {}, expected {}", + witness_circuit.len(), + meta.num_advice_columns, + ))); + } + // Check that all current_phase advice columns are Some, and their length is correct + for (column_index, advice_column) in witness_circuit.iter().enumerate() { + if column_indices.contains(&column_index) { + match advice_column { + None => { + return Err(Error::Other(format!( + "expected advice column with index {} at phase {}", + column_index, current_phase.0 + ))) + } + Some(advice_column) => { + if advice_column.len() != params.n() as usize { + return Err(Error::Other(format!( + "expected advice column with index {} to have length {}", + column_index, + params.n(), + ))); + } + } + } + } else if advice_column.is_some() { + return Err(Error::Other(format!( + "expected no advice column with index {} at phase {}", + column_index, current_phase.0 + ))); + }; + } + } + + let mut commit_phase_fn = |advice: &mut AdviceSingle, + witness: Vec< + Option, LagrangeCoeff>>, + >| + -> Result<(), Error> { + let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); + let mut advice_values = + batch_invert_assigned::(witness.into_iter().flatten().collect()); + let unblinded_advice: HashSet = + HashSet::from_iter(meta.unblinded_advice_columns.clone()); + + // Add blinding factors to advice columns + for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { + if !unblinded_advice.contains(column_index) { + for cell in &mut advice_values[unusable_rows_start..] { + *cell = Scheme::Scalar::random(&mut rng); + } + } else { + #[cfg(feature = "sanity-checks")] + for cell in &advice_values[unusable_rows_start..] { + assert_eq!(*cell, Scheme::Scalar::ZERO); + } + } + } + + // Compute commitments to advice column polynomials + let blinds: Vec<_> = column_indices + .iter() + .map(|i| { + if unblinded_advice.contains(i) { + Blind::default() + } else { + Blind(Scheme::Scalar::random(&mut rng)) + } + }) + .collect(); + let advice_commitments_projective: Vec<_> = advice_values + .iter() + .zip(blinds.iter()) + .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) + .collect(); + let mut advice_commitments = + vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &advice_commitments_projective, + &mut advice_commitments, + ); + let advice_commitments = advice_commitments; + drop(advice_commitments_projective); + + for commitment in &advice_commitments { + self.transcript.write_point(*commitment)?; + } + for ((column_index, advice_values), blind) in + column_indices.iter().zip(advice_values).zip(blinds) + { + advice.advice_polys[*column_index] = advice_values; + advice.advice_blinds[*column_index] = blind; + } + Ok(()) + }; + + for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { + commit_phase_fn( + advice, + witness + .into_iter() + .map(|v| v.map(Polynomial::new_lagrange_from_vec)) + .collect(), + )?; + } + + for (index, phase) in meta.challenge_phase.iter().enumerate() { + if current_phase == phase { + let existing = + challenges.insert(index, *self.transcript.squeeze_challenge_scalar::<()>()); + assert!(existing.is_none()); + } + } + + self.next_phase_index += 1; + Ok(challenges.clone()) + } + + /// Finalizes the proof creation. + pub fn create_proof(mut self) -> Result<(), Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let params = self.params; + let meta = &self.pk.vk.cs; + // let queries = &self.pk.vk.queries; + let pk = self.pk; + let domain = &self.pk.vk.domain; + + let mut rng = self.rng; + + let instance = std::mem::take(&mut self.instance); + let advice = std::mem::take(&mut self.advice); + let mut challenges = self.challenges; + + assert_eq!(challenges.len(), meta.num_challenges); + let challenges = (0..meta.num_challenges) + .map(|index| challenges.remove(&index).unwrap()) + .collect::>(); + + // Sample theta challenge for keeping lookup columns linearly independent + let theta: ChallengeTheta<_> = self.transcript.squeeze_challenge_scalar(); + + let mut lookups_fn = + |instance: &InstanceSingle, + advice: &AdviceSingle| + -> Result>, Error> { + meta.lookups + .iter() + .map(|lookup| { + lookup.commit_permuted( + pk, + params, + domain, + theta, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }; + let lookups: Vec>> = instance + .iter() + .zip(advice.iter()) + .map(|(instance, advice)| -> Result, Error> { + // Construct and commit to permuted values for each lookup + lookups_fn(instance, advice) + }) + .collect::, _>>()?; + + // Sample beta challenge + let beta: ChallengeBeta<_> = self.transcript.squeeze_challenge_scalar(); + + // Sample gamma challenge + let gamma: ChallengeGamma<_> = self.transcript.squeeze_challenge_scalar(); + + // Commit to permutation. + let permutations: Vec> = instance + .iter() + .zip(advice.iter()) + .map(|(instance, advice)| { + meta.permutation.commit( + params, + pk, + &pk.permutation, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + beta, + gamma, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>()?; + + let lookups: Vec>> = lookups + .into_iter() + .map(|lookups| -> Result, _> { + // Construct and commit to products for each lookup + lookups + .into_iter() + .map(|lookup| { + lookup.commit_product(pk, params, beta, gamma, &mut rng, self.transcript) + }) + .collect::, _>>() + }) + .collect::, _>>()?; + + let shuffles: Vec>> = instance + .iter() + .zip(advice.iter()) + .map(|(instance, advice)| -> Result, _> { + // Compress expressions for each shuffle + meta.shuffles + .iter() + .map(|shuffle| { + shuffle.commit_product( + pk, + params, + domain, + theta, + gamma, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Commit to the vanishing argument's random polynomial for blinding h(x_3) + let vanishing = vanishing::Argument::commit(params, domain, &mut rng, self.transcript)?; + + // Obtain challenge for keeping all separate gates linearly independent + let y: ChallengeY<_> = self.transcript.squeeze_challenge_scalar(); + + // Calculate the advice polys + let advice: Vec> = advice + .into_iter() + .map( + |AdviceSingle { + advice_polys, + advice_blinds, + }| { + AdviceSingle { + advice_polys: advice_polys + .into_iter() + .map(|poly| domain.lagrange_to_coeff(poly)) + .collect::>(), + advice_blinds, + } + }, + ) + .collect(); + + // Evaluate the h(X) polynomial + let h_poly = pk.ev.evaluate_h( + pk, + &advice + .iter() + .map(|a| a.advice_polys.as_slice()) + .collect::>(), + &instance + .iter() + .map(|i| i.instance_polys.as_slice()) + .collect::>(), + &challenges, + *y, + *beta, + *gamma, + *theta, + &lookups, + &shuffles, + &permutations, + ); + + // Construct the vanishing argument's h(X) commitments + let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, self.transcript)?; + + let x: ChallengeX<_> = self.transcript.squeeze_challenge_scalar(); + let xn = x.pow([params.n()]); + + if P::QUERY_INSTANCE { + // Compute and hash instance evals for the circuit instance + for instance in instance.iter() { + // Evaluate polynomials at omega^i x + let instance_evals: Vec<_> = meta + .instance_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &instance.instance_polys[column.index()], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + + // Hash each instance column evaluation + for eval in instance_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + } + } + + // Compute and hash advice evals for the circuit instance + for advice in advice.iter() { + // Evaluate polynomials at omega^i x + let advice_evals: Vec<_> = meta + .advice_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &advice.advice_polys[column.index()], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + // dbg!(&advice_evals); + + // Hash each advice column evaluation + for eval in advice_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + } + + // Compute and hash fixed evals + let fixed_evals: Vec<_> = meta + .fixed_queries + .iter() + .map(|&(column, at)| { + eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) + }) + .collect(); + + // Hash each fixed column evaluation + for eval in fixed_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + + let vanishing = vanishing.evaluate(x, xn, domain, self.transcript)?; + + // Evaluate common permutation data + pk.permutation.evaluate(x, self.transcript)?; + + // Evaluate the permutations, if any, at omega^i x. + let permutations: Vec> = permutations + .into_iter() + .map(|permutation| -> Result<_, _> { + permutation.construct().evaluate(pk, x, self.transcript) + }) + .collect::, _>>()?; + + // Evaluate the lookups, if any, at omega^i x. + let lookups: Vec>> = lookups + .into_iter() + .map(|lookups| -> Result, _> { + lookups + .into_iter() + .map(|p| p.evaluate(pk, x, self.transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Evaluate the shuffles, if any, at omega^i x. + let shuffles: Vec>> = shuffles + .into_iter() + .map(|shuffles| -> Result, _> { + shuffles + .into_iter() + .map(|p| p.evaluate(pk, x, self.transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + let instances = instance + .iter() + .zip(advice.iter()) + .zip(permutations.iter()) + .zip(lookups.iter()) + .zip(shuffles.iter()) + .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { + iter::empty() + .chain( + P::QUERY_INSTANCE + .then_some(meta.instance_queries.iter().map(move |&(column, at)| { + ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &instance.instance_polys[column.index()], + blind: Blind::default(), + } + })) + .into_iter() + .flatten(), + ) + .chain( + meta.advice_queries + .iter() + .map(move |&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &advice.advice_polys[column.index()], + blind: advice.advice_blinds[column.index()], + }), + ) + .chain(permutation.open(pk, x)) + .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) + .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) + }) + .chain(meta.fixed_queries.iter().map(|&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &pk.fixed_polys[column.index()], + blind: Blind::default(), + })) + .chain(pk.permutation.open(x)) + // We query the h(X) polynomial at x + .chain(vanishing.open(x)); + + let prover = P::new(params); + println!("DBG create_proof"); + prover + .create_proof(rng, self.transcript, instances) + .map_err(|_| Error::ConstraintSystemFailure)?; + + Ok(()) + } +} + +pub(crate) struct WitnessCollection<'a, F: Field> { + pub(crate) k: u32, + pub(crate) current_phase: sealed::Phase, + pub(crate) advice: Vec>>, + // pub(crate) unblinded_advice: HashSet, + pub(crate) challenges: &'a HashMap, + pub(crate) instances: &'a [&'a [F]], + pub(crate) usable_rows: RangeTo, + pub(crate) _marker: std::marker::PhantomData, +} + +impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn query_instance(&self, column: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.instances + .get(column.index()) + .and_then(|column| column.get(row)) + .map(|v| Value::known(*v)) + .ok_or(Error::BoundsFailure) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Ignore assignment of advice column in different phase than current one. + if self.current_phase != column.column_type().phase { + return Ok(()); + } + + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .advice + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { + // We only care about advice columns here + + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.challenges + .get(&challenge.index()) + .cloned() + .map(Value::known) + .unwrap_or_else(Value::unknown) + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +/// This creates a proof for the provided `circuit` when given the public +/// parameters `params` and the proving key [`ProvingKey`] that was +/// generated previously for the same circuit. The provided `instances` +/// are zero-padded internally. +pub fn create_proof< + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + ConcreteCircuit: Circuit, +>( + params: &'params Scheme::ParamsProver, + pk: &ProvingKey, + circuits: &[ConcreteCircuit], + instances: &[&[&[Scheme::Scalar]]], + rng: R, + transcript: &mut T, +) -> Result<(), Error> +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + if circuits.len() != instances.len() { + return Err(Error::InvalidInstances); + } + let (_, config, cs) = compile_circuit(params.k(), &circuits[0], pk.vk.compress_selectors)?; + let mut witness_calcs: Vec<_> = circuits + .iter() + .enumerate() + .map(|(i, circuit)| WitnessCalculator::new(params.k(), circuit, &config, &cs, instances[i])) + .collect(); + let mut prover = ProverV2::::new(params, pk, instances, rng, transcript)?; + let mut challenges = HashMap::new(); + let phases = prover.phases.clone(); + for phase in &phases { + println!("DBG phase {}", phase.0); + let mut witnesses = Vec::with_capacity(circuits.len()); + for witness_calc in witness_calcs.iter_mut() { + witnesses.push(witness_calc.calc(phase.0, &challenges)?); + } + challenges = prover.commit_phase(phase.0, witnesses).unwrap(); + } + prover.create_proof() +} + +#[test] +fn test_create_proof() { + use crate::{ + circuit::SimpleFloorPlanner, + plonk::{keygen_pk, keygen_vk, ConstraintSystem}, + poly::kzg::{ + commitment::{KZGCommitmentScheme, ParamsKZG}, + multiopen::ProverSHPLONK, + }, + transcript::{Blake2bWrite, Challenge255, TranscriptWriterBuffer}, + }; + use halo2curves::bn256::Bn256; + use rand_core::OsRng; + + #[derive(Clone, Copy)] + struct MyCircuit; + + impl Circuit for MyCircuit { + type Config = (); + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + *self + } + + fn configure(_meta: &mut ConstraintSystem) -> Self::Config {} + + fn synthesize( + &self, + _config: Self::Config, + _layouter: impl crate::circuit::Layouter, + ) -> Result<(), Error> { + Ok(()) + } + } + + let params: ParamsKZG = ParamsKZG::setup(3, OsRng); + let vk = keygen_vk(¶ms, &MyCircuit).expect("keygen_vk should not fail"); + let pk = keygen_pk(¶ms, vk, &MyCircuit).expect("keygen_pk should not fail"); + let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); + + // Create proof with wrong number of instances + let proof = create_proof::, ProverSHPLONK<_>, _, _, _, _>( + ¶ms, + &pk, + &[MyCircuit, MyCircuit], + &[], + OsRng, + &mut transcript, + ); + assert!(matches!(proof.unwrap_err(), Error::InvalidInstances)); + + // Create proof with correct number of instances + create_proof::, ProverSHPLONK<_>, _, _, _, _>( + ¶ms, + &pk, + &[MyCircuit, MyCircuit], + &[&[], &[]], + OsRng, + &mut transcript, + ) + .expect("proof generation should not fail"); +} diff --git a/halo2_proofs/src/plonk/shuffle.rs b/halo2_proofs_rm/src/plonk/shuffle.rs similarity index 100% rename from halo2_proofs/src/plonk/shuffle.rs rename to halo2_proofs_rm/src/plonk/shuffle.rs diff --git a/halo2_proofs/src/plonk/shuffle/prover.rs b/halo2_proofs_rm/src/plonk/shuffle/prover.rs similarity index 100% rename from halo2_proofs/src/plonk/shuffle/prover.rs rename to halo2_proofs_rm/src/plonk/shuffle/prover.rs diff --git a/halo2_proofs/src/plonk/shuffle/verifier.rs b/halo2_proofs_rm/src/plonk/shuffle/verifier.rs similarity index 100% rename from halo2_proofs/src/plonk/shuffle/verifier.rs rename to halo2_proofs_rm/src/plonk/shuffle/verifier.rs diff --git a/halo2_proofs/src/plonk/vanishing.rs b/halo2_proofs_rm/src/plonk/vanishing.rs similarity index 100% rename from halo2_proofs/src/plonk/vanishing.rs rename to halo2_proofs_rm/src/plonk/vanishing.rs diff --git a/halo2_proofs/src/plonk/vanishing/prover.rs b/halo2_proofs_rm/src/plonk/vanishing/prover.rs similarity index 100% rename from halo2_proofs/src/plonk/vanishing/prover.rs rename to halo2_proofs_rm/src/plonk/vanishing/prover.rs diff --git a/halo2_proofs/src/plonk/vanishing/verifier.rs b/halo2_proofs_rm/src/plonk/vanishing/verifier.rs similarity index 100% rename from halo2_proofs/src/plonk/vanishing/verifier.rs rename to halo2_proofs_rm/src/plonk/vanishing/verifier.rs diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs_rm/src/plonk/verifier.rs similarity index 100% rename from halo2_proofs/src/plonk/verifier.rs rename to halo2_proofs_rm/src/plonk/verifier.rs diff --git a/halo2_proofs/src/plonk/verifier/batch.rs b/halo2_proofs_rm/src/plonk/verifier/batch.rs similarity index 100% rename from halo2_proofs/src/plonk/verifier/batch.rs rename to halo2_proofs_rm/src/plonk/verifier/batch.rs diff --git a/halo2_proofs/src/poly.rs b/halo2_proofs_rm/src/poly.rs similarity index 100% rename from halo2_proofs/src/poly.rs rename to halo2_proofs_rm/src/poly.rs diff --git a/halo2_proofs/src/poly/commitment.rs b/halo2_proofs_rm/src/poly/commitment.rs similarity index 100% rename from halo2_proofs/src/poly/commitment.rs rename to halo2_proofs_rm/src/poly/commitment.rs diff --git a/halo2_proofs/src/poly/domain.rs b/halo2_proofs_rm/src/poly/domain.rs similarity index 100% rename from halo2_proofs/src/poly/domain.rs rename to halo2_proofs_rm/src/poly/domain.rs diff --git a/halo2_proofs/src/poly/ipa/commitment.rs b/halo2_proofs_rm/src/poly/ipa/commitment.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/commitment.rs rename to halo2_proofs_rm/src/poly/ipa/commitment.rs diff --git a/halo2_proofs/src/poly/ipa/commitment/prover.rs b/halo2_proofs_rm/src/poly/ipa/commitment/prover.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/commitment/prover.rs rename to halo2_proofs_rm/src/poly/ipa/commitment/prover.rs diff --git a/halo2_proofs/src/poly/ipa/commitment/verifier.rs b/halo2_proofs_rm/src/poly/ipa/commitment/verifier.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/commitment/verifier.rs rename to halo2_proofs_rm/src/poly/ipa/commitment/verifier.rs diff --git a/halo2_proofs/src/poly/ipa/mod.rs b/halo2_proofs_rm/src/poly/ipa/mod.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/mod.rs rename to halo2_proofs_rm/src/poly/ipa/mod.rs diff --git a/halo2_proofs/src/poly/ipa/msm.rs b/halo2_proofs_rm/src/poly/ipa/msm.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/msm.rs rename to halo2_proofs_rm/src/poly/ipa/msm.rs diff --git a/halo2_proofs/src/poly/ipa/multiopen.rs b/halo2_proofs_rm/src/poly/ipa/multiopen.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/multiopen.rs rename to halo2_proofs_rm/src/poly/ipa/multiopen.rs diff --git a/halo2_proofs/src/poly/ipa/multiopen/prover.rs b/halo2_proofs_rm/src/poly/ipa/multiopen/prover.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/multiopen/prover.rs rename to halo2_proofs_rm/src/poly/ipa/multiopen/prover.rs diff --git a/halo2_proofs/src/poly/ipa/multiopen/verifier.rs b/halo2_proofs_rm/src/poly/ipa/multiopen/verifier.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/multiopen/verifier.rs rename to halo2_proofs_rm/src/poly/ipa/multiopen/verifier.rs diff --git a/halo2_proofs/src/poly/ipa/strategy.rs b/halo2_proofs_rm/src/poly/ipa/strategy.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/strategy.rs rename to halo2_proofs_rm/src/poly/ipa/strategy.rs diff --git a/halo2_proofs/src/poly/kzg/commitment.rs b/halo2_proofs_rm/src/poly/kzg/commitment.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/commitment.rs rename to halo2_proofs_rm/src/poly/kzg/commitment.rs diff --git a/halo2_proofs/src/poly/kzg/mod.rs b/halo2_proofs_rm/src/poly/kzg/mod.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/mod.rs rename to halo2_proofs_rm/src/poly/kzg/mod.rs diff --git a/halo2_proofs/src/poly/kzg/msm.rs b/halo2_proofs_rm/src/poly/kzg/msm.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/msm.rs rename to halo2_proofs_rm/src/poly/kzg/msm.rs diff --git a/halo2_proofs/src/poly/kzg/multiopen.rs b/halo2_proofs_rm/src/poly/kzg/multiopen.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen.rs rename to halo2_proofs_rm/src/poly/kzg/multiopen.rs diff --git a/halo2_proofs/src/poly/kzg/multiopen/gwc.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/gwc.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen/gwc.rs rename to halo2_proofs_rm/src/poly/kzg/multiopen/gwc.rs diff --git a/halo2_proofs/src/poly/kzg/multiopen/gwc/prover.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/prover.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen/gwc/prover.rs rename to halo2_proofs_rm/src/poly/kzg/multiopen/gwc/prover.rs diff --git a/halo2_proofs/src/poly/kzg/multiopen/gwc/verifier.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/verifier.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen/gwc/verifier.rs rename to halo2_proofs_rm/src/poly/kzg/multiopen/gwc/verifier.rs diff --git a/halo2_proofs/src/poly/kzg/multiopen/shplonk.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen/shplonk.rs rename to halo2_proofs_rm/src/poly/kzg/multiopen/shplonk.rs diff --git a/halo2_proofs/src/poly/kzg/multiopen/shplonk/prover.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/prover.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen/shplonk/prover.rs rename to halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/prover.rs diff --git a/halo2_proofs/src/poly/kzg/multiopen/shplonk/verifier.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/verifier.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen/shplonk/verifier.rs rename to halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/verifier.rs diff --git a/halo2_proofs/src/poly/kzg/strategy.rs b/halo2_proofs_rm/src/poly/kzg/strategy.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/strategy.rs rename to halo2_proofs_rm/src/poly/kzg/strategy.rs diff --git a/halo2_proofs/src/poly/multiopen_test.rs b/halo2_proofs_rm/src/poly/multiopen_test.rs similarity index 100% rename from halo2_proofs/src/poly/multiopen_test.rs rename to halo2_proofs_rm/src/poly/multiopen_test.rs diff --git a/halo2_proofs/src/poly/query.rs b/halo2_proofs_rm/src/poly/query.rs similarity index 100% rename from halo2_proofs/src/poly/query.rs rename to halo2_proofs_rm/src/poly/query.rs diff --git a/halo2_proofs/src/poly/strategy.rs b/halo2_proofs_rm/src/poly/strategy.rs similarity index 100% rename from halo2_proofs/src/poly/strategy.rs rename to halo2_proofs_rm/src/poly/strategy.rs diff --git a/halo2_proofs/src/transcript.rs b/halo2_proofs_rm/src/transcript.rs similarity index 100% rename from halo2_proofs/src/transcript.rs rename to halo2_proofs_rm/src/transcript.rs diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs_rm/tests/frontend_backend_split.rs similarity index 100% rename from halo2_proofs/tests/frontend_backend_split.rs rename to halo2_proofs_rm/tests/frontend_backend_split.rs diff --git a/halo2_proofs/tests/plonk_api.rs b/halo2_proofs_rm/tests/plonk_api.rs similarity index 100% rename from halo2_proofs/tests/plonk_api.rs rename to halo2_proofs_rm/tests/plonk_api.rs From 9b73ca4f0e96e23119f3fbc13fcc454039a8d3c7 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Wed, 24 Jan 2024 16:07:33 +0000 Subject: [PATCH 51/79] Fix test imports --- common/src/dev.rs | 2 +- common/src/dev/cost_model.rs | 2 +- common/src/dev/gates.rs | 2 +- common/src/dev/graph.rs | 2 +- common/src/dev/graph/layout.rs | 2 +- common/src/dev/tfp.rs | 2 +- common/src/plonk/circuit.rs | 2 +- common/src/poly/ipa/commitment.rs | 4 ++-- common/src/poly/kzg/commitment.rs | 2 +- common/src/poly/kzg/multiopen/shplonk.rs | 2 +- common/src/poly/multiopen_test.rs | 2 +- 11 files changed, 12 insertions(+), 12 deletions(-) diff --git a/common/src/dev.rs b/common/src/dev.rs index b6a108057b..11f172451c 100644 --- a/common/src/dev.rs +++ b/common/src/dev.rs @@ -188,7 +188,7 @@ impl Mul for Value { /// plonk::{Advice, Any, Circuit, Column, ConstraintSystem, Error, Selector}, /// poly::Rotation, /// }; -/// use ff::PrimeField; +/// use halo2_middleware::ff::PrimeField; /// use halo2curves::pasta::Fp; /// const K: u32 = 5; /// diff --git a/common/src/dev/cost_model.rs b/common/src/dev/cost_model.rs index 51b3a1ad76..86ce03800a 100644 --- a/common/src/dev/cost_model.rs +++ b/common/src/dev/cost_model.rs @@ -5,7 +5,7 @@ use std::collections::HashSet; use std::{iter, num::ParseIntError, str::FromStr}; use crate::plonk::Circuit; -use ff::{Field, FromUniformBytes}; +use halo2_middleware::ff::{Field, FromUniformBytes}; use serde::Deserialize; use serde_derive::Serialize; diff --git a/common/src/dev/gates.rs b/common/src/dev/gates.rs index 41ab2edd9d..dbe692fcac 100644 --- a/common/src/dev/gates.rs +++ b/common/src/dev/gates.rs @@ -28,7 +28,7 @@ struct Gate { /// # Examples /// /// ``` -/// use ff::Field; +/// use halo2_middleware::ff::Field; /// use halo2_proofs::{ /// circuit::{Layouter, SimpleFloorPlanner}, /// dev::CircuitGates, diff --git a/common/src/dev/graph.rs b/common/src/dev/graph.rs index 11654fe415..381cd6bd37 100644 --- a/common/src/dev/graph.rs +++ b/common/src/dev/graph.rs @@ -1,4 +1,4 @@ -use ff::Field; +use halo2_middleware::ff::Field; use tabbycat::{AttrList, Edge, GraphBuilder, GraphType, Identity, StmtList}; use crate::{ diff --git a/common/src/dev/graph/layout.rs b/common/src/dev/graph/layout.rs index 94bd7eea14..4777e05ff8 100644 --- a/common/src/dev/graph/layout.rs +++ b/common/src/dev/graph/layout.rs @@ -1,4 +1,4 @@ -use ff::Field; +use halo2_middleware::ff::Field; use plotters::{ coord::Shift, prelude::{DrawingArea, DrawingAreaErrorKind, DrawingBackend}, diff --git a/common/src/dev/tfp.rs b/common/src/dev/tfp.rs index ec1a195f6e..84bf1cc8db 100644 --- a/common/src/dev/tfp.rs +++ b/common/src/dev/tfp.rs @@ -27,7 +27,7 @@ use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance} /// # Examples /// /// ``` -/// use ff::Field; +/// use halo2_middleware::ff::Field; /// use halo2_proofs::{ /// circuit::{floor_planner, Layouter, Value}, /// dev::TracingFloorPlanner, diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index b226c70449..b1bce1305b 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -118,7 +118,7 @@ impl SealedPhase for super::ThirdPhase { /// circuit::{Chip, Layouter, Value}, /// plonk::{Advice, Column, Error, Selector}, /// }; -/// use ff::Field; +/// use halo2_middleware::ff::Field; /// # use halo2_proofs::plonk::Fixed; /// /// struct Config { diff --git a/common/src/poly/ipa/commitment.rs b/common/src/poly/ipa/commitment.rs index 0058262601..b6e0945aab 100644 --- a/common/src/poly/ipa/commitment.rs +++ b/common/src/poly/ipa/commitment.rs @@ -234,8 +234,8 @@ mod test { use crate::poly::ipa::commitment::{create_proof, verify_proof, ParamsIPA}; use crate::poly::ipa::msm::MSMIPA; - use ff::Field; use group::Curve; + use halo2_middleware::ff::Field; #[test] fn test_commit_lagrange_epaffine() { @@ -291,7 +291,7 @@ mod test { fn test_opening_proof() { const K: u32 = 6; - use ff::Field; + use halo2_middleware::ff::Field; use rand_core::OsRng; use super::super::commitment::{Blind, Params}; diff --git a/common/src/poly/kzg/commitment.rs b/common/src/poly/kzg/commitment.rs index 07c6eb8519..3a223173d5 100644 --- a/common/src/poly/kzg/commitment.rs +++ b/common/src/poly/kzg/commitment.rs @@ -365,7 +365,7 @@ mod test { use crate::poly::commitment::ParamsProver; use crate::poly::commitment::{Blind, Params}; use crate::poly::kzg::commitment::ParamsKZG; - use ff::Field; + use halo2_middleware::ff::Field; #[test] fn test_commit_lagrange() { diff --git a/common/src/poly/kzg/multiopen/shplonk.rs b/common/src/poly/kzg/multiopen/shplonk.rs index 80cad76fa0..5f963f4049 100644 --- a/common/src/poly/kzg/multiopen/shplonk.rs +++ b/common/src/poly/kzg/multiopen/shplonk.rs @@ -142,7 +142,7 @@ where #[cfg(test)] mod proptests { use super::{construct_intermediate_sets, Commitment, IntermediateSets}; - use ff::FromUniformBytes; + use halo2_middleware::ff::FromUniformBytes; use halo2curves::pasta::Fp; use proptest::{collection::vec, prelude::*, sample::select}; use std::convert::TryFrom; diff --git a/common/src/poly/multiopen_test.rs b/common/src/poly/multiopen_test.rs index 47c6731167..7ee3e7c9d3 100644 --- a/common/src/poly/multiopen_test.rs +++ b/common/src/poly/multiopen_test.rs @@ -14,8 +14,8 @@ mod test { Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read, Keccak256Write, TranscriptReadBuffer, TranscriptWriterBuffer, }; - use ff::WithSmallOrderMulGroup; use group::Curve; + use halo2_middleware::ff::WithSmallOrderMulGroup; use rand_core::OsRng; #[test] From 41f20034fe7a6ef4211ab983b36368896b51f3cd Mon Sep 17 00:00:00 2001 From: Eduard S Date: Wed, 24 Jan 2024 16:31:47 +0000 Subject: [PATCH 52/79] Enable dev --- common/src/dev.rs | 84 +++++++++++++-------------------- common/src/dev/cost.rs | 3 +- common/src/dev/tfp.rs | 3 +- common/src/lib.rs | 2 +- common/src/plonk/permutation.rs | 4 +- 5 files changed, 40 insertions(+), 56 deletions(-) diff --git a/common/src/dev.rs b/common/src/dev.rs index 11f172451c..84b9ee6c54 100644 --- a/common/src/dev.rs +++ b/common/src/dev.rs @@ -9,17 +9,17 @@ use blake2b_simd::blake2b; use halo2_middleware::ff::Field; use halo2_middleware::ff::FromUniformBytes; -use crate::plonk::permutation::keygen::Assembly; use crate::{ circuit, plonk::{ permutation, sealed::{self, SealedPhase}, - Assigned, Assignment, Circuit, ConstraintSystem, Error, Expression, FirstPhase, - FloorPlanner, Phase, Selector, + Assignment, Circuit, ConstraintSystem, Error, Expression, FirstPhase, FloorPlanner, Phase, + Selector, }, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; use crate::multicore::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, @@ -311,7 +311,7 @@ pub struct MockProver { challenges: Vec, - permutation: permutation::keygen::Assembly, + permutation: permutation::AssemblyFront, // A range of available rows for assignment and copies. usable_rows: Range, @@ -670,7 +670,7 @@ impl + Ord> MockProver { }; cs.num_advice_columns ]; - let permutation = permutation::keygen::Assembly::new(n, &cs.permutation); + let permutation = permutation::AssemblyFront::new(n, &cs.permutation); let constants = cs.constants.clone(); // Use hash chain to derive deterministic challenges for testing @@ -1111,54 +1111,36 @@ impl + Ord> MockProver { .collect::>() }); - let mapping = self.permutation.mapping(); // Check that permutations preserve the original values of the cells. - let perm_errors = { - // Original values of columns involved in the permutation. - let original = |column, row| { - self.cs - .permutation - .get_columns() - .get(column) - .map(|c: &Column| match c.column_type() { - Any::Advice(_) => self.advice[c.index()][row], - Any::Fixed => self.fixed[c.index()][row], - Any::Instance => { - let cell: &InstanceValue = &self.instance[c.index()][row]; - CellValue::Assigned(cell.value()) - } - }) - .unwrap() - }; - - // Iterate over each column of the permutation - mapping.enumerate().flat_map(move |(column, values)| { - // Iterate over each row of the column to check that the cell's - // value is preserved by the mapping. - values - .enumerate() - .filter_map(move |(row, cell)| { - let original_cell = original(column, row); - let permuted_cell = original(cell.0, cell.1); - if original_cell == permuted_cell { - None - } else { - let columns = self.cs.permutation.get_columns(); - let column = columns.get(column).unwrap(); - Some(VerifyFailure::Permutation { - column: (*column).into(), - location: FailureLocation::find( - &self.regions, - row, - Some(column).into_iter().cloned().collect(), - ), - }) - } - }) - .collect::>() - }) + // Original values of columns involved in the permutation. + let original = |column: Column, row: usize| match column.column_type() { + Any::Advice(_) => self.advice[column.index()][row], + Any::Fixed => self.fixed[column.index()][row], + Any::Instance => { + let cell: &InstanceValue = &self.instance[column.index()][row]; + CellValue::Assigned(cell.value()) + } }; + // Iterate over each pair of copied cells to check that the cell's value is preserved + // by the copy. + let perm_errors = self.permutation.copies.iter().flat_map(|(cell_a, cell_b)| { + let original_cell = original(cell_a.column, cell_a.row); + let permuted_cell = original(cell_b.column, cell_b.row); + if original_cell == permuted_cell { + None + } else { + Some(VerifyFailure::Permutation { + column: cell_a.column.into(), + location: FailureLocation::find( + &self.regions, + cell_a.row, + Some(&cell_a.column).into_iter().cloned().collect(), + ), + }) + } + }); + let mut errors: Vec<_> = iter::empty() .chain(selector_errors) .chain(gate_errors) @@ -1258,7 +1240,7 @@ impl + Ord> MockProver { } /// Returns the permutation argument (`Assembly`) used within a MockProver instance. - pub fn permutation(&self) -> &Assembly { + pub fn permutation(&self) -> &permutation::AssemblyFront { &self.permutation } } diff --git a/common/src/dev/cost.rs b/common/src/dev/cost.rs index 2f03dff9df..d1c6371cad 100644 --- a/common/src/dev/cost.rs +++ b/common/src/dev/cost.rs @@ -14,9 +14,10 @@ use halo2_middleware::poly::Rotation; use crate::{ circuit::{layouter::RegionColumn, Value}, - plonk::{Assigned, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, + plonk::{Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; /// Measures a circuit to determine its costs, and explain what contributes to them. #[allow(dead_code)] diff --git a/common/src/dev/tfp.rs b/common/src/dev/tfp.rs index 84bf1cc8db..3339bf8cd7 100644 --- a/common/src/dev/tfp.rs +++ b/common/src/dev/tfp.rs @@ -8,9 +8,10 @@ use crate::{ layouter::{RegionLayouter, SyncDeps}, AssignedCell, Cell, Layouter, Region, Table, Value, }, - plonk::{Assigned, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, + plonk::{Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, }; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::plonk::Assigned; /// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. /// diff --git a/common/src/lib.rs b/common/src/lib.rs index 48cc4b9884..32ddbd0a56 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -21,7 +21,7 @@ pub mod transcript; // TODO: Move to backend for now. The end goal is to have this in the frontend, but it requires // many changes because the MockProver heavliy uses backend types. -// pub mod dev; +pub mod dev; pub mod helpers; pub use helpers::SerdeFormat; diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index b49761231e..2c6a7416b0 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -88,8 +88,8 @@ impl Argument { // TODO: Move to frontend #[derive(Clone, Debug)] pub struct AssemblyFront { - n: usize, - columns: Vec>, + pub n: usize, + pub columns: Vec>, pub copies: Vec<(Cell, Cell)>, } From 933c399265e3736fbe8c2a1b533c7b6243fab4f5 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Wed, 24 Jan 2024 16:39:55 +0000 Subject: [PATCH 53/79] Move dev to frontend --- common/src/lib.rs | 3 --- {common => frontend}/src/dev.rs | 6 +++--- {common => frontend}/src/dev/cost.rs | 4 ++-- {common => frontend}/src/dev/cost_model.rs | 0 {common => frontend}/src/dev/failure.rs | 6 ++---- {common => frontend}/src/dev/failure/emitter.rs | 6 ++---- {common => frontend}/src/dev/gates.rs | 6 ++---- {common => frontend}/src/dev/graph.rs | 0 {common => frontend}/src/dev/graph/layout.rs | 0 {common => frontend}/src/dev/metadata.rs | 2 +- {common => frontend}/src/dev/tfp.rs | 10 ++++------ {common => frontend}/src/dev/util.rs | 2 +- frontend/src/lib.rs | 1 + 13 files changed, 18 insertions(+), 28 deletions(-) rename {common => frontend}/src/dev.rs (99%) rename {common => frontend}/src/dev/cost.rs (99%) rename {common => frontend}/src/dev/cost_model.rs (100%) rename {common => frontend}/src/dev/failure.rs (99%) rename {common => frontend}/src/dev/failure/emitter.rs (99%) rename {common => frontend}/src/dev/gates.rs (99%) rename {common => frontend}/src/dev/graph.rs (100%) rename {common => frontend}/src/dev/graph/layout.rs (100%) rename {common => frontend}/src/dev/metadata.rs (99%) rename {common => frontend}/src/dev/tfp.rs (98%) rename {common => frontend}/src/dev/util.rs (97%) diff --git a/common/src/lib.rs b/common/src/lib.rs index 32ddbd0a56..75fbee1c0a 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -19,9 +19,6 @@ pub mod plonk; pub mod poly; pub mod transcript; -// TODO: Move to backend for now. The end goal is to have this in the frontend, but it requires -// many changes because the MockProver heavliy uses backend types. -pub mod dev; pub mod helpers; pub use helpers::SerdeFormat; diff --git a/common/src/dev.rs b/frontend/src/dev.rs similarity index 99% rename from common/src/dev.rs rename to frontend/src/dev.rs index 84b9ee6c54..4e93f36882 100644 --- a/common/src/dev.rs +++ b/frontend/src/dev.rs @@ -9,7 +9,7 @@ use blake2b_simd::blake2b; use halo2_middleware::ff::Field; use halo2_middleware::ff::FromUniformBytes; -use crate::{ +use halo2_common::{ circuit, plonk::{ permutation, @@ -21,7 +21,7 @@ use crate::{ use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; use halo2_middleware::plonk::Assigned; -use crate::multicore::{ +use halo2_common::multicore::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, ParallelSliceMut, }; @@ -542,7 +542,7 @@ impl Assignment for MockProver { left_row: usize, right_column: Column, right_row: usize, - ) -> Result<(), crate::plonk::Error> { + ) -> Result<(), halo2_common::plonk::Error> { if !self.in_phase(FirstPhase) { return Ok(()); } diff --git a/common/src/dev/cost.rs b/frontend/src/dev/cost.rs similarity index 99% rename from common/src/dev/cost.rs rename to frontend/src/dev/cost.rs index d1c6371cad..4fb13a39fe 100644 --- a/common/src/dev/cost.rs +++ b/frontend/src/dev/cost.rs @@ -12,7 +12,7 @@ use group::prime::PrimeGroup; use halo2_middleware::ff::{Field, PrimeField}; use halo2_middleware::poly::Rotation; -use crate::{ +use halo2_common::{ circuit::{layouter::RegionColumn, Value}, plonk::{Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, }; @@ -235,7 +235,7 @@ impl Assignment for Layout { l_row: usize, r_col: Column, r_row: usize, - ) -> Result<(), crate::plonk::Error> { + ) -> Result<(), halo2_common::plonk::Error> { self.equality.push((l_col, l_row, r_col, r_row)); Ok(()) } diff --git a/common/src/dev/cost_model.rs b/frontend/src/dev/cost_model.rs similarity index 100% rename from common/src/dev/cost_model.rs rename to frontend/src/dev/cost_model.rs diff --git a/common/src/dev/failure.rs b/frontend/src/dev/failure.rs similarity index 99% rename from common/src/dev/failure.rs rename to frontend/src/dev/failure.rs index ee369ba2a8..150fdbfbb5 100644 --- a/common/src/dev/failure.rs +++ b/frontend/src/dev/failure.rs @@ -11,10 +11,8 @@ use super::{ Region, }; use crate::dev::metadata::Constraint; -use crate::{ - dev::{Instance, Value}, - plonk::{ConstraintSystem, Expression, Gate}, -}; +use crate::dev::{Instance, Value}; +use halo2_common::plonk::{ConstraintSystem, Expression, Gate}; use halo2_middleware::circuit::{Any, Column}; mod emitter; diff --git a/common/src/dev/failure/emitter.rs b/frontend/src/dev/failure/emitter.rs similarity index 99% rename from common/src/dev/failure/emitter.rs rename to frontend/src/dev/failure/emitter.rs index 1bb3432940..db228ba8eb 100644 --- a/common/src/dev/failure/emitter.rs +++ b/frontend/src/dev/failure/emitter.rs @@ -4,10 +4,8 @@ use std::iter; use group::ff::Field; use super::FailureLocation; -use crate::{ - dev::{metadata, util}, - plonk::Expression, -}; +use crate::dev::{metadata, util}; +use halo2_common::plonk::Expression; use halo2_middleware::circuit::{Advice, Any}; fn padded(p: char, width: usize, text: &str) -> String { diff --git a/common/src/dev/gates.rs b/frontend/src/dev/gates.rs similarity index 99% rename from common/src/dev/gates.rs rename to frontend/src/dev/gates.rs index dbe692fcac..2d6e43c10c 100644 --- a/common/src/dev/gates.rs +++ b/frontend/src/dev/gates.rs @@ -5,10 +5,8 @@ use std::{ use halo2_middleware::ff::PrimeField; -use crate::{ - dev::util, - plonk::{sealed::SealedPhase, Circuit, ConstraintSystem, FirstPhase}, -}; +use crate::dev::util; +use halo2_common::plonk::{sealed::SealedPhase, Circuit, ConstraintSystem, FirstPhase}; #[derive(Debug)] struct Constraint { diff --git a/common/src/dev/graph.rs b/frontend/src/dev/graph.rs similarity index 100% rename from common/src/dev/graph.rs rename to frontend/src/dev/graph.rs diff --git a/common/src/dev/graph/layout.rs b/frontend/src/dev/graph/layout.rs similarity index 100% rename from common/src/dev/graph/layout.rs rename to frontend/src/dev/graph/layout.rs diff --git a/common/src/dev/metadata.rs b/frontend/src/dev/metadata.rs similarity index 99% rename from common/src/dev/metadata.rs rename to frontend/src/dev/metadata.rs index 690c432628..66e4b49bd3 100644 --- a/common/src/dev/metadata.rs +++ b/frontend/src/dev/metadata.rs @@ -1,7 +1,7 @@ //! Metadata about circuits. use super::metadata::Column as ColumnMetadata; -use crate::plonk::{self}; +use halo2_common::plonk; use halo2_middleware::circuit::Any; pub use halo2_middleware::metadata::Column; use std::{ diff --git a/common/src/dev/tfp.rs b/frontend/src/dev/tfp.rs similarity index 98% rename from common/src/dev/tfp.rs rename to frontend/src/dev/tfp.rs index 3339bf8cd7..8daa95e9b0 100644 --- a/common/src/dev/tfp.rs +++ b/frontend/src/dev/tfp.rs @@ -3,13 +3,11 @@ use std::{fmt, marker::PhantomData}; use halo2_middleware::ff::Field; use tracing::{debug, debug_span, span::EnteredSpan}; -use crate::{ - circuit::{ - layouter::{RegionLayouter, SyncDeps}, - AssignedCell, Cell, Layouter, Region, Table, Value, - }, - plonk::{Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, +use halo2_common::circuit::{ + layouter::{RegionLayouter, SyncDeps}, + AssignedCell, Cell, Layouter, Region, Table, Value, }; +use halo2_common::plonk::{Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}; use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; use halo2_middleware::plonk::Assigned; diff --git a/common/src/dev/util.rs b/frontend/src/dev/util.rs similarity index 97% rename from common/src/dev/util.rs rename to frontend/src/dev/util.rs index 52c2de2460..dbfb6eb32e 100644 --- a/common/src/dev/util.rs +++ b/frontend/src/dev/util.rs @@ -2,7 +2,7 @@ use group::ff::Field; use std::collections::BTreeMap; use super::{metadata, CellValue, InstanceValue, Value}; -use crate::plonk::{AdviceQuery, Expression, FixedQuery, Gate, InstanceQuery, VirtualCell}; +use halo2_common::plonk::{AdviceQuery, Expression, FixedQuery, Gate, InstanceQuery, VirtualCell}; use halo2_middleware::circuit::{Advice, Any, Column, ColumnType}; use halo2_middleware::poly::Rotation; diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index 3fca57d042..39952d73aa 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -2,3 +2,4 @@ #![allow(unused_imports)] // TODO: Remove pub mod circuit; +pub mod dev; From 9b19dd9b18e51f0ea46c0418013decc8bbfadf56 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Thu, 25 Jan 2024 09:57:50 +0000 Subject: [PATCH 54/79] Remove old halo2_proofs src --- halo2_proofs_rm/src/arithmetic.rs | 554 --- halo2_proofs_rm/src/circuit.rs | 595 --- halo2_proofs_rm/src/circuit/floor_planner.rs | 6 - .../src/circuit/floor_planner/single_pass.rs | 434 --- .../src/circuit/floor_planner/v1.rs | 549 --- .../src/circuit/floor_planner/v1/strategy.rs | 282 -- halo2_proofs_rm/src/circuit/layouter.rs | 315 -- halo2_proofs_rm/src/circuit/table_layouter.rs | 413 --- halo2_proofs_rm/src/circuit/value.rs | 703 ---- halo2_proofs_rm/src/dev.rs | 1854 ---------- halo2_proofs_rm/src/dev/cost.rs | 561 --- halo2_proofs_rm/src/dev/cost_model.rs | 323 -- halo2_proofs_rm/src/dev/failure.rs | 873 ----- halo2_proofs_rm/src/dev/failure/emitter.rs | 214 -- halo2_proofs_rm/src/dev/gates.rs | 314 -- halo2_proofs_rm/src/dev/graph.rs | 204 -- halo2_proofs_rm/src/dev/graph/layout.rs | 323 -- halo2_proofs_rm/src/dev/metadata.rs | 313 -- halo2_proofs_rm/src/dev/tfp.rs | 508 --- halo2_proofs_rm/src/dev/util.rs | 161 - halo2_proofs_rm/src/helpers.rs | 154 - halo2_proofs_rm/src/lib.rs | 21 - halo2_proofs_rm/src/multicore.rs | 38 - halo2_proofs_rm/src/plonk.rs | 549 --- halo2_proofs_rm/src/plonk/assigned.rs | 665 ---- halo2_proofs_rm/src/plonk/circuit.rs | 3246 ----------------- .../src/plonk/circuit/compress_selectors.rs | 352 -- halo2_proofs_rm/src/plonk/error.rs | 136 - halo2_proofs_rm/src/plonk/evaluation.rs | 871 ----- halo2_proofs_rm/src/plonk/keygen.rs | 394 -- halo2_proofs_rm/src/plonk/lookup.rs | 108 - halo2_proofs_rm/src/plonk/lookup/prover.rs | 475 --- halo2_proofs_rm/src/plonk/lookup/verifier.rs | 211 -- halo2_proofs_rm/src/plonk/permutation.rs | 172 - .../src/plonk/permutation/keygen.rs | 543 --- .../src/plonk/permutation/prover.rs | 329 -- .../src/plonk/permutation/verifier.rs | 254 -- halo2_proofs_rm/src/plonk/prover.rs | 994 ----- halo2_proofs_rm/src/plonk/shuffle.rs | 76 - halo2_proofs_rm/src/plonk/shuffle/prover.rs | 250 -- halo2_proofs_rm/src/plonk/shuffle/verifier.rs | 138 - halo2_proofs_rm/src/plonk/vanishing.rs | 11 - halo2_proofs_rm/src/plonk/vanishing/prover.rs | 199 - .../src/plonk/vanishing/verifier.rs | 138 - halo2_proofs_rm/src/plonk/verifier.rs | 462 --- halo2_proofs_rm/src/plonk/verifier/batch.rs | 135 - halo2_proofs_rm/src/poly.rs | 345 -- halo2_proofs_rm/src/poly/commitment.rs | 245 -- halo2_proofs_rm/src/poly/domain.rs | 557 --- halo2_proofs_rm/src/poly/ipa/commitment.rs | 370 -- .../src/poly/ipa/commitment/prover.rs | 167 - .../src/poly/ipa/commitment/verifier.rs | 100 - halo2_proofs_rm/src/poly/ipa/mod.rs | 7 - halo2_proofs_rm/src/poly/ipa/msm.rs | 271 -- halo2_proofs_rm/src/poly/ipa/multiopen.rs | 172 - .../src/poly/ipa/multiopen/prover.rs | 122 - .../src/poly/ipa/multiopen/verifier.rs | 148 - halo2_proofs_rm/src/poly/ipa/strategy.rs | 171 - halo2_proofs_rm/src/poly/kzg/commitment.rs | 417 --- halo2_proofs_rm/src/poly/kzg/mod.rs | 8 - halo2_proofs_rm/src/poly/kzg/msm.rs | 203 -- halo2_proofs_rm/src/poly/kzg/multiopen.rs | 5 - halo2_proofs_rm/src/poly/kzg/multiopen/gwc.rs | 50 - .../src/poly/kzg/multiopen/gwc/prover.rs | 89 - .../src/poly/kzg/multiopen/gwc/verifier.rs | 124 - .../src/poly/kzg/multiopen/shplonk.rs | 247 -- .../src/poly/kzg/multiopen/shplonk/prover.rs | 298 -- .../poly/kzg/multiopen/shplonk/verifier.rs | 140 - halo2_proofs_rm/src/poly/kzg/strategy.rs | 181 - halo2_proofs_rm/src/poly/multiopen_test.rs | 298 -- halo2_proofs_rm/src/poly/query.rs | 160 - halo2_proofs_rm/src/poly/strategy.rs | 31 - halo2_proofs_rm/src/transcript.rs | 554 --- 73 files changed, 25900 deletions(-) delete mode 100644 halo2_proofs_rm/src/arithmetic.rs delete mode 100644 halo2_proofs_rm/src/circuit.rs delete mode 100644 halo2_proofs_rm/src/circuit/floor_planner.rs delete mode 100644 halo2_proofs_rm/src/circuit/floor_planner/single_pass.rs delete mode 100644 halo2_proofs_rm/src/circuit/floor_planner/v1.rs delete mode 100644 halo2_proofs_rm/src/circuit/floor_planner/v1/strategy.rs delete mode 100644 halo2_proofs_rm/src/circuit/layouter.rs delete mode 100644 halo2_proofs_rm/src/circuit/table_layouter.rs delete mode 100644 halo2_proofs_rm/src/circuit/value.rs delete mode 100644 halo2_proofs_rm/src/dev.rs delete mode 100644 halo2_proofs_rm/src/dev/cost.rs delete mode 100644 halo2_proofs_rm/src/dev/cost_model.rs delete mode 100644 halo2_proofs_rm/src/dev/failure.rs delete mode 100644 halo2_proofs_rm/src/dev/failure/emitter.rs delete mode 100644 halo2_proofs_rm/src/dev/gates.rs delete mode 100644 halo2_proofs_rm/src/dev/graph.rs delete mode 100644 halo2_proofs_rm/src/dev/graph/layout.rs delete mode 100644 halo2_proofs_rm/src/dev/metadata.rs delete mode 100644 halo2_proofs_rm/src/dev/tfp.rs delete mode 100644 halo2_proofs_rm/src/dev/util.rs delete mode 100644 halo2_proofs_rm/src/helpers.rs delete mode 100644 halo2_proofs_rm/src/multicore.rs delete mode 100644 halo2_proofs_rm/src/plonk.rs delete mode 100644 halo2_proofs_rm/src/plonk/assigned.rs delete mode 100644 halo2_proofs_rm/src/plonk/circuit.rs delete mode 100644 halo2_proofs_rm/src/plonk/circuit/compress_selectors.rs delete mode 100644 halo2_proofs_rm/src/plonk/error.rs delete mode 100644 halo2_proofs_rm/src/plonk/evaluation.rs delete mode 100644 halo2_proofs_rm/src/plonk/keygen.rs delete mode 100644 halo2_proofs_rm/src/plonk/lookup.rs delete mode 100644 halo2_proofs_rm/src/plonk/lookup/prover.rs delete mode 100644 halo2_proofs_rm/src/plonk/lookup/verifier.rs delete mode 100644 halo2_proofs_rm/src/plonk/permutation.rs delete mode 100644 halo2_proofs_rm/src/plonk/permutation/keygen.rs delete mode 100644 halo2_proofs_rm/src/plonk/permutation/prover.rs delete mode 100644 halo2_proofs_rm/src/plonk/permutation/verifier.rs delete mode 100644 halo2_proofs_rm/src/plonk/prover.rs delete mode 100644 halo2_proofs_rm/src/plonk/shuffle.rs delete mode 100644 halo2_proofs_rm/src/plonk/shuffle/prover.rs delete mode 100644 halo2_proofs_rm/src/plonk/shuffle/verifier.rs delete mode 100644 halo2_proofs_rm/src/plonk/vanishing.rs delete mode 100644 halo2_proofs_rm/src/plonk/vanishing/prover.rs delete mode 100644 halo2_proofs_rm/src/plonk/vanishing/verifier.rs delete mode 100644 halo2_proofs_rm/src/plonk/verifier.rs delete mode 100644 halo2_proofs_rm/src/plonk/verifier/batch.rs delete mode 100644 halo2_proofs_rm/src/poly.rs delete mode 100644 halo2_proofs_rm/src/poly/commitment.rs delete mode 100644 halo2_proofs_rm/src/poly/domain.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/commitment.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/commitment/prover.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/commitment/verifier.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/mod.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/msm.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/multiopen.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/multiopen/prover.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/multiopen/verifier.rs delete mode 100644 halo2_proofs_rm/src/poly/ipa/strategy.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/commitment.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/mod.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/msm.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/multiopen.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/multiopen/gwc.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/multiopen/gwc/prover.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/multiopen/gwc/verifier.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/multiopen/shplonk.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/prover.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/verifier.rs delete mode 100644 halo2_proofs_rm/src/poly/kzg/strategy.rs delete mode 100644 halo2_proofs_rm/src/poly/multiopen_test.rs delete mode 100644 halo2_proofs_rm/src/poly/query.rs delete mode 100644 halo2_proofs_rm/src/poly/strategy.rs delete mode 100644 halo2_proofs_rm/src/transcript.rs diff --git a/halo2_proofs_rm/src/arithmetic.rs b/halo2_proofs_rm/src/arithmetic.rs deleted file mode 100644 index 0163e355eb..0000000000 --- a/halo2_proofs_rm/src/arithmetic.rs +++ /dev/null @@ -1,554 +0,0 @@ -//! This module provides common utilities, traits and structures for group, -//! field and polynomial arithmetic. - -use super::multicore; -pub use ff::Field; -use group::{ - ff::{BatchInvert, PrimeField}, - Curve, Group, GroupOpsOwned, ScalarMulOwned, -}; - -pub use halo2curves::{CurveAffine, CurveExt}; - -/// This represents an element of a group with basic operations that can be -/// performed. This allows an FFT implementation (for example) to operate -/// generically over either a field or elliptic curve group. -pub trait FftGroup: - Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned -{ -} - -impl FftGroup for T -where - Scalar: Field, - T: Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned, -{ -} - -fn multiexp_serial(coeffs: &[C::Scalar], bases: &[C], acc: &mut C::Curve) { - let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); - - let c = if bases.len() < 4 { - 1 - } else if bases.len() < 32 { - 3 - } else { - (f64::from(bases.len() as u32)).ln().ceil() as usize - }; - - fn get_at(segment: usize, c: usize, bytes: &F::Repr) -> usize { - let skip_bits = segment * c; - let skip_bytes = skip_bits / 8; - - if skip_bytes >= (F::NUM_BITS as usize + 7) / 8 { - return 0; - } - - let mut v = [0; 8]; - for (v, o) in v.iter_mut().zip(bytes.as_ref()[skip_bytes..].iter()) { - *v = *o; - } - - let mut tmp = u64::from_le_bytes(v); - tmp >>= skip_bits - (skip_bytes * 8); - tmp %= 1 << c; - - tmp as usize - } - - let segments = (C::Scalar::NUM_BITS as usize / c) + 1; - - for current_segment in (0..segments).rev() { - for _ in 0..c { - *acc = acc.double(); - } - - #[derive(Clone, Copy)] - enum Bucket { - None, - Affine(C), - Projective(C::Curve), - } - - impl Bucket { - fn add_assign(&mut self, other: &C) { - *self = match *self { - Bucket::None => Bucket::Affine(*other), - Bucket::Affine(a) => Bucket::Projective(a + *other), - Bucket::Projective(mut a) => { - a += *other; - Bucket::Projective(a) - } - } - } - - fn add(self, mut other: C::Curve) -> C::Curve { - match self { - Bucket::None => other, - Bucket::Affine(a) => { - other += a; - other - } - Bucket::Projective(a) => other + &a, - } - } - } - - let mut buckets: Vec> = vec![Bucket::None; (1 << c) - 1]; - - for (coeff, base) in coeffs.iter().zip(bases.iter()) { - let coeff = get_at::(current_segment, c, coeff); - if coeff != 0 { - buckets[coeff - 1].add_assign(base); - } - } - - // Summation by parts - // e.g. 3a + 2b + 1c = a + - // (a) + b + - // ((a) + b) + c - let mut running_sum = C::Curve::identity(); - for exp in buckets.into_iter().rev() { - running_sum = exp.add(running_sum); - *acc += &running_sum; - } - } -} - -/// Performs a small multi-exponentiation operation. -/// Uses the double-and-add algorithm with doublings shared across points. -pub fn small_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { - let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); - let mut acc = C::Curve::identity(); - - // for byte idx - for byte_idx in (0..((C::Scalar::NUM_BITS as usize + 7) / 8)).rev() { - // for bit idx - for bit_idx in (0..8).rev() { - acc = acc.double(); - // for each coeff - for coeff_idx in 0..coeffs.len() { - let byte = coeffs[coeff_idx].as_ref()[byte_idx]; - if ((byte >> bit_idx) & 1) != 0 { - acc += bases[coeff_idx]; - } - } - } - } - - acc -} - -/// Performs a multi-exponentiation operation. -/// -/// This function will panic if coeffs and bases have a different length. -/// -/// This will use multithreading if beneficial. -pub fn best_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { - assert_eq!(coeffs.len(), bases.len()); - - let num_threads = multicore::current_num_threads(); - if coeffs.len() > num_threads { - let chunk = coeffs.len() / num_threads; - let num_chunks = coeffs.chunks(chunk).len(); - let mut results = vec![C::Curve::identity(); num_chunks]; - multicore::scope(|scope| { - let chunk = coeffs.len() / num_threads; - - for ((coeffs, bases), acc) in coeffs - .chunks(chunk) - .zip(bases.chunks(chunk)) - .zip(results.iter_mut()) - { - scope.spawn(move |_| { - multiexp_serial(coeffs, bases, acc); - }); - } - }); - results.iter().fold(C::Curve::identity(), |a, b| a + b) - } else { - let mut acc = C::Curve::identity(); - multiexp_serial(coeffs, bases, &mut acc); - acc - } -} - -/// Performs a radix-$2$ Fast-Fourier Transformation (FFT) on a vector of size -/// $n = 2^k$, when provided `log_n` = $k$ and an element of multiplicative -/// order $n$ called `omega` ($\omega$). The result is that the vector `a`, when -/// interpreted as the coefficients of a polynomial of degree $n - 1$, is -/// transformed into the evaluations of this polynomial at each of the $n$ -/// distinct powers of $\omega$. This transformation is invertible by providing -/// $\omega^{-1}$ in place of $\omega$ and dividing each resulting field element -/// by $n$. -/// -/// This will use multithreading if beneficial. -pub fn best_fft>(a: &mut [G], omega: Scalar, log_n: u32) { - fn bitreverse(mut n: usize, l: usize) -> usize { - let mut r = 0; - for _ in 0..l { - r = (r << 1) | (n & 1); - n >>= 1; - } - r - } - - let threads = multicore::current_num_threads(); - let log_threads = log2_floor(threads); - let n = a.len(); - assert_eq!(n, 1 << log_n); - - for k in 0..n { - let rk = bitreverse(k, log_n as usize); - if k < rk { - a.swap(rk, k); - } - } - - // precompute twiddle factors - let twiddles: Vec<_> = (0..(n / 2)) - .scan(Scalar::ONE, |w, _| { - let tw = *w; - *w *= ω - Some(tw) - }) - .collect(); - - if log_n <= log_threads { - let mut chunk = 2_usize; - let mut twiddle_chunk = n / 2; - for _ in 0..log_n { - a.chunks_mut(chunk).for_each(|coeffs| { - let (left, right) = coeffs.split_at_mut(chunk / 2); - - // case when twiddle factor is one - let (a, left) = left.split_at_mut(1); - let (b, right) = right.split_at_mut(1); - let t = b[0]; - b[0] = a[0]; - a[0] += &t; - b[0] -= &t; - - left.iter_mut() - .zip(right.iter_mut()) - .enumerate() - .for_each(|(i, (a, b))| { - let mut t = *b; - t *= &twiddles[(i + 1) * twiddle_chunk]; - *b = *a; - *a += &t; - *b -= &t; - }); - }); - chunk *= 2; - twiddle_chunk /= 2; - } - } else { - recursive_butterfly_arithmetic(a, n, 1, &twiddles) - } -} - -/// This perform recursive butterfly arithmetic -pub fn recursive_butterfly_arithmetic>( - a: &mut [G], - n: usize, - twiddle_chunk: usize, - twiddles: &[Scalar], -) { - if n == 2 { - let t = a[1]; - a[1] = a[0]; - a[0] += &t; - a[1] -= &t; - } else { - let (left, right) = a.split_at_mut(n / 2); - multicore::join( - || recursive_butterfly_arithmetic(left, n / 2, twiddle_chunk * 2, twiddles), - || recursive_butterfly_arithmetic(right, n / 2, twiddle_chunk * 2, twiddles), - ); - - // case when twiddle factor is one - let (a, left) = left.split_at_mut(1); - let (b, right) = right.split_at_mut(1); - let t = b[0]; - b[0] = a[0]; - a[0] += &t; - b[0] -= &t; - - left.iter_mut() - .zip(right.iter_mut()) - .enumerate() - .for_each(|(i, (a, b))| { - let mut t = *b; - t *= &twiddles[(i + 1) * twiddle_chunk]; - *b = *a; - *a += &t; - *b -= &t; - }); - } -} - -/// Convert coefficient bases group elements to lagrange basis by inverse FFT. -pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec { - let n_inv = C::Scalar::TWO_INV.pow_vartime([k as u64, 0, 0, 0]); - let mut omega_inv = C::Scalar::ROOT_OF_UNITY_INV; - for _ in k..C::Scalar::S { - omega_inv = omega_inv.square(); - } - - let mut g_lagrange_projective = g_projective; - best_fft(&mut g_lagrange_projective, omega_inv, k); - parallelize(&mut g_lagrange_projective, |g, _| { - for g in g.iter_mut() { - *g *= n_inv; - } - }); - - let mut g_lagrange = vec![C::identity(); 1 << k]; - parallelize(&mut g_lagrange, |g_lagrange, starts| { - C::Curve::batch_normalize( - &g_lagrange_projective[starts..(starts + g_lagrange.len())], - g_lagrange, - ); - }); - - g_lagrange -} - -/// This evaluates a provided polynomial (in coefficient form) at `point`. -pub fn eval_polynomial(poly: &[F], point: F) -> F { - fn evaluate(poly: &[F], point: F) -> F { - poly.iter() - .rev() - .fold(F::ZERO, |acc, coeff| acc * point + coeff) - } - let n = poly.len(); - let num_threads = multicore::current_num_threads(); - if n * 2 < num_threads { - evaluate(poly, point) - } else { - let chunk_size = (n + num_threads - 1) / num_threads; - let mut parts = vec![F::ZERO; num_threads]; - multicore::scope(|scope| { - for (chunk_idx, (out, poly)) in - parts.chunks_mut(1).zip(poly.chunks(chunk_size)).enumerate() - { - scope.spawn(move |_| { - let start = chunk_idx * chunk_size; - out[0] = evaluate(poly, point) * point.pow_vartime([start as u64, 0, 0, 0]); - }); - } - }); - parts.iter().fold(F::ZERO, |acc, coeff| acc + coeff) - } -} - -/// This computes the inner product of two vectors `a` and `b`. -/// -/// This function will panic if the two vectors are not the same size. -pub fn compute_inner_product(a: &[F], b: &[F]) -> F { - // TODO: parallelize? - assert_eq!(a.len(), b.len()); - - let mut acc = F::ZERO; - for (a, b) in a.iter().zip(b.iter()) { - acc += (*a) * (*b); - } - - acc -} - -/// Divides polynomial `a` in `X` by `X - b` with -/// no remainder. -pub fn kate_division<'a, F: Field, I: IntoIterator>(a: I, mut b: F) -> Vec -where - I::IntoIter: DoubleEndedIterator + ExactSizeIterator, -{ - b = -b; - let a = a.into_iter(); - - let mut q = vec![F::ZERO; a.len() - 1]; - - let mut tmp = F::ZERO; - for (q, r) in q.iter_mut().rev().zip(a.rev()) { - let mut lead_coeff = *r; - lead_coeff.sub_assign(&tmp); - *q = lead_coeff; - tmp = lead_coeff; - tmp.mul_assign(&b); - } - - q -} - -/// This utility function will parallelize an operation that is to be -/// performed over a mutable slice. -pub fn parallelize(v: &mut [T], f: F) { - // Algorithm rationale: - // - // Using the stdlib `chunks_mut` will lead to severe load imbalance. - // From https://github.com/rust-lang/rust/blob/e94bda3/library/core/src/slice/iter.rs#L1607-L1637 - // if the division is not exact, the last chunk will be the remainder. - // - // Dividing 40 items on 12 threads will lead to a chunk size of 40/12 = 3, - // There will be a 13 chunks of size 3 and 1 of size 1 distributed on 12 threads. - // This leads to 1 thread working on 6 iterations, 1 on 4 iterations and 10 on 3 iterations, - // a load imbalance of 2x. - // - // Instead we can divide work into chunks of size - // 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3 = 4*4 + 3*8 = 40 - // - // This would lead to a 6/4 = 1.5x speedup compared to naive chunks_mut - // - // See also OpenMP spec (page 60) - // http://www.openmp.org/mp-documents/openmp-4.5.pdf - // "When no chunk_size is specified, the iteration space is divided into chunks - // that are approximately equal in size, and at most one chunk is distributed to - // each thread. The size of the chunks is unspecified in this case." - // This implies chunks are the same size ±1 - - let f = &f; - let total_iters = v.len(); - let num_threads = multicore::current_num_threads(); - let base_chunk_size = total_iters / num_threads; - let cutoff_chunk_id = total_iters % num_threads; - let split_pos = cutoff_chunk_id * (base_chunk_size + 1); - let (v_hi, v_lo) = v.split_at_mut(split_pos); - - multicore::scope(|scope| { - // Skip special-case: number of iterations is cleanly divided by number of threads. - if cutoff_chunk_id != 0 { - for (chunk_id, chunk) in v_hi.chunks_exact_mut(base_chunk_size + 1).enumerate() { - let offset = chunk_id * (base_chunk_size + 1); - scope.spawn(move |_| f(chunk, offset)); - } - } - // Skip special-case: less iterations than number of threads. - if base_chunk_size != 0 { - for (chunk_id, chunk) in v_lo.chunks_exact_mut(base_chunk_size).enumerate() { - let offset = split_pos + (chunk_id * base_chunk_size); - scope.spawn(move |_| f(chunk, offset)); - } - } - }); -} - -fn log2_floor(num: usize) -> u32 { - assert!(num > 0); - - let mut pow = 0; - - while (1 << (pow + 1)) <= num { - pow += 1; - } - - pow -} - -/// Returns coefficients of an n - 1 degree polynomial given a set of n points -/// and their evaluations. This function will panic if two values in `points` -/// are the same. -pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { - assert_eq!(points.len(), evals.len()); - if points.len() == 1 { - // Constant polynomial - vec![evals[0]] - } else { - let mut denoms = Vec::with_capacity(points.len()); - for (j, x_j) in points.iter().enumerate() { - let mut denom = Vec::with_capacity(points.len() - 1); - for x_k in points - .iter() - .enumerate() - .filter(|&(k, _)| k != j) - .map(|a| a.1) - { - denom.push(*x_j - x_k); - } - denoms.push(denom); - } - // Compute (x_j - x_k)^(-1) for each j != i - denoms.iter_mut().flat_map(|v| v.iter_mut()).batch_invert(); - - let mut final_poly = vec![F::ZERO; points.len()]; - for (j, (denoms, eval)) in denoms.into_iter().zip(evals.iter()).enumerate() { - let mut tmp: Vec = Vec::with_capacity(points.len()); - let mut product = Vec::with_capacity(points.len() - 1); - tmp.push(F::ONE); - for (x_k, denom) in points - .iter() - .enumerate() - .filter(|&(k, _)| k != j) - .map(|a| a.1) - .zip(denoms.into_iter()) - { - product.resize(tmp.len() + 1, F::ZERO); - for ((a, b), product) in tmp - .iter() - .chain(std::iter::once(&F::ZERO)) - .zip(std::iter::once(&F::ZERO).chain(tmp.iter())) - .zip(product.iter_mut()) - { - *product = *a * (-denom * x_k) + *b * denom; - } - std::mem::swap(&mut tmp, &mut product); - } - assert_eq!(tmp.len(), points.len()); - assert_eq!(product.len(), points.len() - 1); - for (final_coeff, interpolation_coeff) in final_poly.iter_mut().zip(tmp.into_iter()) { - *final_coeff += interpolation_coeff * eval; - } - } - final_poly - } -} - -pub(crate) fn evaluate_vanishing_polynomial(roots: &[F], z: F) -> F { - fn evaluate(roots: &[F], z: F) -> F { - roots.iter().fold(F::ONE, |acc, point| (z - point) * acc) - } - let n = roots.len(); - let num_threads = multicore::current_num_threads(); - if n * 2 < num_threads { - evaluate(roots, z) - } else { - let chunk_size = (n + num_threads - 1) / num_threads; - let mut parts = vec![F::ONE; num_threads]; - multicore::scope(|scope| { - for (out, roots) in parts.chunks_mut(1).zip(roots.chunks(chunk_size)) { - scope.spawn(move |_| out[0] = evaluate(roots, z)); - } - }); - parts.iter().fold(F::ONE, |acc, part| acc * part) - } -} - -pub(crate) fn powers(base: F) -> impl Iterator { - std::iter::successors(Some(F::ONE), move |power| Some(base * power)) -} - -#[cfg(test)] -use rand_core::OsRng; - -#[cfg(test)] -use crate::halo2curves::pasta::Fp; - -#[test] -fn test_lagrange_interpolate() { - let rng = OsRng; - - let points = (0..5).map(|_| Fp::random(rng)).collect::>(); - let evals = (0..5).map(|_| Fp::random(rng)).collect::>(); - - for coeffs in 0..5 { - let points = &points[0..coeffs]; - let evals = &evals[0..coeffs]; - - let poly = lagrange_interpolate(points, evals); - assert_eq!(poly.len(), points.len()); - - for (point, eval) in points.iter().zip(evals) { - assert_eq!(eval_polynomial(&poly, *point), *eval); - } - } -} diff --git a/halo2_proofs_rm/src/circuit.rs b/halo2_proofs_rm/src/circuit.rs deleted file mode 100644 index 56a6be0e5c..0000000000 --- a/halo2_proofs_rm/src/circuit.rs +++ /dev/null @@ -1,595 +0,0 @@ -//! Traits and structs for implementing circuit components. - -use std::{fmt, marker::PhantomData}; - -use ff::Field; - -use crate::plonk::{ - Advice, Any, Assigned, Challenge, Column, Error, Fixed, Instance, Selector, TableColumn, -}; - -mod value; -pub use value::Value; - -pub mod floor_planner; -pub use floor_planner::single_pass::SimpleFloorPlanner; - -pub mod layouter; -mod table_layouter; - -pub use table_layouter::{SimpleTableLayouter, TableLayouter}; - -/// A chip implements a set of instructions that can be used by gadgets. -/// -/// The chip stores state that is required at circuit synthesis time in -/// [`Chip::Config`], which can be fetched via [`Chip::config`]. -/// -/// The chip also loads any fixed configuration needed at synthesis time -/// using its own implementation of `load`, and stores it in [`Chip::Loaded`]. -/// This can be accessed via [`Chip::loaded`]. -pub trait Chip: Sized { - /// A type that holds the configuration for this chip, and any other state it may need - /// during circuit synthesis, that can be derived during [`Circuit::configure`]. - /// - /// [`Circuit::configure`]: crate::plonk::Circuit::configure - type Config: fmt::Debug + Clone; - - /// A type that holds any general chip state that needs to be loaded at the start of - /// [`Circuit::synthesize`]. This might simply be `()` for some chips. - /// - /// [`Circuit::synthesize`]: crate::plonk::Circuit::synthesize - type Loaded: fmt::Debug + Clone; - - /// The chip holds its own configuration. - fn config(&self) -> &Self::Config; - - /// Provides access to general chip state loaded at the beginning of circuit - /// synthesis. - /// - /// Panics if called before `Chip::load`. - fn loaded(&self) -> &Self::Loaded; -} - -/// Index of a region in a layouter -#[derive(Clone, Copy, Debug)] -pub struct RegionIndex(usize); - -impl From for RegionIndex { - fn from(idx: usize) -> RegionIndex { - RegionIndex(idx) - } -} - -impl std::ops::Deref for RegionIndex { - type Target = usize; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -/// Starting row of a region in a layouter -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub struct RegionStart(usize); - -impl From for RegionStart { - fn from(idx: usize) -> RegionStart { - RegionStart(idx) - } -} - -impl std::ops::Deref for RegionStart { - type Target = usize; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -/// A pointer to a cell within a circuit. -#[derive(Clone, Copy, Debug)] -pub struct Cell { - /// Identifies the region in which this cell resides. - pub region_index: RegionIndex, - /// The relative offset of this cell within its region. - pub row_offset: usize, - /// The column of this cell. - pub column: Column, -} - -/// An assigned cell. -#[derive(Clone, Debug)] -pub struct AssignedCell { - value: Value, - cell: Cell, - _marker: PhantomData, -} - -impl AssignedCell { - /// Returns the value of the [`AssignedCell`]. - pub fn value(&self) -> Value<&V> { - self.value.as_ref() - } - - /// Returns the cell. - pub fn cell(&self) -> Cell { - self.cell - } -} - -impl AssignedCell -where - for<'v> Assigned: From<&'v V>, -{ - /// Returns the field element value of the [`AssignedCell`]. - pub fn value_field(&self) -> Value> { - self.value.to_field() - } -} - -impl AssignedCell, F> { - /// Evaluates this assigned cell's value directly, performing an unbatched inversion - /// if necessary. - /// - /// If the denominator is zero, the returned cell's value is zero. - pub fn evaluate(self) -> AssignedCell { - AssignedCell { - value: self.value.evaluate(), - cell: self.cell, - _marker: Default::default(), - } - } -} - -impl AssignedCell -where - for<'v> Assigned: From<&'v V>, -{ - /// Copies the value to a given advice cell and constrains them to be equal. - /// - /// Returns an error if either this cell or the given cell are in columns - /// where equality has not been enabled. - pub fn copy_advice( - &self, - annotation: A, - region: &mut Region<'_, F>, - column: Column, - offset: usize, - ) -> Result - where - A: Fn() -> AR, - AR: Into, - { - let assigned_cell = - region.assign_advice(annotation, column, offset, || self.value.clone())?; - region.constrain_equal(assigned_cell.cell(), self.cell())?; - - Ok(assigned_cell) - } -} - -/// A region of the circuit in which a [`Chip`] can assign cells. -/// -/// Inside a region, the chip may freely use relative offsets; the [`Layouter`] will -/// treat these assignments as a single "region" within the circuit. -/// -/// The [`Layouter`] is allowed to optimise between regions as it sees fit. Chips must use -/// [`Region::constrain_equal`] to copy in variables assigned in other regions. -/// -/// TODO: It would be great if we could constrain the columns in these types to be -/// "logical" columns that are guaranteed to correspond to the chip (and have come from -/// `Chip::Config`). -#[derive(Debug)] -pub struct Region<'r, F: Field> { - region: &'r mut dyn layouter::RegionLayouter, -} - -impl<'r, F: Field> From<&'r mut dyn layouter::RegionLayouter> for Region<'r, F> { - fn from(region: &'r mut dyn layouter::RegionLayouter) -> Self { - Region { region } - } -} - -impl<'r, F: Field> Region<'r, F> { - /// Enables a selector at the given offset. - pub(crate) fn enable_selector( - &mut self, - annotation: A, - selector: &Selector, - offset: usize, - ) -> Result<(), Error> - where - A: Fn() -> AR, - AR: Into, - { - self.region - .enable_selector(&|| annotation().into(), selector, offset) - } - - /// Allows the circuit implementor to name/annotate a Column within a Region context. - /// - /// This is useful in order to improve the amount of information that `prover.verify()` - /// and `prover.assert_satisfied()` can provide. - pub fn name_column(&mut self, annotation: A, column: T) - where - A: Fn() -> AR, - AR: Into, - T: Into>, - { - self.region - .name_column(&|| annotation().into(), column.into()); - } - - /// Assign an advice column value (witness). - /// - /// Even though `to` has `FnMut` bounds, it is guaranteed to be called at most once. - pub fn assign_advice<'v, V, VR, A, AR>( - &'v mut self, - annotation: A, - column: Column, - offset: usize, - mut to: V, - ) -> Result, Error> - where - V: FnMut() -> Value + 'v, - for<'vr> Assigned: From<&'vr VR>, - A: Fn() -> AR, - AR: Into, - { - let mut value = Value::unknown(); - let cell = - self.region - .assign_advice(&|| annotation().into(), column, offset, &mut || { - let v = to(); - let value_f = v.to_field(); - value = v; - value_f - })?; - - Ok(AssignedCell { - value, - cell, - _marker: PhantomData, - }) - } - - /// Assigns a constant value to the column `advice` at `offset` within this region. - /// - /// The constant value will be assigned to a cell within one of the fixed columns - /// configured via `ConstraintSystem::enable_constant`. - /// - /// Returns the advice cell. - pub fn assign_advice_from_constant( - &mut self, - annotation: A, - column: Column, - offset: usize, - constant: VR, - ) -> Result, Error> - where - for<'vr> Assigned: From<&'vr VR>, - A: Fn() -> AR, - AR: Into, - { - let cell = self.region.assign_advice_from_constant( - &|| annotation().into(), - column, - offset, - (&constant).into(), - )?; - - Ok(AssignedCell { - value: Value::known(constant), - cell, - _marker: PhantomData, - }) - } - - /// Assign the value of the instance column's cell at absolute location - /// `row` to the column `advice` at `offset` within this region. - /// - /// Returns the advice cell, and its value if known. - pub fn assign_advice_from_instance( - &mut self, - annotation: A, - instance: Column, - row: usize, - advice: Column, - offset: usize, - ) -> Result, Error> - where - A: Fn() -> AR, - AR: Into, - { - let (cell, value) = self.region.assign_advice_from_instance( - &|| annotation().into(), - instance, - row, - advice, - offset, - )?; - - Ok(AssignedCell { - value, - cell, - _marker: PhantomData, - }) - } - - /// Returns the value of the instance column's cell at absolute location `row`. - /// - /// This method is only provided for convenience; it does not create any constraints. - /// Callers still need to use [`Self::assign_advice_from_instance`] to constrain the - /// instance values in their circuit. - pub fn instance_value( - &mut self, - instance: Column, - row: usize, - ) -> Result, Error> { - self.region.instance_value(instance, row) - } - - /// Assign a fixed value. - /// - /// Even though `to` has `FnMut` bounds, it is guaranteed to be called at most once. - pub fn assign_fixed<'v, V, VR, A, AR>( - &'v mut self, - annotation: A, - column: Column, - offset: usize, - mut to: V, - ) -> Result, Error> - where - V: FnMut() -> Value + 'v, - for<'vr> Assigned: From<&'vr VR>, - A: Fn() -> AR, - AR: Into, - { - let mut value = Value::unknown(); - let cell = - self.region - .assign_fixed(&|| annotation().into(), column, offset, &mut || { - let v = to(); - let value_f = v.to_field(); - value = v; - value_f - })?; - - Ok(AssignedCell { - value, - cell, - _marker: PhantomData, - }) - } - - /// Constrains a cell to have a constant value. - /// - /// Returns an error if the cell is in a column where equality has not been enabled. - pub fn constrain_constant(&mut self, cell: Cell, constant: VR) -> Result<(), Error> - where - VR: Into>, - { - self.region.constrain_constant(cell, constant.into()) - } - - /// Constrains two cells to have the same value. - /// - /// Returns an error if either of the cells are in columns where equality - /// has not been enabled. - pub fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { - self.region.constrain_equal(left, right) - } -} - -/// A lookup table in the circuit. -#[derive(Debug)] -pub struct Table<'r, F: Field> { - table: &'r mut dyn TableLayouter, -} - -impl<'r, F: Field> From<&'r mut dyn TableLayouter> for Table<'r, F> { - fn from(table: &'r mut dyn TableLayouter) -> Self { - Table { table } - } -} - -impl<'r, F: Field> Table<'r, F> { - /// Assigns a fixed value to a table cell. - /// - /// Returns an error if the table cell has already been assigned to. - /// - /// Even though `to` has `FnMut` bounds, it is guaranteed to be called at most once. - pub fn assign_cell<'v, V, VR, A, AR>( - &'v mut self, - annotation: A, - column: TableColumn, - offset: usize, - mut to: V, - ) -> Result<(), Error> - where - V: FnMut() -> Value + 'v, - VR: Into>, - A: Fn() -> AR, - AR: Into, - { - self.table - .assign_cell(&|| annotation().into(), column, offset, &mut || { - to().into_field() - }) - } -} - -/// A layout strategy within a circuit. The layouter is chip-agnostic and applies its -/// strategy to the context and config it is given. -/// -/// This abstracts over the circuit assignments, handling row indices etc. -/// -pub trait Layouter { - /// Represents the type of the "root" of this layouter, so that nested namespaces - /// can minimize indirection. - type Root: Layouter; - - /// Assign a region of gates to an absolute row number. - /// - /// Inside the closure, the chip may freely use relative offsets; the `Layouter` will - /// treat these assignments as a single "region" within the circuit. Outside this - /// closure, the `Layouter` is allowed to optimise as it sees fit. - /// - /// ```ignore - /// fn assign_region(&mut self, || "region name", |region| { - /// let config = chip.config(); - /// region.assign_advice(config.a, offset, || { Some(value)}); - /// }); - /// ``` - fn assign_region(&mut self, name: N, assignment: A) -> Result - where - A: FnMut(Region<'_, F>) -> Result, - N: Fn() -> NR, - NR: Into; - - /// Assign a table region to an absolute row number. - /// - /// ```ignore - /// fn assign_table(&mut self, || "table name", |table| { - /// let config = chip.config(); - /// table.assign_fixed(config.a, offset, || { Some(value)}); - /// }); - /// ``` - fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> - where - A: FnMut(Table<'_, F>) -> Result<(), Error>, - N: Fn() -> NR, - NR: Into; - - /// Constrains a [`Cell`] to equal an instance column's row value at an - /// absolute position. - fn constrain_instance( - &mut self, - cell: Cell, - column: Column, - row: usize, - ) -> Result<(), Error>; - - /// Queries the value of the given challenge. - /// - /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. - fn get_challenge(&self, challenge: Challenge) -> Value; - - /// Gets the "root" of this assignment, bypassing the namespacing. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - fn get_root(&mut self) -> &mut Self::Root; - - /// Creates a new (sub)namespace and enters into it. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR; - - /// Exits out of the existing namespace. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - fn pop_namespace(&mut self, gadget_name: Option); - - /// Enters into a namespace. - fn namespace(&mut self, name_fn: N) -> NamespacedLayouter<'_, F, Self::Root> - where - NR: Into, - N: FnOnce() -> NR, - { - self.get_root().push_namespace(name_fn); - - NamespacedLayouter(self.get_root(), PhantomData) - } -} - -/// This is a "namespaced" layouter which borrows a `Layouter` (pushing a namespace -/// context) and, when dropped, pops out of the namespace context. -#[derive(Debug)] -pub struct NamespacedLayouter<'a, F: Field, L: Layouter + 'a>(&'a mut L, PhantomData); - -impl<'a, F: Field, L: Layouter + 'a> Layouter for NamespacedLayouter<'a, F, L> { - type Root = L::Root; - - fn assign_region(&mut self, name: N, assignment: A) -> Result - where - A: FnMut(Region<'_, F>) -> Result, - N: Fn() -> NR, - NR: Into, - { - self.0.assign_region(name, assignment) - } - - fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> - where - A: FnMut(Table<'_, F>) -> Result<(), Error>, - N: Fn() -> NR, - NR: Into, - { - self.0.assign_table(name, assignment) - } - - fn constrain_instance( - &mut self, - cell: Cell, - column: Column, - row: usize, - ) -> Result<(), Error> { - self.0.constrain_instance(cell, column, row) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.0.get_challenge(challenge) - } - - fn get_root(&mut self) -> &mut Self::Root { - self.0.get_root() - } - - fn push_namespace(&mut self, _name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - panic!("Only the root's push_namespace should be called"); - } - - fn pop_namespace(&mut self, _gadget_name: Option) { - panic!("Only the root's pop_namespace should be called"); - } -} - -impl<'a, F: Field, L: Layouter + 'a> Drop for NamespacedLayouter<'a, F, L> { - fn drop(&mut self) { - let gadget_name = { - #[cfg(feature = "gadget-traces")] - { - let mut gadget_name = None; - let mut is_second_frame = false; - backtrace::trace(|frame| { - if is_second_frame { - // Resolve this instruction pointer to a symbol name. - backtrace::resolve_frame(frame, |symbol| { - gadget_name = symbol.name().map(|name| format!("{name:#}")); - }); - - // We are done! - false - } else { - // We want the next frame. - is_second_frame = true; - true - } - }); - gadget_name - } - - #[cfg(not(feature = "gadget-traces"))] - None - }; - - self.get_root().pop_namespace(gadget_name); - } -} diff --git a/halo2_proofs_rm/src/circuit/floor_planner.rs b/halo2_proofs_rm/src/circuit/floor_planner.rs deleted file mode 100644 index 1b629034e6..0000000000 --- a/halo2_proofs_rm/src/circuit/floor_planner.rs +++ /dev/null @@ -1,6 +0,0 @@ -//! Implementations of common circuit floor planners. - -pub(super) mod single_pass; - -mod v1; -pub use v1::{V1Pass, V1}; diff --git a/halo2_proofs_rm/src/circuit/floor_planner/single_pass.rs b/halo2_proofs_rm/src/circuit/floor_planner/single_pass.rs deleted file mode 100644 index 33c09e4c57..0000000000 --- a/halo2_proofs_rm/src/circuit/floor_planner/single_pass.rs +++ /dev/null @@ -1,434 +0,0 @@ -use std::cmp; -use std::collections::HashMap; -use std::fmt; -use std::marker::PhantomData; - -use ff::Field; - -use crate::{ - circuit::{ - layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, - table_layouter::{compute_table_lengths, SimpleTableLayouter}, - Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, - }, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, Error, Fixed, FloorPlanner, - Instance, Selector, TableColumn, - }, -}; - -/// A simple [`FloorPlanner`] that performs minimal optimizations. -/// -/// This floor planner is suitable for debugging circuits. It aims to reflect the circuit -/// "business logic" in the circuit layout as closely as possible. It uses a single-pass -/// layouter that does not reorder regions for optimal packing. -#[derive(Debug)] -pub struct SimpleFloorPlanner; - -impl FloorPlanner for SimpleFloorPlanner { - fn synthesize + SyncDeps, C: Circuit>( - cs: &mut CS, - circuit: &C, - config: C::Config, - constants: Vec>, - ) -> Result<(), Error> { - let layouter = SingleChipLayouter::new(cs, constants)?; - circuit.synthesize(config, layouter) - } -} - -/// A [`Layouter`] for a single-chip circuit. -pub struct SingleChipLayouter<'a, F: Field, CS: Assignment + 'a> { - cs: &'a mut CS, - constants: Vec>, - /// Stores the starting row for each region. - regions: Vec, - /// Stores the first empty row for each column. - columns: HashMap, - /// Stores the table fixed columns. - table_columns: Vec, - _marker: PhantomData, -} - -impl<'a, F: Field, CS: Assignment + 'a> fmt::Debug for SingleChipLayouter<'a, F, CS> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SingleChipLayouter") - .field("regions", &self.regions) - .field("columns", &self.columns) - .finish() - } -} - -impl<'a, F: Field, CS: Assignment> SingleChipLayouter<'a, F, CS> { - /// Creates a new single-chip layouter. - pub fn new(cs: &'a mut CS, constants: Vec>) -> Result { - let ret = SingleChipLayouter { - cs, - constants, - regions: vec![], - columns: HashMap::default(), - table_columns: vec![], - _marker: PhantomData, - }; - Ok(ret) - } -} - -impl<'a, F: Field, CS: Assignment + 'a + SyncDeps> Layouter - for SingleChipLayouter<'a, F, CS> -{ - type Root = Self; - - fn assign_region(&mut self, name: N, mut assignment: A) -> Result - where - A: FnMut(Region<'_, F>) -> Result, - N: Fn() -> NR, - NR: Into, - { - let region_index = self.regions.len(); - - // Get shape of the region. - let mut shape = RegionShape::new(region_index.into()); - { - let region: &mut dyn RegionLayouter = &mut shape; - assignment(region.into())?; - } - - // Lay out this region. We implement the simplest approach here: position the - // region starting at the earliest row for which none of the columns are in use. - let mut region_start = 0; - for column in &shape.columns { - region_start = cmp::max(region_start, self.columns.get(column).cloned().unwrap_or(0)); - } - self.regions.push(region_start.into()); - - // Update column usage information. - for column in shape.columns { - self.columns.insert(column, region_start + shape.row_count); - } - - // Assign region cells. - self.cs.enter_region(name); - let mut region = SingleChipLayouterRegion::new(self, region_index.into()); - let result = { - let region: &mut dyn RegionLayouter = &mut region; - assignment(region.into()) - }?; - let constants_to_assign = region.constants; - self.cs.exit_region(); - - // Assign constants. For the simple floor planner, we assign constants in order in - // the first `constants` column. - if self.constants.is_empty() { - if !constants_to_assign.is_empty() { - return Err(Error::NotEnoughColumnsForConstants); - } - } else { - let constants_column = self.constants[0]; - let next_constant_row = self - .columns - .entry(Column::::from(constants_column).into()) - .or_default(); - for (constant, advice) in constants_to_assign { - self.cs.assign_fixed( - || format!("Constant({:?})", constant.evaluate()), - constants_column, - *next_constant_row, - || Value::known(constant), - )?; - self.cs.copy( - constants_column.into(), - *next_constant_row, - advice.column, - *self.regions[*advice.region_index] + advice.row_offset, - )?; - *next_constant_row += 1; - } - } - - Ok(result) - } - - fn assign_table(&mut self, name: N, mut assignment: A) -> Result<(), Error> - where - A: FnMut(Table<'_, F>) -> Result<(), Error>, - N: Fn() -> NR, - NR: Into, - { - // Maintenance hazard: there is near-duplicate code in `v1::AssignmentPass::assign_table`. - // Assign table cells. - self.cs.enter_region(name); - let mut table = SimpleTableLayouter::new(self.cs, &self.table_columns); - { - let table: &mut dyn TableLayouter = &mut table; - assignment(table.into()) - }?; - let default_and_assigned = table.default_and_assigned; - self.cs.exit_region(); - - // Check that all table columns have the same length `first_unused`, - // and all cells up to that length are assigned. - let first_unused = compute_table_lengths(&default_and_assigned)?; - - // Record these columns so that we can prevent them from being used again. - for column in default_and_assigned.keys() { - self.table_columns.push(*column); - } - - for (col, (default_val, _)) in default_and_assigned { - // default_val must be Some because we must have assigned - // at least one cell in each column, and in that case we checked - // that all cells up to first_unused were assigned. - self.cs - .fill_from_row(col.inner(), first_unused, default_val.unwrap())?; - } - - Ok(()) - } - - fn constrain_instance( - &mut self, - cell: Cell, - instance: Column, - row: usize, - ) -> Result<(), Error> { - self.cs.copy( - cell.column, - *self.regions[*cell.region_index] + cell.row_offset, - instance.into(), - row, - ) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.cs.get_challenge(challenge) - } - - fn get_root(&mut self) -> &mut Self::Root { - self - } - - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - self.cs.push_namespace(name_fn) - } - - fn pop_namespace(&mut self, gadget_name: Option) { - self.cs.pop_namespace(gadget_name) - } -} - -struct SingleChipLayouterRegion<'r, 'a, F: Field, CS: Assignment + 'a> { - layouter: &'r mut SingleChipLayouter<'a, F, CS>, - region_index: RegionIndex, - /// Stores the constants to be assigned, and the cells to which they are copied. - constants: Vec<(Assigned, Cell)>, -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a> fmt::Debug - for SingleChipLayouterRegion<'r, 'a, F, CS> -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SingleChipLayouterRegion") - .field("layouter", &self.layouter) - .field("region_index", &self.region_index) - .finish() - } -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a> SingleChipLayouterRegion<'r, 'a, F, CS> { - fn new(layouter: &'r mut SingleChipLayouter<'a, F, CS>, region_index: RegionIndex) -> Self { - SingleChipLayouterRegion { - layouter, - region_index, - constants: vec![], - } - } -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a + SyncDeps> RegionLayouter - for SingleChipLayouterRegion<'r, 'a, F, CS> -{ - fn enable_selector<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - selector: &Selector, - offset: usize, - ) -> Result<(), Error> { - self.layouter.cs.enable_selector( - annotation, - selector, - *self.layouter.regions[*self.region_index] + offset, - ) - } - - fn name_column<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - ) { - self.layouter.cs.annotate_column(annotation, column); - } - - fn assign_advice<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - self.layouter.cs.assign_advice( - annotation, - column, - *self.layouter.regions[*self.region_index] + offset, - to, - )?; - - Ok(Cell { - region_index: self.region_index, - row_offset: offset, - column: column.into(), - }) - } - - fn assign_advice_from_constant<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - constant: Assigned, - ) -> Result { - let advice = - self.assign_advice(annotation, column, offset, &mut || Value::known(constant))?; - self.constrain_constant(advice, constant)?; - - Ok(advice) - } - - fn assign_advice_from_instance<'v>( - &mut self, - annotation: &'v (dyn Fn() -> String + 'v), - instance: Column, - row: usize, - advice: Column, - offset: usize, - ) -> Result<(Cell, Value), Error> { - let value = self.layouter.cs.query_instance(instance, row)?; - - let cell = self.assign_advice(annotation, advice, offset, &mut || value.to_field())?; - - self.layouter.cs.copy( - cell.column, - *self.layouter.regions[*cell.region_index] + cell.row_offset, - instance.into(), - row, - )?; - - Ok((cell, value)) - } - - fn instance_value( - &mut self, - instance: Column, - row: usize, - ) -> Result, Error> { - self.layouter.cs.query_instance(instance, row) - } - - fn assign_fixed<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - self.layouter.cs.assign_fixed( - annotation, - column, - *self.layouter.regions[*self.region_index] + offset, - to, - )?; - - Ok(Cell { - region_index: self.region_index, - row_offset: offset, - column: column.into(), - }) - } - - fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error> { - self.constants.push((constant, cell)); - Ok(()) - } - - fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { - self.layouter.cs.copy( - left.column, - *self.layouter.regions[*left.region_index] + left.row_offset, - right.column, - *self.layouter.regions[*right.region_index] + right.row_offset, - )?; - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::vesta; - - use super::SimpleFloorPlanner; - use crate::{ - dev::MockProver, - plonk::{Advice, Circuit, Column, Error}, - }; - - #[test] - fn not_enough_columns_for_constants() { - struct MyCircuit {} - - impl Circuit for MyCircuit { - type Config = Column; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - MyCircuit {} - } - - fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { - meta.advice_column() - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl crate::circuit::Layouter, - ) -> Result<(), crate::plonk::Error> { - layouter.assign_region( - || "assign constant", - |mut region| { - region.assign_advice_from_constant( - || "one", - config, - 0, - vesta::Scalar::one(), - ) - }, - )?; - - Ok(()) - } - } - - let circuit = MyCircuit {}; - assert!(matches!( - MockProver::run(3, &circuit, vec![]).unwrap_err(), - Error::NotEnoughColumnsForConstants, - )); - } -} diff --git a/halo2_proofs_rm/src/circuit/floor_planner/v1.rs b/halo2_proofs_rm/src/circuit/floor_planner/v1.rs deleted file mode 100644 index fd26e681df..0000000000 --- a/halo2_proofs_rm/src/circuit/floor_planner/v1.rs +++ /dev/null @@ -1,549 +0,0 @@ -use std::fmt; - -use ff::Field; - -use crate::{ - circuit::{ - layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, - table_layouter::{compute_table_lengths, SimpleTableLayouter}, - Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, - }, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, Error, Fixed, FloorPlanner, - Instance, Selector, TableColumn, - }, -}; - -mod strategy; - -/// The version 1 [`FloorPlanner`] provided by `halo2`. -/// -/// - No column optimizations are performed. Circuit configuration is left entirely to the -/// circuit designer. -/// - A dual-pass layouter is used to measures regions prior to assignment. -/// - Regions are measured as rectangles, bounded on the cells they assign. -/// - Regions are laid out using a greedy first-fit strategy, after sorting regions by -/// their "advice area" (number of advice columns * rows). -#[derive(Debug)] -pub struct V1; - -struct V1Plan<'a, F: Field, CS: Assignment + 'a> { - cs: &'a mut CS, - /// Stores the starting row for each region. - regions: Vec, - /// Stores the constants to be assigned, and the cells to which they are copied. - constants: Vec<(Assigned, Cell)>, - /// Stores the table fixed columns. - table_columns: Vec, -} - -impl<'a, F: Field, CS: Assignment + 'a> fmt::Debug for V1Plan<'a, F, CS> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("floor_planner::V1Plan").finish() - } -} - -impl<'a, F: Field, CS: Assignment + SyncDeps> V1Plan<'a, F, CS> { - /// Creates a new v1 layouter. - pub fn new(cs: &'a mut CS) -> Result { - let ret = V1Plan { - cs, - regions: vec![], - constants: vec![], - table_columns: vec![], - }; - Ok(ret) - } -} - -impl FloorPlanner for V1 { - fn synthesize + SyncDeps, C: Circuit>( - cs: &mut CS, - circuit: &C, - config: C::Config, - constants: Vec>, - ) -> Result<(), Error> { - let mut plan = V1Plan::new(cs)?; - - // First pass: measure the regions within the circuit. - let mut measure = MeasurementPass::new(); - { - let pass = &mut measure; - circuit - .without_witnesses() - .synthesize(config.clone(), V1Pass::<_, CS>::measure(pass))?; - } - - // Planning: - // - Position the regions. - let (regions, column_allocations) = strategy::slot_in_biggest_advice_first(measure.regions); - plan.regions = regions; - - // - Determine how many rows our planned circuit will require. - let first_unassigned_row = column_allocations - .values() - .map(|a| a.unbounded_interval_start()) - .max() - .unwrap_or(0); - - // - Position the constants within those rows. - let fixed_allocations: Vec<_> = constants - .into_iter() - .map(|c| { - ( - c, - column_allocations - .get(&Column::::from(c).into()) - .cloned() - .unwrap_or_default(), - ) - }) - .collect(); - let constant_positions = || { - fixed_allocations.iter().flat_map(|(c, a)| { - let c = *c; - a.free_intervals(0, Some(first_unassigned_row)) - .flat_map(move |e| e.range().unwrap().map(move |i| (c, i))) - }) - }; - - // Second pass: - // - Assign the regions. - let mut assign = AssignmentPass::new(&mut plan); - { - let pass = &mut assign; - circuit.synthesize(config, V1Pass::assign(pass))?; - } - - // - Assign the constants. - if constant_positions().count() < plan.constants.len() { - return Err(Error::NotEnoughColumnsForConstants); - } - for ((fixed_column, fixed_row), (value, advice)) in - constant_positions().zip(plan.constants.into_iter()) - { - plan.cs.assign_fixed( - || format!("Constant({:?})", value.evaluate()), - fixed_column, - fixed_row, - || Value::known(value), - )?; - plan.cs.copy( - fixed_column.into(), - fixed_row, - advice.column, - *plan.regions[*advice.region_index] + advice.row_offset, - )?; - } - - Ok(()) - } -} - -#[derive(Debug)] -enum Pass<'p, 'a, F: Field, CS: Assignment + 'a> { - Measurement(&'p mut MeasurementPass), - Assignment(&'p mut AssignmentPass<'p, 'a, F, CS>), -} - -/// A single pass of the [`V1`] layouter. -#[derive(Debug)] -pub struct V1Pass<'p, 'a, F: Field, CS: Assignment + 'a>(Pass<'p, 'a, F, CS>); - -impl<'p, 'a, F: Field, CS: Assignment + 'a> V1Pass<'p, 'a, F, CS> { - fn measure(pass: &'p mut MeasurementPass) -> Self { - V1Pass(Pass::Measurement(pass)) - } - - fn assign(pass: &'p mut AssignmentPass<'p, 'a, F, CS>) -> Self { - V1Pass(Pass::Assignment(pass)) - } -} - -impl<'p, 'a, F: Field, CS: Assignment + SyncDeps> Layouter for V1Pass<'p, 'a, F, CS> { - type Root = Self; - - fn assign_region(&mut self, name: N, assignment: A) -> Result - where - A: FnMut(Region<'_, F>) -> Result, - N: Fn() -> NR, - NR: Into, - { - match &mut self.0 { - Pass::Measurement(pass) => pass.assign_region(assignment), - Pass::Assignment(pass) => pass.assign_region(name, assignment), - } - } - - fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> - where - A: FnMut(Table<'_, F>) -> Result<(), Error>, - N: Fn() -> NR, - NR: Into, - { - match &mut self.0 { - Pass::Measurement(_) => Ok(()), - Pass::Assignment(pass) => pass.assign_table(name, assignment), - } - } - - fn constrain_instance( - &mut self, - cell: Cell, - instance: Column, - row: usize, - ) -> Result<(), Error> { - match &mut self.0 { - Pass::Measurement(_) => Ok(()), - Pass::Assignment(pass) => pass.constrain_instance(cell, instance, row), - } - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - match &self.0 { - Pass::Measurement(_) => Value::unknown(), - Pass::Assignment(pass) => pass.plan.cs.get_challenge(challenge), - } - } - - fn get_root(&mut self) -> &mut Self::Root { - self - } - - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - if let Pass::Assignment(pass) = &mut self.0 { - pass.plan.cs.push_namespace(name_fn); - } - } - - fn pop_namespace(&mut self, gadget_name: Option) { - if let Pass::Assignment(pass) = &mut self.0 { - pass.plan.cs.pop_namespace(gadget_name); - } - } -} - -/// Measures the circuit. -#[derive(Debug)] -pub struct MeasurementPass { - regions: Vec, -} - -impl MeasurementPass { - fn new() -> Self { - MeasurementPass { regions: vec![] } - } - - fn assign_region(&mut self, mut assignment: A) -> Result - where - A: FnMut(Region<'_, F>) -> Result, - { - let region_index = self.regions.len(); - - // Get shape of the region. - let mut shape = RegionShape::new(region_index.into()); - let result = { - let region: &mut dyn RegionLayouter = &mut shape; - assignment(region.into()) - }?; - self.regions.push(shape); - - Ok(result) - } -} - -/// Assigns the circuit. -#[derive(Debug)] -pub struct AssignmentPass<'p, 'a, F: Field, CS: Assignment + 'a> { - plan: &'p mut V1Plan<'a, F, CS>, - /// Counter tracking which region we need to assign next. - region_index: usize, -} - -impl<'p, 'a, F: Field, CS: Assignment + SyncDeps> AssignmentPass<'p, 'a, F, CS> { - fn new(plan: &'p mut V1Plan<'a, F, CS>) -> Self { - AssignmentPass { - plan, - region_index: 0, - } - } - - fn assign_region(&mut self, name: N, mut assignment: A) -> Result - where - A: FnMut(Region<'_, F>) -> Result, - N: Fn() -> NR, - NR: Into, - { - // Get the next region we are assigning. - let region_index = self.region_index; - self.region_index += 1; - - self.plan.cs.enter_region(name); - let mut region = V1Region::new(self.plan, region_index.into()); - let result = { - let region: &mut dyn RegionLayouter = &mut region; - assignment(region.into()) - }?; - self.plan.cs.exit_region(); - - Ok(result) - } - - fn assign_table(&mut self, name: N, mut assignment: A) -> Result - where - A: FnMut(Table<'_, F>) -> Result, - N: Fn() -> NR, - NR: Into, - { - // Maintenance hazard: there is near-duplicate code in `SingleChipLayouter::assign_table`. - - // Assign table cells. - self.plan.cs.enter_region(name); - let mut table = SimpleTableLayouter::new(self.plan.cs, &self.plan.table_columns); - let result = { - let table: &mut dyn TableLayouter = &mut table; - assignment(table.into()) - }?; - let default_and_assigned = table.default_and_assigned; - self.plan.cs.exit_region(); - - // Check that all table columns have the same length `first_unused`, - // and all cells up to that length are assigned. - let first_unused = compute_table_lengths(&default_and_assigned)?; - - // Record these columns so that we can prevent them from being used again. - for column in default_and_assigned.keys() { - self.plan.table_columns.push(*column); - } - - for (col, (default_val, _)) in default_and_assigned { - // default_val must be Some because we must have assigned - // at least one cell in each column, and in that case we checked - // that all cells up to first_unused were assigned. - self.plan - .cs - .fill_from_row(col.inner(), first_unused, default_val.unwrap())?; - } - - Ok(result) - } - - fn constrain_instance( - &mut self, - cell: Cell, - instance: Column, - row: usize, - ) -> Result<(), Error> { - self.plan.cs.copy( - cell.column, - *self.plan.regions[*cell.region_index] + cell.row_offset, - instance.into(), - row, - ) - } -} - -struct V1Region<'r, 'a, F: Field, CS: Assignment + 'a> { - plan: &'r mut V1Plan<'a, F, CS>, - region_index: RegionIndex, -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a> fmt::Debug for V1Region<'r, 'a, F, CS> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("V1Region") - .field("plan", &self.plan) - .field("region_index", &self.region_index) - .finish() - } -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a> V1Region<'r, 'a, F, CS> { - fn new(plan: &'r mut V1Plan<'a, F, CS>, region_index: RegionIndex) -> Self { - V1Region { plan, region_index } - } -} - -impl<'r, 'a, F: Field, CS: Assignment + SyncDeps> RegionLayouter for V1Region<'r, 'a, F, CS> { - fn enable_selector<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - selector: &Selector, - offset: usize, - ) -> Result<(), Error> { - self.plan.cs.enable_selector( - annotation, - selector, - *self.plan.regions[*self.region_index] + offset, - ) - } - - fn assign_advice<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - self.plan.cs.assign_advice( - annotation, - column, - *self.plan.regions[*self.region_index] + offset, - to, - )?; - - Ok(Cell { - region_index: self.region_index, - row_offset: offset, - column: column.into(), - }) - } - - fn assign_advice_from_constant<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - constant: Assigned, - ) -> Result { - let advice = - self.assign_advice(annotation, column, offset, &mut || Value::known(constant))?; - self.constrain_constant(advice, constant)?; - - Ok(advice) - } - - fn assign_advice_from_instance<'v>( - &mut self, - annotation: &'v (dyn Fn() -> String + 'v), - instance: Column, - row: usize, - advice: Column, - offset: usize, - ) -> Result<(Cell, Value), Error> { - let value = self.plan.cs.query_instance(instance, row)?; - - let cell = self.assign_advice(annotation, advice, offset, &mut || value.to_field())?; - - self.plan.cs.copy( - cell.column, - *self.plan.regions[*cell.region_index] + cell.row_offset, - instance.into(), - row, - )?; - - Ok((cell, value)) - } - - fn instance_value( - &mut self, - instance: Column, - row: usize, - ) -> Result, Error> { - self.plan.cs.query_instance(instance, row) - } - - fn assign_fixed<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - self.plan.cs.assign_fixed( - annotation, - column, - *self.plan.regions[*self.region_index] + offset, - to, - )?; - - Ok(Cell { - region_index: self.region_index, - row_offset: offset, - column: column.into(), - }) - } - - fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error> { - self.plan.constants.push((constant, cell)); - Ok(()) - } - - fn name_column<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - ) { - self.plan.cs.annotate_column(annotation, column) - } - - fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { - self.plan.cs.copy( - left.column, - *self.plan.regions[*left.region_index] + left.row_offset, - right.column, - *self.plan.regions[*right.region_index] + right.row_offset, - )?; - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::vesta; - - use crate::{ - dev::MockProver, - plonk::{Advice, Circuit, Column, Error}, - }; - - #[test] - fn not_enough_columns_for_constants() { - struct MyCircuit {} - - impl Circuit for MyCircuit { - type Config = Column; - type FloorPlanner = super::V1; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - MyCircuit {} - } - - fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { - meta.advice_column() - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl crate::circuit::Layouter, - ) -> Result<(), crate::plonk::Error> { - layouter.assign_region( - || "assign constant", - |mut region| { - region.assign_advice_from_constant( - || "one", - config, - 0, - vesta::Scalar::one(), - ) - }, - )?; - - Ok(()) - } - } - - let circuit = MyCircuit {}; - assert!(matches!( - MockProver::run(3, &circuit, vec![]).unwrap_err(), - Error::NotEnoughColumnsForConstants, - )); - } -} diff --git a/halo2_proofs_rm/src/circuit/floor_planner/v1/strategy.rs b/halo2_proofs_rm/src/circuit/floor_planner/v1/strategy.rs deleted file mode 100644 index 71745de245..0000000000 --- a/halo2_proofs_rm/src/circuit/floor_planner/v1/strategy.rs +++ /dev/null @@ -1,282 +0,0 @@ -use std::{ - cmp, - collections::{BTreeSet, HashMap}, - ops::Range, -}; - -use super::{RegionColumn, RegionShape}; -use crate::{circuit::RegionStart, plonk::Any}; - -/// A region allocated within a column. -#[derive(Clone, Default, Debug, PartialEq, Eq)] -struct AllocatedRegion { - // The starting position of the region. - start: usize, - // The length of the region. - length: usize, -} - -impl Ord for AllocatedRegion { - fn cmp(&self, other: &Self) -> cmp::Ordering { - self.start.cmp(&other.start) - } -} - -impl PartialOrd for AllocatedRegion { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -/// An area of empty space within a column. -pub(crate) struct EmptySpace { - // The starting position (inclusive) of the empty space. - start: usize, - // The ending position (exclusive) of the empty space, or `None` if unbounded. - end: Option, -} - -impl EmptySpace { - pub(crate) fn range(&self) -> Option> { - self.end.map(|end| self.start..end) - } -} - -/// Allocated rows within a column. -/// -/// This is a set of [a_start, a_end) pairs representing disjoint allocated intervals. -#[derive(Clone, Default, Debug)] -pub struct Allocations(BTreeSet); - -impl Allocations { - /// Returns the row that forms the unbounded unallocated interval [row, None). - pub(crate) fn unbounded_interval_start(&self) -> usize { - self.0 - .iter() - .last() - .map(|r| r.start + r.length) - .unwrap_or(0) - } - - /// Return all the *unallocated* nonempty intervals intersecting [start, end). - /// - /// `end = None` represents an unbounded end. - pub(crate) fn free_intervals( - &self, - start: usize, - end: Option, - ) -> impl Iterator + '_ { - self.0 - .iter() - .map(Some) - .chain(Some(None)) - .scan(start, move |row, region| { - Some(if let Some(region) = region { - if end.map(|end| region.start >= end).unwrap_or(false) { - None - } else { - let ret = if *row < region.start { - Some(EmptySpace { - start: *row, - end: Some(region.start), - }) - } else { - None - }; - - *row = cmp::max(*row, region.start + region.length); - - ret - } - } else if end.map(|end| *row < end).unwrap_or(true) { - Some(EmptySpace { start: *row, end }) - } else { - None - }) - }) - .flatten() - } -} - -/// Allocated rows within a circuit. -pub type CircuitAllocations = HashMap; - -/// - `start` is the current start row of the region (not of this column). -/// - `slack` is the maximum number of rows the start could be moved down, taking into -/// account prior columns. -fn first_fit_region( - column_allocations: &mut CircuitAllocations, - region_columns: &[RegionColumn], - region_length: usize, - start: usize, - slack: Option, -) -> Option { - let (c, remaining_columns) = match region_columns.split_first() { - Some(cols) => cols, - None => return Some(start), - }; - let end = slack.map(|slack| start + region_length + slack); - - // Iterate over the unallocated non-empty intervals in c that intersect [start, end). - for space in column_allocations - .entry(*c) - .or_default() - .clone() - .free_intervals(start, end) - { - // Do we have enough room for this column of the region in this interval? - let s_slack = space - .end - .map(|end| (end as isize - space.start as isize) - region_length as isize); - if let Some((slack, s_slack)) = slack.zip(s_slack) { - assert!(s_slack <= slack as isize); - } - if s_slack.unwrap_or(0) >= 0 { - let row = first_fit_region( - column_allocations, - remaining_columns, - region_length, - space.start, - s_slack.map(|s| s as usize), - ); - if let Some(row) = row { - if let Some(end) = end { - assert!(row + region_length <= end); - } - column_allocations - .get_mut(c) - .unwrap() - .0 - .insert(AllocatedRegion { - start: row, - length: region_length, - }); - return Some(row); - } - } - } - - // No placement worked; the caller will need to try other possibilities. - None -} - -/// Positions the regions starting at the earliest row for which none of the columns are -/// in use, taking into account gaps between earlier regions. -fn slot_in( - region_shapes: Vec, -) -> (Vec<(RegionStart, RegionShape)>, CircuitAllocations) { - // Tracks the empty regions for each column. - let mut column_allocations: CircuitAllocations = Default::default(); - - let regions = region_shapes - .into_iter() - .map(|region| { - // Sort the region's columns to ensure determinism. - // - An unstable sort is fine, because region.columns() returns a set. - // - The sort order relies on Column's Ord implementation! - let mut region_columns: Vec<_> = region.columns().iter().cloned().collect(); - region_columns.sort_unstable(); - - let region_start = first_fit_region( - &mut column_allocations, - ®ion_columns, - region.row_count(), - 0, - None, - ) - .expect("We can always fit a region somewhere"); - - (region_start.into(), region) - }) - .collect(); - - // Return the column allocations for potential further processing. - (regions, column_allocations) -} - -/// Sorts the regions by advice area and then lays them out with the [`slot_in`] strategy. -pub fn slot_in_biggest_advice_first( - region_shapes: Vec, -) -> (Vec, CircuitAllocations) { - let mut sorted_regions: Vec<_> = region_shapes.into_iter().collect(); - let sort_key = |shape: &RegionShape| { - // Count the number of advice columns - let advice_cols = shape - .columns() - .iter() - .filter(|c| match c { - RegionColumn::Column(c) => matches!(c.column_type(), Any::Advice(_)), - _ => false, - }) - .count(); - // Sort by advice area (since this has the most contention). - advice_cols * shape.row_count() - }; - - // This used to incorrectly use `sort_unstable_by_key` with non-unique keys, which gave - // output that differed between 32-bit and 64-bit platforms, and potentially between Rust - // versions. - // We now use `sort_by_cached_key` with non-unique keys, and rely on `region_shapes` - // being sorted by region index (which we also rely on below to return `RegionStart`s - // in the correct order). - #[cfg(not(feature = "floor-planner-v1-legacy-pdqsort"))] - sorted_regions.sort_by_cached_key(sort_key); - - // To preserve compatibility, when the "floor-planner-v1-legacy-pdqsort" feature is enabled, - // we use a copy of the pdqsort implementation from the Rust 1.56.1 standard library, fixed - // to its behaviour on 64-bit platforms. - // https://github.com/rust-lang/rust/blob/1.56.1/library/core/src/slice/mod.rs#L2365-L2402 - #[cfg(feature = "floor-planner-v1-legacy-pdqsort")] - halo2_legacy_pdqsort::sort::quicksort(&mut sorted_regions, |a, b| sort_key(a).lt(&sort_key(b))); - - sorted_regions.reverse(); - - // Lay out the sorted regions. - let (mut regions, column_allocations) = slot_in(sorted_regions); - - // Un-sort the regions so they match the original indexing. - regions.sort_unstable_by_key(|(_, region)| region.region_index().0); - let regions = regions.into_iter().map(|(start, _)| start).collect(); - - (regions, column_allocations) -} - -#[test] -fn test_slot_in() { - use crate::plonk::Column; - - let regions = vec![ - RegionShape { - region_index: 0.into(), - columns: vec![Column::new(0, Any::advice()), Column::new(1, Any::advice())] - .into_iter() - .map(|a| a.into()) - .collect(), - row_count: 15, - }, - RegionShape { - region_index: 1.into(), - columns: vec![Column::new(2, Any::advice())] - .into_iter() - .map(|a| a.into()) - .collect(), - row_count: 10, - }, - RegionShape { - region_index: 2.into(), - columns: vec![Column::new(2, Any::advice()), Column::new(0, Any::advice())] - .into_iter() - .map(|a| a.into()) - .collect(), - row_count: 10, - }, - ]; - assert_eq!( - slot_in(regions) - .0 - .into_iter() - .map(|(i, _)| i) - .collect::>(), - vec![0.into(), 0.into(), 15.into()] - ); -} diff --git a/halo2_proofs_rm/src/circuit/layouter.rs b/halo2_proofs_rm/src/circuit/layouter.rs deleted file mode 100644 index f939c3fca5..0000000000 --- a/halo2_proofs_rm/src/circuit/layouter.rs +++ /dev/null @@ -1,315 +0,0 @@ -//! Implementations of common circuit layouters. - -use std::cmp; -use std::collections::HashSet; -use std::fmt; - -use ff::Field; - -pub use super::table_layouter::TableLayouter; -use super::{Cell, RegionIndex, Value}; -use crate::plonk::{Advice, Any, Assigned, Column, Error, Fixed, Instance, Selector}; - -/// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. -#[cfg(feature = "thread-safe-region")] -pub trait SyncDeps: Send + Sync {} - -#[cfg(feature = "thread-safe-region")] -impl SyncDeps for T {} - -/// Intermediate trait requirements for [`RegionLayouter`]. -#[cfg(not(feature = "thread-safe-region"))] -pub trait SyncDeps {} - -#[cfg(not(feature = "thread-safe-region"))] -impl SyncDeps for T {} - -/// Helper trait for implementing a custom [`Layouter`]. -/// -/// This trait is used for implementing region assignments: -/// -/// ```ignore -/// impl<'a, F: Field, C: Chip, CS: Assignment + 'a> Layouter for MyLayouter<'a, C, CS> { -/// fn assign_region( -/// &mut self, -/// assignment: impl FnOnce(Region<'_, F, C>) -> Result<(), Error>, -/// ) -> Result<(), Error> { -/// let region_index = self.regions.len(); -/// self.regions.push(self.current_gate); -/// -/// let mut region = MyRegion::new(self, region_index); -/// { -/// let region: &mut dyn RegionLayouter = &mut region; -/// assignment(region.into())?; -/// } -/// self.current_gate += region.row_count; -/// -/// Ok(()) -/// } -/// } -/// ``` -/// -/// TODO: It would be great if we could constrain the columns in these types to be -/// "logical" columns that are guaranteed to correspond to the chip (and have come from -/// `Chip::Config`). -/// -/// [`Layouter`]: super::Layouter -pub trait RegionLayouter: fmt::Debug + SyncDeps { - /// Enables a selector at the given offset. - fn enable_selector<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - selector: &Selector, - offset: usize, - ) -> Result<(), Error>; - - /// Allows the circuit implementor to name/annotate a Column within a Region context. - /// - /// This is useful in order to improve the amount of information that `prover.verify()` - /// and `prover.assert_satisfied()` can provide. - fn name_column<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - ); - - /// Assign an advice column value (witness) - fn assign_advice<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result; - - /// Assigns a constant value to the column `advice` at `offset` within this region. - /// - /// The constant value will be assigned to a cell within one of the fixed columns - /// configured via `ConstraintSystem::enable_constant`. - /// - /// Returns the advice cell that has been equality-constrained to the constant. - fn assign_advice_from_constant<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - constant: Assigned, - ) -> Result; - - /// Assign the value of the instance column's cell at absolute location - /// `row` to the column `advice` at `offset` within this region. - /// - /// Returns the advice cell that has been equality-constrained to the - /// instance cell, and its value if known. - fn assign_advice_from_instance<'v>( - &mut self, - annotation: &'v (dyn Fn() -> String + 'v), - instance: Column, - row: usize, - advice: Column, - offset: usize, - ) -> Result<(Cell, Value), Error>; - - /// Returns the value of the instance column's cell at absolute location `row`. - fn instance_value(&mut self, instance: Column, row: usize) - -> Result, Error>; - - /// Assigns a fixed value - fn assign_fixed<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result; - - /// Constrains a cell to have a constant value. - /// - /// Returns an error if the cell is in a column where equality has not been enabled. - fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error>; - - /// Constraint two cells to have the same value. - /// - /// Returns an error if either of the cells is not within the given permutation. - fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error>; -} - -/// The shape of a region. For a region at a certain index, we track -/// the set of columns it uses as well as the number of rows it uses. -#[derive(Clone, Debug)] -pub struct RegionShape { - pub(super) region_index: RegionIndex, - pub(super) columns: HashSet, - pub(super) row_count: usize, -} - -/// The virtual column involved in a region. This includes concrete columns, -/// as well as selectors that are not concrete columns at this stage. -#[derive(Eq, PartialEq, Copy, Clone, Debug, Hash)] -pub enum RegionColumn { - /// Concrete column - Column(Column), - /// Virtual column representing a (boolean) selector - Selector(Selector), -} - -impl From> for RegionColumn { - fn from(column: Column) -> RegionColumn { - RegionColumn::Column(column) - } -} - -impl From for RegionColumn { - fn from(selector: Selector) -> RegionColumn { - RegionColumn::Selector(selector) - } -} - -impl Ord for RegionColumn { - fn cmp(&self, other: &Self) -> cmp::Ordering { - match (self, other) { - (Self::Column(ref a), Self::Column(ref b)) => a.cmp(b), - (Self::Selector(ref a), Self::Selector(ref b)) => a.0.cmp(&b.0), - (Self::Column(_), Self::Selector(_)) => cmp::Ordering::Less, - (Self::Selector(_), Self::Column(_)) => cmp::Ordering::Greater, - } - } -} - -impl PartialOrd for RegionColumn { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl RegionShape { - /// Create a new `RegionShape` for a region at `region_index`. - pub fn new(region_index: RegionIndex) -> Self { - RegionShape { - region_index, - columns: HashSet::default(), - row_count: 0, - } - } - - /// Get the `region_index` of a `RegionShape`. - pub fn region_index(&self) -> RegionIndex { - self.region_index - } - - /// Get a reference to the set of `columns` used in a `RegionShape`. - pub fn columns(&self) -> &HashSet { - &self.columns - } - - /// Get the `row_count` of a `RegionShape`. - pub fn row_count(&self) -> usize { - self.row_count - } -} - -impl RegionLayouter for RegionShape { - fn enable_selector<'v>( - &'v mut self, - _: &'v (dyn Fn() -> String + 'v), - selector: &Selector, - offset: usize, - ) -> Result<(), Error> { - // Track the selector's fixed column as part of the region's shape. - self.columns.insert((*selector).into()); - self.row_count = cmp::max(self.row_count, offset + 1); - Ok(()) - } - - fn assign_advice<'v>( - &'v mut self, - _: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - _to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - self.columns.insert(Column::::from(column).into()); - self.row_count = cmp::max(self.row_count, offset + 1); - - Ok(Cell { - region_index: self.region_index, - row_offset: offset, - column: column.into(), - }) - } - - fn assign_advice_from_constant<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - constant: Assigned, - ) -> Result { - // The rest is identical to witnessing an advice cell. - self.assign_advice(annotation, column, offset, &mut || Value::known(constant)) - } - - fn assign_advice_from_instance<'v>( - &mut self, - _: &'v (dyn Fn() -> String + 'v), - _: Column, - _: usize, - advice: Column, - offset: usize, - ) -> Result<(Cell, Value), Error> { - self.columns.insert(Column::::from(advice).into()); - self.row_count = cmp::max(self.row_count, offset + 1); - - Ok(( - Cell { - region_index: self.region_index, - row_offset: offset, - column: advice.into(), - }, - Value::unknown(), - )) - } - - fn instance_value( - &mut self, - _instance: Column, - _row: usize, - ) -> Result, Error> { - Ok(Value::unknown()) - } - - fn assign_fixed<'v>( - &'v mut self, - _: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - _to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - self.columns.insert(Column::::from(column).into()); - self.row_count = cmp::max(self.row_count, offset + 1); - - Ok(Cell { - region_index: self.region_index, - row_offset: offset, - column: column.into(), - }) - } - - fn name_column<'v>( - &'v mut self, - _annotation: &'v (dyn Fn() -> String + 'v), - _column: Column, - ) { - // Do nothing - } - - fn constrain_constant(&mut self, _cell: Cell, _constant: Assigned) -> Result<(), Error> { - // Global constants don't affect the region shape. - Ok(()) - } - - fn constrain_equal(&mut self, _left: Cell, _right: Cell) -> Result<(), Error> { - // Equality constraints don't affect the region shape. - Ok(()) - } -} diff --git a/halo2_proofs_rm/src/circuit/table_layouter.rs b/halo2_proofs_rm/src/circuit/table_layouter.rs deleted file mode 100644 index 06338bb896..0000000000 --- a/halo2_proofs_rm/src/circuit/table_layouter.rs +++ /dev/null @@ -1,413 +0,0 @@ -//! Implementations of common table layouters. - -use std::{ - collections::HashMap, - fmt::{self, Debug}, -}; - -use ff::Field; - -use crate::plonk::{Assigned, Assignment, Error, TableColumn, TableError}; - -use super::Value; - -/// Helper trait for implementing a custom [`Layouter`]. -/// -/// This trait is used for implementing table assignments. -/// -/// [`Layouter`]: super::Layouter -pub trait TableLayouter: std::fmt::Debug { - /// Assigns a fixed value to a table cell. - /// - /// Returns an error if the table cell has already been assigned to. - fn assign_cell<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: TableColumn, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result<(), Error>; -} - -/// The default value to fill a table column with. -/// -/// - The outer `Option` tracks whether the value in row 0 of the table column has been -/// assigned yet. This will always be `Some` once a valid table has been completely -/// assigned. -/// - The inner `Value` tracks whether the underlying `Assignment` is evaluating -/// witnesses or not. -type DefaultTableValue = Option>>; - -/// A table layouter that can be used to assign values to a table. -pub struct SimpleTableLayouter<'r, 'a, F: Field, CS: Assignment + 'a> { - cs: &'a mut CS, - used_columns: &'r [TableColumn], - /// maps from a fixed column to a pair (default value, vector saying which rows are assigned) - pub default_and_assigned: HashMap, Vec)>, -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a> fmt::Debug for SimpleTableLayouter<'r, 'a, F, CS> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SimpleTableLayouter") - .field("used_columns", &self.used_columns) - .field("default_and_assigned", &self.default_and_assigned) - .finish() - } -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a> SimpleTableLayouter<'r, 'a, F, CS> { - /// Returns a new SimpleTableLayouter - pub fn new(cs: &'a mut CS, used_columns: &'r [TableColumn]) -> Self { - SimpleTableLayouter { - cs, - used_columns, - default_and_assigned: HashMap::default(), - } - } -} - -impl<'r, 'a, F: Field, CS: Assignment + 'a> TableLayouter - for SimpleTableLayouter<'r, 'a, F, CS> -{ - fn assign_cell<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: TableColumn, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result<(), Error> { - if self.used_columns.contains(&column) { - return Err(Error::TableError(TableError::UsedColumn(column))); - } - - let entry = self.default_and_assigned.entry(column).or_default(); - - let mut value = Value::unknown(); - self.cs.assign_fixed( - annotation, - column.inner(), - offset, // tables are always assigned starting at row 0 - || { - let res = to(); - value = res; - res - }, - )?; - - match (entry.0.is_none(), offset) { - // Use the value at offset 0 as the default value for this table column. - (true, 0) => entry.0 = Some(value), - // Since there is already an existing default value for this table column, - // the caller should not be attempting to assign another value at offset 0. - (false, 0) => { - return Err(Error::TableError(TableError::OverwriteDefault( - column, - format!("{:?}", entry.0.unwrap()), - format!("{value:?}"), - ))) - } - _ => (), - } - if entry.1.len() <= offset { - entry.1.resize(offset + 1, false); - } - entry.1[offset] = true; - - Ok(()) - } -} - -pub(crate) fn compute_table_lengths( - default_and_assigned: &HashMap, Vec)>, -) -> Result { - let column_lengths: Result, Error> = default_and_assigned - .iter() - .map(|(col, (default_value, assigned))| { - if default_value.is_none() || assigned.is_empty() { - return Err(Error::TableError(TableError::ColumnNotAssigned(*col))); - } - if assigned.iter().all(|b| *b) { - // All values in the column have been assigned - Ok((col, assigned.len())) - } else { - Err(Error::TableError(TableError::ColumnNotAssigned(*col))) - } - }) - .collect(); - let column_lengths = column_lengths?; - column_lengths - .into_iter() - .try_fold((None, 0), |acc, (col, col_len)| { - if acc.1 == 0 || acc.1 == col_len { - Ok((Some(*col), col_len)) - } else { - let mut cols = [(*col, col_len), (acc.0.unwrap(), acc.1)]; - cols.sort(); - Err(Error::TableError(TableError::UnevenColumnLengths( - cols[0], cols[1], - ))) - } - }) - .map(|col_len| col_len.1) -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use crate::{ - circuit::{Layouter, SimpleFloorPlanner}, - dev::MockProver, - plonk::{Circuit, ConstraintSystem}, - poly::Rotation, - }; - - use super::*; - - #[test] - fn table_no_default() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: TableColumn, - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = meta.lookup_table_column(); - - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - vec![(a, table)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "duplicate assignment", - |mut table| { - table.assign_cell( - || "default", - config.table, - 1, - || Value::known(Fp::zero()), - ) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "TableColumn { inner: Column { index: 0, column_type: Fixed } } not fully assigned. Help: assign a value at offset 0." - ); - } - - #[test] - fn table_overwrite_default() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: TableColumn, - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = meta.lookup_table_column(); - - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - vec![(a, table)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "duplicate assignment", - |mut table| { - table.assign_cell( - || "default", - config.table, - 0, - || Value::known(Fp::zero()), - )?; - table.assign_cell( - || "duplicate", - config.table, - 0, - || Value::known(Fp::zero()), - ) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "Attempted to overwrite default value Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } with Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } in TableColumn { inner: Column { index: 0, column_type: Fixed } }" - ); - } - - #[test] - fn table_reuse_column() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: TableColumn, - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = meta.lookup_table_column(); - - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - vec![(a, table)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "first assignment", - |mut table| { - table.assign_cell( - || "default", - config.table, - 0, - || Value::known(Fp::zero()), - ) - }, - )?; - - layouter.assign_table( - || "reuse", - |mut table| { - table.assign_cell(|| "reuse", config.table, 1, || Value::known(Fp::zero())) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "TableColumn { inner: Column { index: 0, column_type: Fixed } } has already been used" - ); - } - - #[test] - fn table_uneven_columns() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: (TableColumn, TableColumn), - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = (meta.lookup_table_column(), meta.lookup_table_column()); - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - - vec![(a.clone(), table.0), (a, table.1)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "table with uneven columns", - |mut table| { - table.assign_cell(|| "", config.table.0, 0, || Value::known(Fp::zero()))?; - table.assign_cell(|| "", config.table.0, 1, || Value::known(Fp::zero()))?; - - table.assign_cell(|| "", config.table.1, 0, || Value::known(Fp::zero())) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "TableColumn { inner: Column { index: 0, column_type: Fixed } } has length 2 while TableColumn { inner: Column { index: 1, column_type: Fixed } } has length 1" - ); - } -} diff --git a/halo2_proofs_rm/src/circuit/value.rs b/halo2_proofs_rm/src/circuit/value.rs deleted file mode 100644 index f3ea6a39ea..0000000000 --- a/halo2_proofs_rm/src/circuit/value.rs +++ /dev/null @@ -1,703 +0,0 @@ -use std::borrow::Borrow; -use std::ops::{Add, Mul, Neg, Sub}; - -use group::ff::Field; - -use crate::plonk::{Assigned, Error}; - -/// A value that might exist within a circuit. -/// -/// This behaves like `Option` but differs in two key ways: -/// - It does not expose the enum cases, or provide an `Option::unwrap` equivalent. This -/// helps to ensure that unwitnessed values correctly propagate. -/// - It provides pass-through implementations of common traits such as `Add` and `Mul`, -/// for improved usability. -#[derive(Clone, Copy, Debug)] -pub struct Value { - inner: Option, -} - -impl Default for Value { - fn default() -> Self { - Self::unknown() - } -} - -impl Value { - /// Constructs an unwitnessed value. - pub const fn unknown() -> Self { - Self { inner: None } - } - - /// Constructs a known value. - /// - /// # Examples - /// - /// ``` - /// use halo2_proofs::circuit::Value; - /// - /// let v = Value::known(37); - /// ``` - pub const fn known(value: V) -> Self { - Self { inner: Some(value) } - } - - /// Obtains the inner value for assigning into the circuit. - /// - /// Returns `Error::Synthesis` if this is [`Value::unknown()`]. - pub(crate) fn assign(self) -> Result { - self.inner.ok_or(Error::Synthesis) - } - - /// Converts from `&Value` to `Value<&V>`. - pub fn as_ref(&self) -> Value<&V> { - Value { - inner: self.inner.as_ref(), - } - } - - /// Converts from `&mut Value` to `Value<&mut V>`. - pub fn as_mut(&mut self) -> Value<&mut V> { - Value { - inner: self.inner.as_mut(), - } - } - - /// ONLY FOR INTERNAL CRATE USAGE; DO NOT EXPOSE! - pub(crate) fn into_option(self) -> Option { - self.inner - } - - /// Enforces an assertion on the contained value, if known. - /// - /// The assertion is ignored if `self` is [`Value::unknown()`]. Do not try to enforce - /// circuit constraints with this method! - /// - /// # Panics - /// - /// Panics if `f` returns `false`. - pub fn assert_if_known bool>(&self, f: F) { - if let Some(value) = self.inner.as_ref() { - assert!(f(value)); - } - } - - /// Checks the contained value for an error condition, if known. - /// - /// The error check is ignored if `self` is [`Value::unknown()`]. Do not try to - /// enforce circuit constraints with this method! - pub fn error_if_known_and bool>(&self, f: F) -> Result<(), Error> { - match self.inner.as_ref() { - Some(value) if f(value) => Err(Error::Synthesis), - _ => Ok(()), - } - } - - /// Maps a `Value` to `Value` by applying a function to the contained value. - pub fn map W>(self, f: F) -> Value { - Value { - inner: self.inner.map(f), - } - } - - /// Returns [`Value::unknown()`] if the value is [`Value::unknown()`], otherwise calls - /// `f` with the wrapped value and returns the result. - pub fn and_then Value>(self, f: F) -> Value { - match self.inner { - Some(v) => f(v), - None => Value::unknown(), - } - } - - /// Zips `self` with another `Value`. - /// - /// If `self` is `Value::known(s)` and `other` is `Value::known(o)`, this method - /// returns `Value::known((s, o))`. Otherwise, [`Value::unknown()`] is returned. - pub fn zip(self, other: Value) -> Value<(V, W)> { - Value { - inner: self.inner.zip(other.inner), - } - } -} - -impl Value<(V, W)> { - /// Unzips a value containing a tuple of two values. - /// - /// If `self` is `Value::known((a, b)), this method returns - /// `(Value::known(a), Value::known(b))`. Otherwise, - /// `(Value::unknown(), Value::unknown())` is returned. - pub fn unzip(self) -> (Value, Value) { - match self.inner { - Some((a, b)) => (Value::known(a), Value::known(b)), - None => (Value::unknown(), Value::unknown()), - } - } -} - -impl Value<&V> { - /// Maps a `Value<&V>` to a `Value` by copying the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn copied(self) -> Value - where - V: Copy, - { - Value { - inner: self.inner.copied(), - } - } - - /// Maps a `Value<&V>` to a `Value` by cloning the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn cloned(self) -> Value - where - V: Clone, - { - Value { - inner: self.inner.cloned(), - } - } -} - -impl Value<&mut V> { - /// Maps a `Value<&mut V>` to a `Value` by copying the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn copied(self) -> Value - where - V: Copy, - { - Value { - inner: self.inner.copied(), - } - } - - /// Maps a `Value<&mut V>` to a `Value` by cloning the contents of the value. - #[must_use = "`self` will be dropped if the result is not used"] - pub fn cloned(self) -> Value - where - V: Clone, - { - Value { - inner: self.inner.cloned(), - } - } -} - -impl Value<[V; LEN]> { - /// Transposes a `Value<[V; LEN]>` into a `[Value; LEN]`. - /// - /// [`Value::unknown()`] will be mapped to `[Value::unknown(); LEN]`. - pub fn transpose_array(self) -> [Value; LEN] { - let mut ret = [Value::unknown(); LEN]; - if let Some(arr) = self.inner { - for (entry, value) in ret.iter_mut().zip(arr) { - *entry = Value::known(value); - } - } - ret - } -} - -impl Value -where - I: IntoIterator, - I::IntoIter: ExactSizeIterator, -{ - /// Transposes a `Value>` into a `Vec>`. - /// - /// [`Value::unknown()`] will be mapped to `vec![Value::unknown(); length]`. - /// - /// # Panics - /// - /// Panics if `self` is `Value::known(values)` and `values.len() != length`. - pub fn transpose_vec(self, length: usize) -> Vec> { - match self.inner { - Some(values) => { - let values = values.into_iter(); - assert_eq!(values.len(), length); - values.map(Value::known).collect() - } - None => (0..length).map(|_| Value::unknown()).collect(), - } - } -} - -// -// FromIterator -// - -impl> FromIterator> for Value { - /// Takes each element in the [`Iterator`]: if it is [`Value::unknown()`], no further - /// elements are taken, and the [`Value::unknown()`] is returned. Should no - /// [`Value::unknown()`] occur, a container of type `V` containing the values of each - /// [`Value`] is returned. - fn from_iter>>(iter: I) -> Self { - Self { - inner: iter.into_iter().map(|v| v.inner).collect(), - } - } -} - -// -// Neg -// - -impl Neg for Value { - type Output = Value; - - fn neg(self) -> Self::Output { - Value { - inner: self.inner.map(|v| -v), - } - } -} - -// -// Add -// - -impl Add for Value -where - V: Add, -{ - type Output = Value; - - fn add(self, rhs: Self) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add for &Value -where - for<'v> &'v V: Add, -{ - type Output = Value; - - fn add(self, rhs: Self) -> Self::Output { - Value { - inner: self - .inner - .as_ref() - .zip(rhs.inner.as_ref()) - .map(|(a, b)| a + b), - } - } -} - -impl Add> for Value -where - for<'v> V: Add<&'v V, Output = O>, -{ - type Output = Value; - - fn add(self, rhs: Value<&V>) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add> for Value<&V> -where - for<'v> &'v V: Add, -{ - type Output = Value; - - fn add(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add<&Value> for Value -where - for<'v> V: Add<&'v V, Output = O>, -{ - type Output = Value; - - fn add(self, rhs: &Self) -> Self::Output { - self + rhs.as_ref() - } -} - -impl Add> for &Value -where - for<'v> &'v V: Add, -{ - type Output = Value; - - fn add(self, rhs: Value) -> Self::Output { - self.as_ref() + rhs - } -} - -// -// Sub -// - -impl Sub for Value -where - V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Self) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub for &Value -where - for<'v> &'v V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Self) -> Self::Output { - Value { - inner: self - .inner - .as_ref() - .zip(rhs.inner.as_ref()) - .map(|(a, b)| a - b), - } - } -} - -impl Sub> for Value -where - for<'v> V: Sub<&'v V, Output = O>, -{ - type Output = Value; - - fn sub(self, rhs: Value<&V>) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub> for Value<&V> -where - for<'v> &'v V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub<&Value> for Value -where - for<'v> V: Sub<&'v V, Output = O>, -{ - type Output = Value; - - fn sub(self, rhs: &Self) -> Self::Output { - self - rhs.as_ref() - } -} - -impl Sub> for &Value -where - for<'v> &'v V: Sub, -{ - type Output = Value; - - fn sub(self, rhs: Value) -> Self::Output { - self.as_ref() - rhs - } -} - -// -// Mul -// - -impl Mul for Value -where - V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Self) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul for &Value -where - for<'v> &'v V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Self) -> Self::Output { - Value { - inner: self - .inner - .as_ref() - .zip(rhs.inner.as_ref()) - .map(|(a, b)| a * b), - } - } -} - -impl Mul> for Value -where - for<'v> V: Mul<&'v V, Output = O>, -{ - type Output = Value; - - fn mul(self, rhs: Value<&V>) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul> for Value<&V> -where - for<'v> &'v V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul<&Value> for Value -where - for<'v> V: Mul<&'v V, Output = O>, -{ - type Output = Value; - - fn mul(self, rhs: &Self) -> Self::Output { - self * rhs.as_ref() - } -} - -impl Mul> for &Value -where - for<'v> &'v V: Mul, -{ - type Output = Value; - - fn mul(self, rhs: Value) -> Self::Output { - self.as_ref() * rhs - } -} - -// -// Assigned -// - -impl From> for Value> { - fn from(value: Value) -> Self { - Self { - inner: value.inner.map(Assigned::from), - } - } -} - -impl Add> for Value> { - type Output = Value>; - - fn add(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add for Value> { - type Output = Value>; - - fn add(self, rhs: F) -> Self::Output { - self + Value::known(rhs) - } -} - -impl Add> for Value<&Assigned> { - type Output = Value>; - - fn add(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), - } - } -} - -impl Add for Value<&Assigned> { - type Output = Value>; - - fn add(self, rhs: F) -> Self::Output { - self + Value::known(rhs) - } -} - -impl Sub> for Value> { - type Output = Value>; - - fn sub(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub for Value> { - type Output = Value>; - - fn sub(self, rhs: F) -> Self::Output { - self - Value::known(rhs) - } -} - -impl Sub> for Value<&Assigned> { - type Output = Value>; - - fn sub(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), - } - } -} - -impl Sub for Value<&Assigned> { - type Output = Value>; - - fn sub(self, rhs: F) -> Self::Output { - self - Value::known(rhs) - } -} - -impl Mul> for Value> { - type Output = Value>; - - fn mul(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul for Value> { - type Output = Value>; - - fn mul(self, rhs: F) -> Self::Output { - self * Value::known(rhs) - } -} - -impl Mul> for Value<&Assigned> { - type Output = Value>; - - fn mul(self, rhs: Value) -> Self::Output { - Value { - inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), - } - } -} - -impl Mul for Value<&Assigned> { - type Output = Value>; - - fn mul(self, rhs: F) -> Self::Output { - self * Value::known(rhs) - } -} - -impl Value { - /// Returns the field element corresponding to this value. - pub fn to_field(&self) -> Value> - where - for<'v> Assigned: From<&'v V>, - { - Value { - inner: self.inner.as_ref().map(|v| v.into()), - } - } - - /// Returns the field element corresponding to this value. - pub fn into_field(self) -> Value> - where - V: Into>, - { - Value { - inner: self.inner.map(|v| v.into()), - } - } - - /// Doubles this field element. - /// - /// # Examples - /// - /// If you have a `Value`, convert it to `Value>` first: - /// ``` - /// # use halo2curves::pasta::pallas::Base as F; - /// use halo2_proofs::{circuit::Value, plonk::Assigned}; - /// - /// let v = Value::known(F::from(2)); - /// let v: Value> = v.into(); - /// v.double(); - /// ``` - pub fn double(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().double()), - } - } - - /// Squares this field element. - pub fn square(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().square()), - } - } - - /// Cubes this field element. - pub fn cube(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().cube()), - } - } - - /// Inverts this assigned value (taking the inverse of zero to be zero). - pub fn invert(&self) -> Value> - where - V: Borrow>, - { - Value { - inner: self.inner.as_ref().map(|v| v.borrow().invert()), - } - } -} - -impl Value> { - /// Evaluates this value directly, performing an unbatched inversion if necessary. - /// - /// If the denominator is zero, the returned value is zero. - pub fn evaluate(self) -> Value { - Value { - inner: self.inner.map(|v| v.evaluate()), - } - } -} diff --git a/halo2_proofs_rm/src/dev.rs b/halo2_proofs_rm/src/dev.rs deleted file mode 100644 index 7a3aca10cc..0000000000 --- a/halo2_proofs_rm/src/dev.rs +++ /dev/null @@ -1,1854 +0,0 @@ -//! Tools for developing circuits. - -use std::collections::HashMap; -use std::collections::HashSet; -use std::iter; -use std::ops::{Add, Mul, Neg, Range}; - -use blake2b_simd::blake2b; -use ff::Field; -use ff::FromUniformBytes; - -use crate::plonk::permutation::keygen::Assembly; -use crate::{ - circuit, - plonk::{ - permutation, - sealed::{self, SealedPhase}, - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Expression, FirstPhase, Fixed, FloorPlanner, Instance, Phase, Selector, - }, -}; - -use crate::multicore::{ - IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, - ParallelSliceMut, -}; - -pub mod metadata; -use metadata::Column as ColumnMetadata; -mod util; - -mod failure; -pub use failure::{FailureLocation, VerifyFailure}; - -pub mod cost; -pub use cost::CircuitCost; - -#[cfg(feature = "cost-estimator")] -pub mod cost_model; - -mod gates; -pub use gates::CircuitGates; - -mod tfp; -pub use tfp::TracingFloorPlanner; - -#[cfg(feature = "dev-graph")] -mod graph; - -#[cfg(feature = "dev-graph")] -#[cfg_attr(docsrs, doc(cfg(feature = "dev-graph")))] -pub use graph::{circuit_dot_graph, layout::CircuitLayout}; - -#[derive(Debug)] -struct Region { - /// The name of the region. Not required to be unique. - name: String, - /// The columns involved in this region. - columns: HashSet>, - /// The rows that this region starts and ends on, if known. - rows: Option<(usize, usize)>, - /// The selectors that have been enabled in this region. All other selectors are by - /// construction not enabled. - enabled_selectors: HashMap>, - /// Annotations given to Advice, Fixed or Instance columns within a region context. - annotations: HashMap, - /// The cells assigned in this region. We store this as a `Vec` so that if any cells - /// are double-assigned, they will be visibly darker. - cells: HashMap<(Column, usize), usize>, -} - -impl Region { - fn update_extent(&mut self, column: Column, row: usize) { - self.columns.insert(column); - - // The region start is the earliest row assigned to. - // The region end is the latest row assigned to. - let (mut start, mut end) = self.rows.unwrap_or((row, row)); - if row < start { - // The first row assigned was not at start 0 within the region. - start = row; - } - if row > end { - end = row; - } - self.rows = Some((start, end)); - } -} - -/// The value of a particular cell within the circuit. -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub enum CellValue { - /// An unassigned cell. - Unassigned, - /// A cell that has been assigned a value. - Assigned(F), - /// A unique poisoned cell. - Poison(usize), -} - -/// A value within an expression. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)] -enum Value { - Real(F), - Poison, -} - -impl From> for Value { - fn from(value: CellValue) -> Self { - match value { - // Cells that haven't been explicitly assigned to, default to zero. - CellValue::Unassigned => Value::Real(F::ZERO), - CellValue::Assigned(v) => Value::Real(v), - CellValue::Poison(_) => Value::Poison, - } - } -} - -impl Neg for Value { - type Output = Self; - - fn neg(self) -> Self::Output { - match self { - Value::Real(a) => Value::Real(-a), - _ => Value::Poison, - } - } -} - -impl Add for Value { - type Output = Self; - - fn add(self, rhs: Self) -> Self::Output { - match (self, rhs) { - (Value::Real(a), Value::Real(b)) => Value::Real(a + b), - _ => Value::Poison, - } - } -} - -impl Mul for Value { - type Output = Self; - - fn mul(self, rhs: Self) -> Self::Output { - match (self, rhs) { - (Value::Real(a), Value::Real(b)) => Value::Real(a * b), - // If poison is multiplied by zero, then we treat the poison as unconstrained - // and we don't propagate it. - (Value::Real(x), Value::Poison) | (Value::Poison, Value::Real(x)) - if x.is_zero_vartime() => - { - Value::Real(F::ZERO) - } - _ => Value::Poison, - } - } -} - -impl Mul for Value { - type Output = Self; - - fn mul(self, rhs: F) -> Self::Output { - match self { - Value::Real(lhs) => Value::Real(lhs * rhs), - // If poison is multiplied by zero, then we treat the poison as unconstrained - // and we don't propagate it. - Value::Poison if rhs.is_zero_vartime() => Value::Real(F::ZERO), - _ => Value::Poison, - } - } -} - -/// A test prover for debugging circuits. -/// -/// The normal proving process, when applied to a buggy circuit implementation, might -/// return proofs that do not validate when they should, but it can't indicate anything -/// other than "something is invalid". `MockProver` can be used to figure out _why_ these -/// are invalid: it stores all the private inputs along with the circuit internals, and -/// then checks every constraint manually. -/// -/// # Examples -/// -/// ``` -/// use halo2_proofs::{ -/// circuit::{Layouter, SimpleFloorPlanner, Value}, -/// dev::{FailureLocation, MockProver, VerifyFailure}, -/// plonk::{Advice, Any, Circuit, Column, ConstraintSystem, Error, Selector}, -/// poly::Rotation, -/// }; -/// use ff::PrimeField; -/// use halo2curves::pasta::Fp; -/// const K: u32 = 5; -/// -/// #[derive(Copy, Clone)] -/// struct MyConfig { -/// a: Column, -/// b: Column, -/// c: Column, -/// s: Selector, -/// } -/// -/// #[derive(Clone, Default)] -/// struct MyCircuit { -/// a: Value, -/// b: Value, -/// } -/// -/// impl Circuit for MyCircuit { -/// type Config = MyConfig; -/// type FloorPlanner = SimpleFloorPlanner; -/// #[cfg(feature = "circuit-params")] -/// type Params = (); -/// -/// fn without_witnesses(&self) -> Self { -/// Self::default() -/// } -/// -/// fn configure(meta: &mut ConstraintSystem) -> MyConfig { -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let c = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("R1CS constraint", |meta| { -/// let a = meta.query_advice(a, Rotation::cur()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let c = meta.query_advice(c, Rotation::cur()); -/// let s = meta.query_selector(s); -/// -/// // BUG: Should be a * b - c -/// Some(("buggy R1CS", s * (a * b + c))) -/// }); -/// -/// MyConfig { a, b, c, s } -/// } -/// -/// fn synthesize(&self, config: MyConfig, mut layouter: impl Layouter) -> Result<(), Error> { -/// layouter.assign_region(|| "Example region", |mut region| { -/// config.s.enable(&mut region, 0)?; -/// region.assign_advice(|| "a", config.a, 0, || { -/// self.a.map(F::from) -/// })?; -/// region.assign_advice(|| "b", config.b, 0, || { -/// self.b.map(F::from) -/// })?; -/// region.assign_advice(|| "c", config.c, 0, || { -/// (self.a * self.b).map(F::from) -/// })?; -/// Ok(()) -/// }) -/// } -/// } -/// -/// // Assemble the private inputs to the circuit. -/// let circuit = MyCircuit { -/// a: Value::known(2), -/// b: Value::known(4), -/// }; -/// -/// // This circuit has no public inputs. -/// let instance = vec![]; -/// -/// let prover = MockProver::::run(K, &circuit, instance).unwrap(); -/// assert_eq!( -/// prover.verify(), -/// Err(vec![VerifyFailure::ConstraintNotSatisfied { -/// constraint: ((0, "R1CS constraint").into(), 0, "buggy R1CS").into(), -/// location: FailureLocation::InRegion { -/// region: (0, "Example region").into(), -/// offset: 0, -/// }, -/// cell_values: vec![ -/// (((Any::advice(), 0).into(), 0).into(), "0x2".to_string()), -/// (((Any::advice(), 1).into(), 0).into(), "0x4".to_string()), -/// (((Any::advice(), 2).into(), 0).into(), "0x8".to_string()), -/// ], -/// }]) -/// ); -/// -/// // If we provide a too-small K, we get a panic. -/// use std::panic; -/// let result = panic::catch_unwind(|| { -/// MockProver::::run(2, &circuit, vec![]).unwrap_err() -/// }); -/// assert_eq!( -/// result.unwrap_err().downcast_ref::().unwrap(), -/// "n=4, minimum_rows=8, k=2" -/// ); -/// ``` -#[derive(Debug)] -pub struct MockProver { - k: u32, - n: u32, - cs: ConstraintSystem, - - /// The regions in the circuit. - regions: Vec, - /// The current region being assigned to. Will be `None` after the circuit has been - /// synthesized. - current_region: Option, - - // The fixed cells in the circuit, arranged as [column][row]. - fixed: Vec>>, - // The advice cells in the circuit, arranged as [column][row]. - advice: Vec>>, - // The instance cells in the circuit, arranged as [column][row]. - instance: Vec>>, - - selectors: Vec>, - - challenges: Vec, - - permutation: permutation::keygen::Assembly, - - // A range of available rows for assignment and copies. - usable_rows: Range, - - current_phase: sealed::Phase, -} - -/// Instance Value -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum InstanceValue { - /// Assigned instance value - Assigned(F), - /// Padding - Padding, -} - -impl InstanceValue { - fn value(&self) -> F { - match self { - InstanceValue::Assigned(v) => *v, - InstanceValue::Padding => F::ZERO, - } - } -} - -impl MockProver { - fn in_phase(&self, phase: P) -> bool { - self.current_phase == phase.to_sealed() - } -} - -impl Assignment for MockProver { - fn enter_region(&mut self, name: N) - where - NR: Into, - N: FnOnce() -> NR, - { - if !self.in_phase(FirstPhase) { - return; - } - - assert!(self.current_region.is_none()); - self.current_region = Some(Region { - name: name().into(), - columns: HashSet::default(), - rows: None, - annotations: HashMap::default(), - enabled_selectors: HashMap::default(), - cells: HashMap::default(), - }); - } - - fn exit_region(&mut self) { - if !self.in_phase(FirstPhase) { - return; - } - - self.regions.push(self.current_region.take().unwrap()); - } - - fn annotate_column(&mut self, annotation: A, column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - if !self.in_phase(FirstPhase) { - return; - } - - if let Some(region) = self.current_region.as_mut() { - region - .annotations - .insert(ColumnMetadata::from(column), annotation().into()); - } - } - - fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - if !self.in_phase(FirstPhase) { - return Ok(()); - } - - assert!( - self.usable_rows.contains(&row), - "row={} not in usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); - - // Track that this selector was enabled. We require that all selectors are enabled - // inside some region (i.e. no floating selectors). - self.current_region - .as_mut() - .unwrap() - .enabled_selectors - .entry(*selector) - .or_default() - .push(row); - - self.selectors[selector.0][row] = true; - - Ok(()) - } - - fn query_instance( - &self, - column: Column, - row: usize, - ) -> Result, Error> { - assert!( - self.usable_rows.contains(&row), - "row={}, usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); - - Ok(self - .instance - .get(column.index()) - .and_then(|column| column.get(row)) - .map(|v| circuit::Value::known(v.value())) - .expect("bound failure")) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> circuit::Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - if self.in_phase(FirstPhase) { - assert!( - self.usable_rows.contains(&row), - "row={}, usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); - - if let Some(region) = self.current_region.as_mut() { - region.update_extent(column.into(), row); - region - .cells - .entry((column.into(), row)) - .and_modify(|count| *count += 1) - .or_default(); - } - } - - match to().into_field().evaluate().assign() { - Ok(to) => { - let value = self - .advice - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .expect("bounds failure"); - *value = CellValue::Assigned(to); - } - Err(err) => { - // Propagate `assign` error if the column is in current phase. - if self.in_phase(column.column_type().phase) { - return Err(err); - } - } - } - - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> circuit::Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - if !self.in_phase(FirstPhase) { - return Ok(()); - } - - assert!( - self.usable_rows.contains(&row), - "row={}, usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); - - if let Some(region) = self.current_region.as_mut() { - region.update_extent(column.into(), row); - region - .cells - .entry((column.into(), row)) - .and_modify(|count| *count += 1) - .or_default(); - } - - *self - .fixed - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .expect("bounds failure") = CellValue::Assigned(to().into_field().evaluate().assign()?); - - Ok(()) - } - - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), crate::plonk::Error> { - if !self.in_phase(FirstPhase) { - return Ok(()); - } - - assert!( - self.usable_rows.contains(&left_row) && self.usable_rows.contains(&right_row), - "left_row={}, right_row={}, usable_rows={:?}, k={}", - left_row, - right_row, - self.usable_rows, - self.k, - ); - - self.permutation - .copy(left_column, left_row, right_column, right_row) - } - - fn fill_from_row( - &mut self, - col: Column, - from_row: usize, - to: circuit::Value>, - ) -> Result<(), Error> { - if !self.in_phase(FirstPhase) { - return Ok(()); - } - - assert!( - self.usable_rows.contains(&from_row), - "row={}, usable_rows={:?}, k={}", - from_row, - self.usable_rows, - self.k, - ); - - for row in self.usable_rows.clone().skip(from_row) { - self.assign_fixed(|| "", col, row, || to)?; - } - - Ok(()) - } - - fn get_challenge(&self, challenge: Challenge) -> circuit::Value { - if self.current_phase <= challenge.phase { - return circuit::Value::unknown(); - } - - circuit::Value::known(self.challenges[challenge.index()]) - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // TODO: Do something with namespaces :) - } - - fn pop_namespace(&mut self, _: Option) { - // TODO: Do something with namespaces :) - } -} - -impl + Ord> MockProver { - /// Runs a synthetic keygen-and-prove operation on the given circuit, collecting data - /// about the constraints and their assignments. - pub fn run>( - k: u32, - circuit: &ConcreteCircuit, - instance: Vec>, - ) -> Result { - let n = 1 << k; - - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - let cs = cs; - - assert!( - n >= cs.minimum_rows(), - "n={}, minimum_rows={}, k={}", - n, - cs.minimum_rows(), - k, - ); - - assert_eq!(instance.len(), cs.num_instance_columns); - - let instance = instance - .into_iter() - .map(|instance| { - assert!( - instance.len() <= n - (cs.blinding_factors() + 1), - "instance.len={}, n={}, cs.blinding_factors={}", - instance.len(), - n, - cs.blinding_factors() - ); - - let mut instance_values = vec![InstanceValue::Padding; n]; - for (idx, value) in instance.into_iter().enumerate() { - instance_values[idx] = InstanceValue::Assigned(value); - } - - instance_values - }) - .collect::>(); - - // Fixed columns contain no blinding factors. - let fixed = vec![vec![CellValue::Unassigned; n]; cs.num_fixed_columns]; - let selectors = vec![vec![false; n]; cs.num_selectors]; - // Advice columns contain blinding factors. - let blinding_factors = cs.blinding_factors(); - let usable_rows = n - (blinding_factors + 1); - let advice = vec![ - { - let mut column = vec![CellValue::Unassigned; n]; - // Poison unusable rows. - for (i, cell) in column.iter_mut().enumerate().skip(usable_rows) { - *cell = CellValue::Poison(i); - } - column - }; - cs.num_advice_columns - ]; - let permutation = permutation::keygen::Assembly::new(n, &cs.permutation); - let constants = cs.constants.clone(); - - // Use hash chain to derive deterministic challenges for testing - let challenges = { - let mut hash: [u8; 64] = blake2b(b"Halo2-MockProver").as_bytes().try_into().unwrap(); - iter::repeat_with(|| { - hash = blake2b(&hash).as_bytes().try_into().unwrap(); - F::from_uniform_bytes(&hash) - }) - .take(cs.num_challenges) - .collect() - }; - - let mut prover = MockProver { - k, - n: n as u32, - cs, - regions: vec![], - current_region: None, - fixed, - advice, - instance, - selectors, - challenges, - permutation, - usable_rows: 0..usable_rows, - current_phase: FirstPhase.to_sealed(), - }; - - for current_phase in prover.cs.phases() { - prover.current_phase = current_phase; - ConcreteCircuit::FloorPlanner::synthesize( - &mut prover, - circuit, - config.clone(), - constants.clone(), - )?; - } - - let (cs, selector_polys) = prover.cs.compress_selectors(prover.selectors.clone()); - prover.cs = cs; - prover.fixed.extend(selector_polys.into_iter().map(|poly| { - let mut v = vec![CellValue::Unassigned; n]; - for (v, p) in v.iter_mut().zip(&poly[..]) { - *v = CellValue::Assigned(*p); - } - v - })); - - #[cfg(feature = "thread-safe-region")] - prover.permutation.build_ordered_mapping(); - - Ok(prover) - } - - /// Return the content of an advice column as assigned by the circuit. - pub fn advice_values(&self, column: Column) -> &[CellValue] { - &self.advice[column.index()] - } - - /// Return the content of a fixed column as assigned by the circuit. - pub fn fixed_values(&self, column: Column) -> &[CellValue] { - &self.fixed[column.index()] - } - - /// Returns `Ok(())` if this `MockProver` is satisfied, or a list of errors indicating - /// the reasons that the circuit is not satisfied. - /// Constraints and lookup are checked at `usable_rows`, parallelly. - pub fn verify(&self) -> Result<(), Vec> { - self.verify_at_rows(self.usable_rows.clone(), self.usable_rows.clone()) - } - - /// Returns `Ok(())` if this `MockProver` is satisfied, or a list of errors indicating - /// the reasons that the circuit is not satisfied. - /// Constraints are only checked at `gate_row_ids`, and lookup inputs are only checked at `lookup_input_row_ids`, parallelly. - pub fn verify_at_rows>( - &self, - gate_row_ids: I, - lookup_input_row_ids: I, - ) -> Result<(), Vec> { - let n = self.n as i32; - - let gate_row_ids = gate_row_ids.collect::>(); - let lookup_input_row_ids = lookup_input_row_ids.collect::>(); - - // check all the row ids are valid - gate_row_ids.par_iter().for_each(|row_id| { - if !self.usable_rows.contains(row_id) { - panic!("invalid gate row id {row_id}"); - } - }); - lookup_input_row_ids.par_iter().for_each(|row_id| { - if !self.usable_rows.contains(row_id) { - panic!("invalid gate row id {row_id}"); - } - }); - - // Check that within each region, all cells used in instantiated gates have been - // assigned to. - let selector_errors = self.regions.iter().enumerate().flat_map(|(r_i, r)| { - r.enabled_selectors.iter().flat_map(move |(selector, at)| { - // Find the gates enabled by this selector - self.cs - .gates - .iter() - // Assume that if a queried selector is enabled, the user wants to use the - // corresponding gate in some way. - // - // TODO: This will trip up on the reverse case, where leaving a selector - // un-enabled keeps a gate enabled. We could alternatively require that - // every selector is explicitly enabled or disabled on every row? But that - // seems messy and confusing. - .enumerate() - .filter(move |(_, g)| g.queried_selectors().contains(selector)) - .flat_map(move |(gate_index, gate)| { - at.par_iter() - .flat_map(move |selector_row| { - // Selectors are queried with no rotation. - let gate_row = *selector_row as i32; - - gate.queried_cells() - .iter() - .filter_map(move |cell| { - // Determine where this cell should have been assigned. - let cell_row = - ((gate_row + n + cell.rotation.0) % n) as usize; - - match cell.column.column_type() { - Any::Instance => { - // Handle instance cells, which are not in the region. - let instance_value = - &self.instance[cell.column.index()][cell_row]; - match instance_value { - InstanceValue::Assigned(_) => None, - _ => Some( - VerifyFailure::InstanceCellNotAssigned { - gate: (gate_index, gate.name()).into(), - region: (r_i, r.name.clone()).into(), - gate_offset: *selector_row, - column: cell.column.try_into().unwrap(), - row: cell_row, - }, - ), - } - } - _ => { - // Check that it was assigned! - if r.cells.contains_key(&(cell.column, cell_row)) { - None - } else { - Some(VerifyFailure::CellNotAssigned { - gate: (gate_index, gate.name()).into(), - region: ( - r_i, - r.name.clone(), - r.annotations.clone(), - ) - .into(), - gate_offset: *selector_row, - column: cell.column, - offset: cell_row as isize - - r.rows.unwrap().0 as isize, - }) - } - } - } - }) - .collect::>() - }) - .collect::>() - }) - }) - }); - - // Check that all gates are satisfied for all rows. - let gate_errors = self - .cs - .gates - .iter() - .enumerate() - .flat_map(|(gate_index, gate)| { - let blinding_rows = - (self.n as usize - (self.cs.blinding_factors() + 1))..(self.n as usize); - (gate_row_ids - .clone() - .into_par_iter() - .chain(blinding_rows.into_par_iter())) - .flat_map(move |row| { - let row = row as i32 + n; - gate.polynomials() - .iter() - .enumerate() - .filter_map(move |(poly_index, poly)| { - match poly.evaluate_lazy( - &|scalar| Value::Real(scalar), - &|_| panic!("virtual selectors are removed during optimization"), - &util::load(n, row, &self.cs.fixed_queries, &self.fixed), - &util::load(n, row, &self.cs.advice_queries, &self.advice), - &util::load_instance( - n, - row, - &self.cs.instance_queries, - &self.instance, - ), - &|challenge| Value::Real(self.challenges[challenge.index()]), - &|a| -a, - &|a, b| a + b, - &|a, b| a * b, - &|a, scalar| a * scalar, - &Value::Real(F::ZERO), - ) { - Value::Real(x) if x.is_zero_vartime() => None, - Value::Real(_) => Some(VerifyFailure::ConstraintNotSatisfied { - constraint: ( - (gate_index, gate.name()).into(), - poly_index, - gate.constraint_name(poly_index), - ) - .into(), - location: FailureLocation::find_expressions( - &self.cs, - &self.regions, - (row - n) as usize, - Some(poly).into_iter(), - ), - cell_values: util::cell_values( - gate, - poly, - &util::load(n, row, &self.cs.fixed_queries, &self.fixed), - &util::load(n, row, &self.cs.advice_queries, &self.advice), - &util::load_instance( - n, - row, - &self.cs.instance_queries, - &self.instance, - ), - ), - }), - Value::Poison => Some(VerifyFailure::ConstraintPoisoned { - constraint: ( - (gate_index, gate.name()).into(), - poly_index, - gate.constraint_name(poly_index), - ) - .into(), - }), - } - }) - .collect::>() - }) - .collect::>() - }); - - let load = |expression: &Expression, row| { - expression.evaluate_lazy( - &|scalar| Value::Real(scalar), - &|_| panic!("virtual selectors are removed during optimization"), - &|query| { - self.fixed[query.column_index] - [(row as i32 + n + query.rotation.0) as usize % n as usize] - .into() - }, - &|query| { - self.advice[query.column_index] - [(row as i32 + n + query.rotation.0) as usize % n as usize] - .into() - }, - &|query| { - Value::Real( - self.instance[query.column_index] - [(row as i32 + n + query.rotation.0) as usize % n as usize] - .value(), - ) - }, - &|challenge| Value::Real(self.challenges[challenge.index()]), - &|a| -a, - &|a, b| a + b, - &|a, b| a * b, - &|a, scalar| a * scalar, - &Value::Real(F::ZERO), - ) - }; - - let mut cached_table = Vec::new(); - let mut cached_table_identifier = Vec::new(); - // Check that all lookups exist in their respective tables. - let lookup_errors = - self.cs - .lookups - .iter() - .enumerate() - .flat_map(|(lookup_index, lookup)| { - assert!(lookup.table_expressions.len() == lookup.input_expressions.len()); - assert!(self.usable_rows.end > 0); - - // We optimize on the basis that the table might have been filled so that the last - // usable row now has the fill contents (it doesn't matter if there was no filling). - // Note that this "fill row" necessarily exists in the table, and we use that fact to - // slightly simplify the optimization: we're only trying to check that all input rows - // are contained in the table, and so we can safely just drop input rows that - // match the fill row. - let fill_row: Vec<_> = lookup - .table_expressions - .iter() - .map(move |c| load(c, self.usable_rows.end - 1)) - .collect(); - - let table_identifier = lookup - .table_expressions - .iter() - .map(Expression::identifier) - .collect::>(); - if table_identifier != cached_table_identifier { - cached_table_identifier = table_identifier; - - // In the real prover, the lookup expressions are never enforced on - // unusable rows, due to the (1 - (l_last(X) + l_blind(X))) term. - cached_table = self - .usable_rows - .clone() - .into_par_iter() - .filter_map(|table_row| { - let t = lookup - .table_expressions - .iter() - .map(move |c| load(c, table_row)) - .collect(); - - if t != fill_row { - Some(t) - } else { - None - } - }) - .collect(); - cached_table.par_sort_unstable(); - } - let table = &cached_table; - - let mut inputs: Vec<(Vec<_>, usize)> = lookup_input_row_ids - .clone() - .into_par_iter() - .filter_map(|input_row| { - let t = lookup - .input_expressions - .iter() - .map(move |c| load(c, input_row)) - .collect(); - - if t != fill_row { - // Also keep track of the original input row, since we're going to sort. - Some((t, input_row)) - } else { - None - } - }) - .collect(); - inputs.par_sort_unstable(); - - inputs - .par_iter() - .filter_map(move |(input, input_row)| { - if table.binary_search(input).is_err() { - Some(VerifyFailure::Lookup { - name: lookup.name.clone(), - lookup_index, - location: FailureLocation::find_expressions( - &self.cs, - &self.regions, - *input_row, - lookup.input_expressions.iter(), - ), - }) - } else { - None - } - }) - .collect::>() - }); - - let shuffle_errors = - self.cs - .shuffles - .iter() - .enumerate() - .flat_map(|(shuffle_index, shuffle)| { - assert!(shuffle.shuffle_expressions.len() == shuffle.input_expressions.len()); - assert!(self.usable_rows.end > 0); - - let mut shuffle_rows: Vec>> = self - .usable_rows - .clone() - .map(|row| { - let t = shuffle - .shuffle_expressions - .iter() - .map(move |c| load(c, row)) - .collect(); - t - }) - .collect(); - shuffle_rows.sort(); - - let mut input_rows: Vec<(Vec>, usize)> = self - .usable_rows - .clone() - .map(|input_row| { - let t = shuffle - .input_expressions - .iter() - .map(move |c| load(c, input_row)) - .collect(); - - (t, input_row) - }) - .collect(); - input_rows.sort(); - - input_rows - .iter() - .zip(shuffle_rows.iter()) - .filter_map(|((input_value, row), shuffle_value)| { - if shuffle_value != input_value { - Some(VerifyFailure::Shuffle { - name: shuffle.name.clone(), - shuffle_index, - location: FailureLocation::find_expressions( - &self.cs, - &self.regions, - *row, - shuffle.input_expressions.iter(), - ), - }) - } else { - None - } - }) - .collect::>() - }); - - let mapping = self.permutation.mapping(); - // Check that permutations preserve the original values of the cells. - let perm_errors = { - // Original values of columns involved in the permutation. - let original = |column, row| { - self.cs - .permutation - .get_columns() - .get(column) - .map(|c: &Column| match c.column_type() { - Any::Advice(_) => self.advice[c.index()][row], - Any::Fixed => self.fixed[c.index()][row], - Any::Instance => { - let cell: &InstanceValue = &self.instance[c.index()][row]; - CellValue::Assigned(cell.value()) - } - }) - .unwrap() - }; - - // Iterate over each column of the permutation - mapping.enumerate().flat_map(move |(column, values)| { - // Iterate over each row of the column to check that the cell's - // value is preserved by the mapping. - values - .enumerate() - .filter_map(move |(row, cell)| { - let original_cell = original(column, row); - let permuted_cell = original(cell.0, cell.1); - if original_cell == permuted_cell { - None - } else { - let columns = self.cs.permutation.get_columns(); - let column = columns.get(column).unwrap(); - Some(VerifyFailure::Permutation { - column: (*column).into(), - location: FailureLocation::find( - &self.regions, - row, - Some(column).into_iter().cloned().collect(), - ), - }) - } - }) - .collect::>() - }) - }; - - let mut errors: Vec<_> = iter::empty() - .chain(selector_errors) - .chain(gate_errors) - .chain(lookup_errors) - .chain(perm_errors) - .chain(shuffle_errors) - .collect(); - if errors.is_empty() { - Ok(()) - } else { - // Remove any duplicate `ConstraintPoisoned` errors (we check all unavailable - // rows in case the trigger is row-specific, but the error message only points - // at the constraint). - errors.dedup_by(|a, b| match (a, b) { - ( - a @ VerifyFailure::ConstraintPoisoned { .. }, - b @ VerifyFailure::ConstraintPoisoned { .. }, - ) => a == b, - _ => false, - }); - Err(errors) - } - } - - /// Panics if the circuit being checked by this `MockProver` is not satisfied. - /// - /// Any verification failures will be pretty-printed to stderr before the function - /// panics. - /// - /// Apart from the stderr output, this method is equivalent to: - /// ```ignore - /// assert_eq!(prover.verify(), Ok(())); - /// ``` - pub fn assert_satisfied(&self) { - if let Err(errs) = self.verify() { - for err in errs { - err.emit(self); - eprintln!(); - } - panic!("circuit was not satisfied"); - } - } - - /// Panics if the circuit being checked by this `MockProver` is not satisfied. - /// - /// Any verification failures will be pretty-printed to stderr before the function - /// panics. - /// - /// Constraints are only checked at `gate_row_ids`, and lookup inputs are only checked at `lookup_input_row_ids`, parallelly. - /// - /// Apart from the stderr output, this method is equivalent to: - /// ```ignore - /// assert_eq!(prover.verify_at_rows(), Ok(())); - /// ``` - pub fn assert_satisfied_at_rows>( - &self, - gate_row_ids: I, - lookup_input_row_ids: I, - ) { - if let Err(errs) = self.verify_at_rows(gate_row_ids, lookup_input_row_ids) { - for err in errs { - err.emit(self); - eprintln!(); - } - panic!("circuit was not satisfied"); - } - } - - /// Returns the constraint system - pub fn cs(&self) -> &ConstraintSystem { - &self.cs - } - - /// Returns the usable rows - pub fn usable_rows(&self) -> &Range { - &self.usable_rows - } - - /// Returns the list of Advice Columns used within a MockProver instance and the associated values contained on each Cell. - pub fn advice(&self) -> &Vec>> { - &self.advice - } - - /// Returns the list of Fixed Columns used within a MockProver instance and the associated values contained on each Cell. - pub fn fixed(&self) -> &Vec>> { - &self.fixed - } - - /// Returns the list of Selector Columns used within a MockProver instance and the associated values contained on each Cell. - pub fn selectors(&self) -> &Vec> { - &self.selectors - } - - /// Returns the list of Instance Columns used within a MockProver instance and the associated values contained on each Cell. - pub fn instance(&self) -> &Vec>> { - &self.instance - } - - /// Returns the permutation argument (`Assembly`) used within a MockProver instance. - pub fn permutation(&self) -> &Assembly { - &self.permutation - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use super::{FailureLocation, MockProver, VerifyFailure}; - use crate::{ - circuit::{Layouter, SimpleFloorPlanner, Value}, - plonk::{ - sealed::SealedPhase, Advice, Any, Circuit, Column, ConstraintSystem, Error, Expression, - FirstPhase, Fixed, Instance, Selector, TableColumn, - }, - poly::Rotation, - }; - - #[test] - fn unassigned_cell() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - a: Column, - b: Column, - q: Selector, - } - - struct FaultyCircuit {} - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let b = meta.advice_column(); - let q = meta.selector(); - - meta.create_gate("Equality check", |cells| { - let a = cells.query_advice(a, Rotation::prev()); - let b = cells.query_advice(b, Rotation::cur()); - let q = cells.query_selector(q); - - // If q is enabled, a and b must be assigned to. - vec![q * (a - b)] - }); - - FaultyCircuitConfig { a, b, q } - } - - fn without_witnesses(&self) -> Self { - Self {} - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_region( - || "Faulty synthesis", - |mut region| { - // Enable the equality gate. - config.q.enable(&mut region, 1)?; - - // Assign a = 0. - region.assign_advice(|| "a", config.a, 0, || Value::known(Fp::zero()))?; - - // Name Column a - region.name_column(|| "This is annotated!", config.a); - - // Name Column b - region.name_column(|| "This is also annotated!", config.b); - - // BUG: Forget to assign b = 0! This could go unnoticed during - // development, because cell values default to zero, which in this - // case is fine, but for other assignments would be broken. - Ok(()) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit {}, vec![]).unwrap(); - assert_eq!( - prover.verify(), - Err(vec![VerifyFailure::CellNotAssigned { - gate: (0, "Equality check").into(), - region: (0, "Faulty synthesis".to_owned()).into(), - gate_offset: 1, - column: Column::new( - 1, - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }) - ), - offset: 1, - }]) - ); - } - - #[test] - fn bad_lookup_any() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - a: Column, - table: Column, - advice_table: Column, - q: Selector, - } - - struct FaultyCircuit {} - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let q = meta.complex_selector(); - let table = meta.instance_column(); - let advice_table = meta.advice_column(); - - meta.annotate_lookup_any_column(table, || "Inst-Table"); - meta.enable_equality(table); - meta.annotate_lookup_any_column(advice_table, || "Adv-Table"); - meta.enable_equality(advice_table); - - meta.lookup_any("lookup", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - let q = cells.query_selector(q); - let advice_table = cells.query_advice(advice_table, Rotation::cur()); - let table = cells.query_instance(table, Rotation::cur()); - - // If q is enabled, a must be in the table. - // When q is not enabled, lookup the default value instead. - let not_q = Expression::Constant(Fp::one()) - q.clone(); - let default = Expression::Constant(Fp::from(2)); - vec![ - ( - q.clone() * a.clone() + not_q.clone() * default.clone(), - table, - ), - (q * a + not_q * default, advice_table), - ] - }); - - FaultyCircuitConfig { - a, - q, - table, - advice_table, - } - } - - fn without_witnesses(&self) -> Self { - Self {} - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - // No assignment needed for the table as is an Instance Column. - - layouter.assign_region( - || "Good synthesis", - |mut region| { - // Enable the lookup on rows 0 and 1. - config.q.enable(&mut region, 0)?; - config.q.enable(&mut region, 1)?; - - for i in 0..4 { - // Load Advice lookup table with Instance lookup table values. - region.assign_advice_from_instance( - || "Advice from instance tables", - config.table, - i, - config.advice_table, - i, - )?; - } - - // Assign a = 2 and a = 6. - region.assign_advice( - || "a = 2", - config.a, - 0, - || Value::known(Fp::from(2)), - )?; - region.assign_advice( - || "a = 6", - config.a, - 1, - || Value::known(Fp::from(6)), - )?; - - Ok(()) - }, - )?; - - layouter.assign_region( - || "Faulty synthesis", - |mut region| { - // Enable the lookup on rows 0 and 1. - config.q.enable(&mut region, 0)?; - config.q.enable(&mut region, 1)?; - - for i in 0..4 { - // Load Advice lookup table with Instance lookup table values. - region.assign_advice_from_instance( - || "Advice from instance tables", - config.table, - i, - config.advice_table, - i, - )?; - } - - // Assign a = 4. - region.assign_advice( - || "a = 4", - config.a, - 0, - || Value::known(Fp::from(4)), - )?; - - // BUG: Assign a = 5, which doesn't exist in the table! - region.assign_advice( - || "a = 5", - config.a, - 1, - || Value::known(Fp::from(5)), - )?; - - region.name_column(|| "Witness example", config.a); - - Ok(()) - }, - ) - } - } - - let prover = MockProver::run( - K, - &FaultyCircuit {}, - // This is our "lookup table". - vec![vec![ - Fp::from(1u64), - Fp::from(2u64), - Fp::from(4u64), - Fp::from(6u64), - ]], - ) - .unwrap(); - assert_eq!( - prover.verify(), - Err(vec![VerifyFailure::Lookup { - name: "lookup".to_string(), - lookup_index: 0, - location: FailureLocation::InRegion { - region: (1, "Faulty synthesis").into(), - offset: 1, - } - }]) - ); - } - - #[test] - fn bad_fixed_lookup() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - a: Column, - q: Selector, - table: TableColumn, - } - - struct FaultyCircuit {} - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let q = meta.complex_selector(); - let table = meta.lookup_table_column(); - meta.annotate_lookup_column(table, || "Table1"); - - meta.lookup("lookup", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - let q = cells.query_selector(q); - - // If q is enabled, a must be in the table. - // When q is not enabled, lookup the default value instead. - let not_q = Expression::Constant(Fp::one()) - q.clone(); - let default = Expression::Constant(Fp::from(2)); - vec![(q * a + not_q * default, table)] - }); - - FaultyCircuitConfig { a, q, table } - } - - fn without_witnesses(&self) -> Self { - Self {} - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "Doubling table", - |mut table| { - (1..(1 << (K - 1))) - .map(|i| { - table.assign_cell( - || format!("table[{}] = {}", i, 2 * i), - config.table, - i - 1, - || Value::known(Fp::from(2 * i as u64)), - ) - }) - .try_fold((), |_, res| res) - }, - )?; - - layouter.assign_region( - || "Good synthesis", - |mut region| { - // Enable the lookup on rows 0 and 1. - config.q.enable(&mut region, 0)?; - config.q.enable(&mut region, 1)?; - - // Assign a = 2 and a = 6. - region.assign_advice( - || "a = 2", - config.a, - 0, - || Value::known(Fp::from(2)), - )?; - region.assign_advice( - || "a = 6", - config.a, - 1, - || Value::known(Fp::from(6)), - )?; - - Ok(()) - }, - )?; - - layouter.assign_region( - || "Faulty synthesis", - |mut region| { - // Enable the lookup on rows 0 and 1. - config.q.enable(&mut region, 0)?; - config.q.enable(&mut region, 1)?; - - // Assign a = 4. - region.assign_advice( - || "a = 4", - config.a, - 0, - || Value::known(Fp::from(4)), - )?; - - // BUG: Assign a = 5, which doesn't exist in the table! - region.assign_advice( - || "a = 5", - config.a, - 1, - || Value::known(Fp::from(5)), - )?; - - region.name_column(|| "Witness example", config.a); - - Ok(()) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit {}, vec![]).unwrap(); - assert_eq!( - prover.verify(), - Err(vec![VerifyFailure::Lookup { - name: "lookup".to_string(), - lookup_index: 0, - location: FailureLocation::InRegion { - region: (2, "Faulty synthesis").into(), - offset: 1, - } - }]) - ); - } - - #[test] - fn contraint_unsatisfied() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - a: Column, - b: Column, - c: Column, - d: Column, - q: Selector, - } - - struct FaultyCircuit {} - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let b = meta.advice_column(); - let c = meta.advice_column(); - let d = meta.fixed_column(); - let q = meta.selector(); - - meta.create_gate("Equality check", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - let b = cells.query_advice(b, Rotation::cur()); - let c = cells.query_advice(c, Rotation::cur()); - let d = cells.query_fixed(d, Rotation::cur()); - let q = cells.query_selector(q); - - // If q is enabled, a and b must be assigned to. - vec![q * (a - b) * (c - d)] - }); - - FaultyCircuitConfig { a, b, c, d, q } - } - - fn without_witnesses(&self) -> Self { - Self {} - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_region( - || "Correct synthesis", - |mut region| { - // Enable the equality gate. - config.q.enable(&mut region, 0)?; - - // Assign a = 1. - region.assign_advice(|| "a", config.a, 0, || Value::known(Fp::one()))?; - - // Assign b = 1. - region.assign_advice(|| "b", config.b, 0, || Value::known(Fp::one()))?; - - // Assign c = 5. - region.assign_advice( - || "c", - config.c, - 0, - || Value::known(Fp::from(5u64)), - )?; - // Assign d = 7. - region.assign_fixed( - || "d", - config.d, - 0, - || Value::known(Fp::from(7u64)), - )?; - Ok(()) - }, - )?; - layouter.assign_region( - || "Wrong synthesis", - |mut region| { - // Enable the equality gate. - config.q.enable(&mut region, 0)?; - - // Assign a = 1. - region.assign_advice(|| "a", config.a, 0, || Value::known(Fp::one()))?; - - // Assign b = 0. - region.assign_advice(|| "b", config.b, 0, || Value::known(Fp::zero()))?; - - // Name Column a - region.name_column(|| "This is Advice!", config.a); - // Name Column b - region.name_column(|| "This is Advice too!", config.b); - - // Assign c = 5. - region.assign_advice( - || "c", - config.c, - 0, - || Value::known(Fp::from(5u64)), - )?; - // Assign d = 7. - region.assign_fixed( - || "d", - config.d, - 0, - || Value::known(Fp::from(7u64)), - )?; - - // Name Column c - region.name_column(|| "Another one!", config.c); - // Name Column d - region.name_column(|| "This is a Fixed!", config.d); - - // Note that none of the terms cancel eachother. Therefore we will have a constraint that is non satisfied for - // the `Equalty check` gate. - Ok(()) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit {}, vec![]).unwrap(); - assert_eq!( - prover.verify(), - Err(vec![VerifyFailure::ConstraintNotSatisfied { - constraint: ((0, "Equality check").into(), 0, "").into(), - location: FailureLocation::InRegion { - region: (1, "Wrong synthesis").into(), - offset: 0, - }, - cell_values: vec![ - ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 0 - ) - .into(), - 0 - ) - .into(), - "1".to_string() - ), - ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 1 - ) - .into(), - 0 - ) - .into(), - "0".to_string() - ), - ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 2 - ) - .into(), - 0 - ) - .into(), - "0x5".to_string() - ), - (((Any::Fixed, 0).into(), 0).into(), "0x7".to_string()), - ], - },]) - ) - } -} diff --git a/halo2_proofs_rm/src/dev/cost.rs b/halo2_proofs_rm/src/dev/cost.rs deleted file mode 100644 index 735f1f0dc7..0000000000 --- a/halo2_proofs_rm/src/dev/cost.rs +++ /dev/null @@ -1,561 +0,0 @@ -//! Developer tools for investigating the cost of a circuit. - -use std::{ - cmp, - collections::{HashMap, HashSet}, - iter, - marker::PhantomData, - ops::{Add, Mul}, -}; - -use ff::{Field, PrimeField}; -use group::prime::PrimeGroup; - -use crate::{ - circuit::{layouter::RegionColumn, Value}, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Fixed, FloorPlanner, Instance, Selector, - }, - poly::Rotation, -}; - -/// Measures a circuit to determine its costs, and explain what contributes to them. -#[allow(dead_code)] -#[derive(Debug)] -pub struct CircuitCost> { - /// Power-of-2 bound on the number of rows in the circuit. - k: u32, - /// Maximum degree of the circuit. - max_deg: usize, - /// Number of advice columns. - advice_columns: usize, - /// Number of direct queries for each column type. - instance_queries: usize, - advice_queries: usize, - fixed_queries: usize, - /// Number of lookup arguments. - lookups: usize, - /// Number of columns in the global permutation. - permutation_cols: usize, - /// Number of distinct sets of points in the multiopening argument. - point_sets: usize, - /// Maximum rows used over all columns - max_rows: usize, - /// Maximum rows used over all advice columns - max_advice_rows: usize, - /// Maximum rows used over all fixed columns - max_fixed_rows: usize, - num_fixed_columns: usize, - num_advice_columns: usize, - num_instance_columns: usize, - num_total_columns: usize, - - _marker: PhantomData<(G, ConcreteCircuit)>, -} - -/// Region implementation used by Layout -#[allow(dead_code)] -#[derive(Debug)] -pub(crate) struct LayoutRegion { - /// The name of the region. Not required to be unique. - pub(crate) name: String, - /// The columns used by this region. - pub(crate) columns: HashSet, - /// The row that this region starts on, if known. - pub(crate) offset: Option, - /// The number of rows that this region takes up. - pub(crate) rows: usize, - /// The cells assigned in this region. - pub(crate) cells: Vec<(RegionColumn, usize)>, -} - -/// Cost and graphing layouter -#[derive(Default, Debug)] -pub(crate) struct Layout { - /// k = 1 << n - pub(crate) k: u32, - /// Regions of the layout - pub(crate) regions: Vec, - current_region: Option, - /// Total row count - pub(crate) total_rows: usize, - /// Total advice rows - pub(crate) total_advice_rows: usize, - /// Total fixed rows - pub(crate) total_fixed_rows: usize, - /// Any cells assigned outside of a region. - pub(crate) loose_cells: Vec<(RegionColumn, usize)>, - /// Pairs of cells between which we have equality constraints. - pub(crate) equality: Vec<(Column, usize, Column, usize)>, - /// Selector assignments used for optimization pass - pub(crate) selectors: Vec>, -} - -impl Layout { - /// Creates a empty layout - pub fn new(k: u32, n: usize, num_selectors: usize) -> Self { - Layout { - k, - regions: vec![], - current_region: None, - total_rows: 0, - total_advice_rows: 0, - total_fixed_rows: 0, - // Any cells assigned outside of a region. - loose_cells: vec![], - // Pairs of cells between which we have equality constraints. - equality: vec![], - // Selector assignments used for optimization pass - selectors: vec![vec![false; n]; num_selectors], - } - } - - /// Update layout metadata - pub fn update(&mut self, column: RegionColumn, row: usize) { - self.total_rows = cmp::max(self.total_rows, row + 1); - - if let RegionColumn::Column(col) = column { - match col.column_type() { - Any::Advice(_) => { - self.total_advice_rows = cmp::max(self.total_advice_rows, row + 1) - } - Any::Fixed => self.total_fixed_rows = cmp::max(self.total_fixed_rows, row + 1), - _ => {} - } - } - - if let Some(region) = self.current_region { - let region = &mut self.regions[region]; - region.columns.insert(column); - - // The region offset is the earliest row assigned to. - let mut offset = region.offset.unwrap_or(row); - if row < offset { - // The first row assigned was not at offset 0 within the region. - region.rows += offset - row; - offset = row; - } - // The number of rows in this region is the gap between the earliest and - // latest rows assigned. - region.rows = cmp::max(region.rows, row - offset + 1); - region.offset = Some(offset); - - region.cells.push((column, row)); - } else { - self.loose_cells.push((column, row)); - } - } -} - -impl Assignment for Layout { - fn enter_region(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - assert!(self.current_region.is_none()); - self.current_region = Some(self.regions.len()); - self.regions.push(LayoutRegion { - name: name_fn().into(), - columns: HashSet::default(), - offset: None, - rows: 0, - cells: vec![], - }) - } - - fn annotate_column(&mut self, _: A, _: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - } - - fn exit_region(&mut self) { - assert!(self.current_region.is_some()); - self.current_region = None; - } - - fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - if let Some(cell) = self.selectors[selector.0].get_mut(row) { - *cell = true; - } else { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.update((*selector).into(), row); - Ok(()) - } - - fn query_instance(&self, _: Column, _: usize) -> Result, Error> { - Ok(Value::unknown()) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - self.update(Column::::from(column).into(), row); - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - column: Column, - row: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - self.update(Column::::from(column).into(), row); - Ok(()) - } - - fn copy( - &mut self, - l_col: Column, - l_row: usize, - r_col: Column, - r_row: usize, - ) -> Result<(), crate::plonk::Error> { - self.equality.push((l_col, l_row, r_col, r_row)); - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} - -impl> CircuitCost { - /// Measures a circuit with parameter constant `k`. - /// - /// Panics if `k` is not large enough for the circuit. - pub fn measure(k: u32, circuit: &ConcreteCircuit) -> Self { - // Collect the layout details. - let mut cs = ConstraintSystem::default(); - let config = ConcreteCircuit::configure(&mut cs); - let mut layout = Layout::new(k, 1 << k, cs.num_selectors); - ConcreteCircuit::FloorPlanner::synthesize( - &mut layout, - circuit, - config, - cs.constants.clone(), - ) - .unwrap(); - let (cs, _) = cs.compress_selectors(layout.selectors); - - assert!((1 << k) >= cs.minimum_rows()); - - // Figure out how many point sets we have due to queried cells. - let mut column_queries: HashMap, HashSet> = HashMap::new(); - for (c, r) in iter::empty() - .chain( - cs.advice_queries - .iter() - .map(|(c, r)| (Column::::from(*c), *r)), - ) - .chain(cs.instance_queries.iter().map(|(c, r)| ((*c).into(), *r))) - .chain(cs.fixed_queries.iter().map(|(c, r)| ((*c).into(), *r))) - .chain( - cs.permutation - .get_columns() - .into_iter() - .map(|c| (c, Rotation::cur())), - ) - { - column_queries.entry(c).or_default().insert(r.0); - } - let mut point_sets: HashSet> = HashSet::new(); - for (_, r) in column_queries { - // Sort the query sets so we merge duplicates. - let mut query_set: Vec<_> = r.into_iter().collect(); - query_set.sort_unstable(); - point_sets.insert(query_set); - } - - // Include lookup polynomials in point sets: - point_sets.insert(vec![0, 1]); // product_poly - point_sets.insert(vec![-1, 0]); // permuted_input_poly - point_sets.insert(vec![0]); // permuted_table_poly - - // Include permutation polynomials in point sets. - point_sets.insert(vec![0, 1]); // permutation_product_poly - let max_deg = cs.degree(); - let permutation_cols = cs.permutation.get_columns().len(); - if permutation_cols > max_deg - 2 { - // permutation_product_poly for chaining chunks. - point_sets.insert(vec![-((cs.blinding_factors() + 1) as i32), 0, 1]); - } - - CircuitCost { - k, - max_deg, - advice_columns: cs.num_advice_columns, - instance_queries: cs.instance_queries.len(), - advice_queries: cs.advice_queries.len(), - fixed_queries: cs.fixed_queries.len(), - lookups: cs.lookups.len(), - permutation_cols, - point_sets: point_sets.len(), - max_rows: layout.total_rows, - max_advice_rows: layout.total_advice_rows, - max_fixed_rows: layout.total_fixed_rows, - num_advice_columns: cs.num_advice_columns, - num_fixed_columns: cs.num_fixed_columns, - num_instance_columns: cs.num_instance_columns, - num_total_columns: cs.num_instance_columns - + cs.num_advice_columns - + cs.num_fixed_columns, - _marker: PhantomData, - } - } - - fn permutation_chunks(&self) -> usize { - let chunk_size = self.max_deg - 2; - (self.permutation_cols + chunk_size - 1) / chunk_size - } - - /// Returns the marginal proof size per instance of this circuit. - pub fn marginal_proof_size(&self) -> MarginalProofSize { - let chunks = self.permutation_chunks(); - - MarginalProofSize { - // Cells: - // - 1 commitment per advice column per instance - // - 1 eval per instance column query per instance - // - 1 eval per advice column query per instance - instance: ProofContribution::new(0, self.instance_queries), - advice: ProofContribution::new(self.advice_columns, self.advice_queries), - - // Lookup arguments: - // - 3 commitments per lookup argument per instance - // - 5 evals per lookup argument per instance - lookups: ProofContribution::new(3 * self.lookups, 5 * self.lookups), - - // Global permutation argument: - // - chunks commitments per instance - // - 2 * chunks + (chunks - 1) evals per instance - equality: ProofContribution::new( - chunks, - if chunks == 0 { chunks } else { 3 * chunks - 1 }, - ), - - _marker: PhantomData, - } - } - - /// Returns the proof size for the given number of instances of this circuit. - pub fn proof_size(&self, instances: usize) -> ProofSize { - let marginal = self.marginal_proof_size(); - - ProofSize { - // Cells: - // - marginal cost per instance - // - 1 eval per fixed column query - instance: marginal.instance * instances, - advice: marginal.advice * instances, - fixed: ProofContribution::new(0, self.fixed_queries), - - // Lookup arguments: - // - marginal cost per instance - lookups: marginal.lookups * instances, - - // Global permutation argument: - // - marginal cost per instance - // - 1 eval per column - equality: marginal.equality * instances - + ProofContribution::new(0, self.permutation_cols), - - // Vanishing argument: - // - 1 + (max_deg - 1) commitments - // - 1 random_poly eval - vanishing: ProofContribution::new(self.max_deg, 1), - - // Multiopening argument: - // - f_commitment - // - 1 eval per set of points in multiopen argument - multiopen: ProofContribution::new(1, self.point_sets), - - // Polycommit: - // - s_poly commitment - // - inner product argument (2 * k round commitments) - // - a - // - xi - polycomm: ProofContribution::new((1 + 2 * self.k).try_into().unwrap(), 2), - - _marker: PhantomData, - } - } -} - -/// (commitments, evaluations) -#[derive(Debug)] -struct ProofContribution { - commitments: usize, - evaluations: usize, -} - -impl ProofContribution { - fn new(commitments: usize, evaluations: usize) -> Self { - ProofContribution { - commitments, - evaluations, - } - } - - fn len(&self, point: usize, scalar: usize) -> usize { - self.commitments * point + self.evaluations * scalar - } -} - -impl Add for ProofContribution { - type Output = Self; - - fn add(self, rhs: Self) -> Self::Output { - Self { - commitments: self.commitments + rhs.commitments, - evaluations: self.evaluations + rhs.evaluations, - } - } -} - -impl Mul for ProofContribution { - type Output = Self; - - fn mul(self, instances: usize) -> Self::Output { - Self { - commitments: self.commitments * instances, - evaluations: self.evaluations * instances, - } - } -} - -/// The marginal size of a Halo 2 proof, broken down into its contributing factors. -#[derive(Debug)] -pub struct MarginalProofSize { - instance: ProofContribution, - advice: ProofContribution, - lookups: ProofContribution, - equality: ProofContribution, - _marker: PhantomData, -} - -impl From> for usize { - fn from(proof: MarginalProofSize) -> Self { - let point = G::Repr::default().as_ref().len(); - let scalar = ::Repr::default().as_ref().len(); - - proof.instance.len(point, scalar) - + proof.advice.len(point, scalar) - + proof.lookups.len(point, scalar) - + proof.equality.len(point, scalar) - } -} - -/// The size of a Halo 2 proof, broken down into its contributing factors. -#[derive(Debug)] -pub struct ProofSize { - instance: ProofContribution, - advice: ProofContribution, - fixed: ProofContribution, - lookups: ProofContribution, - equality: ProofContribution, - vanishing: ProofContribution, - multiopen: ProofContribution, - polycomm: ProofContribution, - _marker: PhantomData, -} - -impl From> for usize { - fn from(proof: ProofSize) -> Self { - let point = G::Repr::default().as_ref().len(); - let scalar = ::Repr::default().as_ref().len(); - - proof.instance.len(point, scalar) - + proof.advice.len(point, scalar) - + proof.fixed.len(point, scalar) - + proof.lookups.len(point, scalar) - + proof.equality.len(point, scalar) - + proof.vanishing.len(point, scalar) - + proof.multiopen.len(point, scalar) - + proof.polycomm.len(point, scalar) - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::{Eq, Fp}; - - use crate::circuit::SimpleFloorPlanner; - - use super::*; - - #[test] - fn circuit_cost_without_permutation() { - const K: u32 = 4; - - struct MyCircuit; - impl Circuit for MyCircuit { - type Config = (); - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(_meta: &mut ConstraintSystem) -> Self::Config {} - - fn synthesize( - &self, - _config: Self::Config, - _layouter: impl crate::circuit::Layouter, - ) -> Result<(), Error> { - Ok(()) - } - } - CircuitCost::::measure(K, &MyCircuit).proof_size(1); - } -} diff --git a/halo2_proofs_rm/src/dev/cost_model.rs b/halo2_proofs_rm/src/dev/cost_model.rs deleted file mode 100644 index 51b3a1ad76..0000000000 --- a/halo2_proofs_rm/src/dev/cost_model.rs +++ /dev/null @@ -1,323 +0,0 @@ -//! The cost estimator takes high-level parameters for a circuit design, and estimates the -//! verification cost, as well as resulting proof size. - -use std::collections::HashSet; -use std::{iter, num::ParseIntError, str::FromStr}; - -use crate::plonk::Circuit; -use ff::{Field, FromUniformBytes}; -use serde::Deserialize; -use serde_derive::Serialize; - -use super::MockProver; - -/// Supported commitment schemes -#[derive(Debug, Eq, PartialEq)] -pub enum CommitmentScheme { - /// Inner Product Argument commitment scheme - IPA, - /// KZG with GWC19 mutli-open strategy - KZGGWC, - /// KZG with BDFG20 mutli-open strategy - KZGSHPLONK, -} - -/// Options to build a circuit specification to measure the cost model of. -#[derive(Debug)] -pub struct CostOptions { - /// An advice column with the given rotations. May be repeated. - pub advice: Vec, - - /// An instance column with the given rotations. May be repeated. - pub instance: Vec, - - /// A fixed column with the given rotations. May be repeated. - pub fixed: Vec, - - /// Maximum degree of the custom gates. - pub gate_degree: usize, - - /// Maximum degree of the constraint system. - pub max_degree: usize, - - /// A lookup over N columns with max input degree I and max table degree T. May be repeated. - pub lookup: Vec, - - /// A permutation over N columns. May be repeated. - pub permutation: Permutation, - - /// A shuffle over N columns with max input degree I and max shuffle degree T. May be repeated. - pub shuffle: Vec, - - /// 2^K bound on the number of rows. - pub k: usize, -} - -/// Structure holding polynomial related data for benchmarks -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct Poly { - /// Rotations for the given polynomial - pub rotations: Vec, -} - -impl FromStr for Poly { - type Err = ParseIntError; - - fn from_str(s: &str) -> Result { - let mut rotations: Vec = - s.split(',').map(|r| r.parse()).collect::>()?; - rotations.sort_unstable(); - Ok(Poly { rotations }) - } -} - -/// Structure holding the Lookup related data for circuit benchmarks. -#[derive(Debug, Clone)] -pub struct Lookup; - -impl Lookup { - fn queries(&self) -> impl Iterator { - // - product commitments at x and \omega x - // - input commitments at x and x_inv - // - table commitments at x - let product = "0,1".parse().unwrap(); - let input = "0,-1".parse().unwrap(); - let table = "0".parse().unwrap(); - - iter::empty() - .chain(Some(product)) - .chain(Some(input)) - .chain(Some(table)) - } -} - -/// Number of permutation enabled columns -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct Permutation { - columns: usize, -} - -impl Permutation { - fn queries(&self) -> impl Iterator { - // - product commitments at x and x_inv - // - polynomial commitments at x - let product = "0,-1".parse().unwrap(); - let poly = "0".parse().unwrap(); - - iter::empty() - .chain(Some(product)) - .chain(iter::repeat(poly).take(self.columns)) - } -} - -/// Structure holding the [Shuffle] related data for circuit benchmarks. -#[derive(Debug, Clone)] -pub struct Shuffle; - -impl Shuffle { - fn queries(&self) -> impl Iterator { - // Open shuffle product commitment at x and \omega x - let shuffle = "0, 1".parse().unwrap(); - - iter::empty().chain(Some(shuffle)) - } -} - -/// High-level specifications of an abstract circuit. -#[derive(Debug, Deserialize, Serialize)] -pub struct ModelCircuit { - /// Power-of-2 bound on the number of rows in the circuit. - pub k: usize, - /// Maximum degree of the circuit. - pub max_deg: usize, - /// Number of advice columns. - pub advice_columns: usize, - /// Number of lookup arguments. - pub lookups: usize, - /// Equality constraint enabled columns. - pub permutations: usize, - /// Number of shuffle arguments - pub shuffles: usize, - /// Number of distinct column queries across all gates. - pub column_queries: usize, - /// Number of distinct sets of points in the multiopening argument. - pub point_sets: usize, - /// Size of the proof for the circuit - pub size: usize, -} - -impl CostOptions { - /// Convert [CostOptions] to [ModelCircuit]. The proof sizè is computed depending on the base - /// and scalar field size of the curve used, together with the [CommitmentScheme]. - pub fn into_model_circuit( - &self, - comm_scheme: CommitmentScheme, - ) -> ModelCircuit { - let mut queries: Vec<_> = iter::empty() - .chain(self.advice.iter()) - .chain(self.instance.iter()) - .chain(self.fixed.iter()) - .cloned() - .chain(self.lookup.iter().flat_map(|l| l.queries())) - .chain(self.permutation.queries()) - .chain(self.shuffle.iter().flat_map(|s| s.queries())) - .chain(iter::repeat("0".parse().unwrap()).take(self.max_degree - 1)) - .collect(); - - let column_queries = queries.len(); - queries.sort_unstable(); - queries.dedup(); - let point_sets = queries.len(); - - let comp_bytes = |points: usize, scalars: usize| points * COMM + scalars * SCALAR; - - // PLONK: - // - COMM bytes (commitment) per advice column - // - 3 * COMM bytes (commitments) + 5 * SCALAR bytes (evals) per lookup column - // - COMM bytes (commitment) + 2 * SCALAR bytes (evals) per permutation argument - // - COMM bytes (eval) per column per permutation argument - let plonk = comp_bytes(1, 0) * self.advice.len() - + comp_bytes(3, 5) * self.lookup.len() - + comp_bytes(1, 2 + self.permutation.columns); - - // Vanishing argument: - // - (max_deg - 1) * COMM bytes (commitments) + (max_deg - 1) * SCALAR bytes (h_evals) - // for quotient polynomial - // - SCALAR bytes (eval) per column query - let vanishing = - comp_bytes(self.max_degree - 1, self.max_degree - 1) + comp_bytes(0, column_queries); - - // Multiopening argument: - // - f_commitment (COMM bytes) - // - SCALAR bytes (evals) per set of points in multiopen argument - let multiopen = comp_bytes(1, point_sets); - - let polycomm = match comm_scheme { - CommitmentScheme::IPA => { - // Polycommit IPA: - // - s_poly commitment (COMM bytes) - // - inner product argument (k rounds * 2 * COMM bytes) - // - a (SCALAR bytes) - // - xi (SCALAR bytes) - comp_bytes(1 + 2 * self.k, 2) - } - CommitmentScheme::KZGGWC => { - let mut nr_rotations = HashSet::new(); - for poly in self.advice.iter() { - nr_rotations.extend(poly.rotations.clone()); - } - for poly in self.fixed.iter() { - nr_rotations.extend(poly.rotations.clone()); - } - for poly in self.instance.iter() { - nr_rotations.extend(poly.rotations.clone()); - } - - // Polycommit GWC: - // - number_rotations * COMM bytes - comp_bytes(nr_rotations.len(), 0) - } - CommitmentScheme::KZGSHPLONK => { - // Polycommit SHPLONK: - // - quotient polynomial commitment (COMM bytes) - comp_bytes(1, 0) - } - }; - - let size = plonk + vanishing + multiopen + polycomm; - - ModelCircuit { - k: self.k, - max_deg: self.max_degree, - advice_columns: self.advice.len(), - lookups: self.lookup.len(), - permutations: self.permutation.columns, - shuffles: self.shuffle.len(), - column_queries, - point_sets, - size, - } - } -} - -/// Given a Plonk circuit, this function returns a [ModelCircuit] -pub fn from_circuit_to_model_circuit< - F: Ord + Field + FromUniformBytes<64>, - C: Circuit, - const COMM: usize, - const SCALAR: usize, ->( - k: u32, - circuit: &C, - instances: Vec>, - comm_scheme: CommitmentScheme, -) -> ModelCircuit { - let options = from_circuit_to_cost_model_options(k, circuit, instances); - options.into_model_circuit::(comm_scheme) -} - -/// Given a Plonk circuit, this function returns [CostOptions] -pub fn from_circuit_to_cost_model_options, C: Circuit>( - k: u32, - circuit: &C, - instances: Vec>, -) -> CostOptions { - let prover = MockProver::run(k, circuit, instances).unwrap(); - let cs = prover.cs; - - let fixed = { - // init the fixed polynomials with no rotations - let mut fixed = vec![Poly { rotations: vec![] }; cs.num_fixed_columns()]; - for (col, rot) in cs.fixed_queries() { - fixed[col.index()].rotations.push(rot.0 as isize); - } - fixed - }; - - let advice = { - // init the advice polynomials with no rotations - let mut advice = vec![Poly { rotations: vec![] }; cs.num_advice_columns()]; - for (col, rot) in cs.advice_queries() { - advice[col.index()].rotations.push(rot.0 as isize); - } - advice - }; - - let instance = { - // init the instance polynomials with no rotations - let mut instance = vec![Poly { rotations: vec![] }; cs.num_instance_columns()]; - for (col, rot) in cs.instance_queries() { - instance[col.index()].rotations.push(rot.0 as isize); - } - instance - }; - - let lookup = { cs.lookups().iter().map(|_| Lookup).collect::>() }; - - let permutation = Permutation { - columns: cs.permutation().get_columns().len(), - }; - - let shuffle = { cs.shuffles.iter().map(|_| Shuffle).collect::>() }; - - let gate_degree = cs - .gates - .iter() - .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) - .max() - .unwrap_or(0); - - let k = prover.k.try_into().unwrap(); - - CostOptions { - advice, - instance, - fixed, - gate_degree, - max_degree: cs.degree(), - lookup, - permutation, - shuffle, - k, - } -} diff --git a/halo2_proofs_rm/src/dev/failure.rs b/halo2_proofs_rm/src/dev/failure.rs deleted file mode 100644 index f9f5c27ded..0000000000 --- a/halo2_proofs_rm/src/dev/failure.rs +++ /dev/null @@ -1,873 +0,0 @@ -use std::collections::{BTreeMap, HashSet}; -use std::fmt::{self, Debug}; - -use group::ff::Field; - -use super::metadata::{DebugColumn, DebugVirtualCell}; -use super::MockProver; -use super::{ - metadata, - util::{self, AnyQuery}, - Region, -}; -use crate::dev::metadata::Constraint; -use crate::{ - dev::{Instance, Value}, - plonk::{Any, Column, ConstraintSystem, Expression, Gate}, -}; - -mod emitter; - -/// The location within the circuit at which a particular [`VerifyFailure`] occurred. -#[derive(Debug, PartialEq, Eq, Clone)] -pub enum FailureLocation { - /// A location inside a region. - InRegion { - /// The region in which the failure occurred. - region: metadata::Region, - /// The offset (relative to the start of the region) at which the failure - /// occurred. - offset: usize, - }, - /// A location outside of a region. - OutsideRegion { - /// The circuit row on which the failure occurred. - row: usize, - }, -} - -impl fmt::Display for FailureLocation { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::InRegion { region, offset } => write!(f, "in {region} at offset {offset}"), - Self::OutsideRegion { row } => { - write!(f, "outside any region, on row {row}") - } - } - } -} - -impl FailureLocation { - /// Returns a `DebugColumn` from Column metadata and `&self`. - pub(super) fn get_debug_column(&self, metadata: metadata::Column) -> DebugColumn { - match self { - Self::InRegion { region, .. } => { - DebugColumn::from((metadata, region.column_annotations.as_ref())) - } - _ => DebugColumn::from((metadata, None)), - } - } - - pub(super) fn find_expressions<'a, F: Field>( - cs: &ConstraintSystem, - regions: &[Region], - failure_row: usize, - failure_expressions: impl Iterator>, - ) -> Self { - let failure_columns: HashSet> = failure_expressions - .flat_map(|expression| { - expression.evaluate( - &|_| vec![], - &|_| panic!("virtual selectors are removed during optimization"), - &|query| vec![cs.fixed_queries[query.index.unwrap()].0.into()], - &|query| vec![cs.advice_queries[query.index.unwrap()].0.into()], - &|query| vec![cs.instance_queries[query.index.unwrap()].0.into()], - &|_| vec![], - &|a| a, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|a, _| a, - ) - }) - .collect(); - - Self::find(regions, failure_row, failure_columns) - } - - /// Figures out whether the given row and columns overlap an assigned region. - pub(super) fn find( - regions: &[Region], - failure_row: usize, - failure_columns: HashSet>, - ) -> Self { - regions - .iter() - .enumerate() - .find(|(_, r)| { - if let Some((start, end)) = r.rows { - // We match the region if any input columns overlap, rather than all of - // them, because matching complex selector columns is hard. As long as - // regions are rectangles, and failures occur due to assignments entirely - // within single regions, "any" will be equivalent to "all". If these - // assumptions change, we'll start getting bug reports from users :) - (start..=end).contains(&failure_row) && !failure_columns.is_disjoint(&r.columns) - } else { - // Zero-area region - false - } - }) - .map(|(r_i, r)| FailureLocation::InRegion { - region: (r_i, r.name.clone(), r.annotations.clone()).into(), - offset: failure_row - r.rows.unwrap().0, - }) - .unwrap_or_else(|| FailureLocation::OutsideRegion { row: failure_row }) - } -} - -/// The reasons why a particular circuit is not satisfied. -#[derive(PartialEq, Eq)] -pub enum VerifyFailure { - /// A cell used in an active gate was not assigned to. - CellNotAssigned { - /// The index of the active gate. - gate: metadata::Gate, - /// The region in which this cell should be assigned. - region: metadata::Region, - /// The offset (relative to the start of the region) at which the active gate - /// queries this cell. - gate_offset: usize, - /// The column in which this cell should be assigned. - column: Column, - /// The offset (relative to the start of the region) at which this cell should be - /// assigned. This may be negative (for example, if a selector enables a gate at - /// offset 0, but the gate uses `Rotation::prev()`). - offset: isize, - }, - /// An instance cell used in an active gate was not assigned to. - InstanceCellNotAssigned { - /// The index of the active gate. - gate: metadata::Gate, - /// The region in which this gate was activated. - region: metadata::Region, - /// The offset (relative to the start of the region) at which the active gate - /// queries this cell. - gate_offset: usize, - /// The column in which this cell should be assigned. - column: Column, - /// The absolute row at which this cell should be assigned. - row: usize, - }, - /// A constraint was not satisfied for a particular row. - ConstraintNotSatisfied { - /// The polynomial constraint that is not satisfied. - constraint: metadata::Constraint, - /// The location at which this constraint is not satisfied. - /// - /// `FailureLocation::OutsideRegion` is usually caused by a constraint that does - /// not contain a selector, and as a result is active on every row. - location: FailureLocation, - /// The values of the virtual cells used by this constraint. - cell_values: Vec<(metadata::VirtualCell, String)>, - }, - /// A constraint was active on an unusable row, and is likely missing a selector. - ConstraintPoisoned { - /// The polynomial constraint that is not satisfied. - constraint: metadata::Constraint, - }, - /// A lookup input did not exist in its corresponding table. - Lookup { - /// The name of the lookup that is not satisfied. - name: String, - /// The index of the lookup that is not satisfied. These indices are assigned in - /// the order in which `ConstraintSystem::lookup` is called during - /// `Circuit::configure`. - lookup_index: usize, - /// The location at which the lookup is not satisfied. - /// - /// `FailureLocation::InRegion` is most common, and may be due to the intentional - /// use of a lookup (if its inputs are conditional on a complex selector), or an - /// unintentional lookup constraint that overlaps the region (indicating that the - /// lookup's inputs should be made conditional). - /// - /// `FailureLocation::OutsideRegion` is uncommon, and could mean that: - /// - The input expressions do not correctly constrain a default value that exists - /// in the table when the lookup is not being used. - /// - The input expressions use a column queried at a non-zero `Rotation`, and the - /// lookup is active on a row adjacent to an unrelated region. - location: FailureLocation, - }, - /// A shuffle input did not exist in its corresponding map. - Shuffle { - /// The name of the lookup that is not satisfied. - name: String, - /// The index of the lookup that is not satisfied. These indices are assigned in - /// the order in which `ConstraintSystem::lookup` is called during - /// `Circuit::configure`. - shuffle_index: usize, - /// The location at which the lookup is not satisfied. - /// - /// `FailureLocation::InRegion` is most common, and may be due to the intentional - /// use of a lookup (if its inputs are conditional on a complex selector), or an - /// unintentional lookup constraint that overlaps the region (indicating that the - /// lookup's inputs should be made conditional). - /// - /// `FailureLocation::OutsideRegion` is uncommon, and could mean that: - /// - The input expressions do not correctly constrain a default value that exists - /// in the table when the lookup is not being used. - /// - The input expressions use a column queried at a non-zero `Rotation`, and the - /// lookup is active on a row adjacent to an unrelated region. - location: FailureLocation, - }, - /// A permutation did not preserve the original value of a cell. - Permutation { - /// The column in which this permutation is not satisfied. - column: metadata::Column, - /// The location at which the permutation is not satisfied. - location: FailureLocation, - }, -} - -impl fmt::Display for VerifyFailure { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::CellNotAssigned { - gate, - region, - gate_offset, - column, - offset, - } => { - write!( - f, - "{} uses {} at offset {}, which requires cell in column {:?} at offset {} with annotation {:?} to be assigned.", - region, gate, gate_offset, column, offset, region.get_column_annotation((*column).into()) - ) - } - Self::InstanceCellNotAssigned { - gate, - region, - gate_offset, - column, - row, - } => { - write!( - f, - "{region} uses {gate} at offset {gate_offset}, which requires cell in instance column {column:?} at row {row} to be assigned.", - ) - } - Self::ConstraintNotSatisfied { - constraint, - location, - cell_values, - } => { - writeln!(f, "{constraint} is not satisfied {location}")?; - for (dvc, value) in cell_values.iter().map(|(vc, string)| { - let ann_map = match location { - FailureLocation::InRegion { region, offset: _ } => { - ®ion.column_annotations - } - _ => &None, - }; - - (DebugVirtualCell::from((vc, ann_map.as_ref())), string) - }) { - writeln!(f, "- {dvc} = {value}")?; - } - Ok(()) - } - Self::ConstraintPoisoned { constraint } => { - write!( - f, - "{constraint} is active on an unusable row - missing selector?" - ) - } - Self::Lookup { - name, - lookup_index, - location, - } => { - write!( - f, - "Lookup {name}(index: {lookup_index}) is not satisfied {location}", - ) - } - Self::Shuffle { - name, - shuffle_index, - location, - } => { - write!( - f, - "Shuffle {name}(index: {shuffle_index}) is not satisfied {location}" - ) - } - Self::Permutation { column, location } => { - write!( - f, - "Equality constraint not satisfied by cell ({}, {})", - location.get_debug_column(*column), - location - ) - } - } - } -} - -impl Debug for VerifyFailure { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - VerifyFailure::ConstraintNotSatisfied { - constraint, - location, - cell_values, - } => { - #[allow(dead_code)] - #[derive(Debug)] - struct ConstraintCaseDebug { - constraint: Constraint, - location: FailureLocation, - cell_values: Vec<(DebugVirtualCell, String)>, - } - - let ann_map = match location { - FailureLocation::InRegion { region, offset: _ } => { - region.column_annotations.clone() - } - _ => None, - }; - - let debug = ConstraintCaseDebug { - constraint: constraint.clone(), - location: location.clone(), - cell_values: cell_values - .iter() - .map(|(vc, value)| { - ( - DebugVirtualCell::from((vc, ann_map.as_ref())), - value.clone(), - ) - }) - .collect(), - }; - - write!(f, "{debug:#?}") - } - _ => write!(f, "{self:#}"), - } - } -} - -/// Renders `VerifyFailure::CellNotAssigned`. -/// -/// ```text -/// error: cell not assigned -/// Cell layout in region 'Faulty synthesis': -/// | Offset | A0 | A1 | -/// +--------+----+----+ -/// | 0 | x0 | | -/// | 1 | | X | <--{ X marks the spot! 🦜 -/// -/// Gate 'Equality check' (applied at offset 1) queries these cells. -/// ``` -fn render_cell_not_assigned( - gates: &[Gate], - gate: &metadata::Gate, - region: &metadata::Region, - gate_offset: usize, - column: Column, - offset: isize, -) { - // Collect the necessary rendering information: - // - The columns involved in this gate. - // - How many cells are in each column. - // - The grid of cell values, indexed by rotation. - let mut columns = BTreeMap::::default(); - let mut layout = BTreeMap::>::default(); - for (i, cell) in gates[gate.index].queried_cells().iter().enumerate() { - let cell_column = cell.column.into(); - *columns.entry(cell_column).or_default() += 1; - layout - .entry(cell.rotation.0) - .or_default() - .entry(cell_column) - .or_insert_with(|| { - if cell.column == column && gate_offset as i32 + cell.rotation.0 == offset as i32 { - "X".to_string() - } else { - format!("x{i}") - } - }); - } - - eprintln!("error: cell not assigned"); - emitter::render_cell_layout( - " ", - &FailureLocation::InRegion { - region: region.clone(), - offset: gate_offset, - }, - &columns, - &layout, - |row_offset, rotation| { - if (row_offset.unwrap() + rotation) as isize == offset { - eprint!(" <--{{ X marks the spot! 🦜"); - } - }, - ); - eprintln!(); - eprintln!( - " Gate '{}' (applied at offset {}) queries these cells.", - gate.name, gate_offset - ); -} - -/// Renders `VerifyFailure::ConstraintNotSatisfied`. -/// -/// ```text -/// error: constraint not satisfied -/// Cell layout in region 'somewhere': -/// | Offset | A0 | -/// +--------+----+ -/// | 0 | x0 | <--{ Gate 'foo' applied here -/// | 1 | x1 | -/// -/// Constraint 'bar': -/// x1 + x1 * 0x100 + x1 * 0x10000 + x1 * 0x100_0000 - x0 = 0 -/// -/// Assigned cell values: -/// x0 = 0x5 -/// x1 = 0x5 -/// ``` -fn render_constraint_not_satisfied( - gates: &[Gate], - constraint: &metadata::Constraint, - location: &FailureLocation, - cell_values: &[(metadata::VirtualCell, String)], -) { - // Collect the necessary rendering information: - // - The columns involved in this constraint. - // - How many cells are in each column. - // - The grid of cell values, indexed by rotation. - let mut columns = BTreeMap::::default(); - let mut layout = BTreeMap::>::default(); - for (i, (cell, _)) in cell_values.iter().enumerate() { - *columns.entry(cell.column).or_default() += 1; - layout - .entry(cell.rotation) - .or_default() - .entry(cell.column) - .or_insert(format!("x{i}")); - } - - eprintln!("error: constraint not satisfied"); - emitter::render_cell_layout(" ", location, &columns, &layout, |_, rotation| { - if rotation == 0 { - eprint!(" <--{{ Gate '{}' applied here", constraint.gate.name); - } - }); - - // Print the unsatisfied constraint, in terms of the local variables. - eprintln!(); - eprintln!(" Constraint '{}':", constraint.name); - eprintln!( - " {} = 0", - emitter::expression_to_string( - &gates[constraint.gate.index].polynomials()[constraint.index], - &layout - ) - ); - - // Print the map from local variables to assigned values. - eprintln!(); - eprintln!(" Assigned cell values:"); - for (i, (_, value)) in cell_values.iter().enumerate() { - eprintln!(" x{i} = {value}"); - } -} - -/// Renders `VerifyFailure::Lookup`. -/// -/// ```text -/// error: lookup input does not exist in table -/// (L0) ∉ (F0) -/// -/// Lookup inputs: -/// L0 = x1 * x0 + (1 - x1) * 0x2 -/// ^ -/// | Cell layout in region 'Faulty synthesis': -/// | | Offset | A0 | F1 | -/// | +--------+----+----+ -/// | | 1 | x0 | x1 | <--{ Lookup inputs queried here -/// | -/// | Assigned cell values: -/// | x0 = 0x5 -/// | x1 = 1 -/// ``` -fn render_lookup( - prover: &MockProver, - name: &str, - lookup_index: usize, - location: &FailureLocation, -) { - let n = prover.n as i32; - let cs = &prover.cs; - let lookup = &cs.lookups[lookup_index]; - - // Get the absolute row on which the lookup's inputs are being queried, so we can - // fetch the input values. - let row = match location { - FailureLocation::InRegion { region, offset } => { - prover.regions[region.index].rows.unwrap().0 + offset - } - FailureLocation::OutsideRegion { row } => *row, - } as i32; - - // Recover the fixed columns from the table expressions. We don't allow composite - // expressions for the table side of lookups. - let lookup_columns = lookup.table_expressions.iter().map(|expr| { - expr.evaluate( - &|f| format! {"Const: {f:#?}"}, - &|s| format! {"S{}", s.0}, - &|query| { - format!( - "{:?}", - prover - .cs - .general_column_annotations - .get(&metadata::Column::from((Any::Fixed, query.column_index))) - .cloned() - .unwrap_or_else(|| format!("F{}", query.column_index())) - ) - }, - &|query| { - format!( - "{:?}", - prover - .cs - .general_column_annotations - .get(&metadata::Column::from((Any::advice(), query.column_index))) - .cloned() - .unwrap_or_else(|| format!("A{}", query.column_index())) - ) - }, - &|query| { - format!( - "{:?}", - prover - .cs - .general_column_annotations - .get(&metadata::Column::from((Any::Instance, query.column_index))) - .cloned() - .unwrap_or_else(|| format!("I{}", query.column_index())) - ) - }, - &|challenge| format! {"C{}", challenge.index()}, - &|query| format! {"-{query}"}, - &|a, b| format! {"{a} + {b}"}, - &|a, b| format! {"{a} * {b}"}, - &|a, b| format! {"{a} * {b:?}"}, - ) - }); - - fn cell_value<'a, F: Field, Q: Into + Copy>( - load: impl Fn(Q) -> Value + 'a, - ) -> impl Fn(Q) -> BTreeMap + 'a { - move |query| { - let AnyQuery { - column_type, - column_index, - rotation, - .. - } = query.into(); - Some(( - ((column_type, column_index).into(), rotation.0).into(), - match load(query) { - Value::Real(v) => util::format_value(v), - Value::Poison => unreachable!(), - }, - )) - .into_iter() - .collect() - } - } - - eprintln!("error: lookup input does not exist in table"); - eprint!(" ("); - for i in 0..lookup.input_expressions.len() { - eprint!("{}L{}", if i == 0 { "" } else { ", " }, i); - } - - eprint!(") ∉ ("); - for (i, column) in lookup_columns.enumerate() { - eprint!("{}{}", if i == 0 { "" } else { ", " }, column); - } - eprintln!(")"); - - eprintln!(); - eprintln!(" Lookup '{name}' inputs:"); - for (i, input) in lookup.input_expressions.iter().enumerate() { - // Fetch the cell values (since we don't store them in VerifyFailure::Lookup). - let cell_values = input.evaluate( - &|_| BTreeMap::default(), - &|_| panic!("virtual selectors are removed during optimization"), - &cell_value(&util::load(n, row, &cs.fixed_queries, &prover.fixed)), - &cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)), - &cell_value(&util::load_instance( - n, - row, - &cs.instance_queries, - &prover.instance, - )), - &|_| BTreeMap::default(), - &|a| a, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|a, _| a, - ); - - // Collect the necessary rendering information: - // - The columns involved in this constraint. - // - How many cells are in each column. - // - The grid of cell values, indexed by rotation. - let mut columns = BTreeMap::::default(); - let mut layout = BTreeMap::>::default(); - for (i, (cell, _)) in cell_values.iter().enumerate() { - *columns.entry(cell.column).or_default() += 1; - layout - .entry(cell.rotation) - .or_default() - .entry(cell.column) - .or_insert(format!("x{i}")); - } - - if i != 0 { - eprintln!(); - } - eprintln!( - " L{} = {}", - i, - emitter::expression_to_string(input, &layout) - ); - eprintln!(" ^"); - - emitter::render_cell_layout(" | ", location, &columns, &layout, |_, rotation| { - if rotation == 0 { - eprint!(" <--{{ Lookup '{name}' inputs queried here"); - } - }); - - // Print the map from local variables to assigned values. - eprintln!(" |"); - eprintln!(" | Assigned cell values:"); - for (i, (_, value)) in cell_values.iter().enumerate() { - eprintln!(" | x{i} = {value}"); - } - } -} - -fn render_shuffle( - prover: &MockProver, - name: &str, - shuffle_index: usize, - location: &FailureLocation, -) { - let n = prover.n as i32; - let cs = &prover.cs; - let shuffle = &cs.shuffles[shuffle_index]; - - // Get the absolute row on which the shuffle's inputs are being queried, so we can - // fetch the input values. - let row = match location { - FailureLocation::InRegion { region, offset } => { - prover.regions[region.index].rows.unwrap().0 + offset - } - FailureLocation::OutsideRegion { row } => *row, - } as i32; - - let shuffle_columns = shuffle.shuffle_expressions.iter().map(|expr| { - expr.evaluate( - &|f| format! {"Const: {f:#?}"}, - &|s| format! {"S{}", s.0}, - &|query| { - format!( - "{:?}", - prover - .cs - .general_column_annotations - .get(&metadata::Column::from((Any::Fixed, query.column_index))) - .cloned() - .unwrap_or_else(|| format!("F{}", query.column_index())) - ) - }, - &|query| { - format!( - "{:?}", - prover - .cs - .general_column_annotations - .get(&metadata::Column::from((Any::advice(), query.column_index))) - .cloned() - .unwrap_or_else(|| format!("A{}", query.column_index())) - ) - }, - &|query| { - format!( - "{:?}", - prover - .cs - .general_column_annotations - .get(&metadata::Column::from((Any::Instance, query.column_index))) - .cloned() - .unwrap_or_else(|| format!("I{}", query.column_index())) - ) - }, - &|challenge| format! {"C{}", challenge.index()}, - &|query| format! {"-{query}"}, - &|a, b| format! {"{a} + {b}"}, - &|a, b| format! {"{a} * {b}"}, - &|a, b| format! {"{a} * {b:?}"}, - ) - }); - - fn cell_value<'a, F: Field, Q: Into + Copy>( - load: impl Fn(Q) -> Value + 'a, - ) -> impl Fn(Q) -> BTreeMap + 'a { - move |query| { - let AnyQuery { - column_type, - column_index, - rotation, - .. - } = query.into(); - Some(( - ((column_type, column_index).into(), rotation.0).into(), - match load(query) { - Value::Real(v) => util::format_value(v), - Value::Poison => unreachable!(), - }, - )) - .into_iter() - .collect() - } - } - - eprintln!("error: input does not exist in shuffle"); - eprint!(" ("); - for i in 0..shuffle.input_expressions.len() { - eprint!("{}L{}", if i == 0 { "" } else { ", " }, i); - } - eprint!(") <-> ("); - for (i, column) in shuffle_columns.enumerate() { - eprint!("{}{}", if i == 0 { "" } else { ", " }, column); - } - eprintln!(")"); - - eprintln!(); - eprintln!(" Shuffle '{name}' inputs:"); - for (i, input) in shuffle.input_expressions.iter().enumerate() { - // Fetch the cell values (since we don't store them in VerifyFailure::Shuffle). - let cell_values = input.evaluate( - &|_| BTreeMap::default(), - &|_| panic!("virtual selectors are removed during optimization"), - &cell_value(&util::load(n, row, &cs.fixed_queries, &prover.fixed)), - &cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)), - &cell_value(&util::load_instance( - n, - row, - &cs.instance_queries, - &prover.instance, - )), - &|_| BTreeMap::default(), - &|a| a, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|a, _| a, - ); - - // Collect the necessary rendering information: - // - The columns involved in this constraint. - // - How many cells are in each column. - // - The grid of cell values, indexed by rotation. - let mut columns = BTreeMap::::default(); - let mut layout = BTreeMap::>::default(); - for (i, (cell, _)) in cell_values.iter().enumerate() { - *columns.entry(cell.column).or_default() += 1; - layout - .entry(cell.rotation) - .or_default() - .entry(cell.column) - .or_insert(format!("x{i}")); - } - - if i != 0 { - eprintln!(); - } - eprintln!( - " Sh{} = {}", - i, - emitter::expression_to_string(input, &layout) - ); - eprintln!(" ^"); - - emitter::render_cell_layout(" | ", location, &columns, &layout, |_, rotation| { - if rotation == 0 { - eprint!(" <--{{ Shuffle '{name}' inputs queried here"); - } - }); - - // Print the map from local variables to assigned values. - eprintln!(" |"); - eprintln!(" | Assigned cell values:"); - for (i, (_, value)) in cell_values.iter().enumerate() { - eprintln!(" | x{i} = {value}"); - } - } -} - -impl VerifyFailure { - /// Emits this failure in pretty-printed format to stderr. - pub(super) fn emit(&self, prover: &MockProver) { - match self { - Self::CellNotAssigned { - gate, - region, - gate_offset, - column, - offset, - } => render_cell_not_assigned( - &prover.cs.gates, - gate, - region, - *gate_offset, - *column, - *offset, - ), - Self::ConstraintNotSatisfied { - constraint, - location, - cell_values, - } => { - render_constraint_not_satisfied(&prover.cs.gates, constraint, location, cell_values) - } - Self::Lookup { - name, - lookup_index, - location, - } => render_lookup(prover, name, *lookup_index, location), - Self::Shuffle { - name, - shuffle_index, - location, - } => render_shuffle(prover, name, *shuffle_index, location), - _ => eprintln!("{self}"), - } - } -} diff --git a/halo2_proofs_rm/src/dev/failure/emitter.rs b/halo2_proofs_rm/src/dev/failure/emitter.rs deleted file mode 100644 index 24109d599b..0000000000 --- a/halo2_proofs_rm/src/dev/failure/emitter.rs +++ /dev/null @@ -1,214 +0,0 @@ -use std::collections::BTreeMap; -use std::iter; - -use group::ff::Field; - -use super::FailureLocation; -use crate::{ - dev::{metadata, util}, - plonk::{Advice, Any, Expression}, -}; - -fn padded(p: char, width: usize, text: &str) -> String { - let pad = width - text.len(); - - format!( - "{}{}{}", - iter::repeat(p).take(pad - pad / 2).collect::(), - text, - iter::repeat(p).take(pad / 2).collect::(), - ) -} - -fn column_type_and_idx(column: &metadata::Column) -> String { - format!( - "{}{}", - match column.column_type { - Any::Advice(_) => "A", - Any::Fixed => "F", - Any::Instance => "I", - }, - column.index - ) -} - -/// Renders a cell layout around a given failure location. -/// -/// `highlight_row` is called at the end of each row, with the offset of the active row -/// (if `location` is in a region), and the rotation of the current row relative to the -/// active row. -pub(super) fn render_cell_layout( - prefix: &str, - location: &FailureLocation, - columns: &BTreeMap, - layout: &BTreeMap>, - highlight_row: impl Fn(Option, i32), -) { - let col_width = |cells: usize| cells.to_string().len() + 3; - let mut col_headers = String::new(); - - // If we are in a region, show rows at offsets relative to it. Otherwise, just show - // the rotations directly. - let offset = match location { - FailureLocation::InRegion { region, offset } => { - col_headers - .push_str(format!("{}Cell layout in region '{}':\n", prefix, region.name).as_str()); - col_headers.push_str(format!("{prefix} | Offset |").as_str()); - Some(*offset as i32) - } - FailureLocation::OutsideRegion { row } => { - col_headers.push_str(format!("{prefix}Cell layout at row {row}:\n").as_str()); - col_headers.push_str(format!("{prefix} |Rotation|").as_str()); - None - } - }; - eprint!("\n{col_headers}"); - - let widths: Vec = columns - .iter() - .map(|(col, _)| { - let size = match location { - FailureLocation::InRegion { region, offset: _ } => { - if let Some(column_ann) = region.column_annotations.as_ref() { - if let Some(ann) = column_ann.get(col) { - ann.len() - } else { - col_width(column_type_and_idx(col).as_str().len()) - } - } else { - col_width(column_type_and_idx(col).as_str().len()) - } - } - FailureLocation::OutsideRegion { row: _ } => { - col_width(column_type_and_idx(col).as_str().len()) - } - }; - size - }) - .collect(); - - // Print the assigned cells, and their region offset or rotation + the column name at which they're assigned to. - for ((column, _), &width) in columns.iter().zip(widths.iter()) { - eprint!( - "{}|", - padded( - ' ', - width, - &match location { - FailureLocation::InRegion { region, offset: _ } => { - region - .column_annotations - .as_ref() - .and_then(|column_ann| column_ann.get(column).cloned()) - .unwrap_or_else(|| column_type_and_idx(column)) - } - FailureLocation::OutsideRegion { row: _ } => { - column_type_and_idx(column) - } - } - .to_string() - ) - ); - } - - eprintln!(); - eprint!("{prefix} +--------+"); - for &width in widths.iter() { - eprint!("{}+", padded('-', width, "")); - } - eprintln!(); - for (rotation, row) in layout { - eprint!( - "{} |{}|", - prefix, - padded(' ', 8, &(offset.unwrap_or(0) + rotation).to_string()) - ); - for ((col, _), &width) in columns.iter().zip(widths.iter()) { - eprint!( - "{}|", - padded( - ' ', - width, - row.get(col).map(|s| s.as_str()).unwrap_or_default() - ) - ); - } - highlight_row(offset, *rotation); - eprintln!(); - } -} - -pub(super) fn expression_to_string( - expr: &Expression, - layout: &BTreeMap>, -) -> String { - expr.evaluate( - &util::format_value, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| { - if let Some(label) = layout - .get(&query.rotation.0) - .and_then(|row| row.get(&(Any::Fixed, query.column_index).into())) - { - label.clone() - } else if query.rotation.0 == 0 { - // This is most likely a merged selector - format!("S{}", query.index.unwrap()) - } else { - // No idea how we'd get here... - format!("F{}@{}", query.column_index, query.rotation.0) - } - }, - &|query| { - layout - .get(&query.rotation.0) - .and_then(|map| { - map.get( - &( - Any::Advice(Advice { phase: query.phase }), - query.column_index, - ) - .into(), - ) - }) - .cloned() - .unwrap_or_default() - }, - &|query| { - layout - .get(&query.rotation.0) - .unwrap() - .get(&(Any::Instance, query.column_index).into()) - .unwrap() - .clone() - }, - &|challenge| format!("C{}({})", challenge.index(), challenge.phase()), - &|a| { - if a.contains(' ') { - format!("-({a})") - } else { - format!("-{a}") - } - }, - &|a, b| { - if let Some(b) = b.strip_prefix('-') { - format!("{a} - {b}") - } else { - format!("{a} + {b}") - } - }, - &|a, b| match (a.contains(' '), b.contains(' ')) { - (false, false) => format!("{a} * {b}"), - (false, true) => format!("{a} * ({b})"), - (true, false) => format!("({a}) * {b}"), - (true, true) => format!("({a}) * ({b})"), - }, - &|a, s| { - if a.contains(' ') { - format!("({}) * {}", a, util::format_value(s)) - } else { - format!("{} * {}", a, util::format_value(s)) - } - }, - ) -} diff --git a/halo2_proofs_rm/src/dev/gates.rs b/halo2_proofs_rm/src/dev/gates.rs deleted file mode 100644 index 4421c0967f..0000000000 --- a/halo2_proofs_rm/src/dev/gates.rs +++ /dev/null @@ -1,314 +0,0 @@ -use std::{ - collections::BTreeSet, - fmt::{self, Write}, -}; - -use ff::PrimeField; - -use crate::{ - dev::util, - plonk::{sealed::SealedPhase, Circuit, ConstraintSystem, FirstPhase}, -}; - -#[derive(Debug)] -struct Constraint { - name: String, - expression: String, - queries: BTreeSet, -} - -#[derive(Debug)] -struct Gate { - name: String, - constraints: Vec, -} - -/// A struct for collecting and displaying the gates within a circuit. -/// -/// # Examples -/// -/// ``` -/// use ff::Field; -/// use halo2_proofs::{ -/// circuit::{Layouter, SimpleFloorPlanner}, -/// dev::CircuitGates, -/// plonk::{Circuit, ConstraintSystem, Error}, -/// poly::Rotation, -/// }; -/// use halo2curves::pasta::pallas; -/// -/// #[derive(Copy, Clone)] -/// struct MyConfig {} -/// -/// #[derive(Clone, Default)] -/// struct MyCircuit {} -/// -/// impl Circuit for MyCircuit { -/// type Config = MyConfig; -/// type FloorPlanner = SimpleFloorPlanner; -/// #[cfg(feature = "circuit-params")] -/// type Params = (); -/// -/// fn without_witnesses(&self) -> Self { -/// Self::default() -/// } -/// -/// fn configure(meta: &mut ConstraintSystem) -> MyConfig { -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let c = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("R1CS constraint", |meta| { -/// let a = meta.query_advice(a, Rotation::cur()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let c = meta.query_advice(c, Rotation::cur()); -/// let s = meta.query_selector(s); -/// -/// Some(("R1CS", s * (a * b - c))) -/// }); -/// -/// // We aren't using this circuit for anything in this example. -/// MyConfig {} -/// } -/// -/// fn synthesize(&self, _: MyConfig, _: impl Layouter) -> Result<(), Error> { -/// // Gates are known at configure time; it doesn't matter how we use them. -/// Ok(()) -/// } -/// } -/// -/// #[cfg(feature = "circuit-params")] -/// let gates = CircuitGates::collect::(()); -/// #[cfg(not(feature = "circuit-params"))] -/// let gates = CircuitGates::collect::(); -/// assert_eq!( -/// format!("{}", gates), -/// r#####"R1CS constraint: -/// - R1CS: -/// S0 * (A0@0 * A1@0 - A2@0) -/// Total gates: 1 -/// Total custom constraint polynomials: 1 -/// Total negations: 1 -/// Total additions: 1 -/// Total multiplications: 2 -/// "#####, -/// ); -/// ``` -#[derive(Debug)] -pub struct CircuitGates { - gates: Vec, - total_negations: usize, - total_additions: usize, - total_multiplications: usize, -} - -impl CircuitGates { - /// Collects the gates from within the circuit. - pub fn collect>( - #[cfg(feature = "circuit-params")] params: C::Params, - ) -> Self { - // Collect the graph details. - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let _ = C::configure_with_params(&mut cs, params); - #[cfg(not(feature = "circuit-params"))] - let _ = C::configure(&mut cs); - - let gates = cs - .gates - .iter() - .map(|gate| Gate { - name: gate.name().to_string(), - constraints: gate - .polynomials() - .iter() - .enumerate() - .map(|(i, constraint)| Constraint { - name: gate.constraint_name(i).to_string(), - expression: constraint.evaluate( - &util::format_value, - &|selector| format!("S{}", selector.0), - &|query| format!("F{}@{}", query.column_index, query.rotation.0), - &|query| { - if query.phase == FirstPhase.to_sealed() { - format!("A{}@{}", query.column_index, query.rotation.0) - } else { - format!( - "A{}({})@{}", - query.column_index, - query.phase(), - query.rotation.0 - ) - } - }, - &|query| format!("I{}@{}", query.column_index, query.rotation.0), - &|challenge| format!("C{}({})", challenge.index(), challenge.phase()), - &|a| { - if a.contains(' ') { - format!("-({a})") - } else { - format!("-{a}") - } - }, - &|a, b| { - if let Some(b) = b.strip_prefix('-') { - format!("{a} - {b}") - } else { - format!("{a} + {b}") - } - }, - &|a, b| match (a.contains(' '), b.contains(' ')) { - (false, false) => format!("{a} * {b}"), - (false, true) => format!("{a} * ({b})"), - (true, false) => format!("({a}) * {b}"), - (true, true) => format!("({a}) * ({b})"), - }, - &|a, s| { - if a.contains(' ') { - format!("({}) * {}", a, util::format_value(s)) - } else { - format!("{} * {}", a, util::format_value(s)) - } - }, - ), - queries: constraint.evaluate( - &|_| BTreeSet::default(), - &|selector| vec![format!("S{}", selector.0)].into_iter().collect(), - &|query| { - vec![format!("F{}@{}", query.column_index, query.rotation.0)] - .into_iter() - .collect() - }, - &|query| { - let query = if query.phase == FirstPhase.to_sealed() { - format!("A{}@{}", query.column_index, query.rotation.0) - } else { - format!( - "A{}({})@{}", - query.column_index, - query.phase(), - query.rotation.0 - ) - }; - vec![query].into_iter().collect() - }, - &|query| { - vec![format!("I{}@{}", query.column_index, query.rotation.0)] - .into_iter() - .collect() - }, - &|challenge| { - vec![format!("C{}({})", challenge.index(), challenge.phase())] - .into_iter() - .collect() - }, - &|a| a, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|a, _| a, - ), - }) - .collect(), - }) - .collect(); - - let (total_negations, total_additions, total_multiplications) = cs - .gates - .iter() - .flat_map(|gate| { - gate.polynomials().iter().map(|poly| { - poly.evaluate( - &|_| (0, 0, 0), - &|_| (0, 0, 0), - &|_| (0, 0, 0), - &|_| (0, 0, 0), - &|_| (0, 0, 0), - &|_| (0, 0, 0), - &|(a_n, a_a, a_m)| (a_n + 1, a_a, a_m), - &|(a_n, a_a, a_m), (b_n, b_a, b_m)| (a_n + b_n, a_a + b_a + 1, a_m + b_m), - &|(a_n, a_a, a_m), (b_n, b_a, b_m)| (a_n + b_n, a_a + b_a, a_m + b_m + 1), - &|(a_n, a_a, a_m), _| (a_n, a_a, a_m + 1), - ) - }) - }) - .fold((0, 0, 0), |(acc_n, acc_a, acc_m), (n, a, m)| { - (acc_n + n, acc_a + a, acc_m + m) - }); - - CircuitGates { - gates, - total_negations, - total_additions, - total_multiplications, - } - } - - /// Prints the queries in this circuit to a CSV grid. - pub fn queries_to_csv(&self) -> String { - let mut queries = BTreeSet::new(); - for gate in &self.gates { - for constraint in &gate.constraints { - for query in &constraint.queries { - queries.insert(query); - } - } - } - - let mut ret = String::new(); - let w = &mut ret; - for query in &queries { - write!(w, "{query},").unwrap(); - } - writeln!(w, "Name").unwrap(); - - for gate in &self.gates { - for constraint in &gate.constraints { - for query in &queries { - if constraint.queries.contains(*query) { - write!(w, "1").unwrap(); - } else { - write!(w, "0").unwrap(); - } - write!(w, ",").unwrap(); - } - writeln!(w, "{}/{}", gate.name, constraint.name).unwrap(); - } - } - ret - } -} - -impl fmt::Display for CircuitGates { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - for gate in &self.gates { - writeln!(f, "{}:", gate.name)?; - for constraint in &gate.constraints { - if constraint.name.is_empty() { - writeln!(f, "- {}", constraint.expression)?; - } else { - writeln!(f, "- {}:", constraint.name)?; - writeln!(f, " {}", constraint.expression)?; - } - } - } - writeln!(f, "Total gates: {}", self.gates.len())?; - writeln!( - f, - "Total custom constraint polynomials: {}", - self.gates - .iter() - .map(|gate| gate.constraints.len()) - .sum::() - )?; - writeln!(f, "Total negations: {}", self.total_negations)?; - writeln!(f, "Total additions: {}", self.total_additions)?; - writeln!(f, "Total multiplications: {}", self.total_multiplications) - } -} diff --git a/halo2_proofs_rm/src/dev/graph.rs b/halo2_proofs_rm/src/dev/graph.rs deleted file mode 100644 index 11654fe415..0000000000 --- a/halo2_proofs_rm/src/dev/graph.rs +++ /dev/null @@ -1,204 +0,0 @@ -use ff::Field; -use tabbycat::{AttrList, Edge, GraphBuilder, GraphType, Identity, StmtList}; - -use crate::{ - circuit::Value, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Fixed, FloorPlanner, Instance, Selector, - }, -}; - -pub mod layout; - -/// Builds a dot graph string representing the given circuit. -/// -/// The graph is built from calls to [`Layouter::namespace`] both within the circuit, and -/// inside the gadgets and chips that it uses. -/// -/// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace -pub fn circuit_dot_graph>( - circuit: &ConcreteCircuit, -) -> String { - // Collect the graph details. - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - let mut graph = Graph::default(); - ConcreteCircuit::FloorPlanner::synthesize(&mut graph, circuit, config, cs.constants).unwrap(); - - // Construct the node labels. We need to store these, because tabbycat operates on - // string references, and we need those references to live long enough. - let node_labels: Vec<_> = graph - .nodes - .into_iter() - .map(|(name, gadget_name)| { - if let Some(gadget_name) = gadget_name { - format!("[{gadget_name}] {name}") - } else { - name - } - }) - .collect(); - - // Construct the dot graph statements. - let mut stmts = StmtList::new(); - for (id, label) in node_labels.iter().enumerate() { - stmts = stmts.add_node( - id.into(), - None, - Some(AttrList::new().add_pair(tabbycat::attributes::label(label))), - ); - } - for (parent, child) in graph.edges { - stmts = - stmts.add_edge(Edge::head_node(parent.into(), None).arrow_to_node(child.into(), None)) - } - - // Build the graph! - GraphBuilder::default() - .graph_type(GraphType::DiGraph) - .strict(false) - .id(Identity::id("circuit").unwrap()) - .stmts(stmts) - .build() - .unwrap() - .to_string() -} - -#[derive(Default)] -struct Graph { - /// Graph nodes in the namespace, structured as `(name, gadget_name)`. - nodes: Vec<(String, Option)>, - - /// Directed edges in the graph, as pairs of indices into `nodes`. - edges: Vec<(usize, usize)>, - - /// The current namespace, as indices into `nodes`. - current_namespace: Vec, -} - -impl Assignment for Graph { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing; we don't care about cells in this context. - Ok(()) - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn query_instance(&self, _: Column, _: usize) -> Result, Error> { - Ok(Value::unknown()) - } - - fn assign_advice( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing; we don't care about cells in this context. - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing; we don't care about cells in this context. - Ok(()) - } - - fn copy( - &mut self, - _: Column, - _: usize, - _: Column, - _: usize, - ) -> Result<(), crate::plonk::Error> { - // Do nothing; we don't care about permutations in this context. - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Store the new node. - let new_node = self.nodes.len(); - self.nodes.push((name_fn().into(), None)); - - // Create an edge from the parent, if any. - if let Some(parent) = self.current_namespace.last() { - self.edges.push((*parent, new_node)); - } - - // Push the new namespace. - self.current_namespace.push(new_node); - } - - fn pop_namespace(&mut self, gadget_name: Option) { - // Store the gadget name that was extracted, if any. - let node = self - .current_namespace - .last() - .expect("pop_namespace should never be called on the root"); - self.nodes[*node].1 = gadget_name; - - // Pop the namespace. - self.current_namespace.pop(); - } -} diff --git a/halo2_proofs_rm/src/dev/graph/layout.rs b/halo2_proofs_rm/src/dev/graph/layout.rs deleted file mode 100644 index 94bd7eea14..0000000000 --- a/halo2_proofs_rm/src/dev/graph/layout.rs +++ /dev/null @@ -1,323 +0,0 @@ -use ff::Field; -use plotters::{ - coord::Shift, - prelude::{DrawingArea, DrawingAreaErrorKind, DrawingBackend}, -}; -use std::collections::HashSet; -use std::ops::Range; - -use crate::{ - circuit::layouter::RegionColumn, - dev::cost::Layout, - plonk::{Any, Circuit, Column, ConstraintSystem, FloorPlanner}, -}; - -/// Graphical renderer for circuit layouts. -/// -/// Cells that have been assigned to by the circuit will be shaded. If any cells are -/// assigned to more than once (which is usually a mistake), they will be shaded darker -/// than the surrounding cells. -/// -/// # Examples -/// -/// ```ignore -/// use halo2_proofs::dev::CircuitLayout; -/// use plotters::prelude::*; -/// -/// let drawing_area = BitMapBackend::new("example-circuit-layout.png", (1024, 768)) -/// .into_drawing_area(); -/// drawing_area.fill(&WHITE).unwrap(); -/// let drawing_area = drawing_area -/// .titled("Example Circuit Layout", ("sans-serif", 60)) -/// .unwrap(); -/// -/// let circuit = MyCircuit::default(); -/// let k = 5; // Suitable size for MyCircuit -/// CircuitLayout::default().render(k, &circuit, &drawing_area).unwrap(); -/// ``` -#[derive(Debug, Default)] -pub struct CircuitLayout { - hide_labels: bool, - mark_equality_cells: bool, - show_equality_constraints: bool, - view_width: Option>, - view_height: Option>, -} - -impl CircuitLayout { - /// Sets the visibility of region labels. - /// - /// The default is to show labels. - pub fn show_labels(mut self, show: bool) -> Self { - self.hide_labels = !show; - self - } - - /// Marks cells involved in equality constraints, in red. - /// - /// The default is to not mark these cells. - pub fn mark_equality_cells(mut self, show: bool) -> Self { - self.mark_equality_cells = show; - self - } - - /// Draws red lines between equality-constrained cells. - /// - /// The default is to not show these, as they can get _very_ messy. - pub fn show_equality_constraints(mut self, show: bool) -> Self { - self.show_equality_constraints = show; - self - } - - /// Sets the view width for this layout, as a number of columns. - pub fn view_width(mut self, width: Range) -> Self { - self.view_width = Some(width); - self - } - - /// Sets the view height for this layout, as a number of rows. - pub fn view_height(mut self, height: Range) -> Self { - self.view_height = Some(height); - self - } - - /// Renders the given circuit on the given drawing area. - pub fn render, DB: DrawingBackend>( - self, - k: u32, - circuit: &ConcreteCircuit, - drawing_area: &DrawingArea, - ) -> Result<(), DrawingAreaErrorKind> { - use plotters::coord::types::RangedCoordusize; - use plotters::prelude::*; - - let n = 1 << k; - // Collect the layout details. - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - let mut layout = Layout::new(k, n, cs.num_selectors); - ConcreteCircuit::FloorPlanner::synthesize( - &mut layout, - circuit, - config, - cs.constants.clone(), - ) - .unwrap(); - let (cs, selector_polys) = cs.compress_selectors(layout.selectors); - let non_selector_fixed_columns = cs.num_fixed_columns - selector_polys.len(); - - // Figure out what order to render the columns in. - // TODO: For now, just render them in the order they were configured. - let total_columns = cs.num_instance_columns + cs.num_advice_columns + cs.num_fixed_columns; - let column_index = |cs: &ConstraintSystem, column: RegionColumn| { - let column: Column = match column { - RegionColumn::Column(col) => col, - RegionColumn::Selector(selector) => cs.selector_map[selector.0].into(), - }; - column.index() - + match column.column_type() { - Any::Instance => 0, - Any::Advice(_) => cs.num_instance_columns, - Any::Fixed => cs.num_instance_columns + cs.num_advice_columns, - } - }; - - let view_width = self.view_width.unwrap_or(0..total_columns); - let view_height = self.view_height.unwrap_or(0..n); - let view_bottom = view_height.end; - - // Prepare the grid layout. We render a red background for advice columns, white for - // instance columns, and blue for fixed columns (with a darker blue for selectors). - let root = - drawing_area.apply_coord_spec(Cartesian2d::::new( - view_width, - view_height, - drawing_area.get_pixel_range(), - )); - root.draw(&Rectangle::new( - [(0, 0), (total_columns, view_bottom)], - ShapeStyle::from(&WHITE).filled(), - ))?; - root.draw(&Rectangle::new( - [ - (cs.num_instance_columns, 0), - (cs.num_instance_columns + cs.num_advice_columns, view_bottom), - ], - ShapeStyle::from(&RED.mix(0.2)).filled(), - ))?; - root.draw(&Rectangle::new( - [ - (cs.num_instance_columns + cs.num_advice_columns, 0), - (total_columns, view_bottom), - ], - ShapeStyle::from(&BLUE.mix(0.2)).filled(), - ))?; - { - root.draw(&Rectangle::new( - [ - ( - cs.num_instance_columns - + cs.num_advice_columns - + non_selector_fixed_columns, - 0, - ), - (total_columns, view_bottom), - ], - ShapeStyle::from(&BLUE.mix(0.1)).filled(), - ))?; - } - - // Mark the unusable rows of the circuit. - let usable_rows = n - (cs.blinding_factors() + 1); - if view_bottom > usable_rows { - root.draw(&Rectangle::new( - [(0, usable_rows), (total_columns, view_bottom)], - ShapeStyle::from(&RED.mix(0.4)).filled(), - ))?; - } - - root.draw(&Rectangle::new( - [(0, 0), (total_columns, view_bottom)], - BLACK, - ))?; - - let draw_region = |root: &DrawingArea<_, _>, top_left, bottom_right| { - root.draw(&Rectangle::new( - [top_left, bottom_right], - ShapeStyle::from(&WHITE).filled(), - ))?; - root.draw(&Rectangle::new( - [top_left, bottom_right], - ShapeStyle::from(&RED.mix(0.2)).filled(), - ))?; - root.draw(&Rectangle::new( - [top_left, bottom_right], - ShapeStyle::from(&GREEN.mix(0.2)).filled(), - ))?; - root.draw(&Rectangle::new([top_left, bottom_right], BLACK))?; - Ok(()) - }; - - let draw_cell = |root: &DrawingArea<_, _>, column, row| { - root.draw(&Rectangle::new( - [(column, row), (column + 1, row + 1)], - ShapeStyle::from(&BLACK.mix(0.1)).filled(), - )) - }; - - // Render the regions! - let mut labels = if self.hide_labels { None } else { Some(vec![]) }; - for region in &layout.regions { - if let Some(offset) = region.offset { - // Sort the region's columns according to the defined ordering. - let mut columns: Vec<_> = region.columns.iter().cloned().collect(); - columns.sort_unstable_by_key(|a| column_index(&cs, *a)); - - // Render contiguous parts of the same region as a single box. - let mut width = None; - for column in columns { - let column = column_index(&cs, column); - match width { - Some((start, end)) if end == column => width = Some((start, end + 1)), - Some((start, end)) => { - draw_region(&root, (start, offset), (end, offset + region.rows))?; - if let Some(labels) = &mut labels { - labels.push((region.name.clone(), (start, offset))); - } - width = Some((column, column + 1)); - } - None => width = Some((column, column + 1)), - } - } - - // Render the last part of the region. - if let Some((start, end)) = width { - draw_region(&root, (start, offset), (end, offset + region.rows))?; - if let Some(labels) = &mut labels { - labels.push((region.name.clone(), (start, offset))); - } - } - } - } - - // Darken the cells of the region that have been assigned to. - for region in layout.regions { - for (column, row) in region.cells { - draw_cell(&root, column_index(&cs, column), row)?; - } - } - - // Darken any loose cells that have been assigned to. - for (column, row) in layout.loose_cells { - draw_cell(&root, column_index(&cs, column), row)?; - } - - // Mark equality-constrained cells. - if self.mark_equality_cells { - let mut cells = HashSet::new(); - for (l_col, l_row, r_col, r_row) in &layout.equality { - let l_col = column_index(&cs, (*l_col).into()); - let r_col = column_index(&cs, (*r_col).into()); - - // Deduplicate cells. - cells.insert((l_col, *l_row)); - cells.insert((r_col, *r_row)); - } - - for (col, row) in cells { - root.draw(&Rectangle::new( - [(col, row), (col + 1, row + 1)], - ShapeStyle::from(&RED.mix(0.5)).filled(), - ))?; - } - } - - // Draw lines between equality-constrained cells. - if self.show_equality_constraints { - for (l_col, l_row, r_col, r_row) in &layout.equality { - let l_col = column_index(&cs, (*l_col).into()); - let r_col = column_index(&cs, (*r_col).into()); - root.draw(&PathElement::new( - [(l_col, *l_row), (r_col, *r_row)], - ShapeStyle::from(&RED), - ))?; - } - } - - // Add a line showing the total used rows. - root.draw(&PathElement::new( - [(0, layout.total_rows), (total_columns, layout.total_rows)], - ShapeStyle::from(&BLACK), - ))?; - - // Render labels last, on top of everything else. - if let Some(labels) = labels { - for (label, top_left) in labels { - root.draw( - &(EmptyElement::at(top_left) - + Text::new(label, (10, 10), ("sans-serif", 15.0).into_font())), - )?; - } - root.draw( - &(EmptyElement::at((0, layout.total_rows)) - + Text::new( - format!("{} used rows", layout.total_rows), - (10, 10), - ("sans-serif", 15.0).into_font(), - )), - )?; - root.draw( - &(EmptyElement::at((0, usable_rows)) - + Text::new( - format!("{usable_rows} usable rows"), - (10, 10), - ("sans-serif", 15.0).into_font(), - )), - )?; - } - Ok(()) - } -} diff --git a/halo2_proofs_rm/src/dev/metadata.rs b/halo2_proofs_rm/src/dev/metadata.rs deleted file mode 100644 index f81bfa67a7..0000000000 --- a/halo2_proofs_rm/src/dev/metadata.rs +++ /dev/null @@ -1,313 +0,0 @@ -//! Metadata about circuits. - -use super::metadata::Column as ColumnMetadata; -use crate::plonk::{self, Any}; -use std::{ - collections::HashMap, - fmt::{self, Debug}, -}; -/// Metadata about a column within a circuit. -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Column { - /// The type of the column. - pub(super) column_type: Any, - /// The index of the column. - pub(super) index: usize, -} - -impl Column { - /// Return the column type. - pub fn column_type(&self) -> Any { - self.column_type - } - /// Return the column index. - pub fn index(&self) -> usize { - self.index - } -} - -impl fmt::Display for Column { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Column('{:?}', {})", self.column_type, self.index) - } -} - -impl From<(Any, usize)> for Column { - fn from((column_type, index): (Any, usize)) -> Self { - Column { column_type, index } - } -} - -impl From> for Column { - fn from(column: plonk::Column) -> Self { - Column { - column_type: *column.column_type(), - index: column.index(), - } - } -} - -/// A helper structure that allows to print a Column with it's annotation as a single structure. -#[derive(Debug, Clone)] -pub(super) struct DebugColumn { - /// The type of the column. - column_type: Any, - /// The index of the column. - index: usize, - /// Annotation of the column - annotation: String, -} - -impl From<(Column, Option<&HashMap>)> for DebugColumn { - fn from(info: (Column, Option<&HashMap>)) -> Self { - DebugColumn { - column_type: info.0.column_type, - index: info.0.index, - annotation: info - .1 - .and_then(|map| map.get(&info.0)) - .cloned() - .unwrap_or_default(), - } - } -} - -impl fmt::Display for DebugColumn { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "Column('{:?}', {} - {})", - self.column_type, self.index, self.annotation - ) - } -} - -/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset -/// within a custom gate. -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct VirtualCell { - name: String, - pub(super) column: Column, - pub(super) rotation: i32, -} - -impl From<(Column, i32)> for VirtualCell { - fn from((column, rotation): (Column, i32)) -> Self { - VirtualCell { - name: "".to_string(), - column, - rotation, - } - } -} - -impl> From<(S, Column, i32)> for VirtualCell { - fn from((name, column, rotation): (S, Column, i32)) -> Self { - VirtualCell { - name: name.as_ref().to_string(), - column, - rotation, - } - } -} - -impl From for VirtualCell { - fn from(c: plonk::VirtualCell) -> Self { - VirtualCell { - name: "".to_string(), - column: c.column.into(), - rotation: c.rotation.0, - } - } -} - -impl fmt::Display for VirtualCell { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}@{}", self.column, self.rotation)?; - if !self.name.is_empty() { - write!(f, "({})", self.name.as_str())?; - } - Ok(()) - } -} - -/// Helper structure used to be able to inject Column annotations inside a `Display` or `Debug` call. -#[derive(Clone, Debug)] -pub(super) struct DebugVirtualCell { - name: String, - column: DebugColumn, - rotation: i32, -} - -impl From<(&VirtualCell, Option<&HashMap>)> for DebugVirtualCell { - fn from(info: (&VirtualCell, Option<&HashMap>)) -> Self { - DebugVirtualCell { - name: info.0.name.clone(), - column: DebugColumn::from((info.0.column, info.1)), - rotation: info.0.rotation, - } - } -} - -impl fmt::Display for DebugVirtualCell { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}@{}", self.column, self.rotation)?; - if !self.name.is_empty() { - write!(f, "({})", self.name)?; - } - Ok(()) - } -} - -/// Metadata about a configured gate within a circuit. -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct Gate { - /// The index of the active gate. These indices are assigned in the order in which - /// `ConstraintSystem::create_gate` is called during `Circuit::configure`. - pub(super) index: usize, - /// The name of the active gate. These are specified by the gate creator (such as - /// a chip implementation), and is not enforced to be unique. - pub(super) name: String, -} - -impl fmt::Display for Gate { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Gate {} ('{}')", self.index, self.name.as_str()) - } -} - -impl> From<(usize, S)> for Gate { - fn from((index, name): (usize, S)) -> Self { - Gate { - index, - name: name.as_ref().to_string(), - } - } -} - -/// Metadata about a configured constraint within a circuit. -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct Constraint { - /// The gate containing the constraint. - pub(super) gate: Gate, - /// The index of the polynomial constraint within the gate. These indices correspond - /// to the order in which the constraints are returned from the closure passed to - /// `ConstraintSystem::create_gate` during `Circuit::configure`. - pub(super) index: usize, - /// The name of the constraint. This is specified by the gate creator (such as a chip - /// implementation), and is not enforced to be unique. - pub(super) name: String, -} - -impl fmt::Display for Constraint { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "Constraint {}{} in gate {} ('{}')", - self.index, - if self.name.is_empty() { - String::new() - } else { - format!(" ('{}')", self.name.as_str()) - }, - self.gate.index, - self.gate.name, - ) - } -} - -impl> From<(Gate, usize, S)> for Constraint { - fn from((gate, index, name): (Gate, usize, S)) -> Self { - Constraint { - gate, - index, - name: name.as_ref().to_string(), - } - } -} - -/// Metadata about an assigned region within a circuit. -#[derive(Clone)] -pub struct Region { - /// The index of the region. These indices are assigned in the order in which - /// `Layouter::assign_region` is called during `Circuit::synthesize`. - pub(super) index: usize, - /// The name of the region. This is specified by the region creator (such as a chip - /// implementation), and is not enforced to be unique. - pub(super) name: String, - /// A reference to the annotations of the Columns that exist within this `Region`. - pub(super) column_annotations: Option>, -} - -impl Region { - /// Fetch the annotation of a `Column` within a `Region` providing it's associated metadata. - /// - /// This function will return `None` if: - /// - There's no annotation map generated for this `Region`. - /// - There's no entry on the annotation map corresponding to the metadata provided. - pub(crate) fn get_column_annotation(&self, metadata: ColumnMetadata) -> Option { - self.column_annotations - .as_ref() - .and_then(|map| map.get(&metadata).cloned()) - } -} - -impl PartialEq for Region { - fn eq(&self, other: &Self) -> bool { - self.index == other.index && self.name == other.name - } -} - -impl Eq for Region {} - -impl Debug for Region { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Region {} ('{}')", self.index, self.name) - } -} - -impl fmt::Display for Region { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Region {} ('{}')", self.index, self.name.as_str()) - } -} - -impl From<(usize, String)> for Region { - fn from((index, name): (usize, String)) -> Self { - Region { - index, - name, - column_annotations: None, - } - } -} - -impl From<(usize, &str)> for Region { - fn from((index, name): (usize, &str)) -> Self { - Region { - index, - name: name.to_owned(), - column_annotations: None, - } - } -} - -impl From<(usize, String, HashMap)> for Region { - fn from((index, name, annotations): (usize, String, HashMap)) -> Self { - Region { - index, - name, - column_annotations: Some(annotations), - } - } -} - -impl From<(usize, &str, HashMap)> for Region { - fn from((index, name, annotations): (usize, &str, HashMap)) -> Self { - Region { - index, - name: name.to_owned(), - column_annotations: Some(annotations), - } - } -} diff --git a/halo2_proofs_rm/src/dev/tfp.rs b/halo2_proofs_rm/src/dev/tfp.rs deleted file mode 100644 index 011ba3cac0..0000000000 --- a/halo2_proofs_rm/src/dev/tfp.rs +++ /dev/null @@ -1,508 +0,0 @@ -use std::{fmt, marker::PhantomData}; - -use ff::Field; -use tracing::{debug, debug_span, span::EnteredSpan}; - -use crate::{ - circuit::{ - layouter::{RegionLayouter, SyncDeps}, - AssignedCell, Cell, Layouter, Region, Table, Value, - }, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Fixed, FloorPlanner, Instance, Selector, - }, -}; - -/// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. -/// -/// `TracingFloorPlanner` can be used to instrument your circuit and determine exactly -/// what is happening during a particular run of keygen or proving. This can be useful for -/// identifying unexpected non-determinism or changes to a circuit. -/// -/// # No stability guarantees -/// -/// The `tracing` output is intended for use during circuit development. It should not be -/// considered production-stable, and the precise format or data exposed may change at any -/// time. -/// -/// # Examples -/// -/// ``` -/// use ff::Field; -/// use halo2_proofs::{ -/// circuit::{floor_planner, Layouter, Value}, -/// dev::TracingFloorPlanner, -/// plonk::{Circuit, ConstraintSystem, Error}, -/// }; -/// -/// # struct MyCircuit { -/// # some_witness: Value, -/// # }; -/// # #[derive(Clone)] -/// # struct MyConfig; -/// impl Circuit for MyCircuit { -/// // Wrap `TracingFloorPlanner` around your existing floor planner of choice. -/// //type FloorPlanner = floor_planner::V1; -/// type FloorPlanner = TracingFloorPlanner; -/// -/// // The rest of your `Circuit` implementation is unchanged. -/// type Config = MyConfig; -/// -/// #[cfg(feature = "circuit-params")] -/// type Params = (); -/// -/// fn without_witnesses(&self) -> Self { -/// Self { some_witness: Value::unknown() } -/// } -/// -/// fn configure(meta: &mut ConstraintSystem) -> Self::Config { -/// // .. -/// # todo!() -/// } -/// -/// fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error> { -/// // .. -/// # todo!() -/// } -/// } -/// -/// #[test] -/// fn some_circuit_test() { -/// // At the start of your test, enable tracing. -/// tracing_subscriber::fmt() -/// .with_max_level(tracing::Level::DEBUG) -/// .with_ansi(false) -/// .without_time() -/// .init(); -/// -/// // Now when the rest of the test runs, you will get `tracing` output for every -/// // operation that the circuit performs under the hood! -/// } -/// ``` -#[derive(Debug)] -pub struct TracingFloorPlanner { - _phantom: PhantomData

, -} - -impl FloorPlanner for TracingFloorPlanner

{ - fn synthesize + SyncDeps, C: Circuit>( - cs: &mut CS, - circuit: &C, - config: C::Config, - constants: Vec>, - ) -> Result<(), Error> { - P::synthesize( - &mut TracingAssignment::new(cs), - &TracingCircuit::borrowed(circuit), - config, - constants, - ) - } -} - -/// A helper type that augments a [`Circuit`] with [`tracing`] spans and events. -enum TracingCircuit<'c, F: Field, C: Circuit> { - Borrowed(&'c C, PhantomData), - Owned(C, PhantomData), -} - -impl<'c, F: Field, C: Circuit> TracingCircuit<'c, F, C> { - fn borrowed(circuit: &'c C) -> Self { - Self::Borrowed(circuit, PhantomData) - } - - fn owned(circuit: C) -> Self { - Self::Owned(circuit, PhantomData) - } - - fn inner_ref(&self) -> &C { - match self { - TracingCircuit::Borrowed(circuit, ..) => circuit, - TracingCircuit::Owned(circuit, ..) => circuit, - } - } -} - -impl<'c, F: Field, C: Circuit> Circuit for TracingCircuit<'c, F, C> { - type Config = C::Config; - type FloorPlanner = C::FloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self::owned(self.inner_ref().without_witnesses()) - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let _span = debug_span!("configure").entered(); - C::configure(meta) - } - - fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error> { - let _span = debug_span!("synthesize").entered(); - self.inner_ref() - .synthesize(config, TracingLayouter::new(layouter)) - } -} - -/// A helper type that augments a [`Layouter`] with [`tracing`] spans and events. -struct TracingLayouter> { - layouter: L, - namespace_spans: Vec, - _phantom: PhantomData, -} - -impl> TracingLayouter { - fn new(layouter: L) -> Self { - Self { - layouter, - namespace_spans: vec![], - _phantom: PhantomData, - } - } -} - -impl> Layouter for TracingLayouter { - type Root = Self; - - fn assign_region(&mut self, name: N, mut assignment: A) -> Result - where - A: FnMut(Region<'_, F>) -> Result, - N: Fn() -> NR, - NR: Into, - { - let _span = debug_span!("region", name = name().into()).entered(); - self.layouter.assign_region(name, |region| { - let mut region = TracingRegion(region); - let region: &mut dyn RegionLayouter = &mut region; - assignment(region.into()) - }) - } - - fn assign_table(&mut self, name: N, assignment: A) -> Result<(), Error> - where - A: FnMut(Table<'_, F>) -> Result<(), Error>, - N: Fn() -> NR, - NR: Into, - { - let _span = debug_span!("table", name = name().into()).entered(); - self.layouter.assign_table(name, assignment) - } - - fn constrain_instance( - &mut self, - cell: Cell, - column: Column, - row: usize, - ) -> Result<(), Error> { - self.layouter.constrain_instance(cell, column, row) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn get_root(&mut self) -> &mut Self::Root { - self - } - - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - let name = name_fn().into(); - self.namespace_spans.push(debug_span!("ns", name).entered()); - self.layouter.push_namespace(|| name); - } - - fn pop_namespace(&mut self, gadget_name: Option) { - self.layouter.pop_namespace(gadget_name); - self.namespace_spans.pop(); - } -} - -fn debug_value_and_return_cell(value: AssignedCell) -> Cell { - if let Some(v) = value.value().into_option() { - debug!(target: "assigned", value = ?v); - } - value.cell() -} - -/// A helper type that augments a [`Region`] with [`tracing`] spans and events. -#[derive(Debug)] -struct TracingRegion<'r, F: Field>(Region<'r, F>); - -impl<'r, F: Field> RegionLayouter for TracingRegion<'r, F> { - fn enable_selector<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - selector: &Selector, - offset: usize, - ) -> Result<(), Error> { - let _guard = debug_span!("enable_selector", name = annotation(), offset = offset).entered(); - debug!(target: "layouter", "Entered"); - self.0.enable_selector(annotation, selector, offset) - } - - fn name_column<'v>( - &'v mut self, - _: &'v (dyn std::ops::Fn() -> std::string::String + 'v), - _: Column, - ) { - } - - fn assign_advice<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - let _guard = - debug_span!("assign_advice", name = annotation(), column = ?column, offset = offset) - .entered(); - debug!(target: "layouter", "Entered"); - self.0 - .assign_advice(annotation, column, offset, to) - .map(debug_value_and_return_cell) - } - - fn assign_advice_from_constant<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - constant: Assigned, - ) -> Result { - let _guard = debug_span!("assign_advice_from_constant", - name = annotation(), - column = ?column, - offset = offset, - constant = ?constant, - ) - .entered(); - debug!(target: "layouter", "Entered"); - self.0 - .assign_advice_from_constant(annotation, column, offset, constant) - .map(debug_value_and_return_cell) - } - - fn assign_advice_from_instance<'v>( - &mut self, - annotation: &'v (dyn Fn() -> String + 'v), - instance: Column, - row: usize, - advice: Column, - offset: usize, - ) -> Result<(Cell, Value), Error> { - let _guard = debug_span!("assign_advice_from_instance", - name = annotation(), - instance = ?instance, - row = row, - advice = ?advice, - offset = offset, - ) - .entered(); - debug!(target: "layouter", "Entered"); - self.0 - .assign_advice_from_instance(annotation, instance, row, advice, offset) - .map(|value| { - if let Some(v) = value.value().into_option() { - debug!(target: "assigned", value = ?v); - } - (value.cell(), value.value().cloned()) - }) - } - - fn instance_value( - &mut self, - instance: Column, - row: usize, - ) -> Result, Error> { - self.0.instance_value(instance, row) - } - - fn assign_fixed<'v>( - &'v mut self, - annotation: &'v (dyn Fn() -> String + 'v), - column: Column, - offset: usize, - to: &'v mut (dyn FnMut() -> Value> + 'v), - ) -> Result { - let _guard = - debug_span!("assign_fixed", name = annotation(), column = ?column, offset = offset) - .entered(); - debug!(target: "layouter", "Entered"); - self.0 - .assign_fixed(annotation, column, offset, to) - .map(debug_value_and_return_cell) - } - - fn constrain_constant(&mut self, cell: Cell, constant: Assigned) -> Result<(), Error> { - debug!(target: "constrain_constant", cell = ?cell, constant = ?constant); - self.0.constrain_constant(cell, constant) - } - - fn constrain_equal(&mut self, left: Cell, right: Cell) -> Result<(), Error> { - debug!(target: "constrain_equal", left = ?left, right = ?right); - self.0.constrain_equal(left, right) - } -} - -/// A helper type that augments an [`Assignment`] with [`tracing`] spans and events. -struct TracingAssignment<'cs, F: Field, CS: Assignment> { - cs: &'cs mut CS, - in_region: bool, - _phantom: PhantomData, -} - -impl<'cs, F: Field, CS: Assignment> TracingAssignment<'cs, F, CS> { - fn new(cs: &'cs mut CS) -> Self { - Self { - cs, - in_region: false, - _phantom: PhantomData, - } - } -} - -impl<'cs, F: Field, CS: Assignment> Assignment for TracingAssignment<'cs, F, CS> { - fn enter_region(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - self.in_region = true; - self.cs.enter_region(name_fn); - } - - fn annotate_column(&mut self, _: A, _: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - } - - fn exit_region(&mut self) { - self.cs.exit_region(); - self.in_region = false; - } - - fn enable_selector( - &mut self, - annotation: A, - selector: &Selector, - row: usize, - ) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - let annotation = annotation().into(); - if self.in_region { - debug!(target: "position", row = row); - } else { - debug!(target: "enable_selector", name = annotation, row = row); - } - self.cs.enable_selector(|| annotation, selector, row) - } - - fn query_instance(&self, column: Column, row: usize) -> Result, Error> { - let _guard = debug_span!("positioned").entered(); - debug!(target: "query_instance", column = ?column, row = row); - self.cs.query_instance(column, row) - } - - fn assign_advice( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - let annotation = annotation().into(); - if self.in_region { - debug!(target: "position", row = row); - } else { - debug!(target: "assign_advice", name = annotation, column = ?column, row = row); - } - self.cs.assign_advice(|| annotation, column, row, to) - } - - fn assign_fixed( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - let annotation = annotation().into(); - if self.in_region { - debug!(target: "position", row = row); - } else { - debug!(target: "assign_fixed", name = annotation, column = ?column, row = row); - } - self.cs.assign_fixed(|| annotation, column, row, to) - } - - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - let _guard = debug_span!("positioned").entered(); - debug!( - target: "copy", - left_column = ?left_column, - left_row = left_row, - right_column = ?right_column, - right_row = right_row, - ); - self.cs.copy(left_column, left_row, right_column, right_row) - } - - fn fill_from_row( - &mut self, - column: Column, - row: usize, - to: Value>, - ) -> Result<(), Error> { - let _guard = debug_span!("positioned").entered(); - debug!(target: "fill_from_row", column = ?column, row = row); - self.cs.fill_from_row(column, row, to) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // We enter namespace spans in TracingLayouter. - self.cs.push_namespace(name_fn) - } - - fn pop_namespace(&mut self, gadget_name: Option) { - self.cs.pop_namespace(gadget_name); - // We exit namespace spans in TracingLayouter. - } -} diff --git a/halo2_proofs_rm/src/dev/util.rs b/halo2_proofs_rm/src/dev/util.rs deleted file mode 100644 index a663f9b80b..0000000000 --- a/halo2_proofs_rm/src/dev/util.rs +++ /dev/null @@ -1,161 +0,0 @@ -use group::ff::Field; -use std::collections::BTreeMap; - -use super::{metadata, CellValue, InstanceValue, Value}; -use crate::{ - plonk::{ - Advice, AdviceQuery, Any, Column, ColumnType, Expression, FixedQuery, Gate, InstanceQuery, - VirtualCell, - }, - poly::Rotation, -}; - -pub(crate) struct AnyQuery { - /// Query index - pub index: Option, - /// Column type - pub column_type: Any, - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, -} - -impl From for AnyQuery { - fn from(query: FixedQuery) -> Self { - Self { - index: query.index, - column_type: Any::Fixed, - column_index: query.column_index, - rotation: query.rotation, - } - } -} - -impl From for AnyQuery { - fn from(query: AdviceQuery) -> Self { - Self { - index: query.index, - column_type: Any::Advice(Advice { phase: query.phase }), - column_index: query.column_index, - rotation: query.rotation, - } - } -} - -impl From for AnyQuery { - fn from(query: InstanceQuery) -> Self { - Self { - index: query.index, - column_type: Any::Instance, - column_index: query.column_index, - rotation: query.rotation, - } - } -} - -pub(super) fn format_value(v: F) -> String { - if v.is_zero_vartime() { - "0".into() - } else if v == F::ONE { - "1".into() - } else if v == -F::ONE { - "-1".into() - } else { - // Format value as hex. - let s = format!("{v:?}"); - // Remove leading zeroes. - let s = s.strip_prefix("0x").unwrap(); - let s = s.trim_start_matches('0'); - format!("0x{s}") - } -} - -pub(super) fn load<'a, F: Field, T: ColumnType, Q: Into + Copy>( - n: i32, - row: i32, - queries: &'a [(Column, Rotation)], - cells: &'a [Vec>], -) -> impl Fn(Q) -> Value + 'a { - move |query| { - let (column, at) = &queries[query.into().index.unwrap()]; - let resolved_row = (row + at.0) % n; - cells[column.index()][resolved_row as usize].into() - } -} - -pub(super) fn load_instance<'a, F: Field, T: ColumnType, Q: Into + Copy>( - n: i32, - row: i32, - queries: &'a [(Column, Rotation)], - cells: &'a [Vec>], -) -> impl Fn(Q) -> Value + 'a { - move |query| { - let (column, at) = &queries[query.into().index.unwrap()]; - let resolved_row = (row + at.0) % n; - let cell = &cells[column.index()][resolved_row as usize]; - Value::Real(cell.value()) - } -} - -fn cell_value<'a, F: Field, Q: Into + Copy>( - virtual_cells: &'a [VirtualCell], - load: impl Fn(Q) -> Value + 'a, -) -> impl Fn(Q) -> BTreeMap + 'a { - move |query| { - let AnyQuery { - column_type, - column_index, - rotation, - .. - } = query.into(); - virtual_cells - .iter() - .find(|c| { - c.column.column_type() == &column_type - && c.column.index() == column_index - && c.rotation == rotation - }) - // None indicates a selector, which we don't bother showing. - .map(|cell| { - ( - cell.clone().into(), - match load(query) { - Value::Real(v) => format_value(v), - Value::Poison => unreachable!(), - }, - ) - }) - .into_iter() - .collect() - } -} - -pub(super) fn cell_values<'a, F: Field>( - gate: &Gate, - poly: &Expression, - load_fixed: impl Fn(FixedQuery) -> Value + 'a, - load_advice: impl Fn(AdviceQuery) -> Value + 'a, - load_instance: impl Fn(InstanceQuery) -> Value + 'a, -) -> Vec<(metadata::VirtualCell, String)> { - let virtual_cells = gate.queried_cells(); - let cell_values = poly.evaluate( - &|_| BTreeMap::default(), - &|_| panic!("virtual selectors are removed during optimization"), - &cell_value(virtual_cells, load_fixed), - &cell_value(virtual_cells, load_advice), - &cell_value(virtual_cells, load_instance), - &|_| BTreeMap::default(), - &|a| a, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|a, _| a, - ); - cell_values.into_iter().collect() -} diff --git a/halo2_proofs_rm/src/helpers.rs b/halo2_proofs_rm/src/helpers.rs deleted file mode 100644 index faf7351a3e..0000000000 --- a/halo2_proofs_rm/src/helpers.rs +++ /dev/null @@ -1,154 +0,0 @@ -use crate::poly::Polynomial; -use ff::PrimeField; -use halo2curves::{serde::SerdeObject, CurveAffine}; -use std::io; - -/// This enum specifies how various types are serialized and deserialized. -#[derive(Clone, Copy, Debug)] -pub enum SerdeFormat { - /// Curve elements are serialized in compressed form. - /// Field elements are serialized in standard form, with endianness specified by the - /// `PrimeField` implementation. - Processed, - /// Curve elements are serialized in uncompressed form. Field elements are serialized - /// in their internal Montgomery representation. - /// When deserializing, checks are performed to ensure curve elements indeed lie on the curve and field elements - /// are less than modulus. - RawBytes, - /// Serialization is the same as `RawBytes`, but no checks are performed. - RawBytesUnchecked, -} - -// Keep this trait for compatibility with IPA serialization -pub(crate) trait CurveRead: CurveAffine { - /// Reads a compressed element from the buffer and attempts to parse it - /// using `from_bytes`. - fn read(reader: &mut R) -> io::Result { - let mut compressed = Self::Repr::default(); - reader.read_exact(compressed.as_mut())?; - Option::from(Self::from_bytes(&compressed)) - .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Invalid point encoding in proof")) - } -} -impl CurveRead for C {} - -pub trait SerdeCurveAffine: CurveAffine + SerdeObject { - /// Reads an element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompress it - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - match format { - SerdeFormat::Processed => ::read(reader), - SerdeFormat::RawBytes => ::read_raw(reader), - SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), - } - } - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - match format { - SerdeFormat::Processed => writer.write_all(self.to_bytes().as_ref()), - _ => self.write_raw(writer), - } - } - - /// Byte length of an affine curve element according to `format`. - fn byte_length(format: SerdeFormat) -> usize { - match format { - SerdeFormat::Processed => Self::default().to_bytes().as_ref().len(), - _ => Self::Repr::default().as_ref().len() * 2, - } - } -} -impl SerdeCurveAffine for C {} - -pub trait SerdePrimeField: PrimeField + SerdeObject { - /// Reads a field element as bytes from the buffer according to the `format`: - /// - `Processed`: Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads a field element from raw bytes in its internal Montgomery representations, - /// and checks that the element is less than the modulus. - /// - `RawBytesUnchecked`: Reads a field element in Montgomery form and performs no checks. - fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - match format { - SerdeFormat::Processed => { - let mut compressed = Self::Repr::default(); - reader.read_exact(compressed.as_mut())?; - Option::from(Self::from_repr(compressed)).ok_or_else(|| { - io::Error::new(io::ErrorKind::Other, "Invalid prime field point encoding") - }) - } - SerdeFormat::RawBytes => ::read_raw(reader), - SerdeFormat::RawBytesUnchecked => Ok(::read_raw_unchecked(reader)), - } - } - - /// Writes a field element as bytes to the buffer according to the `format`: - /// - `Processed`: Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - match format { - SerdeFormat::Processed => writer.write_all(self.to_repr().as_ref()), - _ => self.write_raw(writer), - } - } -} -impl SerdePrimeField for F {} - -/// Convert a slice of `bool` into a `u8`. -/// -/// Panics if the slice has length greater than 8. -pub fn pack(bits: &[bool]) -> u8 { - let mut value = 0u8; - assert!(bits.len() <= 8); - for (bit_index, bit) in bits.iter().enumerate() { - value |= (*bit as u8) << bit_index; - } - value -} - -/// Writes the first `bits.len()` bits of a `u8` into `bits`. -pub fn unpack(byte: u8, bits: &mut [bool]) { - for (bit_index, bit) in bits.iter_mut().enumerate() { - *bit = (byte >> bit_index) & 1 == 1; - } -} - -/// Reads a vector of polynomials from buffer -pub(crate) fn read_polynomial_vec( - reader: &mut R, - format: SerdeFormat, -) -> io::Result>> { - let mut len = [0u8; 4]; - reader.read_exact(&mut len)?; - let len = u32::from_be_bytes(len); - - (0..len) - .map(|_| Polynomial::::read(reader, format)) - .collect::>>() -} - -/// Writes a slice of polynomials to buffer -pub(crate) fn write_polynomial_slice( - slice: &[Polynomial], - writer: &mut W, - format: SerdeFormat, -) -> io::Result<()> { - writer.write_all(&(slice.len() as u32).to_be_bytes())?; - for poly in slice.iter() { - poly.write(writer, format)?; - } - Ok(()) -} - -/// Gets the total number of bytes of a slice of polynomials, assuming all polynomials are the same length -pub(crate) fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize { - let field_len = F::default().to_repr().as_ref().len(); - 4 + slice.len() * (4 + field_len * slice.get(0).map(|poly| poly.len()).unwrap_or(0)) -} diff --git a/halo2_proofs_rm/src/lib.rs b/halo2_proofs_rm/src/lib.rs index acc26aff15..e69de29bb2 100644 --- a/halo2_proofs_rm/src/lib.rs +++ b/halo2_proofs_rm/src/lib.rs @@ -1,21 +0,0 @@ -//! # halo2_proofs - -#![cfg_attr(docsrs, feature(doc_cfg))] -// The actual lints we want to disable. -#![allow(clippy::op_ref, clippy::many_single_char_names)] -#![deny(rustdoc::broken_intra_doc_links)] -#![deny(missing_debug_implementations)] -#![deny(missing_docs)] -#![deny(unsafe_code)] - -pub mod arithmetic; -pub mod circuit; -pub use halo2curves; -mod multicore; -pub mod plonk; -pub mod poly; -pub mod transcript; - -pub mod dev; -mod helpers; -pub use helpers::SerdeFormat; diff --git a/halo2_proofs_rm/src/multicore.rs b/halo2_proofs_rm/src/multicore.rs deleted file mode 100644 index 4d30b91a8b..0000000000 --- a/halo2_proofs_rm/src/multicore.rs +++ /dev/null @@ -1,38 +0,0 @@ -pub use rayon::{ - current_num_threads, - iter::{IndexedParallelIterator, IntoParallelRefIterator}, - iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}, - join, scope, - slice::ParallelSliceMut, - Scope, -}; - -pub trait TryFoldAndReduce { - /// Implements `iter.try_fold().try_reduce()` for `rayon::iter::ParallelIterator`, - /// falling back on `Iterator::try_fold` when the `multicore` feature flag is - /// disabled. - /// The `try_fold_and_reduce` function can only be called by a iter with - /// `Result` item type because the `fold_op` must meet the trait - /// bounds of both `try_fold` and `try_reduce` from rayon. - fn try_fold_and_reduce( - self, - identity: impl Fn() -> T + Send + Sync, - fold_op: impl Fn(T, Result) -> Result + Send + Sync, - ) -> Result; -} - -impl TryFoldAndReduce for I -where - T: Send + Sync, - E: Send + Sync, - I: rayon::iter::ParallelIterator>, -{ - fn try_fold_and_reduce( - self, - identity: impl Fn() -> T + Send + Sync, - fold_op: impl Fn(T, Result) -> Result + Send + Sync, - ) -> Result { - self.try_fold(&identity, &fold_op) - .try_reduce(&identity, |a, b| fold_op(a, Ok(b))) - } -} diff --git a/halo2_proofs_rm/src/plonk.rs b/halo2_proofs_rm/src/plonk.rs deleted file mode 100644 index eade0e5a74..0000000000 --- a/halo2_proofs_rm/src/plonk.rs +++ /dev/null @@ -1,549 +0,0 @@ -//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] -//! that is designed specifically for the polynomial commitment scheme described -//! in the [Halo][halo] paper. -//! -//! [halo]: https://eprint.iacr.org/2019/1021 -//! [plonk]: https://eprint.iacr.org/2019/953 - -use blake2b_simd::Params as Blake2bParams; -use group::ff::{Field, FromUniformBytes, PrimeField}; - -use crate::arithmetic::CurveAffine; -use crate::helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, - SerdePrimeField, -}; -use crate::poly::{ - Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, - Polynomial, Rotation, -}; -use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; -use crate::SerdeFormat; - -mod assigned; -mod circuit; -mod error; -mod evaluation; -mod keygen; -mod lookup; -pub mod permutation; -mod shuffle; -mod vanishing; - -mod prover; -mod verifier; - -pub use assigned::*; -pub use circuit::*; -pub use error::*; -pub use keygen::*; -pub use prover::*; -pub use verifier::*; - -use evaluation::Evaluator; -use std::io; - -/// List of queries (columns and rotations) used by a circuit -#[derive(Debug, Clone)] -pub struct Queries { - /// List of unique advice queries - pub advice: Vec<(Column, Rotation)>, - /// List of unique instance queries - pub instance: Vec<(Column, Rotation)>, - /// List of unique fixed queries - pub fixed: Vec<(Column, Rotation)>, - /// Contains an integer for each advice column - /// identifying how many distinct queries it has - /// so far; should be same length as cs.num_advice_columns. - pub num_advice_queries: Vec, -} - -impl Queries { - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } - - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. - - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); - - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; - - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. - - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. - - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 - } -} - -/// This is a verifying key which allows for the verification of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct VerifyingKey { - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - /// Cached maximum degree of `cs` (which doesn't change after construction). - cs_degree: usize, - /// The representative of this `VerifyingKey` in transcripts. - transcript_repr: C::Scalar, - selectors: Vec>, - /// Whether selector compression is turned on or not. - compress_selectors: bool, -} - -// Current version of the VK -const VERSION: u8 = 0x03; - -impl VerifyingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a verifying key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - // Version byte that will be checked on read. - writer.write_all(&[VERSION])?; - let k = &self.domain.k(); - assert!(*k <= C::Scalar::S); - // k value fits in 1 byte - writer.write_all(&[*k as u8])?; - writer.write_all(&[self.compress_selectors as u8])?; - writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; - for commitment in &self.fixed_commitments { - commitment.write(writer, format)?; - } - self.permutation.write(writer, format)?; - - if !self.compress_selectors { - assert!(self.selectors.is_empty()); - } - // write self.selectors - for selector in &self.selectors { - // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write - for bits in selector.chunks(8) { - writer.write_all(&[crate::helpers::pack(bits)])?; - } - } - Ok(()) - } - - /// Reads a verification key from a buffer. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let mut version_byte = [0u8; 1]; - reader.read_exact(&mut version_byte)?; - if VERSION != version_byte[0] { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected version byte", - )); - } - - let mut k = [0u8; 1]; - reader.read_exact(&mut k)?; - let k = u8::from_le_bytes(k); - if k as u32 > C::Scalar::S { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - format!( - "circuit size value (k): {} exceeds maxium: {}", - k, - C::Scalar::S - ), - )); - } - let mut compress_selectors = [0u8; 1]; - reader.read_exact(&mut compress_selectors)?; - if compress_selectors[0] != 0 && compress_selectors[0] != 1 { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected compress_selectors not boolean", - )); - } - let compress_selectors = compress_selectors[0] == 1; - let (domain, cs, _) = keygen::create_domain::( - k as u32, - #[cfg(feature = "circuit-params")] - params, - ); - let mut num_fixed_columns = [0u8; 4]; - reader.read_exact(&mut num_fixed_columns)?; - let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); - - let fixed_commitments: Vec<_> = (0..num_fixed_columns) - .map(|_| C::read(reader, format)) - .collect::>()?; - - let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; - - let (cs, selectors) = if compress_selectors { - // read selectors - let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] - .into_iter() - .map(|mut selector| { - let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; - reader.read_exact(&mut selector_bytes)?; - for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { - crate::helpers::unpack(byte, bits); - } - Ok(selector) - }) - .collect::>()?; - let (cs, _) = cs.compress_selectors(selectors.clone()); - (cs, selectors) - } else { - // we still need to replace selectors with fixed Expressions in `cs` - let fake_selectors = vec![vec![]; cs.num_selectors]; - let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); - (cs, vec![]) - }; - - Ok(Self::from_parts( - domain, - fixed_commitments, - permutation, - cs, - selectors, - compress_selectors, - )) - } - - /// Writes a verifying key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a verification key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } -} - -impl VerifyingKey { - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - 10 + (self.fixed_commitments.len() * C::byte_length(format)) - + self.permutation.bytes_length(format) - + self.selectors.len() - * (self - .selectors - .get(0) - .map(|selector| (selector.len() + 7) / 8) - .unwrap_or(0)) - } - - fn from_parts( - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - selectors: Vec>, - compress_selectors: bool, - ) -> Self - where - C::ScalarExt: FromUniformBytes<64>, - { - // Compute cached values. - let cs_degree = cs.degree(); - - let mut vk = Self { - domain, - fixed_commitments, - permutation, - cs, - cs_degree, - // Temporary, this is not pinned. - transcript_repr: C::Scalar::ZERO, - selectors, - compress_selectors, - }; - - let mut hasher = Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Verify-Key") - .to_state(); - - let s = format!("{:?}", vk.pinned()); - - hasher.update(&(s.len() as u64).to_le_bytes()); - hasher.update(s.as_bytes()); - - // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - - vk - } - - /// Hashes a verification key into a transcript. - pub fn hash_into, T: Transcript>( - &self, - transcript: &mut T, - ) -> io::Result<()> { - transcript.common_scalar(self.transcript_repr)?; - - Ok(()) - } - - /// Obtains a pinned representation of this verification key that contains - /// the minimal information necessary to reconstruct the verification key. - pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { - PinnedVerificationKey { - base_modulus: C::Base::MODULUS, - scalar_modulus: C::Scalar::MODULUS, - domain: self.domain.pinned(), - fixed_commitments: &self.fixed_commitments, - permutation: &self.permutation, - cs: self.cs.pinned(), - } - } - - /// Returns commitments of fixed polynomials - pub fn fixed_commitments(&self) -> &Vec { - &self.fixed_commitments - } - - /// Returns `VerifyingKey` of permutation - pub fn permutation(&self) -> &permutation::VerifyingKey { - &self.permutation - } - - /// Returns `ConstraintSystem` - pub fn cs(&self) -> &ConstraintSystem { - &self.cs - } - - /// Returns representative of this `VerifyingKey` in transcripts - pub fn transcript_repr(&self) -> C::Scalar { - self.transcript_repr - } -} - -/// Minimal representation of a verification key that can be used to identify -/// its active contents. -#[allow(dead_code)] -#[derive(Debug)] -pub struct PinnedVerificationKey<'a, C: CurveAffine> { - base_modulus: &'static str, - scalar_modulus: &'static str, - domain: PinnedEvaluationDomain<'a, C::Scalar>, - cs: PinnedConstraintSystem<'a, C::Scalar>, - fixed_commitments: &'a Vec, - permutation: &'a permutation::VerifyingKey, -} - -/// This is a proving key which allows for the creation of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct ProvingKey { - vk: VerifyingKey, - l0: Polynomial, - l_last: Polynomial, - l_active_row: Polynomial, - fixed_values: Vec>, - fixed_polys: Vec>, - fixed_cosets: Vec>, - permutation: permutation::ProvingKey, - ev: Evaluator, -} - -impl ProvingKey -where - C::Scalar: FromUniformBytes<64>, -{ - /// Get the underlying [`VerifyingKey`]. - pub fn get_vk(&self) -> &VerifyingKey { - &self.vk - } - - /// Gets the total number of bytes in the serialization of `self` - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - let scalar_len = C::Scalar::default().to_repr().as_ref().len(); - self.vk.bytes_length(format) - + 12 - + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) - + polynomial_slice_byte_length(&self.fixed_values) - + polynomial_slice_byte_length(&self.fixed_polys) - + polynomial_slice_byte_length(&self.fixed_cosets) - + self.permutation.bytes_length() - } -} - -impl ProvingKey -where - C::Scalar: SerdePrimeField + FromUniformBytes<64>, -{ - /// Writes a proving key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - self.vk.write(writer, format)?; - self.l0.write(writer, format)?; - self.l_last.write(writer, format)?; - self.l_active_row.write(writer, format)?; - write_polynomial_slice(&self.fixed_values, writer, format)?; - write_polynomial_slice(&self.fixed_polys, writer, format)?; - write_polynomial_slice(&self.fixed_cosets, writer, format)?; - self.permutation.write(writer, format)?; - Ok(()) - } - - /// Reads a proving key from a buffer. - /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let vk = VerifyingKey::::read::( - reader, - format, - #[cfg(feature = "circuit-params")] - params, - )?; - let l0 = Polynomial::read(reader, format)?; - let l_last = Polynomial::read(reader, format)?; - let l_active_row = Polynomial::read(reader, format)?; - let fixed_values = read_polynomial_vec(reader, format)?; - let fixed_polys = read_polynomial_vec(reader, format)?; - let fixed_cosets = read_polynomial_vec(reader, format)?; - let permutation = permutation::ProvingKey::read(reader, format)?; - let ev = Evaluator::new(vk.cs()); - Ok(Self { - vk, - l0, - l_last, - l_active_row, - fixed_values, - fixed_polys, - fixed_cosets, - permutation, - ev, - }) - } - - /// Writes a proving key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a proving key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } -} - -impl VerifyingKey { - /// Get the underlying [`EvaluationDomain`]. - pub fn get_domain(&self) -> &EvaluationDomain { - &self.domain - } -} - -#[derive(Clone, Copy, Debug)] -struct Theta; -type ChallengeTheta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Beta; -type ChallengeBeta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Gamma; -type ChallengeGamma = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Y; -type ChallengeY = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct X; -type ChallengeX = ChallengeScalar; diff --git a/halo2_proofs_rm/src/plonk/assigned.rs b/halo2_proofs_rm/src/plonk/assigned.rs deleted file mode 100644 index 07de325678..0000000000 --- a/halo2_proofs_rm/src/plonk/assigned.rs +++ /dev/null @@ -1,665 +0,0 @@ -use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; - -use group::ff::Field; - -/// A value assigned to a cell within a circuit. -/// -/// Stored as a fraction, so the backend can use batch inversion. -/// -/// A denominator of zero maps to an assigned value of zero. -#[derive(Clone, Copy, Debug)] -pub enum Assigned { - /// The field element zero. - Zero, - /// A value that does not require inversion to evaluate. - Trivial(F), - /// A value stored as a fraction to enable batch inversion. - Rational(F, F), -} - -impl From<&Assigned> for Assigned { - fn from(val: &Assigned) -> Self { - *val - } -} - -impl From<&F> for Assigned { - fn from(numerator: &F) -> Self { - Assigned::Trivial(*numerator) - } -} - -impl From for Assigned { - fn from(numerator: F) -> Self { - Assigned::Trivial(numerator) - } -} - -impl From<(F, F)> for Assigned { - fn from((numerator, denominator): (F, F)) -> Self { - Assigned::Rational(numerator, denominator) - } -} - -impl PartialEq for Assigned { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - // At least one side is directly zero. - (Self::Zero, Self::Zero) => true, - (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - x.is_zero_vartime() - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, - (Self::Trivial(x), Self::Rational(numerator, denominator)) - | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { - &(*x * denominator) == numerator - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, - } - } -} - -impl Eq for Assigned {} - -impl Neg for Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - match self { - Self::Zero => Self::Zero, - Self::Trivial(numerator) => Self::Trivial(-numerator), - Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), - } - } -} - -impl Neg for &Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - -*self - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - // One side is directly zero. - (Self::Zero, _) => rhs, - (_, Self::Zero) => self, - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - other - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator + denominator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - self + Self::Trivial(rhs) - } -} - -impl Add for &Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for Assigned { - type Output = Assigned; - fn add(self, rhs: &Self) -> Assigned { - self + *rhs - } -} - -impl Add> for &Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for &Assigned { - type Output = Assigned; - fn add(self, rhs: &Assigned) -> Assigned { - *self + *rhs - } -} - -impl AddAssign for Assigned { - fn add_assign(&mut self, rhs: Self) { - *self = *self + rhs; - } -} - -impl AddAssign<&Assigned> for Assigned { - fn add_assign(&mut self, rhs: &Self) { - *self = *self + rhs; - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - self + (-rhs) - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - self + (-rhs) - } -} - -impl Sub for &Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for Assigned { - type Output = Assigned; - fn sub(self, rhs: &Self) -> Assigned { - self - *rhs - } -} - -impl Sub> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: &Assigned) -> Assigned { - *self - *rhs - } -} - -impl SubAssign for Assigned { - fn sub_assign(&mut self, rhs: Self) { - *self = *self - rhs; - } -} - -impl SubAssign<&Assigned> for Assigned { - fn sub_assign(&mut self, rhs: &Self) { - *self = *self - rhs; - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - (Self::Zero, _) | (_, Self::Zero) => Self::Zero, - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - self * Self::Trivial(rhs) - } -} - -impl Mul for &Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - *self * rhs - } -} - -impl Mul<&Assigned> for Assigned { - type Output = Assigned; - fn mul(self, rhs: &Assigned) -> Assigned { - self * *rhs - } -} - -impl MulAssign for Assigned { - fn mul_assign(&mut self, rhs: Self) { - *self = *self * rhs; - } -} - -impl MulAssign<&Assigned> for Assigned { - fn mul_assign(&mut self, rhs: &Self) { - *self = *self * rhs; - } -} - -impl Assigned { - /// Returns the numerator. - pub fn numerator(&self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => *x, - Self::Rational(numerator, _) => *numerator, - } - } - - /// Returns the denominator, if non-trivial. - pub fn denominator(&self) -> Option { - match self { - Self::Zero => None, - Self::Trivial(_) => None, - Self::Rational(_, denominator) => Some(*denominator), - } - } - - /// Returns true iff this element is zero. - pub fn is_zero_vartime(&self) -> bool { - match self { - Self::Zero => true, - Self::Trivial(x) => x.is_zero_vartime(), - // Assigned maps x/0 -> 0. - Self::Rational(numerator, denominator) => { - numerator.is_zero_vartime() || denominator.is_zero_vartime() - } - } - } - - /// Doubles this element. - #[must_use] - pub fn double(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.double()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.double(), *denominator) - } - } - } - - /// Squares this element. - #[must_use] - pub fn square(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.square()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.square(), denominator.square()) - } - } - } - - /// Cubes this element. - #[must_use] - pub fn cube(&self) -> Self { - self.square() * self - } - - /// Inverts this assigned value (taking the inverse of zero to be zero). - pub fn invert(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Rational(F::ONE, *x), - Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), - } - } - - /// Evaluates this assigned value directly, performing an unbatched inversion if - /// necessary. - /// - /// If the denominator is zero, this returns zero. - pub fn evaluate(self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => x, - Self::Rational(numerator, denominator) => { - if denominator == F::ONE { - numerator - } else { - numerator * denominator.invert().unwrap_or(F::ZERO) - } - } - } - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use super::Assigned; - // We use (numerator, denominator) in the comments below to denote a rational. - #[test] - fn add_trivial_to_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // 2 + (1,0) = 2 + 0 = 2 - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn add_rational_to_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) + (1,0) = (1,2) + 0 = (1,2) - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn sub_trivial_from_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - 2 = 0 - 2 = -2 - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // 2 - (1,0) = 2 - 0 = 2 - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn sub_rational_from_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - (1,2) = 0 - (1,2) = -(1,2) - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // (1,2) - (1,0) = (1,2) - 0 = (1,2) - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn mul_rational_by_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) * (1,0) = (1,2) * 0 = 0 - assert_eq!((a * b).evaluate(), Fp::zero()); - - // (1,0) * (1,2) = 0 * (1,2) = 0 - assert_eq!((b * a).evaluate(), Fp::zero()); - } -} - -#[cfg(test)] -mod proptests { - use std::{ - cmp, - ops::{Add, Mul, Neg, Sub}, - }; - - use group::ff::Field; - use halo2curves::pasta::Fp; - use proptest::{collection::vec, prelude::*, sample::select}; - - use super::Assigned; - - trait UnaryOperand: Neg { - fn double(&self) -> Self; - fn square(&self) -> Self; - fn cube(&self) -> Self; - fn inv0(&self) -> Self; - } - - impl UnaryOperand for F { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert().unwrap_or(F::ZERO) - } - } - - impl UnaryOperand for Assigned { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert() - } - } - - #[derive(Clone, Debug)] - enum UnaryOperator { - Neg, - Double, - Square, - Cube, - Inv0, - } - - const UNARY_OPERATORS: &[UnaryOperator] = &[ - UnaryOperator::Neg, - UnaryOperator::Double, - UnaryOperator::Square, - UnaryOperator::Cube, - UnaryOperator::Inv0, - ]; - - impl UnaryOperator { - fn apply(&self, a: F) -> F { - match self { - Self::Neg => -a, - Self::Double => a.double(), - Self::Square => a.square(), - Self::Cube => a.cube(), - Self::Inv0 => a.inv0(), - } - } - } - - trait BinaryOperand: Sized + Add + Sub + Mul {} - impl BinaryOperand for F {} - impl BinaryOperand for Assigned {} - - #[derive(Clone, Debug)] - enum BinaryOperator { - Add, - Sub, - Mul, - } - - const BINARY_OPERATORS: &[BinaryOperator] = &[ - BinaryOperator::Add, - BinaryOperator::Sub, - BinaryOperator::Mul, - ]; - - impl BinaryOperator { - fn apply(&self, a: F, b: F) -> F { - match self { - Self::Add => a + b, - Self::Sub => a - b, - Self::Mul => a * b, - } - } - } - - #[derive(Clone, Debug)] - enum Operator { - Unary(UnaryOperator), - Binary(BinaryOperator), - } - - prop_compose! { - /// Use narrow that can be easily reduced. - fn arb_element()(val in any::()) -> Fp { - Fp::from(val) - } - } - - prop_compose! { - fn arb_trivial()(element in arb_element()) -> Assigned { - Assigned::Trivial(element) - } - } - - prop_compose! { - /// Generates half of the denominators as zero to represent a deferred inversion. - fn arb_rational()( - numerator in arb_element(), - denominator in prop_oneof![ - 1 => Just(Fp::zero()), - 2 => arb_element(), - ], - ) -> Assigned { - Assigned::Rational(numerator, denominator) - } - } - - prop_compose! { - fn arb_operators(num_unary: usize, num_binary: usize)( - unary in vec(select(UNARY_OPERATORS), num_unary), - binary in vec(select(BINARY_OPERATORS), num_binary), - ) -> Vec { - unary.into_iter() - .map(Operator::Unary) - .chain(binary.into_iter().map(Operator::Binary)) - .collect() - } - } - - prop_compose! { - fn arb_testcase()( - num_unary in 0usize..5, - num_binary in 0usize..5, - )( - values in vec( - prop_oneof![ - 1 => Just(Assigned::Zero), - 2 => arb_trivial(), - 2 => arb_rational(), - ], - // Ensure that: - // - we have at least one value to apply unary operators to. - // - we can apply every binary operator pairwise sequentially. - cmp::max(usize::from(num_unary > 0), num_binary + 1)), - operations in arb_operators(num_unary, num_binary).prop_shuffle(), - ) -> (Vec>, Vec) { - (values, operations) - } - } - - proptest! { - #[test] - fn operation_commutativity((values, operations) in arb_testcase()) { - // Evaluate the values at the start. - let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); - - // Apply the operations to both the deferred and evaluated values. - fn evaluate( - items: Vec, - operators: &[Operator], - ) -> F { - let mut ops = operators.iter(); - - // Process all binary operators. We are guaranteed to have exactly as many - // binary operators as we need calls to the reduction closure. - let mut res = items.into_iter().reduce(|mut a, b| loop { - match ops.next() { - Some(Operator::Unary(op)) => a = op.apply(a), - Some(Operator::Binary(op)) => break op.apply(a, b), - None => unreachable!(), - } - }).unwrap(); - - // Process any unary operators that weren't handled in the reduce() call - // above (either if we only had one item, or there were unary operators - // after the last binary operator). We are guaranteed to have no binary - // operators remaining at this point. - loop { - match ops.next() { - Some(Operator::Unary(op)) => res = op.apply(res), - Some(Operator::Binary(_)) => unreachable!(), - None => break res, - } - } - } - let deferred_result = evaluate(values, &operations); - let evaluated_result = evaluate(elements, &operations); - - // The two should be equal, i.e. deferred inversion should commute with the - // list of operations. - assert_eq!(deferred_result.evaluate(), evaluated_result); - } - } -} diff --git a/halo2_proofs_rm/src/plonk/circuit.rs b/halo2_proofs_rm/src/plonk/circuit.rs deleted file mode 100644 index 1ecf84b69c..0000000000 --- a/halo2_proofs_rm/src/plonk/circuit.rs +++ /dev/null @@ -1,3246 +0,0 @@ -use super::{lookup, permutation, shuffle, Assigned, Error, Queries}; -use crate::circuit::layouter::SyncDeps; -use crate::dev::metadata; -use crate::plonk::WitnessCollection; -use crate::{ - circuit::{Layouter, Region, Value}, - poly::{batch_invert_assigned, Polynomial, Rotation}, -}; -use core::cmp::max; -use core::ops::{Add, Mul}; -use ff::Field; -use sealed::SealedPhase; -use std::collections::BTreeSet; -use std::collections::HashMap; -use std::fmt::Debug; -use std::iter::{Product, Sum}; -use std::{ - convert::TryFrom, - ops::{Neg, Sub}, -}; - -mod compress_selectors; - -/// A column type -pub trait ColumnType: - 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into -{ - /// Return expression from cell - fn query_cell(&self, index: usize, at: Rotation) -> Expression; -} - -/// A column with an index and type -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Column { - index: usize, - column_type: C, -} - -impl Column { - pub(crate) fn new(index: usize, column_type: C) -> Self { - Column { index, column_type } - } - - /// Index of this column. - pub fn index(&self) -> usize { - self.index - } - - /// Type of this column. - pub fn column_type(&self) -> &C { - &self.column_type - } - - /// Return expression from column at a relative position - pub fn query_cell(&self, at: Rotation) -> Expression { - self.column_type.query_cell(self.index, at) - } - - /// Return expression from column at the current row - pub fn cur(&self) -> Expression { - self.query_cell(Rotation::cur()) - } - - /// Return expression from column at the next row - pub fn next(&self) -> Expression { - self.query_cell(Rotation::next()) - } - - /// Return expression from column at the previous row - pub fn prev(&self) -> Expression { - self.query_cell(Rotation::prev()) - } - - /// Return expression from column at the specified rotation - pub fn rot(&self, rotation: i32) -> Expression { - self.query_cell(Rotation(rotation)) - } -} - -impl Ord for Column { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match self.column_type.into().cmp(&other.column_type.into()) { - // Indices are assigned within column types. - std::cmp::Ordering::Equal => self.index.cmp(&other.index), - order => order, - } - } -} - -impl PartialOrd for Column { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -pub(crate) mod sealed { - /// Phase of advice column - #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct Phase(pub(crate) u8); - - impl Phase { - pub fn prev(&self) -> Option { - self.0.checked_sub(1).map(Phase) - } - } - - impl SealedPhase for Phase { - fn to_sealed(self) -> Phase { - self - } - } - - /// Sealed trait to help keep `Phase` private. - pub trait SealedPhase { - fn to_sealed(self) -> Phase; - } -} - -/// Phase of advice column -pub trait Phase: SealedPhase {} - -impl Phase for P {} - -/// First phase -#[derive(Debug)] -pub struct FirstPhase; - -impl SealedPhase for super::FirstPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(0) - } -} - -/// Second phase -#[derive(Debug)] -pub struct SecondPhase; - -impl SealedPhase for super::SecondPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(1) - } -} - -/// Third phase -#[derive(Debug)] -pub struct ThirdPhase; - -impl SealedPhase for super::ThirdPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(2) - } -} - -/// An advice column -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub struct Advice { - pub(crate) phase: sealed::Phase, -} - -impl Default for Advice { - fn default() -> Advice { - Advice { - phase: FirstPhase.to_sealed(), - } - } -} - -impl Advice { - /// Returns `Advice` in given `Phase` - pub fn new(phase: P) -> Advice { - Advice { - phase: phase.to_sealed(), - } - } - - /// Phase of this column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -impl std::fmt::Debug for Advice { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if self.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &self.phase); - } - debug_struct.finish() - } -} - -/// A fixed column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Fixed; - -/// An instance column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Instance; - -/// An enum over the Advice, Fixed, Instance structs -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub enum Any { - /// An Advice variant - Advice(Advice), - /// A Fixed variant - Fixed, - /// An Instance variant - Instance, -} - -impl Any { - /// Returns Advice variant in `FirstPhase` - pub fn advice() -> Any { - Any::Advice(Advice::default()) - } - - /// Returns Advice variant in given `Phase` - pub fn advice_in(phase: P) -> Any { - Any::Advice(Advice::new(phase)) - } -} - -impl std::fmt::Debug for Any { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Any::Advice(advice) => { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if advice.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &advice.phase); - } - debug_struct.finish() - } - Any::Fixed => f.debug_struct("Fixed").finish(), - Any::Instance => f.debug_struct("Instance").finish(), - } - } -} - -impl Ord for Any { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match (self, other) { - (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, - (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), - // Across column types, sort Instance < Advice < Fixed. - (Any::Instance, Any::Advice(_)) - | (Any::Advice(_), Any::Fixed) - | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, - (Any::Fixed, Any::Instance) - | (Any::Fixed, Any::Advice(_)) - | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, - } - } -} - -impl PartialOrd for Any { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl ColumnType for Advice { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: self.phase, - }) - } -} -impl ColumnType for Fixed { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Instance { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Any { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - match self { - Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: *phase, - }), - Any::Fixed => Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }), - Any::Instance => Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }), - } - } -} - -impl From for Any { - fn from(advice: Advice) -> Any { - Any::Advice(advice) - } -} - -impl From for Any { - fn from(_: Fixed) -> Any { - Any::Fixed - } -} - -impl From for Any { - fn from(_: Instance) -> Any { - Any::Instance - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Advice(advice.column_type), - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Fixed, - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Instance, - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Advice(advice) => Ok(Column { - index: any.index(), - column_type: *advice, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Fixed => Ok(Column { - index: any.index(), - column_type: Fixed, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Instance => Ok(Column { - index: any.index(), - column_type: Instance, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -/// A selector, representing a fixed boolean value per row of the circuit. -/// -/// Selectors can be used to conditionally enable (portions of) gates: -/// ``` -/// use halo2_proofs::poly::Rotation; -/// # use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; -/// -/// # let mut meta = ConstraintSystem::::default(); -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("foo", |meta| { -/// let a = meta.query_advice(a, Rotation::prev()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let s = meta.query_selector(s); -/// -/// // On rows where the selector is enabled, a is constrained to equal b. -/// // On rows where the selector is disabled, a and b can take any value. -/// vec![s * (a - b)] -/// }); -/// ``` -/// -/// Selectors are disabled on all rows by default, and must be explicitly enabled on each -/// row when required: -/// ``` -/// use halo2_proofs::{ -/// circuit::{Chip, Layouter, Value}, -/// plonk::{Advice, Column, Error, Selector}, -/// }; -/// use ff::Field; -/// # use halo2_proofs::plonk::Fixed; -/// -/// struct Config { -/// a: Column, -/// b: Column, -/// s: Selector, -/// } -/// -/// fn circuit_logic>(chip: C, mut layouter: impl Layouter) -> Result<(), Error> { -/// let config = chip.config(); -/// # let config: Config = todo!(); -/// layouter.assign_region(|| "bar", |mut region| { -/// region.assign_advice(|| "a", config.a, 0, || Value::known(F::ONE))?; -/// region.assign_advice(|| "a", config.b, 1, || Value::known(F::ONE))?; -/// config.s.enable(&mut region, 1) -/// })?; -/// Ok(()) -/// } -/// ``` -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Selector(pub(crate) usize, bool); - -impl Selector { - /// Enable this selector at the given offset within the given region. - pub fn enable(&self, region: &mut Region, offset: usize) -> Result<(), Error> { - region.enable_selector(|| "", self, offset) - } - - /// Is this selector "simple"? Simple selectors can only be multiplied - /// by expressions that contain no other simple selectors. - pub fn is_simple(&self) -> bool { - self.1 - } - - /// Returns index of this selector - pub fn index(&self) -> usize { - self.0 - } - - /// Return expression from selector - pub fn expr(&self) -> Expression { - Expression::Selector(*self) - } -} - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, -} - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl FixedQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, - /// Phase of this advice column - pub phase: sealed::Phase, -} - -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, - /// Phase of this advice column - pub(crate) phase: sealed::Phase, -} - -impl AdviceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } - - /// Phase of this advice column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQueryMid { - /// Column index - pub column_index: usize, - /// Rotation of this query - pub rotation: Rotation, -} - -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl InstanceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// A fixed column of a lookup table. -/// -/// A lookup table can be loaded into this column via [`Layouter::assign_table`]. Columns -/// can currently only contain a single table, but they may be used in multiple lookup -/// arguments via [`ConstraintSystem::lookup`]. -/// -/// Lookup table columns are always "encumbered" by the lookup arguments they are used in; -/// they cannot simultaneously be used as general fixed columns. -/// -/// [`Layouter::assign_table`]: crate::circuit::Layouter::assign_table -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub struct TableColumn { - /// The fixed column that this table column is stored in. - /// - /// # Security - /// - /// This inner column MUST NOT be exposed in the public API, or else chip developers - /// can load lookup tables into their circuits without default-value-filling the - /// columns, which can cause soundness bugs. - inner: Column, -} - -impl TableColumn { - /// Returns inner column - pub fn inner(&self) -> Column { - self.inner - } -} - -/// A challenge squeezed from transcript after advice columns at the phase have been committed. -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Challenge { - index: usize, - pub(crate) phase: sealed::Phase, -} - -impl Challenge { - /// Index of this challenge. - pub fn index(&self) -> usize { - self.index - } - - /// Phase of this challenge. - pub fn phase(&self) -> u8 { - self.phase.0 - } - - /// Return Expression - pub fn expr(&self) -> Expression { - Expression::Challenge(*self) - } -} - -/// This trait allows a [`Circuit`] to direct some backend to assign a witness -/// for a constraint system. -pub trait Assignment { - /// Creates a new region and enters into it. - /// - /// Panics if we are currently in a region (if `exit_region` was not called). - /// - /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. - /// - /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region - fn enter_region(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR; - - /// Allows the developer to include an annotation for an specific column within a `Region`. - /// - /// This is usually useful for debugging circuit failures. - fn annotate_column(&mut self, annotation: A, column: Column) - where - A: FnOnce() -> AR, - AR: Into; - - /// Exits the current region. - /// - /// Panics if we are not currently in a region (if `enter_region` was not called). - /// - /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. - /// - /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region - fn exit_region(&mut self); - - /// Enables a selector at the given row. - fn enable_selector( - &mut self, - annotation: A, - selector: &Selector, - row: usize, - ) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into; - - /// Queries the cell of an instance column at a particular absolute row. - /// - /// Returns the cell's value, if known. - fn query_instance(&self, column: Column, row: usize) -> Result, Error>; - - /// Assign an advice column value (witness) - fn assign_advice( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into; - - /// Assign a fixed value - fn assign_fixed( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into; - - /// Assign two cells to have the same value - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error>; - - /// Fills a fixed `column` starting from the given `row` with value `to`. - fn fill_from_row( - &mut self, - column: Column, - row: usize, - to: Value>, - ) -> Result<(), Error>; - - /// Queries the value of the given challenge. - /// - /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. - fn get_challenge(&self, challenge: Challenge) -> Value; - - /// Creates a new (sub)namespace and enters into it. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - /// - /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR; - - /// Exits out of the existing namespace. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - /// - /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace - fn pop_namespace(&mut self, gadget_name: Option); -} - -/// A floor planning strategy for a circuit. -/// -/// The floor planner is chip-agnostic and applies its strategy to the circuit it is used -/// within. -pub trait FloorPlanner { - /// Given the provided `cs`, synthesize the given circuit. - /// - /// `constants` is the list of fixed columns that the layouter may use to assign - /// global constant values. These columns will all have been equality-enabled. - /// - /// Internally, a floor planner will perform the following operations: - /// - Instantiate a [`Layouter`] for this floor planner. - /// - Perform any necessary setup or measurement tasks, which may involve one or more - /// calls to `Circuit::default().synthesize(config, &mut layouter)`. - /// - Call `circuit.synthesize(config, &mut layouter)` exactly once. - fn synthesize + SyncDeps, C: Circuit>( - cs: &mut CS, - circuit: &C, - config: C::Config, - constants: Vec>, - ) -> Result<(), Error>; -} - -/// This is a trait that circuits provide implementations for so that the -/// backend prover can ask the circuit to synthesize using some given -/// [`ConstraintSystem`] implementation. -pub trait Circuit { - /// This is a configuration object that stores things like columns. - type Config: Clone; - /// The floor planner used for this circuit. This is an associated type of the - /// `Circuit` trait because its behaviour is circuit-critical. - type FloorPlanner: FloorPlanner; - /// Optional circuit configuration parameters. Requires the `circuit-params` feature. - #[cfg(feature = "circuit-params")] - type Params: Default; - - /// Returns a copy of this circuit with no witness values (i.e. all witnesses set to - /// `None`). For most circuits, this will be equal to `Self::default()`. - fn without_witnesses(&self) -> Self; - - /// Returns a reference to the parameters that should be used to configure the circuit. - /// Requires the `circuit-params` feature. - #[cfg(feature = "circuit-params")] - fn params(&self) -> Self::Params { - Self::Params::default() - } - - /// The circuit is given an opportunity to describe the exact gate - /// arrangement, column arrangement, etc. Takes a runtime parameter. The default - /// implementation calls `configure` ignoring the `_params` argument in order to easily support - /// circuits that don't use configuration parameters. - #[cfg(feature = "circuit-params")] - fn configure_with_params( - meta: &mut ConstraintSystem, - _params: Self::Params, - ) -> Self::Config { - Self::configure(meta) - } - - /// The circuit is given an opportunity to describe the exact gate - /// arrangement, column arrangement, etc. - fn configure(meta: &mut ConstraintSystem) -> Self::Config; - - /// Given the provided `cs`, synthesize the circuit. The concrete type of - /// the caller will be different depending on the context, and they may or - /// may not expect to have a witness present. - fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum ExpressionMid { - /// This is a constant polynomial - Constant(F), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQueryMid), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQueryMid), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQueryMid), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl ExpressionMid { - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - use ExpressionMid::*; - match self { - Constant(_) => 0, - Fixed(_) => 1, - Advice(_) => 1, - Instance(_) => 1, - Challenge(_) => 0, - Negated(poly) => poly.degree(), - Sum(a, b) => max(a.degree(), b.degree()), - Product(a, b) => a.degree() + b.degree(), - Scaled(poly, _) => poly.degree(), - } - } -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, PartialEq, Eq)] -pub enum Expression { - /// This is a constant polynomial - Constant(F), - /// This is a virtual selector - Selector(Selector), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQuery), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQuery), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQuery), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl Into> for Expression { - fn into(self) -> ExpressionMid { - match self { - Expression::Constant(c) => ExpressionMid::Constant(c), - Expression::Selector(_) => unreachable!(), - Expression::Fixed(FixedQuery { - column_index, - rotation, - .. - }) => ExpressionMid::Fixed(FixedQueryMid { - column_index, - rotation, - }), - Expression::Advice(AdviceQuery { - column_index, - rotation, - phase, - .. - }) => ExpressionMid::Advice(AdviceQueryMid { - column_index, - rotation, - phase, - }), - Expression::Instance(InstanceQuery { - column_index, - rotation, - .. - }) => ExpressionMid::Instance(InstanceQueryMid { - column_index, - rotation, - }), - Expression::Challenge(c) => ExpressionMid::Challenge(c), - Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), - Expression::Sum(lhs, rhs) => { - ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) - } - Expression::Product(lhs, rhs) => { - ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) - } - Expression::Scaled(e, c) => ExpressionMid::Scaled(Box::new((*e).into()), c), - } - } -} - -impl Expression { - /// Make side effects - pub fn query_cells(&mut self, cells: &mut VirtualCells<'_, F>) { - match self { - Expression::Constant(_) => (), - Expression::Selector(selector) => { - if !cells.queried_selectors.contains(selector) { - cells.queried_selectors.push(*selector); - } - } - Expression::Fixed(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Fixed, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_fixed_index(col, query.rotation)); - } - } - Expression::Advice(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Advice { phase: query.phase }, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_advice_index(col, query.rotation)); - } - } - Expression::Instance(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Instance, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_instance_index(col, query.rotation)); - } - } - Expression::Challenge(_) => (), - Expression::Negated(a) => a.query_cells(cells), - Expression::Sum(a, b) => { - a.query_cells(cells); - b.query_cells(cells); - } - Expression::Product(a, b) => { - a.query_cells(cells); - b.query_cells(cells); - } - Expression::Scaled(a, _) => a.query_cells(cells), - }; - } - - /// Evaluate the polynomial using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - constant: &impl Fn(F) -> T, - selector_column: &impl Fn(Selector) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Selector(selector) => selector_column(*selector), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - product(a, b) - } - Expression::Scaled(a, f) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - scaled(a, *f) - } - } - } - - /// Evaluate the polynomial lazily using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate_lazy( - &self, - constant: &impl Fn(F) -> T, - selector_column: &impl Fn(Selector) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - zero: &T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Selector(selector) => selector_column(*selector), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - let b = b.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let (a, b) = if a.complexity() <= b.complexity() { - (a, b) - } else { - (b, a) - }; - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - - if a == *zero { - a - } else { - let b = b.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - product(a, b) - } - } - Expression::Scaled(a, f) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - scaled(a, *f) - } - } - } - - fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { - match self { - Expression::Constant(scalar) => write!(writer, "{scalar:?}"), - Expression::Selector(selector) => write!(writer, "selector[{}]", selector.0), - Expression::Fixed(query) => { - write!( - writer, - "fixed[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Advice(query) => { - write!( - writer, - "advice[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Instance(query) => { - write!( - writer, - "instance[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Challenge(challenge) => { - write!(writer, "challenge[{}]", challenge.index()) - } - Expression::Negated(a) => { - writer.write_all(b"(-")?; - a.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Sum(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"+")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Product(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"*")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Scaled(a, f) => { - a.write_identifier(writer)?; - write!(writer, "*{f:?}") - } - } - } - - /// Identifier for this expression. Expressions with identical identifiers - /// do the same calculation (but the expressions don't need to be exactly equal - /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). - pub fn identifier(&self) -> String { - let mut cursor = std::io::Cursor::new(Vec::new()); - self.write_identifier(&mut cursor).unwrap(); - String::from_utf8(cursor.into_inner()).unwrap() - } - - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Selector(_) => 1, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.degree(), - Expression::Sum(a, b) => max(a.degree(), b.degree()), - Expression::Product(a, b) => a.degree() + b.degree(), - Expression::Scaled(poly, _) => poly.degree(), - } - } - - /// Approximate the computational complexity of this expression. - pub fn complexity(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Selector(_) => 1, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.complexity() + 5, - Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, - Expression::Product(a, b) => a.complexity() + b.complexity() + 30, - Expression::Scaled(poly, _) => poly.complexity() + 30, - } - } - - /// Square this expression. - pub fn square(self) -> Self { - self.clone() * self - } - - /// Returns whether or not this expression contains a simple `Selector`. - fn contains_simple_selector(&self) -> bool { - self.evaluate( - &|_| false, - &|selector| selector.is_simple(), - &|_| false, - &|_| false, - &|_| false, - &|_| false, - &|a| a, - &|a, b| a || b, - &|a, b| a || b, - &|a, _| a, - ) - } - - /// Extracts a simple selector from this gate, if present - fn extract_simple_selector(&self) -> Option { - let op = |a, b| match (a, b) { - (Some(a), None) | (None, Some(a)) => Some(a), - (Some(_), Some(_)) => panic!("two simple selectors cannot be in the same expression"), - _ => None, - }; - - self.evaluate( - &|_| None, - &|selector| { - if selector.is_simple() { - Some(selector) - } else { - None - } - }, - &|_| None, - &|_| None, - &|_| None, - &|_| None, - &|a| a, - &op, - &op, - &|a, _| a, - ) - } -} - -impl std::fmt::Debug for Expression { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), - Expression::Selector(selector) => f.debug_tuple("Selector").field(selector).finish(), - // Skip enum variant and print query struct directly to maintain backwards compatibility. - Expression::Fixed(query) => { - let mut debug_struct = f.debug_struct("Fixed"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Advice(query) => { - let mut debug_struct = f.debug_struct("Advice"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation); - // Only show advice's phase if it's not in first phase. - if query.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &query.phase); - } - debug_struct.finish() - } - Expression::Instance(query) => { - let mut debug_struct = f.debug_struct("Instance"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Challenge(challenge) => { - f.debug_tuple("Challenge").field(challenge).finish() - } - Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), - Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), - Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), - Expression::Scaled(poly, scalar) => { - f.debug_tuple("Scaled").field(poly).field(scalar).finish() - } - } - } -} - -impl Neg for Expression { - type Output = Expression; - fn neg(self) -> Self::Output { - Expression::Negated(Box::new(self)) - } -} - -impl Add for Expression { - type Output = Expression; - fn add(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() || rhs.contains_simple_selector() { - panic!("attempted to use a simple selector in an addition"); - } - Expression::Sum(Box::new(self), Box::new(rhs)) - } -} - -impl Sub for Expression { - type Output = Expression; - fn sub(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() || rhs.contains_simple_selector() { - panic!("attempted to use a simple selector in a subtraction"); - } - Expression::Sum(Box::new(self), Box::new(-rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() && rhs.contains_simple_selector() { - panic!("attempted to multiply two expressions containing simple selectors"); - } - Expression::Product(Box::new(self), Box::new(rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: F) -> Expression { - Expression::Scaled(Box::new(self), rhs) - } -} - -impl Sum for Expression { - fn sum>(iter: I) -> Self { - iter.reduce(|acc, x| acc + x) - .unwrap_or(Expression::Constant(F::ZERO)) - } -} - -impl Product for Expression { - fn product>(iter: I) -> Self { - iter.reduce(|acc, x| acc * x) - .unwrap_or(Expression::Constant(F::ONE)) - } -} - -/// Represents an index into a vector where each entry corresponds to a distinct -/// point that polynomials are queried at. -#[derive(Copy, Clone, Debug)] -pub(crate) struct PointIndex(pub usize); - -/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset -/// within a custom gate. -#[derive(Clone, Debug)] -pub struct VirtualCell { - pub(crate) column: Column, - pub(crate) rotation: Rotation, -} - -impl>> From<(Col, Rotation)> for VirtualCell { - fn from((column, rotation): (Col, Rotation)) -> Self { - VirtualCell { - column: column.into(), - rotation, - } - } -} - -/// An individual polynomial constraint. -/// -/// These are returned by the closures passed to `ConstraintSystem::create_gate`. -#[derive(Debug)] -pub struct Constraint { - name: String, - poly: Expression, -} - -impl From> for Constraint { - fn from(poly: Expression) -> Self { - Constraint { - name: "".to_string(), - poly, - } - } -} - -impl> From<(S, Expression)> for Constraint { - fn from((name, poly): (S, Expression)) -> Self { - Constraint { - name: name.as_ref().to_string(), - poly, - } - } -} - -impl From> for Vec> { - fn from(poly: Expression) -> Self { - vec![Constraint { - name: "".to_string(), - poly, - }] - } -} - -/// A set of polynomial constraints with a common selector. -/// -/// ``` -/// use halo2_proofs::{plonk::{Constraints, Expression}, poly::Rotation}; -/// use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; -/// -/// # let mut meta = ConstraintSystem::::default(); -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let c = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("foo", |meta| { -/// let next = meta.query_advice(a, Rotation::next()); -/// let a = meta.query_advice(a, Rotation::cur()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let c = meta.query_advice(c, Rotation::cur()); -/// let s_ternary = meta.query_selector(s); -/// -/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); -/// -/// Constraints::with_selector( -/// s_ternary, -/// std::array::IntoIter::new([ -/// ("a is boolean", a.clone() * one_minus_a.clone()), -/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), -/// ]), -/// ) -/// }); -/// ``` -/// -/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to -/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, -/// you can pass an array directly. -#[derive(Debug)] -pub struct Constraints>, Iter: IntoIterator> { - selector: Expression, - constraints: Iter, -} - -impl>, Iter: IntoIterator> Constraints { - /// Constructs a set of constraints that are controlled by the given selector. - /// - /// Each constraint `c` in `iterator` will be converted into the constraint - /// `selector * c`. - pub fn with_selector(selector: Expression, constraints: Iter) -> Self { - Constraints { - selector, - constraints, - } - } -} - -fn apply_selector_to_constraint>>( - (selector, c): (Expression, C), -) -> Constraint { - let constraint: Constraint = c.into(); - Constraint { - name: constraint.name, - poly: selector * constraint.poly, - } -} - -type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; -type ConstraintsIterator = std::iter::Map< - std::iter::Zip>, I>, - ApplySelectorToConstraint, ->; - -impl>, Iter: IntoIterator> IntoIterator - for Constraints -{ - type Item = Constraint; - type IntoIter = ConstraintsIterator; - - fn into_iter(self) -> Self::IntoIter { - std::iter::repeat(self.selector) - .zip(self.constraints) - .map(apply_selector_to_constraint) - } -} - -/// A Gate contains a single polynomial identity with a name as metadata. -#[derive(Clone, Debug)] -pub struct GateV2Backend { - name: String, - poly: ExpressionMid, -} - -impl GateV2Backend { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the polynomial identity of this gate - pub fn polynomial(&self) -> &ExpressionMid { - &self.poly - } -} - -/// Gate -#[derive(Clone, Debug)] -pub struct Gate { - name: String, - constraint_names: Vec, - polys: Vec>, - /// We track queried selectors separately from other cells, so that we can use them to - /// trigger debug checks on gates. - queried_selectors: Vec, - queried_cells: Vec, -} - -impl Gate { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the name of the constraint at index `constraint_index`. - pub fn constraint_name(&self, constraint_index: usize) -> &str { - self.constraint_names[constraint_index].as_str() - } - - /// Returns constraints of this gate - pub fn polynomials(&self) -> &[Expression] { - &self.polys - } - - pub(crate) fn queried_selectors(&self) -> &[Selector] { - &self.queried_selectors - } - - pub(crate) fn queried_cells(&self) -> &[VirtualCell] { - &self.queried_cells - } -} - -/// Data that needs to be preprocessed from a circuit -#[derive(Debug, Clone)] -pub struct PreprocessingV2 { - // TODO(Edu): Can we replace this by a simpler structure? - pub(crate) permutation: permutation::keygen::AssemblyMid, - pub(crate) fixed: Vec>, -} - -/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System -/// as well as the fixed columns and copy constraints information. -#[derive(Debug, Clone)] -pub struct CompiledCircuitV2 { - pub(crate) preprocessing: PreprocessingV2, - pub(crate) cs: ConstraintSystemV2Backend, -} - -struct QueriesMap { - advice_map: HashMap<(Column, Rotation), usize>, - instance_map: HashMap<(Column, Rotation), usize>, - fixed_map: HashMap<(Column, Rotation), usize>, - advice: Vec<(Column, Rotation)>, - instance: Vec<(Column, Rotation)>, - fixed: Vec<(Column, Rotation)>, -} - -impl QueriesMap { - fn add_advice(&mut self, col: Column, rot: Rotation) -> usize { - *self.advice_map.entry((col, rot)).or_insert_with(|| { - self.advice.push((col, rot)); - self.advice.len() - 1 - }) - } - fn add_instance(&mut self, col: Column, rot: Rotation) -> usize { - *self.instance_map.entry((col, rot)).or_insert_with(|| { - self.instance.push((col, rot)); - self.instance.len() - 1 - }) - } - fn add_fixed(&mut self, col: Column, rot: Rotation) -> usize { - *self.fixed_map.entry((col, rot)).or_insert_with(|| { - self.fixed.push((col, rot)); - self.fixed.len() - 1 - }) - } -} - -impl QueriesMap { - fn as_expression(&mut self, expr: &ExpressionMid) -> Expression { - match expr { - ExpressionMid::Constant(c) => Expression::Constant(*c), - ExpressionMid::Fixed(query) => { - let (col, rot) = (Column::new(query.column_index, Fixed), query.rotation); - let index = self.add_fixed(col, rot); - Expression::Fixed(FixedQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - }) - } - ExpressionMid::Advice(query) => { - let (col, rot) = ( - Column::new(query.column_index, Advice { phase: query.phase }), - query.rotation, - ); - let index = self.add_advice(col, rot); - Expression::Advice(AdviceQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - phase: query.phase, - }) - } - ExpressionMid::Instance(query) => { - let (col, rot) = (Column::new(query.column_index, Instance), query.rotation); - let index = self.add_instance(col, rot); - Expression::Instance(InstanceQuery { - index: Some(index), - column_index: query.column_index, - rotation: query.rotation, - }) - } - ExpressionMid::Challenge(c) => Expression::Challenge(*c), - ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), - ExpressionMid::Sum(lhs, rhs) => Expression::Sum( - Box::new(self.as_expression(lhs)), - Box::new(self.as_expression(rhs)), - ), - ExpressionMid::Product(lhs, rhs) => Expression::Product( - Box::new(self.as_expression(lhs)), - Box::new(self.as_expression(rhs)), - ), - ExpressionMid::Scaled(e, c) => Expression::Scaled(Box::new(self.as_expression(e)), *c), - } - } -} - -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystemV2Backend { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - pub(crate) gates: Vec>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, -} - -impl Into> for ConstraintSystem { - fn into(self) -> ConstraintSystemV2Backend { - ConstraintSystemV2Backend { - num_fixed_columns: self.num_fixed_columns, - num_advice_columns: self.num_advice_columns, - num_instance_columns: self.num_instance_columns, - num_challenges: self.num_challenges, - unblinded_advice_columns: self.unblinded_advice_columns.clone(), - advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), - challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), - gates: self - .gates - .iter() - .map(|g| { - g.polys.clone().into_iter().enumerate().map(|(i, e)| { - let name = match g.constraint_name(i) { - "" => g.name.clone(), - constraint_name => format!("{}:{}", g.name, constraint_name), - }; - GateV2Backend { - name, - poly: e.into(), - } - }) - }) - .flatten() - .collect(), - permutation: self.permutation.clone(), - lookups: self - .lookups - .iter() - .map(|l| lookup::ArgumentV2 { - name: l.name.clone(), - input_expressions: l - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - table_expressions: l - .table_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) - .collect(), - shuffles: self - .shuffles - .iter() - .map(|s| shuffle::ArgumentV2 { - name: s.name.clone(), - input_expressions: s - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - shuffle_expressions: s - .shuffle_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) - .collect(), - general_column_annotations: self.general_column_annotations.clone(), - } - } -} - -/// Witness calculator. Frontend function -#[derive(Debug)] -pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { - k: u32, - n: usize, - unusable_rows_start: usize, - circuit: &'a ConcreteCircuit, - config: &'a ConcreteCircuit::Config, - cs: &'a ConstraintSystem, - instances: &'a [&'a [F]], - next_phase: u8, -} - -impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, ConcreteCircuit> { - /// Create a new WitnessCalculator - pub fn new( - k: u32, - circuit: &'a ConcreteCircuit, - config: &'a ConcreteCircuit::Config, - cs: &'a ConstraintSystem, - instances: &'a [&'a [F]], - ) -> Self { - let n = 2usize.pow(k); - let unusable_rows_start = n - (cs.blinding_factors() + 1); - Self { - k, - n, - unusable_rows_start, - circuit, - config, - cs, - instances, - next_phase: 0, - } - } - - /// Calculate witness at phase - pub fn calc( - &mut self, - phase: u8, - challenges: &HashMap, - ) -> Result>>>, Error> { - if phase != self.next_phase { - return Err(Error::Other(format!( - "Expected phase {}, got {}", - self.next_phase, phase - ))); - } - let current_phase = match phase { - 0 => FirstPhase.to_sealed(), - 1 => SecondPhase.to_sealed(), - 2 => ThirdPhase.to_sealed(), - _ => unreachable!("only phase [0,2] supported"), - }; - - let mut witness = WitnessCollection { - k: self.k, - current_phase, - advice: vec![vec![Assigned::Zero; self.n]; self.cs.num_advice_columns], - instances: self.instances, - challenges, - // The prover will not be allowed to assign values to advice - // cells that exist within inactive rows, which include some - // number of blinding factors and an extra row for use in the - // permutation argument. - usable_rows: ..self.unusable_rows_start, - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain the witness and other information. - ConcreteCircuit::FloorPlanner::synthesize( - &mut witness, - self.circuit, - self.config.clone(), - self.cs.constants.clone(), - ) - .expect("todo"); - - let column_indices = self - .cs - .advice_column_phase - .iter() - .enumerate() - .filter_map(|(column_index, phase)| { - if current_phase == *phase { - Some(column_index) - } else { - None - } - }) - .collect::>(); - - self.next_phase += 1; - Ok(witness - .advice - .into_iter() - .enumerate() - .map(|(column_index, advice)| { - if column_indices.contains(&column_index) { - Some(advice) - } else { - None - } - }) - .collect()) - } -} - -/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the -/// circuit into its columns and the column configuration; as well as doing the fixed column and -/// copy constraints assignments. The output of this function can then be used for the key -/// generation, and proof generation. -/// If `compress_selectors` is true, multiple selector columns may be multiplexed. -pub fn compile_circuit>( - k: u32, - circuit: &ConcreteCircuit, - compress_selectors: bool, -) -> Result< - ( - CompiledCircuitV2, - ConcreteCircuit::Config, - ConstraintSystem, - ), - Error, -> { - let n = 2usize.pow(k); - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - let cs = cs; - - if n < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(k)); - } - - let mut assembly = crate::plonk::keygen::Assembly { - k, - fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], - permutation: permutation::keygen::AssemblyFront::new(n, &cs.permutation), - selectors: vec![vec![false; n]; cs.num_selectors], - usable_rows: 0..n - (cs.blinding_factors() + 1), - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain URS - ConcreteCircuit::FloorPlanner::synthesize( - &mut assembly, - circuit, - config.clone(), - cs.constants.clone(), - )?; - - let fixed = batch_invert_assigned(assembly.fixed); - let (cs, selector_polys) = if compress_selectors { - cs.compress_selectors(assembly.selectors.clone()) - } else { - // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. - let selectors = std::mem::take(&mut assembly.selectors); - cs.directly_convert_selectors_to_fixed(selectors) - }; - let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); - fixed.extend(selector_polys.into_iter()); - - let preprocessing = PreprocessingV2 { - permutation: permutation::keygen::AssemblyMid { - copies: assembly.permutation.copies, - }, - fixed, - }; - - Ok(( - CompiledCircuitV2 { - cs: cs.clone().into(), - preprocessing, - }, - config, - cs, - )) -} - -impl ConstraintSystemV2Backend { - /// Collect queries used in gates while mapping those gates to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_gates(&self, queries: &mut QueriesMap) -> Vec> { - self.gates - .iter() - .map(|gate| Gate { - name: gate.name.clone(), - constraint_names: Vec::new(), - polys: vec![queries.as_expression(gate.polynomial())], - queried_selectors: Vec::new(), // Unused? - queried_cells: Vec::new(), // Unused? - }) - .collect() - } - - /// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_lookups(&self, queries: &mut QueriesMap) -> Vec> { - self.lookups - .iter() - .map(|lookup| lookup::Argument { - name: lookup.name.clone(), - input_expressions: lookup - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - table_expressions: lookup - .table_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() - } - - /// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed - /// query references in the expressions. - fn collect_queries_shuffles(&self, queries: &mut QueriesMap) -> Vec> { - self.shuffles - .iter() - .map(|shuffle| shuffle::Argument { - name: shuffle.name.clone(), - input_expressions: shuffle - .input_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - shuffle_expressions: shuffle - .shuffle_expressions - .iter() - .map(|e| queries.as_expression(e)) - .collect(), - }) - .collect() - } - - /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the - /// expressions of gates, lookups and shuffles into equivalent ones with indexed query - /// references. - pub(crate) fn collect_queries( - &self, - ) -> ( - Queries, - Vec>, - Vec>, - Vec>, - ) { - let mut queries = QueriesMap { - advice_map: HashMap::new(), - instance_map: HashMap::new(), - fixed_map: HashMap::new(), - advice: Vec::new(), - instance: Vec::new(), - fixed: Vec::new(), - }; - - let gates = self.collect_queries_gates(&mut queries); - let lookups = self.collect_queries_lookups(&mut queries); - let shuffles = self.collect_queries_shuffles(&mut queries); - - // Each column used in a copy constraint involves a query at rotation current. - for column in self.permutation.get_columns() { - match column.column_type { - Any::Instance => { - queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) - } - Any::Fixed => { - queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()) - } - Any::Advice(advice) => { - queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) - } - }; - } - - let mut num_advice_queries = vec![0; self.num_advice_columns]; - for (column, _) in queries.advice.iter() { - num_advice_queries[column.index()] += 1; - } - - let queries = Queries { - advice: queries.advice, - instance: queries.instance, - fixed: queries.fixed, - num_advice_queries, - }; - (queries, gates, lookups, shuffles) - } -} - -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystem { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_selectors: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - /// This is a cached vector that maps virtual selectors to the concrete - /// fixed column that they were compressed into. This is just used by dev - /// tooling right now. - pub(crate) selector_map: Vec>, - - pub(crate) gates: Vec>, - pub(crate) advice_queries: Vec<(Column, Rotation)>, - // Contains an integer for each advice column - // identifying how many distinct queries it has - // so far; should be same length as num_advice_columns. - pub(crate) num_advice_queries: Vec, - pub(crate) instance_queries: Vec<(Column, Rotation)>, - pub(crate) fixed_queries: Vec<(Column, Rotation)>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, - - // Vector of fixed columns, which can be used to store constant values - // that are copied into advice columns. - pub(crate) constants: Vec>, - - pub(crate) minimum_degree: Option, -} - -impl From> for ConstraintSystem { - fn from(cs2: ConstraintSystemV2Backend) -> Self { - let (queries, gates, lookups, shuffles) = cs2.collect_queries(); - ConstraintSystem { - num_fixed_columns: cs2.num_fixed_columns, - num_advice_columns: cs2.num_advice_columns, - num_instance_columns: cs2.num_instance_columns, - num_selectors: 0, - num_challenges: cs2.num_challenges, - unblinded_advice_columns: cs2.unblinded_advice_columns, - advice_column_phase: cs2 - .advice_column_phase - .into_iter() - .map(sealed::Phase) - .collect(), - challenge_phase: cs2.challenge_phase.into_iter().map(sealed::Phase).collect(), - selector_map: Vec::new(), - gates, - advice_queries: queries.advice, - num_advice_queries: queries.num_advice_queries, - instance_queries: queries.instance, - fixed_queries: queries.fixed, - permutation: cs2.permutation, - lookups, - shuffles, - general_column_annotations: cs2.general_column_annotations, - constants: Vec::new(), - minimum_degree: None, - } - } -} - -/// Represents the minimal parameters that determine a `ConstraintSystem`. -#[allow(dead_code)] -pub struct PinnedConstraintSystem<'a, F: Field> { - num_fixed_columns: &'a usize, - num_advice_columns: &'a usize, - num_instance_columns: &'a usize, - num_selectors: &'a usize, - num_challenges: &'a usize, - advice_column_phase: &'a Vec, - challenge_phase: &'a Vec, - gates: PinnedGates<'a, F>, - advice_queries: &'a Vec<(Column, Rotation)>, - instance_queries: &'a Vec<(Column, Rotation)>, - fixed_queries: &'a Vec<(Column, Rotation)>, - permutation: &'a permutation::Argument, - lookups: &'a Vec>, - shuffles: &'a Vec>, - constants: &'a Vec>, - minimum_degree: &'a Option, -} - -impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("PinnedConstraintSystem"); - debug_struct - .field("num_fixed_columns", self.num_fixed_columns) - .field("num_advice_columns", self.num_advice_columns) - .field("num_instance_columns", self.num_instance_columns) - .field("num_selectors", self.num_selectors); - // Only show multi-phase related fields if it's used. - if *self.num_challenges > 0 { - debug_struct - .field("num_challenges", self.num_challenges) - .field("advice_column_phase", self.advice_column_phase) - .field("challenge_phase", self.challenge_phase); - } - debug_struct - .field("gates", &self.gates) - .field("advice_queries", self.advice_queries) - .field("instance_queries", self.instance_queries) - .field("fixed_queries", self.fixed_queries) - .field("permutation", self.permutation) - .field("lookups", self.lookups); - if !self.shuffles.is_empty() { - debug_struct.field("shuffles", self.shuffles); - } - debug_struct - .field("constants", self.constants) - .field("minimum_degree", self.minimum_degree); - debug_struct.finish() - } -} - -struct PinnedGates<'a, F: Field>(&'a Vec>); - -impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - f.debug_list() - .entries(self.0.iter().flat_map(|gate| gate.polynomials().iter())) - .finish() - } -} - -impl Default for ConstraintSystem { - fn default() -> ConstraintSystem { - ConstraintSystem { - num_fixed_columns: 0, - num_advice_columns: 0, - num_instance_columns: 0, - num_selectors: 0, - num_challenges: 0, - unblinded_advice_columns: Vec::new(), - advice_column_phase: Vec::new(), - challenge_phase: Vec::new(), - selector_map: vec![], - gates: vec![], - fixed_queries: Vec::new(), - advice_queries: Vec::new(), - num_advice_queries: Vec::new(), - instance_queries: Vec::new(), - permutation: permutation::Argument::new(), - lookups: Vec::new(), - shuffles: Vec::new(), - general_column_annotations: HashMap::new(), - constants: vec![], - minimum_degree: None, - } - } -} - -impl ConstraintSystem { - /// Obtain a pinned version of this constraint system; a structure with the - /// minimal parameters needed to determine the rest of the constraint - /// system. - pub fn pinned(&self) -> PinnedConstraintSystem<'_, F> { - PinnedConstraintSystem { - num_fixed_columns: &self.num_fixed_columns, - num_advice_columns: &self.num_advice_columns, - num_instance_columns: &self.num_instance_columns, - num_selectors: &self.num_selectors, - num_challenges: &self.num_challenges, - advice_column_phase: &self.advice_column_phase, - challenge_phase: &self.challenge_phase, - gates: PinnedGates(&self.gates), - fixed_queries: &self.fixed_queries, - advice_queries: &self.advice_queries, - instance_queries: &self.instance_queries, - permutation: &self.permutation, - lookups: &self.lookups, - shuffles: &self.shuffles, - constants: &self.constants, - minimum_degree: &self.minimum_degree, - } - } - - /// Enables this fixed column to be used for global constant assignments. - /// - /// # Side-effects - /// - /// The column will be equality-enabled. - pub fn enable_constant(&mut self, column: Column) { - if !self.constants.contains(&column) { - self.constants.push(column); - self.enable_equality(column); - } - } - - /// Enable the ability to enforce equality over cells in this column - pub fn enable_equality>>(&mut self, column: C) { - let column = column.into(); - self.query_any_index(column, Rotation::cur()); - self.permutation.add_column(column); - } - - /// Add a lookup argument for some input expressions and table columns. - /// - /// `table_map` returns a map between input expressions and the table columns - /// they need to match. - pub fn lookup>( - &mut self, - name: S, - table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, TableColumn)>, - ) -> usize { - let mut cells = VirtualCells::new(self); - let table_map = table_map(&mut cells) - .into_iter() - .map(|(mut input, table)| { - if input.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - let mut table = cells.query_fixed(table.inner(), Rotation::cur()); - input.query_cells(&mut cells); - table.query_cells(&mut cells); - (input, table) - }) - .collect(); - let index = self.lookups.len(); - - self.lookups - .push(lookup::Argument::new(name.as_ref(), table_map)); - - index - } - - /// Add a lookup argument for some input expressions and table expressions. - /// - /// `table_map` returns a map between input expressions and the table expressions - /// they need to match. - pub fn lookup_any>( - &mut self, - name: S, - table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, - ) -> usize { - let mut cells = VirtualCells::new(self); - let table_map = table_map(&mut cells) - .into_iter() - .map(|(mut input, mut table)| { - if input.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - if table.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - input.query_cells(&mut cells); - table.query_cells(&mut cells); - (input, table) - }) - .collect(); - let index = self.lookups.len(); - - self.lookups - .push(lookup::Argument::new(name.as_ref(), table_map)); - - index - } - - /// Add a shuffle argument for some input expressions and table expressions. - pub fn shuffle>( - &mut self, - name: S, - shuffle_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, - ) -> usize { - let mut cells = VirtualCells::new(self); - let shuffle_map = shuffle_map(&mut cells) - .into_iter() - .map(|(mut input, mut table)| { - input.query_cells(&mut cells); - table.query_cells(&mut cells); - (input, table) - }) - .collect(); - let index = self.shuffles.len(); - - self.shuffles - .push(shuffle::Argument::new(name.as_ref(), shuffle_map)); - - index - } - - fn query_fixed_index(&mut self, column: Column, at: Rotation) -> usize { - // Return existing query, if it exists - for (index, fixed_query) in self.fixed_queries.iter().enumerate() { - if fixed_query == &(column, at) { - return index; - } - } - - // Make a new query - let index = self.fixed_queries.len(); - self.fixed_queries.push((column, at)); - - index - } - - pub(crate) fn query_advice_index(&mut self, column: Column, at: Rotation) -> usize { - // Return existing query, if it exists - for (index, advice_query) in self.advice_queries.iter().enumerate() { - if advice_query == &(column, at) { - return index; - } - } - - // Make a new query - let index = self.advice_queries.len(); - self.advice_queries.push((column, at)); - self.num_advice_queries[column.index] += 1; - - index - } - - fn query_instance_index(&mut self, column: Column, at: Rotation) -> usize { - // Return existing query, if it exists - for (index, instance_query) in self.instance_queries.iter().enumerate() { - if instance_query == &(column, at) { - return index; - } - } - - // Make a new query - let index = self.instance_queries.len(); - self.instance_queries.push((column, at)); - - index - } - - fn query_any_index(&mut self, column: Column, at: Rotation) -> usize { - match column.column_type() { - Any::Advice(_) => { - self.query_advice_index(Column::::try_from(column).unwrap(), at) - } - Any::Fixed => self.query_fixed_index(Column::::try_from(column).unwrap(), at), - Any::Instance => { - self.query_instance_index(Column::::try_from(column).unwrap(), at) - } - } - } - - pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, advice_query) in self.advice_queries.iter().enumerate() { - if advice_query == &(column, at) { - return index; - } - } - - panic!("get_advice_query_index called for non-existent query"); - } - - pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, fixed_query) in self.fixed_queries.iter().enumerate() { - if fixed_query == &(column, at) { - return index; - } - } - - panic!("get_fixed_query_index called for non-existent query"); - } - - pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, instance_query) in self.instance_queries.iter().enumerate() { - if instance_query == &(column, at) { - return index; - } - } - - panic!("get_instance_query_index called for non-existent query"); - } - - pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { - match column.column_type() { - Any::Advice(_) => { - self.get_advice_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Fixed => { - self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Instance => { - self.get_instance_query_index(Column::::try_from(column).unwrap(), at) - } - } - } - - /// Sets the minimum degree required by the circuit, which can be set to a - /// larger amount than actually needed. This can be used, for example, to - /// force the permutation argument to involve more columns in the same set. - pub fn set_minimum_degree(&mut self, degree: usize) { - self.minimum_degree = Some(degree); - } - - /// Creates a new gate. - /// - /// # Panics - /// - /// A gate is required to contain polynomial constraints. This method will panic if - /// `constraints` returns an empty iterator. - pub fn create_gate>, Iter: IntoIterator, S: AsRef>( - &mut self, - name: S, - constraints: impl FnOnce(&mut VirtualCells<'_, F>) -> Iter, - ) { - let mut cells = VirtualCells::new(self); - let constraints = constraints(&mut cells); - let (constraint_names, polys): (_, Vec<_>) = constraints - .into_iter() - .map(|c| c.into()) - .map(|mut c: Constraint| { - c.poly.query_cells(&mut cells); - (c.name, c.poly) - }) - .unzip(); - - let queried_selectors = cells.queried_selectors; - let queried_cells = cells.queried_cells; - - assert!( - !polys.is_empty(), - "Gates must contain at least one constraint." - ); - - self.gates.push(Gate { - name: name.as_ref().to_string(), - constraint_names, - polys, - queried_selectors, - queried_cells, - }); - } - - /// This will compress selectors together depending on their provided - /// assignments. This `ConstraintSystem` will then be modified to add new - /// fixed columns (representing the actual selectors) and will return the - /// polynomials for those columns. Finally, an internal map is updated to - /// find which fixed column corresponds with a given `Selector`. - /// - /// Do not call this twice. Yes, this should be a builder pattern instead. - pub fn compress_selectors(mut self, selectors: Vec>) -> (Self, Vec>) { - // The number of provided selector assignments must be the number we - // counted for this constraint system. - assert_eq!(selectors.len(), self.num_selectors); - - // Compute the maximal degree of every selector. We only consider the - // expressions in gates, as lookup arguments cannot support simple - // selectors. Selectors that are complex or do not appear in any gates - // will have degree zero. - let mut degrees = vec![0; selectors.len()]; - for expr in self.gates.iter().flat_map(|gate| gate.polys.iter()) { - if let Some(selector) = expr.extract_simple_selector() { - degrees[selector.0] = max(degrees[selector.0], expr.degree()); - } - } - - // We will not increase the degree of the constraint system, so we limit - // ourselves to the largest existing degree constraint. - let max_degree = self.degree(); - - let mut new_columns = vec![]; - let (polys, selector_assignment) = compress_selectors::process( - selectors - .into_iter() - .zip(degrees) - .enumerate() - .map( - |(i, (activations, max_degree))| compress_selectors::SelectorDescription { - selector: i, - activations, - max_degree, - }, - ) - .collect(), - max_degree, - || { - let column = self.fixed_column(); - new_columns.push(column); - Expression::Fixed(FixedQuery { - index: Some(self.query_fixed_index(column, Rotation::cur())), - column_index: column.index, - rotation: Rotation::cur(), - }) - }, - ); - - let mut selector_map = vec![None; selector_assignment.len()]; - let mut selector_replacements = vec![None; selector_assignment.len()]; - for assignment in selector_assignment { - selector_replacements[assignment.selector] = Some(assignment.expression); - selector_map[assignment.selector] = Some(new_columns[assignment.combination_index]); - } - - self.selector_map = selector_map - .into_iter() - .map(|a| a.unwrap()) - .collect::>(); - let selector_replacements = selector_replacements - .into_iter() - .map(|a| a.unwrap()) - .collect::>(); - self.replace_selectors_with_fixed(&selector_replacements); - - (self, polys) - } - - /// Does not combine selectors and directly replaces them everywhere with fixed columns. - pub fn directly_convert_selectors_to_fixed( - mut self, - selectors: Vec>, - ) -> (Self, Vec>) { - // The number of provided selector assignments must be the number we - // counted for this constraint system. - assert_eq!(selectors.len(), self.num_selectors); - - let (polys, selector_replacements): (Vec<_>, Vec<_>) = selectors - .into_iter() - .map(|selector| { - let poly = selector - .iter() - .map(|b| if *b { F::ONE } else { F::ZERO }) - .collect::>(); - let column = self.fixed_column(); - let rotation = Rotation::cur(); - let expr = Expression::Fixed(FixedQuery { - index: Some(self.query_fixed_index(column, rotation)), - column_index: column.index, - rotation, - }); - (poly, expr) - }) - .unzip(); - - self.replace_selectors_with_fixed(&selector_replacements); - self.num_selectors = 0; - - (self, polys) - } - - fn replace_selectors_with_fixed(&mut self, selector_replacements: &[Expression]) { - fn replace_selectors( - expr: &mut Expression, - selector_replacements: &[Expression], - must_be_nonsimple: bool, - ) { - *expr = expr.evaluate( - &|constant| Expression::Constant(constant), - &|selector| { - if must_be_nonsimple { - // Simple selectors are prohibited from appearing in - // expressions in the lookup argument by - // `ConstraintSystem`. - assert!(!selector.is_simple()); - } - - selector_replacements[selector.0].clone() - }, - &|query| Expression::Fixed(query), - &|query| Expression::Advice(query), - &|query| Expression::Instance(query), - &|challenge| Expression::Challenge(challenge), - &|a| -a, - &|a, b| a + b, - &|a, b| a * b, - &|a, f| a * f, - ); - } - - // Substitute selectors for the real fixed columns in all gates - for expr in self.gates.iter_mut().flat_map(|gate| gate.polys.iter_mut()) { - replace_selectors(expr, selector_replacements, false); - } - - // Substitute non-simple selectors for the real fixed columns in all - // lookup expressions - for expr in self.lookups.iter_mut().flat_map(|lookup| { - lookup - .input_expressions - .iter_mut() - .chain(lookup.table_expressions.iter_mut()) - }) { - replace_selectors(expr, selector_replacements, true); - } - - for expr in self.shuffles.iter_mut().flat_map(|shuffle| { - shuffle - .input_expressions - .iter_mut() - .chain(shuffle.shuffle_expressions.iter_mut()) - }) { - replace_selectors(expr, selector_replacements, true); - } - } - - /// Allocate a new (simple) selector. Simple selectors cannot be added to - /// expressions nor multiplied by other expressions containing simple - /// selectors. Also, simple selectors may not appear in lookup argument - /// inputs. - pub fn selector(&mut self) -> Selector { - let index = self.num_selectors; - self.num_selectors += 1; - Selector(index, true) - } - - /// Allocate a new complex selector that can appear anywhere - /// within expressions. - pub fn complex_selector(&mut self) -> Selector { - let index = self.num_selectors; - self.num_selectors += 1; - Selector(index, false) - } - - /// Allocates a new fixed column that can be used in a lookup table. - pub fn lookup_table_column(&mut self) -> TableColumn { - TableColumn { - inner: self.fixed_column(), - } - } - - /// Annotate a Lookup column. - pub fn annotate_lookup_column(&mut self, column: TableColumn, annotation: A) - where - A: Fn() -> AR, - AR: Into, - { - // We don't care if the table has already an annotation. If it's the case we keep the new one. - self.general_column_annotations.insert( - metadata::Column::from((Any::Fixed, column.inner().index)), - annotation().into(), - ); - } - - /// Annotate an Instance column. - pub fn annotate_lookup_any_column(&mut self, column: T, annotation: A) - where - A: Fn() -> AR, - AR: Into, - T: Into>, - { - let col_any = column.into(); - // We don't care if the table has already an annotation. If it's the case we keep the new one. - self.general_column_annotations.insert( - metadata::Column::from((col_any.column_type, col_any.index)), - annotation().into(), - ); - } - - /// Allocate a new fixed column - pub fn fixed_column(&mut self) -> Column { - let tmp = Column { - index: self.num_fixed_columns, - column_type: Fixed, - }; - self.num_fixed_columns += 1; - tmp - } - - /// Allocate a new unblinded advice column at `FirstPhase` - pub fn unblinded_advice_column(&mut self) -> Column { - self.unblinded_advice_column_in(FirstPhase) - } - - /// Allocate a new advice column at `FirstPhase` - pub fn advice_column(&mut self) -> Column { - self.advice_column_in(FirstPhase) - } - - /// Allocate a new unblinded advice column in given phase. This allows for the generation of deterministic commitments to advice columns - /// which can be used to split large circuits into smaller ones, whose proofs can then be "joined" together by their common witness commitments. - pub fn unblinded_advice_column_in(&mut self, phase: P) -> Column { - let phase = phase.to_sealed(); - if let Some(previous_phase) = phase.prev() { - self.assert_phase_exists( - previous_phase, - format!("Column in later phase {phase:?}").as_str(), - ); - } - - let tmp = Column { - index: self.num_advice_columns, - column_type: Advice { phase }, - }; - self.unblinded_advice_columns.push(tmp.index); - self.num_advice_columns += 1; - self.num_advice_queries.push(0); - self.advice_column_phase.push(phase); - tmp - } - - /// Allocate a new advice column in given phase - /// - /// # Panics - /// - /// It panics if previous phase before the given one doesn't have advice column allocated. - pub fn advice_column_in(&mut self, phase: P) -> Column { - let phase = phase.to_sealed(); - if let Some(previous_phase) = phase.prev() { - self.assert_phase_exists( - previous_phase, - format!("Column in later phase {phase:?}").as_str(), - ); - } - - let tmp = Column { - index: self.num_advice_columns, - column_type: Advice { phase }, - }; - self.num_advice_columns += 1; - self.num_advice_queries.push(0); - self.advice_column_phase.push(phase); - tmp - } - - /// Allocate a new instance column - pub fn instance_column(&mut self) -> Column { - let tmp = Column { - index: self.num_instance_columns, - column_type: Instance, - }; - self.num_instance_columns += 1; - tmp - } - - /// Requests a challenge that is usable after the given phase. - /// - /// # Panics - /// - /// It panics if the given phase doesn't have advice column allocated. - pub fn challenge_usable_after(&mut self, phase: P) -> Challenge { - let phase = phase.to_sealed(); - self.assert_phase_exists( - phase, - format!("Challenge usable after phase {phase:?}").as_str(), - ); - - let tmp = Challenge { - index: self.num_challenges, - phase, - }; - self.num_challenges += 1; - self.challenge_phase.push(phase); - tmp - } - - /// Helper funciotn to assert phase exists, to make sure phase-aware resources - /// are allocated in order, and to avoid any phase to be skipped accidentally - /// to cause unexpected issue in the future. - fn assert_phase_exists(&self, phase: sealed::Phase, resource: &str) { - self.advice_column_phase - .iter() - .find(|advice_column_phase| **advice_column_phase == phase) - .unwrap_or_else(|| { - panic!( - "No Column is used in phase {phase:?} while allocating a new {resource:?}" - ) - }); - } - - /// Returns the list of phases - pub fn phases(&self) -> impl Iterator { - let max_phase = self - .advice_column_phase - .iter() - .max() - .map(|phase| phase.0) - .unwrap_or_default(); - (0..=max_phase).map(sealed::Phase) - } - - /// Compute the degree of the constraint system (the maximum degree of all - /// constraints). - pub fn degree(&self) -> usize { - // The permutation argument will serve alongside the gates, so must be - // accounted for. - let mut degree = self.permutation.required_degree(); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.lookups - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.shuffles - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // Account for each gate to ensure our quotient polynomial is the - // correct degree and that our extended domain is the right size. - degree = std::cmp::max( - degree, - self.gates - .iter() - .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) - .max() - .unwrap_or(0), - ); - - std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) - } - - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. - - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); - - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; - - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. - - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. - - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 - } - - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } - - /// Returns number of fixed columns - pub fn num_fixed_columns(&self) -> usize { - self.num_fixed_columns - } - - /// Returns number of advice columns - pub fn num_advice_columns(&self) -> usize { - self.num_advice_columns - } - - /// Returns number of instance columns - pub fn num_instance_columns(&self) -> usize { - self.num_instance_columns - } - - /// Returns number of selectors - pub fn num_selectors(&self) -> usize { - self.num_selectors - } - - /// Returns number of challenges - pub fn num_challenges(&self) -> usize { - self.num_challenges - } - - /// Returns phase of advice columns - pub fn advice_column_phase(&self) -> Vec { - self.advice_column_phase - .iter() - .map(|phase| phase.0) - .collect() - } - - /// Returns phase of challenges - pub fn challenge_phase(&self) -> Vec { - self.challenge_phase.iter().map(|phase| phase.0).collect() - } - - /// Returns gates - pub fn gates(&self) -> &Vec> { - &self.gates - } - - /// Returns general column annotations - pub fn general_column_annotations(&self) -> &HashMap { - &self.general_column_annotations - } - - /// Returns advice queries - pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { - &self.advice_queries - } - - /// Returns instance queries - pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { - &self.instance_queries - } - - /// Returns fixed queries - pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { - &self.fixed_queries - } - - /// Returns permutation argument - pub fn permutation(&self) -> &permutation::Argument { - &self.permutation - } - - /// Returns lookup arguments - pub fn lookups(&self) -> &Vec> { - &self.lookups - } - - /// Returns shuffle arguments - pub fn shuffles(&self) -> &Vec> { - &self.shuffles - } - - /// Returns constants - pub fn constants(&self) -> &Vec> { - &self.constants - } -} - -/// Exposes the "virtual cells" that can be queried while creating a custom gate or lookup -/// table. -#[derive(Debug)] -pub struct VirtualCells<'a, F: Field> { - meta: &'a mut ConstraintSystem, - queried_selectors: Vec, - queried_cells: Vec, -} - -impl<'a, F: Field> VirtualCells<'a, F> { - fn new(meta: &'a mut ConstraintSystem) -> Self { - VirtualCells { - meta, - queried_selectors: vec![], - queried_cells: vec![], - } - } - - /// Query a selector at the current position. - pub fn query_selector(&mut self, selector: Selector) -> Expression { - self.queried_selectors.push(selector); - Expression::Selector(selector) - } - - /// Query a fixed column at a relative position - pub fn query_fixed(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Fixed(FixedQuery { - index: Some(self.meta.query_fixed_index(column, at)), - column_index: column.index, - rotation: at, - }) - } - - /// Query an advice column at a relative position - pub fn query_advice(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Advice(AdviceQuery { - index: Some(self.meta.query_advice_index(column, at)), - column_index: column.index, - rotation: at, - phase: column.column_type().phase, - }) - } - - /// Query an instance column at a relative position - pub fn query_instance(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Instance(InstanceQuery { - index: Some(self.meta.query_instance_index(column, at)), - column_index: column.index, - rotation: at, - }) - } - - /// Query an Any column at a relative position - pub fn query_any>>(&mut self, column: C, at: Rotation) -> Expression { - let column = column.into(); - match column.column_type() { - Any::Advice(_) => self.query_advice(Column::::try_from(column).unwrap(), at), - Any::Fixed => self.query_fixed(Column::::try_from(column).unwrap(), at), - Any::Instance => self.query_instance(Column::::try_from(column).unwrap(), at), - } - } - - /// Query a challenge - pub fn query_challenge(&mut self, challenge: Challenge) -> Expression { - Expression::Challenge(challenge) - } -} - -#[cfg(test)] -mod tests { - use super::Expression; - use halo2curves::bn256::Fr; - - #[test] - fn iter_sum() { - let exprs: Vec> = vec![ - Expression::Constant(1.into()), - Expression::Constant(2.into()), - Expression::Constant(3.into()), - ]; - let happened: Expression = exprs.into_iter().sum(); - let expected: Expression = Expression::Sum( - Box::new(Expression::Sum( - Box::new(Expression::Constant(1.into())), - Box::new(Expression::Constant(2.into())), - )), - Box::new(Expression::Constant(3.into())), - ); - - assert_eq!(happened, expected); - } - - #[test] - fn iter_product() { - let exprs: Vec> = vec![ - Expression::Constant(1.into()), - Expression::Constant(2.into()), - Expression::Constant(3.into()), - ]; - let happened: Expression = exprs.into_iter().product(); - let expected: Expression = Expression::Product( - Box::new(Expression::Product( - Box::new(Expression::Constant(1.into())), - Box::new(Expression::Constant(2.into())), - )), - Box::new(Expression::Constant(3.into())), - ); - - assert_eq!(happened, expected); - } -} diff --git a/halo2_proofs_rm/src/plonk/circuit/compress_selectors.rs b/halo2_proofs_rm/src/plonk/circuit/compress_selectors.rs deleted file mode 100644 index 053ebe3178..0000000000 --- a/halo2_proofs_rm/src/plonk/circuit/compress_selectors.rs +++ /dev/null @@ -1,352 +0,0 @@ -use super::Expression; -use ff::Field; - -/// This describes a selector and where it is activated. -#[derive(Debug, Clone)] -pub struct SelectorDescription { - /// The selector that this description references, by index. - pub selector: usize, - - /// The vector of booleans defining which rows are active for this selector. - pub activations: Vec, - - /// The maximum degree of a gate involving this selector, including the - /// virtual selector itself. This means this will be at least 1 for any - /// expression containing a simple selector, even if that selector is not - /// multiplied by anything. - pub max_degree: usize, -} - -/// This describes the assigned combination of a particular selector as well as -/// the expression it should be substituted with. -#[derive(Debug, Clone)] -pub struct SelectorAssignment { - /// The selector that this structure references, by index. - pub selector: usize, - - /// The combination this selector was assigned to - pub combination_index: usize, - - /// The expression we wish to substitute with - pub expression: Expression, -} - -/// This function takes a vector that defines each selector as well as a closure -/// used to allocate new fixed columns, and returns the assignment of each -/// combination as well as details about each selector assignment. -/// -/// This function takes -/// * `selectors`, a vector of `SelectorDescription`s that describe each -/// selector -/// * `max_degree`, the maximum allowed degree of any gate -/// * `allocate_fixed_columns`, a closure that constructs a new fixed column and -/// queries it at Rotation::cur(), returning the expression -/// -/// and returns `Vec>` containing the assignment of each new fixed column -/// (which each correspond to a combination) as well as a vector of -/// `SelectorAssignment` that the caller can use to perform the necessary -/// substitutions to the constraint system. -/// -/// This function is completely deterministic. -pub fn process( - mut selectors: Vec, - max_degree: usize, - mut allocate_fixed_column: E, -) -> (Vec>, Vec>) -where - E: FnMut() -> Expression, -{ - if selectors.is_empty() { - // There is nothing to optimize. - return (vec![], vec![]); - } - - // The length of all provided selectors must be the same. - let n = selectors[0].activations.len(); - assert!(selectors.iter().all(|a| a.activations.len() == n)); - - let mut combination_assignments = vec![]; - let mut selector_assignments = vec![]; - - // All provided selectors of degree 0 are assumed to be either concrete - // selectors or do not appear in a gate. Let's address these first. - selectors.retain(|selector| { - if selector.max_degree == 0 { - // This is a complex selector, or a selector that does not appear in any - // gate constraint. - let expression = allocate_fixed_column(); - - let combination_assignment = selector - .activations - .iter() - .map(|b| if *b { F::ONE } else { F::ZERO }) - .collect::>(); - let combination_index = combination_assignments.len(); - combination_assignments.push(combination_assignment); - selector_assignments.push(SelectorAssignment { - selector: selector.selector, - combination_index, - expression, - }); - - false - } else { - true - } - }); - - // All of the remaining `selectors` are simple. Let's try to combine them. - // First, we compute the exclusion matrix that has (j, k) = true if selector - // j and selector k conflict -- that is, they are both enabled on the same - // row. This matrix is symmetric and the diagonal entries are false, so we - // only need to store the lower triangular entries. - let mut exclusion_matrix = (0..selectors.len()) - .map(|i| vec![false; i]) - .collect::>(); - - for (i, rows) in selectors - .iter() - .map(|selector| &selector.activations) - .enumerate() - { - // Loop over the selectors previous to this one - for (j, other_selector) in selectors.iter().enumerate().take(i) { - // Look at what selectors are active at the same row - if rows - .iter() - .zip(other_selector.activations.iter()) - .any(|(l, r)| l & r) - { - // Mark them as incompatible - exclusion_matrix[i][j] = true; - } - } - } - - // Simple selectors that we've added to combinations already. - let mut added = vec![false; selectors.len()]; - - for (i, selector) in selectors.iter().enumerate() { - if added[i] { - continue; - } - added[i] = true; - assert!(selector.max_degree <= max_degree); - // This is used to keep track of the largest degree gate involved in the - // combination so far. We subtract by one to omit the virtual selector - // which will be substituted by the caller with the expression we give - // them. - let mut d = selector.max_degree - 1; - let mut combination = vec![selector]; - let mut combination_added = vec![i]; - - // Try to find other selectors that can join this one. - 'try_selectors: for (j, selector) in selectors.iter().enumerate().skip(i + 1) { - if d + combination.len() == max_degree { - // Short circuit; nothing can be added to this - // combination. - break 'try_selectors; - } - - // Skip selectors that have been added to previous combinations - if added[j] { - continue 'try_selectors; - } - - // Is this selector excluded from co-existing in the same - // combination with any of the other selectors so far? - for &i in combination_added.iter() { - if exclusion_matrix[j][i] { - continue 'try_selectors; - } - } - - // Can the new selector join the combination? Reminder: we use - // selector.max_degree - 1 to omit the influence of the virtual - // selector on the degree, as it will be substituted. - let new_d = std::cmp::max(d, selector.max_degree - 1); - if new_d + combination.len() + 1 > max_degree { - // Guess not. - continue 'try_selectors; - } - - d = new_d; - combination.push(selector); - combination_added.push(j); - added[j] = true; - } - - // Now, compute the selector and combination assignments. - let mut combination_assignment = vec![F::ZERO; n]; - let combination_len = combination.len(); - let combination_index = combination_assignments.len(); - let query = allocate_fixed_column(); - - let mut assigned_root = F::ONE; - selector_assignments.extend(combination.into_iter().map(|selector| { - // Compute the expression for substitution. This produces an expression of the - // form - // q * Prod[i = 1..=combination_len, i != assigned_root](i - q) - // - // which is non-zero only on rows where `combination_assignment` is set to - // `assigned_root`. In particular, rows set to 0 correspond to all selectors - // being disabled. - let mut expression = query.clone(); - let mut root = F::ONE; - for _ in 0..combination_len { - if root != assigned_root { - expression = expression * (Expression::Constant(root) - query.clone()); - } - root += F::ONE; - } - - // Update the combination assignment - for (combination, selector) in combination_assignment - .iter_mut() - .zip(selector.activations.iter()) - { - // This will not overwrite another selector's activations because - // we have ensured that selectors are disjoint. - if *selector { - *combination = assigned_root; - } - } - - assigned_root += F::ONE; - - SelectorAssignment { - selector: selector.selector, - combination_index, - expression, - } - })); - combination_assignments.push(combination_assignment); - } - - (combination_assignments, selector_assignments) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{plonk::FixedQuery, poly::Rotation}; - use halo2curves::pasta::Fp; - use proptest::collection::{vec, SizeRange}; - use proptest::prelude::*; - - prop_compose! { - fn arb_selector(assignment_size: usize, max_degree: usize) - (degree in 0..max_degree, - assignment in vec(any::(), assignment_size)) - -> (usize, Vec) { - (degree, assignment) - } - } - - prop_compose! { - fn arb_selector_list(assignment_size: usize, max_degree: usize, num_selectors: impl Into) - (list in vec(arb_selector(assignment_size, max_degree), num_selectors)) - -> Vec - { - list.into_iter().enumerate().map(|(i, (max_degree, activations))| { - SelectorDescription { - selector: i, - activations, - max_degree, - } - }).collect() - } - } - - prop_compose! { - fn arb_instance(max_assignment_size: usize, - max_degree: usize, - max_selectors: usize) - (assignment_size in 1..max_assignment_size, - degree in 1..max_degree, - num_selectors in 1..max_selectors) - (list in arb_selector_list(assignment_size, degree, num_selectors), - degree in Just(degree)) - -> (Vec, usize) - { - (list, degree) - } - } - - proptest! { - #![proptest_config(ProptestConfig::with_cases(10000))] - #[test] - fn test_selector_combination((selectors, max_degree) in arb_instance(10, 10, 15)) { - let mut query = 0; - let (combination_assignments, selector_assignments) = - process::(selectors.clone(), max_degree, || { - let tmp = Expression::Fixed(FixedQuery { - index: Some(query), - column_index: query, - rotation: Rotation::cur(), - }); - query += 1; - tmp - }); - - { - let mut selectors_seen = vec![]; - assert_eq!(selectors.len(), selector_assignments.len()); - for selector in &selector_assignments { - // Every selector should be assigned to a combination - assert!(selector.combination_index < combination_assignments.len()); - assert!(!selectors_seen.contains(&selector.selector)); - selectors_seen.push(selector.selector); - } - } - - // Test that, for each selector, the provided expression - // 1. evaluates to zero on rows where the selector's activation is off - // 2. evaluates to nonzero on rows where the selector's activation is on - // 3. is of degree d such that d + (selector.max_degree - 1) <= max_degree - // OR selector.max_degree is zero - for selector in selector_assignments { - assert_eq!( - selectors[selector.selector].activations.len(), - combination_assignments[selector.combination_index].len() - ); - for (&activation, &assignment) in selectors[selector.selector] - .activations - .iter() - .zip(combination_assignments[selector.combination_index].iter()) - { - let eval = selector.expression.evaluate( - &|c| c, - &|_| panic!("should not occur in returned expressions"), - &|query| { - // Should be the correct combination in the expression - assert_eq!(selector.combination_index, query.index.unwrap()); - assignment - }, - &|_| panic!("should not occur in returned expressions"), - &|_| panic!("should not occur in returned expressions"), - &|_| panic!("should not occur in returned expressions"), - &|a| -a, - &|a, b| a + b, - &|a, b| a * b, - &|a, f| a * f, - ); - - if activation { - assert!(!eval.is_zero_vartime()); - } else { - assert!(eval.is_zero_vartime()); - } - } - - let expr_degree = selector.expression.degree(); - assert!(expr_degree <= max_degree); - if selectors[selector.selector].max_degree > 0 { - assert!( - (selectors[selector.selector].max_degree - 1) + expr_degree <= max_degree - ); - } - } - } - } -} diff --git a/halo2_proofs_rm/src/plonk/error.rs b/halo2_proofs_rm/src/plonk/error.rs deleted file mode 100644 index 14d7339503..0000000000 --- a/halo2_proofs_rm/src/plonk/error.rs +++ /dev/null @@ -1,136 +0,0 @@ -use std::error; -use std::fmt; -use std::io; - -use super::TableColumn; -use super::{Any, Column}; - -/// This is an error that could occur during proving or circuit synthesis. -// TODO: these errors need to be cleaned up -#[derive(Debug)] -pub enum Error { - /// This is an error that can occur during synthesis of the circuit, for - /// example, when the witness is not present. - Synthesis, - /// The provided instances do not match the circuit parameters. - InvalidInstances, - /// The constraint system is not satisfied. - ConstraintSystemFailure, - /// Out of bounds index passed to a backend - BoundsFailure, - /// Opening error - Opening, - /// Transcript error - Transcript(io::Error), - /// `k` is too small for the given circuit. - NotEnoughRowsAvailable { - /// The current value of `k` being used. - current_k: u32, - }, - /// Instance provided exceeds number of available rows - InstanceTooLarge, - /// Circuit synthesis requires global constants, but circuit configuration did not - /// call [`ConstraintSystem::enable_constant`] on fixed columns with sufficient space. - /// - /// [`ConstraintSystem::enable_constant`]: crate::plonk::ConstraintSystem::enable_constant - NotEnoughColumnsForConstants, - /// The instance sets up a copy constraint involving a column that has not been - /// included in the permutation. - ColumnNotInPermutation(Column), - /// An error relating to a lookup table. - TableError(TableError), - /// Generic error not covered by previous cases - Other(String), -} - -impl From for Error { - fn from(error: io::Error) -> Self { - // The only place we can get io::Error from is the transcript. - Error::Transcript(error) - } -} - -impl Error { - /// Constructs an `Error::NotEnoughRowsAvailable`. - pub(crate) fn not_enough_rows_available(current_k: u32) -> Self { - Error::NotEnoughRowsAvailable { current_k } - } -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Error::Synthesis => write!(f, "General synthesis error"), - Error::InvalidInstances => write!(f, "Provided instances do not match the circuit"), - Error::ConstraintSystemFailure => write!(f, "The constraint system is not satisfied"), - Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"), - Error::Opening => write!(f, "Multi-opening proof was invalid"), - Error::Transcript(e) => write!(f, "Transcript error: {e}"), - Error::NotEnoughRowsAvailable { current_k } => write!( - f, - "k = {current_k} is too small for the given circuit. Try using a larger value of k", - ), - Error::InstanceTooLarge => write!(f, "Instance vectors are larger than the circuit"), - Error::NotEnoughColumnsForConstants => { - write!( - f, - "Too few fixed columns are enabled for global constants usage" - ) - } - Error::ColumnNotInPermutation(column) => write!( - f, - "Column {column:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", - ), - Error::TableError(error) => write!(f, "{error}"), - Error::Other(error) => write!(f, "Other: {error}"), - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match self { - Error::Transcript(e) => Some(e), - _ => None, - } - } -} - -/// This is an error that could occur during table synthesis. -#[derive(Debug)] -pub enum TableError { - /// A `TableColumn` has not been assigned. - ColumnNotAssigned(TableColumn), - /// A Table has columns of uneven lengths. - UnevenColumnLengths((TableColumn, usize), (TableColumn, usize)), - /// Attempt to assign a used `TableColumn` - UsedColumn(TableColumn), - /// Attempt to overwrite a default value - OverwriteDefault(TableColumn, String, String), -} - -impl fmt::Display for TableError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TableError::ColumnNotAssigned(col) => { - write!( - f, - "{col:?} not fully assigned. Help: assign a value at offset 0.", - ) - } - TableError::UnevenColumnLengths((col, col_len), (table, table_len)) => write!( - f, - "{col:?} has length {col_len} while {table:?} has length {table_len}", - ), - TableError::UsedColumn(col) => { - write!(f, "{col:?} has already been used") - } - TableError::OverwriteDefault(col, default, val) => { - write!( - f, - "Attempted to overwrite default value {default} with {val} in {col:?}", - ) - } - } - } -} diff --git a/halo2_proofs_rm/src/plonk/evaluation.rs b/halo2_proofs_rm/src/plonk/evaluation.rs deleted file mode 100644 index 431c487c7e..0000000000 --- a/halo2_proofs_rm/src/plonk/evaluation.rs +++ /dev/null @@ -1,871 +0,0 @@ -use crate::multicore; -use crate::plonk::{lookup, permutation, Any, ProvingKey}; -use crate::poly::Basis; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - poly::{Coeff, ExtendedLagrangeCoeff, Polynomial, Rotation}, -}; -use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; - -use super::{shuffle, ConstraintSystem, Expression}; - -/// Return the index in the polynomial of size `isize` after rotation `rot`. -fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { - (((idx as i32) + (rot * rot_scale)).rem_euclid(isize)) as usize -} - -/// Value used in a calculation -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] -pub enum ValueSource { - /// This is a constant value - Constant(usize), - /// This is an intermediate value - Intermediate(usize), - /// This is a fixed column - Fixed(usize, usize), - /// This is an advice (witness) column - Advice(usize, usize), - /// This is an instance (external) column - Instance(usize, usize), - /// This is a challenge - Challenge(usize), - /// beta - Beta(), - /// gamma - Gamma(), - /// theta - Theta(), - /// y - Y(), - /// Previous value - PreviousValue(), -} - -impl Default for ValueSource { - fn default() -> Self { - ValueSource::Constant(0) - } -} - -impl ValueSource { - /// Get the value for this source - #[allow(clippy::too_many_arguments)] - pub fn get( - &self, - rotations: &[usize], - constants: &[F], - intermediates: &[F], - fixed_values: &[Polynomial], - advice_values: &[Polynomial], - instance_values: &[Polynomial], - challenges: &[F], - beta: &F, - gamma: &F, - theta: &F, - y: &F, - previous_value: &F, - ) -> F { - match self { - ValueSource::Constant(idx) => constants[*idx], - ValueSource::Intermediate(idx) => intermediates[*idx], - ValueSource::Fixed(column_index, rotation) => { - fixed_values[*column_index][rotations[*rotation]] - } - ValueSource::Advice(column_index, rotation) => { - advice_values[*column_index][rotations[*rotation]] - } - ValueSource::Instance(column_index, rotation) => { - instance_values[*column_index][rotations[*rotation]] - } - ValueSource::Challenge(index) => challenges[*index], - ValueSource::Beta() => *beta, - ValueSource::Gamma() => *gamma, - ValueSource::Theta() => *theta, - ValueSource::Y() => *y, - ValueSource::PreviousValue() => *previous_value, - } - } -} - -/// Calculation -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum Calculation { - /// This is an addition - Add(ValueSource, ValueSource), - /// This is a subtraction - Sub(ValueSource, ValueSource), - /// This is a product - Mul(ValueSource, ValueSource), - /// This is a square - Square(ValueSource), - /// This is a double - Double(ValueSource), - /// This is a negation - Negate(ValueSource), - /// This is Horner's rule: `val = a; val = val * c + b[]` - Horner(ValueSource, Vec, ValueSource), - /// This is a simple assignment - Store(ValueSource), -} - -impl Calculation { - /// Get the resulting value of this calculation - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - rotations: &[usize], - constants: &[F], - intermediates: &[F], - fixed_values: &[Polynomial], - advice_values: &[Polynomial], - instance_values: &[Polynomial], - challenges: &[F], - beta: &F, - gamma: &F, - theta: &F, - y: &F, - previous_value: &F, - ) -> F { - let get_value = |value: &ValueSource| { - value.get( - rotations, - constants, - intermediates, - fixed_values, - advice_values, - instance_values, - challenges, - beta, - gamma, - theta, - y, - previous_value, - ) - }; - match self { - Calculation::Add(a, b) => get_value(a) + get_value(b), - Calculation::Sub(a, b) => get_value(a) - get_value(b), - Calculation::Mul(a, b) => get_value(a) * get_value(b), - Calculation::Square(v) => get_value(v).square(), - Calculation::Double(v) => get_value(v).double(), - Calculation::Negate(v) => -get_value(v), - Calculation::Horner(start_value, parts, factor) => { - let factor = get_value(factor); - let mut value = get_value(start_value); - for part in parts.iter() { - value = value * factor + get_value(part); - } - value - } - Calculation::Store(v) => get_value(v), - } - } -} - -/// Evaluator -#[derive(Clone, Default, Debug)] -pub struct Evaluator { - /// Custom gates evalution - pub custom_gates: GraphEvaluator, - /// Lookups evalution - pub lookups: Vec>, - /// Shuffle evalution - pub shuffles: Vec>, -} - -/// GraphEvaluator -#[derive(Clone, Debug)] -pub struct GraphEvaluator { - /// Constants - pub constants: Vec, - /// Rotations - pub rotations: Vec, - /// Calculations - pub calculations: Vec, - /// Number of intermediates - pub num_intermediates: usize, -} - -/// EvaluationData -#[derive(Default, Debug)] -pub struct EvaluationData { - /// Intermediates - pub intermediates: Vec, - /// Rotations - pub rotations: Vec, -} - -/// CaluclationInfo -#[derive(Clone, Debug)] -pub struct CalculationInfo { - /// Calculation - pub calculation: Calculation, - /// Target - pub target: usize, -} - -impl Evaluator { - /// Creates a new evaluation structure - pub fn new(cs: &ConstraintSystem) -> Self { - let mut ev = Evaluator::default(); - - // Custom gates - let mut parts = Vec::new(); - for gate in cs.gates.iter() { - parts.extend( - gate.polynomials() - .iter() - .map(|poly| ev.custom_gates.add_expression(poly)), - ); - } - ev.custom_gates.add_calculation(Calculation::Horner( - ValueSource::PreviousValue(), - parts, - ValueSource::Y(), - )); - - // Lookups - for lookup in cs.lookups.iter() { - let mut graph = GraphEvaluator::default(); - - let mut evaluate_lc = |expressions: &Vec>| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; - - // Input coset - let compressed_input_coset = evaluate_lc(&lookup.input_expressions); - // table coset - let compressed_table_coset = evaluate_lc(&lookup.table_expressions); - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - let right_gamma = graph.add_calculation(Calculation::Add( - compressed_table_coset, - ValueSource::Gamma(), - )); - let lc = graph.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Beta(), - )); - graph.add_calculation(Calculation::Mul(lc, right_gamma)); - - ev.lookups.push(graph); - } - - // Shuffles - for shuffle in cs.shuffles.iter() { - let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { - let parts = expressions - .iter() - .map(|expr| graph.add_expression(expr)) - .collect(); - graph.add_calculation(Calculation::Horner( - ValueSource::Constant(0), - parts, - ValueSource::Theta(), - )) - }; - - let mut graph_input = GraphEvaluator::default(); - let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input); - let _ = graph_input.add_calculation(Calculation::Add( - compressed_input_coset, - ValueSource::Gamma(), - )); - - let mut graph_shuffle = GraphEvaluator::default(); - let compressed_shuffle_coset = - evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle); - let _ = graph_shuffle.add_calculation(Calculation::Add( - compressed_shuffle_coset, - ValueSource::Gamma(), - )); - - ev.shuffles.push(graph_input); - ev.shuffles.push(graph_shuffle); - } - - ev - } - - /// Evaluate h poly - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn evaluate_h( - &self, - pk: &ProvingKey, - advice_polys: &[&[Polynomial]], - instance_polys: &[&[Polynomial]], - challenges: &[C::ScalarExt], - y: C::ScalarExt, - beta: C::ScalarExt, - gamma: C::ScalarExt, - theta: C::ScalarExt, - lookups: &[Vec>], - shuffles: &[Vec>], - permutations: &[permutation::prover::Committed], - ) -> Polynomial { - let domain = &pk.vk.domain; - let size = domain.extended_len(); - let rot_scale = 1 << (domain.extended_k() - domain.k()); - let fixed = &pk.fixed_cosets[..]; - let extended_omega = domain.get_extended_omega(); - let isize = size as i32; - let one = C::ScalarExt::ONE; - let l0 = &pk.l0; - let l_last = &pk.l_last; - let l_active_row = &pk.l_active_row; - let p = &pk.vk.cs.permutation; - - // Calculate the advice and instance cosets - let advice: Vec>> = advice_polys - .iter() - .map(|advice_polys| { - advice_polys - .iter() - .map(|poly| domain.coeff_to_extended(poly.clone())) - .collect() - }) - .collect(); - let instance: Vec>> = instance_polys - .iter() - .map(|instance_polys| { - instance_polys - .iter() - .map(|poly| domain.coeff_to_extended(poly.clone())) - .collect() - }) - .collect(); - - let mut values = domain.empty_extended(); - - // Core expression evaluations - let num_threads = multicore::current_num_threads(); - for ((((advice, instance), lookups), shuffles), permutation) in advice - .iter() - .zip(instance.iter()) - .zip(lookups.iter()) - .zip(shuffles.iter()) - .zip(permutations.iter()) - { - // Custom gates - multicore::scope(|scope| { - let chunk_size = (size + num_threads - 1) / num_threads; - for (thread_idx, values) in values.chunks_mut(chunk_size).enumerate() { - let start = thread_idx * chunk_size; - scope.spawn(move |_| { - let mut eval_data = self.custom_gates.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - *value = self.custom_gates.evaluate( - &mut eval_data, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - value, - idx, - rot_scale, - isize, - ); - } - }); - } - }); - - // Permutations - let sets = &permutation.sets; - if !sets.is_empty() { - let blinding_factors = pk.vk.cs.blinding_factors(); - let last_rotation = Rotation(-((blinding_factors + 1) as i32)); - let chunk_len = pk.vk.cs.degree() - 2; - let delta_start = beta * &C::Scalar::ZETA; - - let first_set = sets.first().unwrap(); - let last_set = sets.last().unwrap(); - - // Permutation constraints - parallelize(&mut values, |values, start| { - let mut beta_term = extended_omega.pow_vartime([start as u64, 0, 0, 0]); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - let r_last = get_rotation_idx(idx, last_rotation.0, rot_scale, isize); - - // Enforce only for the first set. - // l_0(X) * (1 - z_0(X)) = 0 - *value = *value * y - + ((one - first_set.permutation_product_coset[idx]) * l0[idx]); - // Enforce only for the last set. - // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 - *value = *value * y - + ((last_set.permutation_product_coset[idx] - * last_set.permutation_product_coset[idx] - - last_set.permutation_product_coset[idx]) - * l_last[idx]); - // Except for the first set, enforce. - // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 - for (set_idx, set) in sets.iter().enumerate() { - if set_idx != 0 { - *value = *value * y - + ((set.permutation_product_coset[idx] - - permutation.sets[set_idx - 1].permutation_product_coset - [r_last]) - * l0[idx]); - } - } - // And for all the sets we enforce: - // (1 - (l_last(X) + l_blind(X))) * ( - // z_i(\omega X) \prod_j (p(X) + \beta s_j(X) + \gamma) - // - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma) - // ) - let mut current_delta = delta_start * beta_term; - for ((set, columns), cosets) in sets - .iter() - .zip(p.columns.chunks(chunk_len)) - .zip(pk.permutation.cosets.chunks(chunk_len)) - { - let mut left = set.permutation_product_coset[r_next]; - for (values, permutation) in columns - .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], - }) - .zip(cosets.iter()) - { - left *= values[idx] + beta * permutation[idx] + gamma; - } - - let mut right = set.permutation_product_coset[idx]; - for values in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], - }) { - right *= values[idx] + current_delta + gamma; - current_delta *= &C::Scalar::DELTA; - } - - *value = *value * y + ((left - right) * l_active_row[idx]); - } - beta_term *= &extended_omega; - } - }); - } - - // Lookups - for (n, lookup) in lookups.iter().enumerate() { - // Polynomials required for this lookup. - // Calculated here so these only have to be kept in memory for the short time - // they are actually needed. - let product_coset = pk.vk.domain.coeff_to_extended(lookup.product_poly.clone()); - let permuted_input_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_input_poly.clone()); - let permuted_table_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_table_poly.clone()); - - // Lookup constraints - parallelize(&mut values, |values, start| { - let lookup_evaluator = &self.lookups[n]; - let mut eval_data = lookup_evaluator.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - - let table_value = lookup_evaluator.evaluate( - &mut eval_data, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - let r_prev = get_rotation_idx(idx, -1, rot_scale, isize); - - let a_minus_s = permuted_input_coset[idx] - permuted_table_coset[idx]; - // l_0(X) * (1 - z(X)) = 0 - *value = *value * y + ((one - product_coset[idx]) * l0[idx]); - // l_last(X) * (z(X)^2 - z(X)) = 0 - *value = *value * y - + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) - * l_last[idx]); - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) - // (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - *value = *value * y - + ((product_coset[r_next] - * (permuted_input_coset[idx] + beta) - * (permuted_table_coset[idx] + gamma) - - product_coset[idx] * table_value) - * l_active_row[idx]); - // Check that the first values in the permuted input expression and permuted - // fixed expression are the same. - // l_0(X) * (a'(X) - s'(X)) = 0 - *value = *value * y + (a_minus_s * l0[idx]); - // Check that each value in the permuted lookup input expression is either - // equal to the value above it, or the value at the same index in the - // permuted table expression. - // (1 - (l_last + l_blind)) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - *value = *value * y - + (a_minus_s - * (permuted_input_coset[idx] - permuted_input_coset[r_prev]) - * l_active_row[idx]); - } - }); - } - - // Shuffle constraints - for (n, shuffle) in shuffles.iter().enumerate() { - let product_coset = pk.vk.domain.coeff_to_extended(shuffle.product_poly.clone()); - - // Shuffle constraints - parallelize(&mut values, |values, start| { - let input_evaluator = &self.shuffles[2 * n]; - let shuffle_evaluator = &self.shuffles[2 * n + 1]; - let mut eval_data_input = shuffle_evaluator.instance(); - let mut eval_data_shuffle = shuffle_evaluator.instance(); - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - - let input_value = input_evaluator.evaluate( - &mut eval_data_input, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let shuffle_value = shuffle_evaluator.evaluate( - &mut eval_data_shuffle, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ); - - let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - - // l_0(X) * (1 - z(X)) = 0 - *value = *value * y + ((one - product_coset[idx]) * l0[idx]); - // l_last(X) * (z(X)^2 - z(X)) = 0 - *value = *value * y - + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) - * l_last[idx]); - // (1 - (l_last(X) + l_blind(X))) * (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) = 0 - *value = *value * y - + l_active_row[idx] - * (product_coset[r_next] * shuffle_value - - product_coset[idx] * input_value) - } - }); - } - } - values - } -} - -impl Default for GraphEvaluator { - fn default() -> Self { - Self { - // Fixed positions to allow easy access - constants: vec![ - C::ScalarExt::ZERO, - C::ScalarExt::ONE, - C::ScalarExt::from(2u64), - ], - rotations: Vec::new(), - calculations: Vec::new(), - num_intermediates: 0, - } - } -} - -impl GraphEvaluator { - /// Adds a rotation - fn add_rotation(&mut self, rotation: &Rotation) -> usize { - let position = self.rotations.iter().position(|&c| c == rotation.0); - match position { - Some(pos) => pos, - None => { - self.rotations.push(rotation.0); - self.rotations.len() - 1 - } - } - } - - /// Adds a constant - fn add_constant(&mut self, constant: &C::ScalarExt) -> ValueSource { - let position = self.constants.iter().position(|&c| c == *constant); - ValueSource::Constant(match position { - Some(pos) => pos, - None => { - self.constants.push(*constant); - self.constants.len() - 1 - } - }) - } - - /// Adds a calculation. - /// Currently does the simplest thing possible: just stores the - /// resulting value so the result can be reused when that calculation - /// is done multiple times. - fn add_calculation(&mut self, calculation: Calculation) -> ValueSource { - let existing_calculation = self - .calculations - .iter() - .find(|c| c.calculation == calculation); - match existing_calculation { - Some(existing_calculation) => ValueSource::Intermediate(existing_calculation.target), - None => { - let target = self.num_intermediates; - self.calculations.push(CalculationInfo { - calculation, - target, - }); - self.num_intermediates += 1; - ValueSource::Intermediate(target) - } - } - } - - /// Generates an optimized evaluation for the expression - fn add_expression(&mut self, expr: &Expression) -> ValueSource { - match expr { - Expression::Constant(scalar) => self.add_constant(scalar), - Expression::Selector(_selector) => unreachable!(), - Expression::Fixed(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Fixed( - query.column_index, - rot_idx, - ))) - } - Expression::Advice(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Advice( - query.column_index, - rot_idx, - ))) - } - Expression::Instance(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Instance( - query.column_index, - rot_idx, - ))) - } - Expression::Challenge(challenge) => self.add_calculation(Calculation::Store( - ValueSource::Challenge(challenge.index()), - )), - Expression::Negated(a) => match **a { - Expression::Constant(scalar) => self.add_constant(&-scalar), - _ => { - let result_a = self.add_expression(a); - match result_a { - ValueSource::Constant(0) => result_a, - _ => self.add_calculation(Calculation::Negate(result_a)), - } - } - }, - Expression::Sum(a, b) => { - // Undo subtraction stored as a + (-b) in expressions - match &**b { - Expression::Negated(b_int) => { - let result_a = self.add_expression(a); - let result_b = self.add_expression(b_int); - if result_a == ValueSource::Constant(0) { - self.add_calculation(Calculation::Negate(result_b)) - } else if result_b == ValueSource::Constant(0) { - result_a - } else { - self.add_calculation(Calculation::Sub(result_a, result_b)) - } - } - _ => { - let result_a = self.add_expression(a); - let result_b = self.add_expression(b); - if result_a == ValueSource::Constant(0) { - result_b - } else if result_b == ValueSource::Constant(0) { - result_a - } else if result_a <= result_b { - self.add_calculation(Calculation::Add(result_a, result_b)) - } else { - self.add_calculation(Calculation::Add(result_b, result_a)) - } - } - } - } - Expression::Product(a, b) => { - let result_a = self.add_expression(a); - let result_b = self.add_expression(b); - if result_a == ValueSource::Constant(0) || result_b == ValueSource::Constant(0) { - ValueSource::Constant(0) - } else if result_a == ValueSource::Constant(1) { - result_b - } else if result_b == ValueSource::Constant(1) { - result_a - } else if result_a == ValueSource::Constant(2) { - self.add_calculation(Calculation::Double(result_b)) - } else if result_b == ValueSource::Constant(2) { - self.add_calculation(Calculation::Double(result_a)) - } else if result_a == result_b { - self.add_calculation(Calculation::Square(result_a)) - } else if result_a <= result_b { - self.add_calculation(Calculation::Mul(result_a, result_b)) - } else { - self.add_calculation(Calculation::Mul(result_b, result_a)) - } - } - Expression::Scaled(a, f) => { - if *f == C::ScalarExt::ZERO { - ValueSource::Constant(0) - } else if *f == C::ScalarExt::ONE { - self.add_expression(a) - } else { - let cst = self.add_constant(f); - let result_a = self.add_expression(a); - self.add_calculation(Calculation::Mul(result_a, cst)) - } - } - } - } - - /// Creates a new evaluation structure - pub fn instance(&self) -> EvaluationData { - EvaluationData { - intermediates: vec![C::ScalarExt::ZERO; self.num_intermediates], - rotations: vec![0usize; self.rotations.len()], - } - } - - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - data: &mut EvaluationData, - fixed: &[Polynomial], - advice: &[Polynomial], - instance: &[Polynomial], - challenges: &[C::ScalarExt], - beta: &C::ScalarExt, - gamma: &C::ScalarExt, - theta: &C::ScalarExt, - y: &C::ScalarExt, - previous_value: &C::ScalarExt, - idx: usize, - rot_scale: i32, - isize: i32, - ) -> C::ScalarExt { - // All rotation index values - for (rot_idx, rot) in self.rotations.iter().enumerate() { - data.rotations[rot_idx] = get_rotation_idx(idx, *rot, rot_scale, isize); - } - - // All calculations, with cached intermediate results - for calc in self.calculations.iter() { - data.intermediates[calc.target] = calc.calculation.evaluate( - &data.rotations, - &self.constants, - &data.intermediates, - fixed, - advice, - instance, - challenges, - beta, - gamma, - theta, - y, - previous_value, - ); - } - - // Return the result of the last calculation (if any) - if let Some(calc) = self.calculations.last() { - data.intermediates[calc.target] - } else { - C::ScalarExt::ZERO - } - } -} - -/// Simple evaluation of an expression -pub fn evaluate( - expression: &Expression, - size: usize, - rot_scale: i32, - fixed: &[Polynomial], - advice: &[Polynomial], - instance: &[Polynomial], - challenges: &[F], -) -> Vec { - let mut values = vec![F::ZERO; size]; - let isize = size as i32; - parallelize(&mut values, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = start + i; - *value = expression.evaluate( - &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| { - fixed[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|query| { - advice[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|query| { - instance[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * b, - &|a, scalar| a * scalar, - ); - } - }); - values -} diff --git a/halo2_proofs_rm/src/plonk/keygen.rs b/halo2_proofs_rm/src/plonk/keygen.rs deleted file mode 100644 index 81b890cf65..0000000000 --- a/halo2_proofs_rm/src/plonk/keygen.rs +++ /dev/null @@ -1,394 +0,0 @@ -#![allow(clippy::int_plus_one)] - -use std::ops::Range; - -use ff::{Field, FromUniformBytes}; -use group::Curve; - -use super::{ - circuit::{ - compile_circuit, Advice, Any, Assignment, Circuit, Column, CompiledCircuitV2, - ConstraintSystem, Fixed, Instance, Selector, - }, - evaluation::Evaluator, - permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, -}; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - circuit::Value, - poly::{ - commitment::{Blind, Params}, - EvaluationDomain, - }, -}; - -pub(crate) fn create_domain( - k: u32, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, -) -> ( - EvaluationDomain, - ConstraintSystem, - ConcreteCircuit::Config, -) -where - C: CurveAffine, - ConcreteCircuit: Circuit, -{ - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, params); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - - let degree = cs.degree(); - - let domain = EvaluationDomain::new(degree as u32, k); - - (domain, cs, config) -} - -/// Assembly to be used in circuit synthesis. -#[derive(Debug)] -pub(crate) struct Assembly { - pub(crate) k: u32, - pub(crate) fixed: Vec, LagrangeCoeff>>, - pub(crate) permutation: permutation::keygen::AssemblyFront, - pub(crate) selectors: Vec>, - // A range of available rows for assignment and copies. - pub(crate) usable_rows: Range, - pub(crate) _marker: std::marker::PhantomData, -} - -impl Assignment for Assembly { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.selectors[selector.0][row] = true; - - Ok(()) - } - - fn query_instance(&self, _: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - // There is no instance in this context. - Ok(Value::unknown()) - } - - fn assign_advice( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about fixed columns here - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .fixed - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.permutation - .copy(left_column, left_row, right_column, right_row) - } - - fn fill_from_row( - &mut self, - column: Column, - from_row: usize, - to: Value>, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&from_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - let col = self - .fixed - .get_mut(column.index()) - .ok_or(Error::BoundsFailure)?; - - let filler = to.assign()?; - for row in self.usable_rows.clone().skip(from_row) { - col[row] = filler; - } - - Ok(()) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} - -/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. -pub fn keygen_vk_v2<'params, C, P>( - params: &P, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - C::Scalar: FromUniformBytes<64>, -{ - let cs2 = &circuit.cs; - let cs: ConstraintSystem = cs2.clone().into(); - let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs.permutation, - &circuit.preprocessing.permutation, - )? - .build_vk(params, &domain, &cs.permutation); - - let fixed_commitments = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - params - .commit_lagrange( - &Polynomial::new_lagrange_from_vec(poly.clone()), - Blind::default(), - ) - .to_affine() - }) - .collect(); - - Ok(VerifyingKey::from_parts( - domain, - fixed_commitments, - permutation_vk, - cs, - Vec::new(), - false, - )) -} - -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// By default, selector compression is turned **off**. -pub fn keygen_vk<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - keygen_vk_custom(params, circuit, true) -} - -/// Generate a `VerifyingKey` from an instance of `Circuit`. -/// -/// The selector compression optimization is turned on only if `compress_selectors` is `true`. -pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( - params: &P, - circuit: &ConcreteCircuit, - compress_selectors: bool, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, - C::Scalar: FromUniformBytes<64>, -{ - let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; - let mut vk = keygen_vk_v2(params, &compiled_circuit)?; - vk.compress_selectors = compress_selectors; - Ok(vk) -} - -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. -pub fn keygen_pk_v2<'params, C, P>( - params: &P, - vk: VerifyingKey, - circuit: &CompiledCircuitV2, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, -{ - let cs = &circuit.cs; - - if (params.n() as usize) < vk.cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let fixed_polys: Vec<_> = circuit - .preprocessing - .fixed - .iter() - .map(|poly| { - vk.domain - .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) - }) - .collect(); - - let fixed_cosets = fixed_polys - .iter() - .map(|poly| vk.domain.coeff_to_extended(poly.clone())) - .collect(); - - let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( - params.n() as usize, - &cs.permutation, - &circuit.preprocessing.permutation, - )? - .build_pk(params, &vk.domain, &cs.permutation); - - // Compute l_0(X) - // TODO: this can be done more efficiently - let mut l0 = vk.domain.empty_lagrange(); - l0[0] = C::Scalar::ONE; - let l0 = vk.domain.lagrange_to_coeff(l0); - let l0 = vk.domain.coeff_to_extended(l0); - - // Compute l_blind(X) which evaluates to 1 for each blinding factor row - // and 0 otherwise over the domain. - let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { - *evaluation = C::Scalar::ONE; - } - let l_blind = vk.domain.lagrange_to_coeff(l_blind); - let l_blind = vk.domain.coeff_to_extended(l_blind); - - // Compute l_last(X) which evaluates to 1 on the first inactive row (just - // before the blinding factors) and 0 otherwise over the domain - let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; - let l_last = vk.domain.lagrange_to_coeff(l_last); - let l_last = vk.domain.coeff_to_extended(l_last); - - // Compute l_active_row(X) - let one = C::Scalar::ONE; - let mut l_active_row = vk.domain.empty_extended(); - parallelize(&mut l_active_row, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = i + start; - *value = one - (l_last[idx] + l_blind[idx]); - } - }); - - // Compute the optimized evaluation data structure - let ev = Evaluator::new(&vk.cs); - - Ok(ProvingKey { - vk, - l0, - l_last, - l_active_row, - fixed_values: circuit - .preprocessing - .fixed - .clone() - .into_iter() - .map(Polynomial::new_lagrange_from_vec) - .collect(), - fixed_polys, - fixed_cosets, - permutation: permutation_pk, - ev, - }) -} - -/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. -pub fn keygen_pk<'params, C, P, ConcreteCircuit>( - params: &P, - vk: VerifyingKey, - circuit: &ConcreteCircuit, -) -> Result, Error> -where - C: CurveAffine, - P: Params<'params, C>, - ConcreteCircuit: Circuit, -{ - let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, vk.compress_selectors)?; - keygen_pk_v2(params, vk, &compiled_circuit) -} diff --git a/halo2_proofs_rm/src/plonk/lookup.rs b/halo2_proofs_rm/src/plonk/lookup.rs deleted file mode 100644 index 97be4b36e0..0000000000 --- a/halo2_proofs_rm/src/plonk/lookup.rs +++ /dev/null @@ -1,108 +0,0 @@ -use super::circuit::{Expression, ExpressionMid}; -use ff::Field; -use std::fmt::{self, Debug}; - -pub(crate) mod prover; -pub(crate) mod verifier; - -/// Expressions involved in a lookup argument, with a name as metadata. -#[derive(Clone, Debug)] -pub struct ArgumentV2 { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) table_expressions: Vec>, -} - -/// Expressions involved in a lookup argument, with a name as metadata. -#[derive(Clone)] -pub struct Argument { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) table_expressions: Vec>, -} - -impl Debug for Argument { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Argument") - .field("input_expressions", &self.input_expressions) - .field("table_expressions", &self.table_expressions) - .finish() - } -} - -impl Argument { - /// Constructs a new lookup argument. - /// - /// `table_map` is a sequence of `(input, table)` tuples. - pub fn new>(name: S, table_map: Vec<(Expression, Expression)>) -> Self { - let (input_expressions, table_expressions) = table_map.into_iter().unzip(); - Argument { - name: name.as_ref().to_string(), - input_expressions, - table_expressions, - } - } - - pub(crate) fn required_degree(&self) -> usize { - assert_eq!(self.input_expressions.len(), self.table_expressions.len()); - - // The first value in the permutation poly should be one. - // degree 2: - // l_0(X) * (1 - z(X)) = 0 - // - // The "last" value in the permutation poly should be a boolean, for - // completeness and soundness. - // degree 3: - // l_last(X) * (z(X)^2 - z(X)) = 0 - // - // Enable the permutation argument for only the rows involved. - // degree (2 + input_degree + table_degree) or 4, whichever is larger: - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - // - // The first two values of a' and s' should be the same. - // degree 2: - // l_0(X) * (a'(X) - s'(X)) = 0 - // - // Either the two values are the same, or the previous - // value of a' is the same as the current value. - // degree 3: - // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - let mut input_degree = 1; - for expr in self.input_expressions.iter() { - input_degree = std::cmp::max(input_degree, expr.degree()); - } - let mut table_degree = 1; - for expr in self.table_expressions.iter() { - table_degree = std::cmp::max(table_degree, expr.degree()); - } - - // In practice because input_degree and table_degree are initialized to - // one, the latter half of this max() invocation is at least 4 always, - // rendering this call pointless except to be explicit in case we change - // the initialization of input_degree/table_degree in the future. - std::cmp::max( - // (1 - (l_last + l_blind)) z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - 4, - // (1 - (l_last + l_blind)) z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - 2 + input_degree + table_degree, - ) - } - - /// Returns input of this argument - pub fn input_expressions(&self) -> &Vec> { - &self.input_expressions - } - - /// Returns table of this argument - pub fn table_expressions(&self) -> &Vec> { - &self.table_expressions - } - - /// Returns name of this argument - pub fn name(&self) -> &str { - &self.name - } -} diff --git a/halo2_proofs_rm/src/plonk/lookup/prover.rs b/halo2_proofs_rm/src/plonk/lookup/prover.rs deleted file mode 100644 index 028b298853..0000000000 --- a/halo2_proofs_rm/src/plonk/lookup/prover.rs +++ /dev/null @@ -1,475 +0,0 @@ -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, - ProvingKey, -}; -use super::Argument; -use crate::plonk::evaluation::evaluate; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - poly::{ - commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use ff::WithSmallOrderMulGroup; -use group::{ - ff::{BatchInvert, Field}, - Curve, -}; -use rand_core::RngCore; -use std::{ - collections::BTreeMap, - iter, - ops::{Mul, MulAssign}, -}; - -#[derive(Debug)] -pub(in crate::plonk) struct Permuted { - compressed_input_expression: Polynomial, - permuted_input_expression: Polynomial, - permuted_input_poly: Polynomial, - permuted_input_blind: Blind, - compressed_table_expression: Polynomial, - permuted_table_expression: Polynomial, - permuted_table_poly: Polynomial, - permuted_table_blind: Blind, -} - -#[derive(Debug)] -pub(in crate::plonk) struct Committed { - pub(in crate::plonk) permuted_input_poly: Polynomial, - permuted_input_blind: Blind, - pub(in crate::plonk) permuted_table_poly: Polynomial, - permuted_table_blind: Blind, - pub(in crate::plonk) product_poly: Polynomial, - product_blind: Blind, -} - -pub(in crate::plonk) struct Evaluated { - constructed: Committed, -} - -impl> Argument { - /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, - /// obtaining A' and S', and - /// - constructs Permuted struct using permuted_input_value = A', and - /// permuted_table_expression = S'. - /// The Permuted struct is used to update the Lookup, and is then returned. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_permuted< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the lookup and compress them - let compressed_input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the lookup and compress them - let compressed_table_expression = compress_expressions(&self.table_expressions); - - // Permute compressed (InputExpression, TableExpression) pair - let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( - pk, - params, - domain, - &mut rng, - &compressed_input_expression, - &compressed_table_expression, - )?; - - // Closure to construct commitment to vector of values - let mut commit_values = |values: &Polynomial| { - let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); - let blind = Blind(C::Scalar::random(&mut rng)); - let commitment = params.commit_lagrange(values, blind).to_affine(); - (poly, blind, commitment) - }; - - // Commit to permuted input expression - let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = - commit_values(&permuted_input_expression); - - // Commit to permuted table expression - let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = - commit_values(&permuted_table_expression); - - // Hash permuted input commitment - transcript.write_point(permuted_input_commitment)?; - - // Hash permuted table commitment - transcript.write_point(permuted_table_commitment)?; - - Ok(Permuted { - compressed_input_expression, - permuted_input_expression, - permuted_input_poly, - permuted_input_blind, - compressed_table_expression, - permuted_table_expression, - permuted_table_poly, - permuted_table_blind, - }) - } -} - -impl Permuted { - /// Given a Lookup with input expressions, table expressions, and the permuted - /// input expression and permuted table expression, this method constructs the - /// grand product polynomial over the lookup. The grand product polynomial - /// is used to populate the Product struct. The Product struct is - /// added to the Lookup and finally returned by the method. - pub(in crate::plonk) fn commit_product< - 'params, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - self, - pk: &ProvingKey, - params: &P, - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - // Goal is to compute the products of fractions - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where a_j(X) is the jth input expression in this lookup, - // where a'(X) is the compression of the permuted input expressions, - // s_j(X) is the jth table expression in this lookup, - // s'(X) is the compression of the permuted table expressions, - // and i is the ith row of the expression. - let mut lookup_product = vec![C::Scalar::ZERO; params.n() as usize]; - // Denominator uses the permuted input expression and permuted table expression - parallelize(&mut lookup_product, |lookup_product, start| { - for ((lookup_product, permuted_input_value), permuted_table_value) in lookup_product - .iter_mut() - .zip(self.permuted_input_expression[start..].iter()) - .zip(self.permuted_table_expression[start..].iter()) - { - *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); - } - }); - - // Batch invert to obtain the denominators for the lookup product - // polynomials - lookup_product.iter_mut().batch_invert(); - - // Finish the computation of the entire fraction by computing the numerators - // (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - parallelize(&mut lookup_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - - *product *= &(self.compressed_input_expression[i] + &*beta); - *product *= &(self.compressed_table_expression[i] + &*gamma); - } - }); - - // The product vector is a vector of products of fractions of the form - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where there are m input expressions and m table expressions, - // a_j(\omega^i) is the jth input expression in this lookup, - // a'j(\omega^i) is the permuted input expression, - // s_j(\omega^i) is the jth table expression in this lookup, - // s'(\omega^i) is the permuted table expression, - // and i is the ith row of the expression. - - // Compute the evaluations of the lookup product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(lookup_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - // This test works only with intermediate representations in this method. - // It can be used for debugging purposes. - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - - // l_0(X) * (1 - z(X)) = 0 - assert_eq!(z[0], C::Scalar::ONE); - - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - for i in 0..u { - let mut left = z[i + 1]; - let permuted_input_value = &self.permuted_input_expression[i]; - - let permuted_table_value = &self.permuted_table_expression[i]; - - left *= &(*beta + permuted_input_value); - left *= &(*gamma + permuted_table_value); - - let mut right = z[i]; - let mut input_term = self.compressed_input_expression[i]; - let mut table_term = self.compressed_table_expression[i]; - - input_term += &(*beta); - table_term += &(*gamma); - right *= &(input_term * &table_term); - - assert_eq!(left, right); - } - - // l_last(X) * (z(X)^2 - z(X)) = 0 - // Assertion will fail only when soundness is broken, in which - // case this z[u] value will be zero. (bad!) - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - permuted_input_poly: self.permuted_input_poly, - permuted_input_blind: self.permuted_input_blind, - permuted_table_poly: self.permuted_table_poly, - permuted_table_blind: self.permuted_table_blind, - product_poly: z, - product_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_inv = domain.rotate_omega(*x, Rotation::prev()); - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - let permuted_input_eval = eval_polynomial(&self.permuted_input_poly, *x); - let permuted_input_inv_eval = eval_polynomial(&self.permuted_input_poly, x_inv); - let permuted_table_eval = eval_polynomial(&self.permuted_table_poly, *x); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - .chain(Some(permuted_input_eval)) - .chain(Some(permuted_input_inv_eval)) - .chain(Some(permuted_table_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = pk.vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open lookup input commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup table commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_table_poly, - blind: self.constructed.permuted_table_blind, - })) - // Open lookup input commitments at x_inv - .chain(Some(ProverQuery { - point: x_inv, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } -} - -type ExpressionPair = (Polynomial, Polynomial); - -/// Given a vector of input values A and a vector of table values S, -/// this method permutes A and S to produce A' and S', such that: -/// - like values in A' are vertically adjacent to each other; and -/// - the first row in a sequence of like values in A' is the row -/// that has the corresponding value in S'. -/// This method returns (A', S') if no errors are encountered. -fn permute_expression_pair<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - mut rng: R, - input_expression: &Polynomial, - table_expression: &Polynomial, -) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - let usable_rows = params.n() as usize - (blinding_factors + 1); - - let mut permuted_input_expression: Vec = input_expression.to_vec(); - permuted_input_expression.truncate(usable_rows); - - // Sort input lookup expression values - permuted_input_expression.sort(); - - // A BTreeMap of each unique element in the table expression and its count - let mut leftover_table_map: BTreeMap = table_expression - .iter() - .take(usable_rows) - .fold(BTreeMap::new(), |mut acc, coeff| { - *acc.entry(*coeff).or_insert(0) += 1; - acc - }); - let mut permuted_table_coeffs = vec![C::Scalar::ZERO; usable_rows]; - - let mut repeated_input_rows = permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter_mut()) - .enumerate() - .filter_map(|(row, (input_value, table_value))| { - // If this is the first occurrence of `input_value` in the input expression - if row == 0 || *input_value != permuted_input_expression[row - 1] { - *table_value = *input_value; - // Remove one instance of input_value from leftover_table_map - if let Some(count) = leftover_table_map.get_mut(input_value) { - assert!(*count > 0); - *count -= 1; - None - } else { - // Return error if input_value not found - Some(Err(Error::ConstraintSystemFailure)) - } - // If input value is repeated - } else { - Some(Ok(row)) - } - }) - .collect::, _>>()?; - - // Populate permuted table at unfilled rows with leftover table elements - for (coeff, count) in leftover_table_map.iter() { - for _ in 0..*count { - permuted_table_coeffs[repeated_input_rows.pop().unwrap()] = *coeff; - } - } - assert!(repeated_input_rows.is_empty()); - - permuted_input_expression - .extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - permuted_table_coeffs.extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - assert_eq!(permuted_input_expression.len(), params.n() as usize); - assert_eq!(permuted_table_coeffs.len(), params.n() as usize); - - #[cfg(feature = "sanity-checks")] - { - let mut last = None; - for (a, b) in permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter()) - .take(usable_rows) - { - if *a != *b { - assert_eq!(*a, last.unwrap()); - } - last = Some(*a); - } - } - - Ok(( - domain.lagrange_from_vec(permuted_input_expression), - domain.lagrange_from_vec(permuted_table_coeffs), - )) -} diff --git a/halo2_proofs_rm/src/plonk/lookup/verifier.rs b/halo2_proofs_rm/src/plonk/lookup/verifier.rs deleted file mode 100644 index bbc86c8e9d..0000000000 --- a/halo2_proofs_rm/src/plonk/lookup/verifier.rs +++ /dev/null @@ -1,211 +0,0 @@ -use std::iter; - -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, -}; -use super::Argument; -use crate::{ - arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, Rotation, VerifierQuery}, - transcript::{EncodedChallenge, TranscriptRead}, -}; -use ff::Field; - -pub struct PermutationCommitments { - permuted_input_commitment: C, - permuted_table_commitment: C, -} - -pub struct Committed { - permuted: PermutationCommitments, - product_commitment: C, -} - -pub struct Evaluated { - committed: Committed, - product_eval: C::Scalar, - product_next_eval: C::Scalar, - permuted_input_eval: C::Scalar, - permuted_input_inv_eval: C::Scalar, - permuted_table_eval: C::Scalar, -} - -impl Argument { - pub(in crate::plonk) fn read_permuted_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let permuted_input_commitment = transcript.read_point()?; - let permuted_table_commitment = transcript.read_point()?; - - Ok(PermutationCommitments { - permuted_input_commitment, - permuted_table_commitment, - }) - } -} - -impl PermutationCommitments { - pub(in crate::plonk) fn read_product_commitment< - E: EncodedChallenge, - T: TranscriptRead, - >( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_commitment = transcript.read_point()?; - - Ok(Committed { - permuted: self, - product_commitment, - }) - } -} - -impl Committed { - pub(crate) fn evaluate, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_eval = transcript.read_scalar()?; - let product_next_eval = transcript.read_scalar()?; - let permuted_input_eval = transcript.read_scalar()?; - let permuted_input_inv_eval = transcript.read_scalar()?; - let permuted_table_eval = transcript.read_scalar()?; - - Ok(Evaluated { - committed: self, - product_eval, - product_next_eval, - permuted_input_eval, - permuted_input_inv_eval, - permuted_table_eval, - }) - } -} - -impl Evaluated { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions<'a>( - &'a self, - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - argument: &'a Argument, - theta: ChallengeTheta, - beta: ChallengeBeta, - gamma: ChallengeGamma, - advice_evals: &[C::Scalar], - fixed_evals: &[C::Scalar], - instance_evals: &[C::Scalar], - challenges: &[C::Scalar], - ) -> impl Iterator + 'a { - let active_rows = C::Scalar::ONE - (l_last + l_blind); - - let product_expression = || { - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - let left = self.product_next_eval - * &(self.permuted_input_eval + &*beta) - * &(self.permuted_table_eval + &*gamma); - - let compress_expressions = |expressions: &[Expression]| { - expressions - .iter() - .map(|expression| { - expression.evaluate( - &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) - }; - let right = self.product_eval - * &(compress_expressions(&argument.input_expressions) + &*beta) - * &(compress_expressions(&argument.table_expressions) + &*gamma); - - (left - &right) * &active_rows - }; - - std::iter::empty() - .chain( - // l_0(X) * (1 - z(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), - ) - .chain( - // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), - ) - .chain( - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - Some(product_expression()), - ) - .chain(Some( - // l_0(X) * (a'(X) - s'(X)) = 0 - l_0 * &(self.permuted_input_eval - &self.permuted_table_eval), - )) - .chain(Some( - // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - (self.permuted_input_eval - &self.permuted_table_eval) - * &(self.permuted_input_eval - &self.permuted_input_inv_eval) - * &active_rows, - )) - } - - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vk: &'r VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitment at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - *x, - self.product_eval, - ))) - // Open lookup input commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - *x, - self.permuted_input_eval, - ))) - // Open lookup table commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_table_commitment, - *x, - self.permuted_table_eval, - ))) - // Open lookup input commitments at \omega^{-1} x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - x_inv, - self.permuted_input_inv_eval, - ))) - // Open lookup product commitment at \omega x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - x_next, - self.product_next_eval, - ))) - } -} diff --git a/halo2_proofs_rm/src/plonk/permutation.rs b/halo2_proofs_rm/src/plonk/permutation.rs deleted file mode 100644 index 22c1fad6c3..0000000000 --- a/halo2_proofs_rm/src/plonk/permutation.rs +++ /dev/null @@ -1,172 +0,0 @@ -//! Implementation of permutation argument. - -use super::circuit::{Any, Column}; -use crate::{ - arithmetic::CurveAffine, - helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, - SerdeCurveAffine, SerdePrimeField, - }, - poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, - SerdeFormat, -}; - -pub(crate) mod keygen; -pub(crate) mod prover; -pub(crate) mod verifier; - -pub use keygen::Assembly; - -use std::io; - -/// A permutation argument. -#[derive(Debug, Clone)] -pub struct Argument { - /// A sequence of columns involved in the argument. - pub(super) columns: Vec>, -} - -impl Argument { - pub(crate) fn new() -> Self { - Argument { columns: vec![] } - } - - /// Returns the minimum circuit degree required by the permutation argument. - /// The argument may use larger degree gates depending on the actual - /// circuit's degree and how many columns are involved in the permutation. - pub(crate) fn required_degree(&self) -> usize { - // degree 2: - // l_0(X) * (1 - z(X)) = 0 - // - // We will fit as many polynomials p_i(X) as possible - // into the required degree of the circuit, so the - // following will not affect the required degree of - // this middleware. - // - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) - // - z(X) \prod (p(X) + \delta^i \beta X + \gamma) - // ) - // - // On the first sets of columns, except the first - // set, we will do - // - // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0 - // - // where z'(X) is the permutation for the previous set - // of columns. - // - // On the final set of columns, we will do - // - // degree 3: - // l_last(X) * (z'(X)^2 - z'(X)) = 0 - // - // which will allow the last value to be zero to - // ensure the argument is perfectly complete. - - // There are constraints of degree 3 regardless of the - // number of columns involved. - 3 - } - - pub(crate) fn add_column(&mut self, column: Column) { - if !self.columns.contains(&column) { - self.columns.push(column); - } - } - - /// Returns columns that participate on the permutation argument. - pub fn get_columns(&self) -> Vec> { - self.columns.clone() - } -} - -/// The verifying key for a single permutation argument. -#[derive(Clone, Debug)] -pub struct VerifyingKey { - commitments: Vec, -} - -impl VerifyingKey { - /// Returns commitments of sigma polynomials - pub fn commitments(&self) -> &Vec { - &self.commitments - } - - pub(crate) fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> - where - C: SerdeCurveAffine, - { - for commitment in &self.commitments { - commitment.write(writer, format)?; - } - Ok(()) - } - - pub(crate) fn read( - reader: &mut R, - argument: &Argument, - format: SerdeFormat, - ) -> io::Result - where - C: SerdeCurveAffine, - { - let commitments = (0..argument.columns.len()) - .map(|_| C::read(reader, format)) - .collect::, _>>()?; - Ok(VerifyingKey { commitments }) - } - - pub(crate) fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - self.commitments.len() * C::byte_length(format) - } -} - -/// The proving key for a single permutation argument. -#[derive(Clone, Debug)] -pub(crate) struct ProvingKey { - permutations: Vec>, - polys: Vec>, - pub(super) cosets: Vec>, -} - -impl ProvingKey -where - C::Scalar: SerdePrimeField, -{ - /// Reads proving key for a single permutation argument from buffer using `Polynomial::read`. - pub(super) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - let permutations = read_polynomial_vec(reader, format)?; - let polys = read_polynomial_vec(reader, format)?; - let cosets = read_polynomial_vec(reader, format)?; - Ok(ProvingKey { - permutations, - polys, - cosets, - }) - } - - /// Writes proving key for a single permutation argument to buffer using `Polynomial::write`. - pub(super) fn write( - &self, - writer: &mut W, - format: SerdeFormat, - ) -> io::Result<()> { - write_polynomial_slice(&self.permutations, writer, format)?; - write_polynomial_slice(&self.polys, writer, format)?; - write_polynomial_slice(&self.cosets, writer, format)?; - Ok(()) - } -} - -impl ProvingKey { - /// Gets the total number of bytes in the serialization of `self` - pub(super) fn bytes_length(&self) -> usize { - polynomial_slice_byte_length(&self.permutations) - + polynomial_slice_byte_length(&self.polys) - + polynomial_slice_byte_length(&self.cosets) - } -} diff --git a/halo2_proofs_rm/src/plonk/permutation/keygen.rs b/halo2_proofs_rm/src/plonk/permutation/keygen.rs deleted file mode 100644 index 32ee0aa25e..0000000000 --- a/halo2_proofs_rm/src/plonk/permutation/keygen.rs +++ /dev/null @@ -1,543 +0,0 @@ -use ff::{Field, PrimeField}; -use group::Curve; - -use super::{Argument, ProvingKey, VerifyingKey}; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - plonk::{Any, Column, Error}, - poly::{ - commitment::{Blind, Params}, - EvaluationDomain, - }, -}; - -#[cfg(feature = "thread-safe-region")] -use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; - -#[cfg(not(feature = "thread-safe-region"))] -use crate::multicore::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; - -#[cfg(feature = "thread-safe-region")] -use std::collections::{BTreeSet, HashMap}; - -#[cfg(not(feature = "thread-safe-region"))] -/// Struct that accumulates all the necessary data in order to construct the permutation argument. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct Assembly { - /// Columns that participate on the copy permutation argument. - columns: Vec>, - /// Mapping of the actual copies done. - mapping: Vec>, - /// Some aux data used to swap positions directly when sorting. - aux: Vec>, - /// More aux data - sizes: Vec>, -} - -// TODO: Dedup with other Cell definition -#[derive(Clone, Debug)] -pub struct Cell { - pub column: Column, - pub row: usize, -} - -#[derive(Clone, Debug)] -pub struct AssemblyMid { - pub copies: Vec<(Cell, Cell)>, -} - -#[derive(Clone, Debug)] -pub struct AssemblyFront { - n: usize, - columns: Vec>, - pub(crate) copies: Vec<(Cell, Cell)>, -} - -impl AssemblyFront { - pub(crate) fn new(n: usize, p: &Argument) -> Self { - Self { - n, - columns: p.columns.clone(), - copies: Vec::new(), - } - } - - pub(crate) fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - if !self.columns.contains(&left_column) { - return Err(Error::ColumnNotInPermutation(left_column)); - } - if !self.columns.contains(&right_column) { - return Err(Error::ColumnNotInPermutation(right_column)); - } - // Check bounds - if left_row >= self.n || right_row >= self.n { - return Err(Error::BoundsFailure); - } - self.copies.push(( - Cell { - column: left_column, - row: left_row, - }, - Cell { - column: right_column, - row: right_row, - }, - )); - Ok(()) - } -} - -#[cfg(not(feature = "thread-safe-region"))] -impl Assembly { - pub(crate) fn new_from_assembly_mid( - n: usize, - p: &Argument, - a: &AssemblyMid, - ) -> Result { - let mut assembly = Self::new(n, p); - for copy in &a.copies { - assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; - } - Ok(assembly) - } - - pub(crate) fn new(n: usize, p: &Argument) -> Self { - // Initialize the copy vector to keep track of copy constraints in all - // the permutation arguments. - let mut columns = vec![]; - for i in 0..p.columns.len() { - // Computes [(i, 0), (i, 1), ..., (i, n - 1)] - columns.push((0..n).map(|j| (i, j)).collect()); - } - - // Before any equality constraints are applied, every cell in the permutation is - // in a 1-cycle; therefore mapping and aux are identical, because every cell is - // its own distinguished element. - Assembly { - columns: p.columns.clone(), - mapping: columns.clone(), - aux: columns, - sizes: vec![vec![1usize; n]; p.columns.len()], - } - } - - pub(crate) fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - let left_column = self - .columns - .iter() - .position(|c| c == &left_column) - .ok_or(Error::ColumnNotInPermutation(left_column))?; - let right_column = self - .columns - .iter() - .position(|c| c == &right_column) - .ok_or(Error::ColumnNotInPermutation(right_column))?; - - // Check bounds - if left_row >= self.mapping[left_column].len() - || right_row >= self.mapping[right_column].len() - { - return Err(Error::BoundsFailure); - } - - // See book/src/design/permutation.md for a description of this algorithm. - - let mut left_cycle = self.aux[left_column][left_row]; - let mut right_cycle = self.aux[right_column][right_row]; - - // If left and right are in the same cycle, do nothing. - if left_cycle == right_cycle { - return Ok(()); - } - - if self.sizes[left_cycle.0][left_cycle.1] < self.sizes[right_cycle.0][right_cycle.1] { - std::mem::swap(&mut left_cycle, &mut right_cycle); - } - - // Merge the right cycle into the left one. - self.sizes[left_cycle.0][left_cycle.1] += self.sizes[right_cycle.0][right_cycle.1]; - let mut i = right_cycle; - loop { - self.aux[i.0][i.1] = left_cycle; - i = self.mapping[i.0][i.1]; - if i == right_cycle { - break; - } - } - - let tmp = self.mapping[left_column][left_row]; - self.mapping[left_column][left_row] = self.mapping[right_column][right_row]; - self.mapping[right_column][right_row] = tmp; - - Ok(()) - } - - pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( - self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> VerifyingKey { - build_vk(params, domain, p, |i, j| self.mapping[i][j]) - } - - pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( - self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> ProvingKey { - build_pk(params, domain, p, |i, j| self.mapping[i][j]) - } - - /// Returns columns that participate in the permutation argument. - pub fn columns(&self) -> &[Column] { - &self.columns - } - - /// Returns mappings of the copies. - pub fn mapping( - &self, - ) -> impl Iterator + '_> { - self.mapping.iter().map(|c| c.par_iter().copied()) - } -} - -#[cfg(feature = "thread-safe-region")] -/// Struct that accumulates all the necessary data in order to construct the permutation argument. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct Assembly { - /// Columns that participate on the copy permutation argument. - columns: Vec>, - /// Mapping of the actual copies done. - cycles: Vec>, - /// Mapping of the actual copies done. - ordered_cycles: Vec>, - /// Mapping of the actual copies done. - aux: HashMap<(usize, usize), usize>, - /// total length of a column - col_len: usize, - /// number of columns - num_cols: usize, -} - -#[cfg(feature = "thread-safe-region")] -impl Assembly { - pub(crate) fn new_from_assembly_mid( - n: usize, - p: &Argument, - a: &AssemblyMid, - ) -> Result { - let mut assembly = Self::new(n, p); - for copy in &a.copies { - assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; - } - Ok(assembly) - } - - pub(crate) fn new(n: usize, p: &Argument) -> Self { - Assembly { - columns: p.columns.clone(), - cycles: Vec::with_capacity(n), - ordered_cycles: Vec::with_capacity(n), - aux: HashMap::new(), - col_len: n, - num_cols: p.columns.len(), - } - } - - pub(crate) fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - let left_column = self - .columns - .iter() - .position(|c| c == &left_column) - .ok_or(Error::ColumnNotInPermutation(left_column))?; - let right_column = self - .columns - .iter() - .position(|c| c == &right_column) - .ok_or(Error::ColumnNotInPermutation(right_column))?; - - // Check bounds - if left_row >= self.col_len || right_row >= self.col_len { - return Err(Error::BoundsFailure); - } - - let left_cycle = self.aux.get(&(left_column, left_row)); - let right_cycle = self.aux.get(&(right_column, right_row)); - - // extract cycle elements - let right_cycle_elems = match right_cycle { - Some(i) => { - let entry = self.cycles[*i].clone(); - self.cycles[*i] = vec![]; - entry - } - None => [(right_column, right_row)].into(), - }; - - assert!(right_cycle_elems.contains(&(right_column, right_row))); - - // merge cycles - let cycle_idx = match left_cycle { - Some(i) => { - let entry = &mut self.cycles[*i]; - entry.extend(right_cycle_elems.clone()); - *i - } - // if they were singletons -- create a new cycle entry - None => { - let mut set: Vec<(usize, usize)> = right_cycle_elems.clone(); - set.push((left_column, left_row)); - self.cycles.push(set); - let cycle_idx = self.cycles.len() - 1; - self.aux.insert((left_column, left_row), cycle_idx); - cycle_idx - } - }; - - let index_updates = vec![cycle_idx; right_cycle_elems.len()].into_iter(); - let updates = right_cycle_elems.into_iter().zip(index_updates); - - self.aux.extend(updates); - - Ok(()) - } - - /// Builds the ordered mapping of the cycles. - /// This will only get executed once. - pub fn build_ordered_mapping(&mut self) { - use crate::multicore::IntoParallelRefMutIterator; - - // will only get called once - if self.ordered_cycles.is_empty() && !self.cycles.is_empty() { - self.ordered_cycles = self - .cycles - .par_iter_mut() - .map(|col| { - let mut set = BTreeSet::new(); - set.extend(col.clone()); - // free up memory - *col = vec![]; - set - }) - .collect(); - } - } - - fn mapping_at_idx(&self, col: usize, row: usize) -> (usize, usize) { - assert!( - !self.ordered_cycles.is_empty() || self.cycles.is_empty(), - "cycles have not been ordered" - ); - - if let Some(cycle_idx) = self.aux.get(&(col, row)) { - let cycle = &self.ordered_cycles[*cycle_idx]; - let mut cycle_iter = cycle.range(( - std::ops::Bound::Excluded((col, row)), - std::ops::Bound::Unbounded, - )); - // point to the next node in the cycle - match cycle_iter.next() { - Some((i, j)) => (*i, *j), - // wrap back around to the first element which SHOULD exist - None => *(cycle.iter().next().unwrap()), - } - // is a singleton - } else { - (col, row) - } - } - - pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( - &mut self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> VerifyingKey { - self.build_ordered_mapping(); - build_vk(params, domain, p, |i, j| self.mapping_at_idx(i, j)) - } - - pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( - &mut self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> ProvingKey { - self.build_ordered_mapping(); - build_pk(params, domain, p, |i, j| self.mapping_at_idx(i, j)) - } - - /// Returns columns that participate in the permutation argument. - pub fn columns(&self) -> &[Column] { - &self.columns - } - - /// Returns mappings of the copies. - pub fn mapping( - &self, - ) -> impl Iterator + '_> { - (0..self.num_cols).map(move |i| { - (0..self.col_len) - .into_par_iter() - .map(move |j| self.mapping_at_idx(i, j)) - }) - } -} - -pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( - params: &P, - domain: &EvaluationDomain, - p: &Argument, - mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, -) -> ProvingKey { - // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] - let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; - { - let omega = domain.get_omega(); - parallelize(&mut omega_powers, |o, start| { - let mut cur = omega.pow_vartime([start as u64]); - for v in o.iter_mut() { - *v = cur; - cur *= ω - } - }) - } - - // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] - let mut deltaomega = vec![omega_powers; p.columns.len()]; - { - parallelize(&mut deltaomega, |o, start| { - let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); - for omega_powers in o.iter_mut() { - for v in omega_powers { - *v *= &cur; - } - cur *= &C::Scalar::DELTA; - } - }); - } - - // Compute permutation polynomials, convert to coset form. - let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; - { - parallelize(&mut permutations, |o, start| { - for (x, permutation_poly) in o.iter_mut().enumerate() { - let i = start + x; - for (j, p) in permutation_poly.iter_mut().enumerate() { - let (permuted_i, permuted_j) = mapping(i, j); - *p = deltaomega[permuted_i][permuted_j]; - } - } - }); - } - - let mut polys = vec![domain.empty_coeff(); p.columns.len()]; - { - parallelize(&mut polys, |o, start| { - for (x, poly) in o.iter_mut().enumerate() { - let i = start + x; - let permutation_poly = permutations[i].clone(); - *poly = domain.lagrange_to_coeff(permutation_poly); - } - }); - } - - let mut cosets = vec![domain.empty_extended(); p.columns.len()]; - { - parallelize(&mut cosets, |o, start| { - for (x, coset) in o.iter_mut().enumerate() { - let i = start + x; - let poly = polys[i].clone(); - *coset = domain.coeff_to_extended(poly); - } - }); - } - - ProvingKey { - permutations, - polys, - cosets, - } -} - -pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( - params: &P, - domain: &EvaluationDomain, - p: &Argument, - mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, -) -> VerifyingKey { - // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] - let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; - { - let omega = domain.get_omega(); - parallelize(&mut omega_powers, |o, start| { - let mut cur = omega.pow_vartime([start as u64]); - for v in o.iter_mut() { - *v = cur; - cur *= ω - } - }) - } - - // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] - let mut deltaomega = vec![omega_powers; p.columns.len()]; - { - parallelize(&mut deltaomega, |o, start| { - let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); - for omega_powers in o.iter_mut() { - for v in omega_powers { - *v *= &cur; - } - cur *= &::DELTA; - } - }); - } - - // Computes the permutation polynomial based on the permutation - // description in the assembly. - let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; - { - parallelize(&mut permutations, |o, start| { - for (x, permutation_poly) in o.iter_mut().enumerate() { - let i = start + x; - for (j, p) in permutation_poly.iter_mut().enumerate() { - let (permuted_i, permuted_j) = mapping(i, j); - *p = deltaomega[permuted_i][permuted_j]; - } - } - }); - } - - // Pre-compute commitments for the URS. - let mut commitments = Vec::with_capacity(p.columns.len()); - for permutation in &permutations { - // Compute commitment to permutation polynomial - commitments.push( - params - .commit_lagrange(permutation, Blind::default()) - .to_affine(), - ); - } - - VerifyingKey { commitments } -} diff --git a/halo2_proofs_rm/src/plonk/permutation/prover.rs b/halo2_proofs_rm/src/plonk/permutation/prover.rs deleted file mode 100644 index d6b108554d..0000000000 --- a/halo2_proofs_rm/src/plonk/permutation/prover.rs +++ /dev/null @@ -1,329 +0,0 @@ -use ff::PrimeField; -use group::{ - ff::{BatchInvert, Field}, - Curve, -}; -use rand_core::RngCore; -use std::iter::{self, ExactSizeIterator}; - -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; -use super::{Argument, ProvingKey}; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - plonk::{self, Error}, - poly::{ - commitment::{Blind, Params}, - Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, Rotation, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; - -pub(crate) struct CommittedSet { - pub(crate) permutation_product_poly: Polynomial, - pub(crate) permutation_product_coset: Polynomial, - permutation_product_blind: Blind, -} - -pub(crate) struct Committed { - pub(crate) sets: Vec>, -} - -pub struct ConstructedSet { - permutation_product_poly: Polynomial, - permutation_product_blind: Blind, -} - -pub(crate) struct Constructed { - sets: Vec>, -} - -pub(crate) struct Evaluated { - constructed: Constructed, -} - -impl Argument { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit< - 'params, - C: CurveAffine, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - params: &P, - pk: &plonk::ProvingKey, - pkey: &ProvingKey, - advice: &[Polynomial], - fixed: &[Polynomial], - instance: &[Polynomial], - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - - // How many columns can be included in a single permutation polynomial? - // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This - // will never underflow because of the requirement of at least a degree - // 3 circuit for the permutation argument. - assert!(pk.vk.cs_degree >= 3); - let chunk_len = pk.vk.cs_degree - 2; - let blinding_factors = pk.vk.cs.blinding_factors(); - - // Each column gets its own delta power. - let mut deltaomega = C::Scalar::ONE; - - // Track the "last" value from the previous column set - let mut last_z = C::Scalar::ONE; - - let mut sets = vec![]; - - for (columns, permutations) in self - .columns - .chunks(chunk_len) - .zip(pkey.permutations.chunks(chunk_len)) - { - // Goal is to compute the products of fractions - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where p_j(X) is the jth column in this permutation, - // and i is the ith row of the column. - - let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; - - // Iterate over each column of the permutation - for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - for ((modified_values, value), permuted_value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - .zip(permuted_column_values[start..].iter()) - { - *modified_values *= &(*beta * permuted_value + &*gamma + value); - } - }); - } - - // Invert to obtain the denominator for the permutation product polynomial - modified_values.batch_invert(); - - // Iterate over each column again, this time finishing the computation - // of the entire fraction by computing the numerators - for &column in columns.iter() { - let omega = domain.get_omega(); - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); - for (modified_values, value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - { - // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta - *modified_values *= &(deltaomega * &*beta + &*gamma + value); - deltaomega *= ω - } - }); - deltaomega *= &::DELTA; - } - - // The modified_values vector is a vector of products of fractions - // of the form - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where i is the index into modified_values, for the jth column in - // the permutation - - // Compute the evaluations of the permutation product polynomial - // over our domain, starting with z[0] = 1 - let mut z = vec![last_z]; - for row in 1..(params.n() as usize) { - let mut tmp = z[row - 1]; - - tmp *= &modified_values[row - 1]; - z.push(tmp); - } - let mut z = domain.lagrange_from_vec(z); - // Set blinding factors - for z in &mut z[params.n() as usize - blinding_factors..] { - *z = C::Scalar::random(&mut rng); - } - // Set new last_z - last_z = z[params.n() as usize - (blinding_factors + 1)]; - - let blind = Blind(C::Scalar::random(&mut rng)); - - let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); - let permutation_product_blind = blind; - let z = domain.lagrange_to_coeff(z); - let permutation_product_poly = z.clone(); - - let permutation_product_coset = domain.coeff_to_extended(z.clone()); - - let permutation_product_commitment = - permutation_product_commitment_projective.to_affine(); - - // Hash the permutation product commitment - transcript.write_point(permutation_product_commitment)?; - - sets.push(CommittedSet { - permutation_product_poly, - permutation_product_coset, - permutation_product_blind, - }); - } - - Ok(Committed { sets }) - } -} - -impl Committed { - pub(in crate::plonk) fn construct(self) -> Constructed { - Constructed { - sets: self - .sets - .iter() - .map(|set| ConstructedSet { - permutation_product_poly: set.permutation_product_poly.clone(), - permutation_product_blind: set.permutation_product_blind, - }) - .collect(), - } - } -} - -impl super::ProvingKey { - pub(in crate::plonk) fn open( - &self, - x: ChallengeX, - ) -> impl Iterator> + Clone { - self.polys.iter().map(move |poly| ProverQuery { - point: *x, - poly, - blind: Blind::default(), - }) - } - - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - &self, - x: ChallengeX, - transcript: &mut T, - ) -> Result<(), Error> { - // Hash permutation evals - for eval in self.polys.iter().map(|poly| eval_polynomial(poly, *x)) { - transcript.write_scalar(eval)?; - } - - Ok(()) - } -} - -impl Constructed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &plonk::ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let blinding_factors = pk.vk.cs.blinding_factors(); - - { - let mut sets = self.sets.iter(); - - while let Some(set) = sets.next() { - let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); - - let permutation_product_next_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation::next()), - ); - - // Hash permutation product evals - for eval in iter::empty() - .chain(Some(&permutation_product_eval)) - .chain(Some(&permutation_product_next_eval)) - { - transcript.write_scalar(*eval)?; - } - - // If we have any remaining sets to process, evaluate this set at omega^u - // so we can constrain the last value of its running product to equal the - // first value of the next set's running product, chaining them together. - if sets.len() > 0 { - let permutation_product_last_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), - ); - - transcript.write_scalar(permutation_product_last_eval)?; - } - } - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a plonk::ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let blinding_factors = pk.vk.cs.blinding_factors(); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - let x_last = pk - .vk - .domain - .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); - - iter::empty() - .chain(self.constructed.sets.iter().flat_map(move |set| { - iter::empty() - // Open permutation product commitments at x and \omega x - .chain(Some(ProverQuery { - point: *x, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - .chain(Some(ProverQuery { - point: x_next, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - })) - // Open it at \omega^{last} x for all but the last set. This rotation is only - // sensical for the first row, but we only use this rotation in a constraint - // that is gated on l_0. - .chain( - self.constructed - .sets - .iter() - .rev() - .skip(1) - .flat_map(move |set| { - Some(ProverQuery { - point: x_last, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - }) - }), - ) - } -} diff --git a/halo2_proofs_rm/src/plonk/permutation/verifier.rs b/halo2_proofs_rm/src/plonk/permutation/verifier.rs deleted file mode 100644 index a4637422ae..0000000000 --- a/halo2_proofs_rm/src/plonk/permutation/verifier.rs +++ /dev/null @@ -1,254 +0,0 @@ -use ff::{Field, PrimeField}; -use std::iter; - -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; -use super::{Argument, VerifyingKey}; -use crate::{ - arithmetic::CurveAffine, - plonk::{self, Error}, - poly::{commitment::MSM, Rotation, VerifierQuery}, - transcript::{EncodedChallenge, TranscriptRead}, -}; - -pub struct Committed { - permutation_product_commitments: Vec, -} - -pub struct EvaluatedSet { - permutation_product_commitment: C, - permutation_product_eval: C::Scalar, - permutation_product_next_eval: C::Scalar, - permutation_product_last_eval: Option, -} - -pub struct CommonEvaluated { - permutation_evals: Vec, -} - -pub struct Evaluated { - sets: Vec>, -} - -impl Argument { - pub(crate) fn read_product_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - vk: &plonk::VerifyingKey, - transcript: &mut T, - ) -> Result, Error> { - let chunk_len = vk.cs_degree - 2; - - let permutation_product_commitments = self - .columns - .chunks(chunk_len) - .map(|_| transcript.read_point()) - .collect::, _>>()?; - - Ok(Committed { - permutation_product_commitments, - }) - } -} - -impl VerifyingKey { - pub(in crate::plonk) fn evaluate, T: TranscriptRead>( - &self, - transcript: &mut T, - ) -> Result, Error> { - let permutation_evals = self - .commitments - .iter() - .map(|_| transcript.read_scalar()) - .collect::, _>>()?; - - Ok(CommonEvaluated { permutation_evals }) - } -} - -impl Committed { - pub(crate) fn evaluate, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let mut sets = vec![]; - - let mut iter = self.permutation_product_commitments.into_iter(); - - while let Some(permutation_product_commitment) = iter.next() { - let permutation_product_eval = transcript.read_scalar()?; - let permutation_product_next_eval = transcript.read_scalar()?; - let permutation_product_last_eval = if iter.len() > 0 { - Some(transcript.read_scalar()?) - } else { - None - }; - - sets.push(EvaluatedSet { - permutation_product_commitment, - permutation_product_eval, - permutation_product_next_eval, - permutation_product_last_eval, - }); - } - - Ok(Evaluated { sets }) - } -} - -impl Evaluated { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions<'a>( - &'a self, - vk: &'a plonk::VerifyingKey, - p: &'a Argument, - common: &'a CommonEvaluated, - advice_evals: &'a [C::Scalar], - fixed_evals: &'a [C::Scalar], - instance_evals: &'a [C::Scalar], - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - beta: ChallengeBeta, - gamma: ChallengeGamma, - x: ChallengeX, - ) -> impl Iterator + 'a { - let chunk_len = vk.cs_degree - 2; - iter::empty() - // Enforce only for the first set. - // l_0(X) * (1 - z_0(X)) = 0 - .chain( - self.sets - .first() - .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), - ) - // Enforce only for the last set. - // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 - .chain(self.sets.last().map(|last_set| { - (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) - * &l_last - })) - // Except for the first set, enforce. - // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 - .chain( - self.sets - .iter() - .skip(1) - .zip(self.sets.iter()) - .map(|(set, last_set)| { - ( - set.permutation_product_eval, - last_set.permutation_product_last_eval.unwrap(), - ) - }) - .map(move |(set, prev_last)| (set - &prev_last) * &l_0), - ) - // And for all the sets we enforce: - // (1 - (l_last(X) + l_blind(X))) * ( - // z_i(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) - // - z_i(X) \prod (p(X) + \delta^i \beta X + \gamma) - // ) - .chain( - self.sets - .iter() - .zip(p.columns.chunks(chunk_len)) - .zip(common.permutation_evals.chunks(chunk_len)) - .enumerate() - .map(move |(chunk_index, ((set, columns), permutation_evals))| { - let mut left = set.permutation_product_next_eval; - for (eval, permutation_eval) in columns - .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => { - advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Fixed => { - fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Instance => { - instance_evals - [vk.cs.get_any_query_index(column, Rotation::cur())] - } - }) - .zip(permutation_evals.iter()) - { - left *= &(eval + &(*beta * permutation_eval) + &*gamma); - } - - let mut right = set.permutation_product_eval; - let mut current_delta = (*beta * &*x) - * &(::DELTA - .pow_vartime([(chunk_index * chunk_len) as u64])); - for eval in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => { - advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Fixed => { - fixed_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - Any::Instance => { - instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] - } - }) { - right *= &(eval + ¤t_delta + &*gamma); - current_delta *= &C::Scalar::DELTA; - } - - (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) - }), - ) - } - - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vk: &'r plonk::VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let blinding_factors = vk.cs.blinding_factors(); - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - let x_last = vk - .domain - .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); - - iter::empty() - .chain(self.sets.iter().flat_map(move |set| { - iter::empty() - // Open permutation product commitments at x and \omega^{-1} x - // Open permutation product commitments at x and \omega x - .chain(Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - *x, - set.permutation_product_eval, - ))) - .chain(Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - x_next, - set.permutation_product_next_eval, - ))) - })) - // Open it at \omega^{last} x for all but the last set - .chain(self.sets.iter().rev().skip(1).flat_map(move |set| { - Some(VerifierQuery::new_commitment( - &set.permutation_product_commitment, - x_last, - set.permutation_product_last_eval.unwrap(), - )) - })) - } -} - -impl CommonEvaluated { - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vkey: &'r VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - // Open permutation commitments for each permutation argument at x - vkey.commitments - .iter() - .zip(self.permutation_evals.iter()) - .map(move |(commitment, &eval)| VerifierQuery::new_commitment(commitment, *x, eval)) - } -} diff --git a/halo2_proofs_rm/src/plonk/prover.rs b/halo2_proofs_rm/src/plonk/prover.rs deleted file mode 100644 index 1168b1d519..0000000000 --- a/halo2_proofs_rm/src/plonk/prover.rs +++ /dev/null @@ -1,994 +0,0 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; -use group::Curve; -use rand_core::RngCore; -use std::collections::{BTreeSet, HashSet}; -use std::ops::RangeTo; -use std::{collections::HashMap, iter}; - -use super::{ - circuit::{ - compile_circuit, - sealed::{self}, - Advice, Any, Assignment, Challenge, Circuit, Column, Fixed, Instance, Selector, - WitnessCalculator, - }, - lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, - ChallengeX, ChallengeY, Error, ProvingKey, -}; - -use crate::{ - arithmetic::{eval_polynomial, CurveAffine}, - circuit::Value, - plonk::Assigned, - poly::{ - commitment::{Blind, CommitmentScheme, Params, Prover}, - Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, - }, -}; -use crate::{ - poly::batch_invert_assigned, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use group::prime::PrimeCurveAffine; - -/// Collection of instance data used during proving for a single circuit proof. -#[derive(Debug)] -struct InstanceSingle { - pub instance_values: Vec>, - pub instance_polys: Vec>, -} - -/// Collection of advice data used during proving for a single circuit proof. -#[derive(Debug, Clone)] -struct AdviceSingle { - pub advice_polys: Vec>, - pub advice_blinds: Vec>, -} - -/// The prover object used to create proofs interactively by passing the witnesses to commit at -/// each phase. This works for a single proof. This is a wrapper over ProverV2. -#[derive(Debug)] -pub struct ProverV2Single< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, ->(ProverV2<'a, 'params, Scheme, P, E, R, T>); - -impl< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - > ProverV2Single<'a, 'params, Scheme, P, E, R, T> -{ - /// Create a new prover object - pub fn new( - params: &'params Scheme::ParamsProver, - pk: &'a ProvingKey, - // TODO: If this was a vector the usage would be simpler - instance: &[&[Scheme::Scalar]], - rng: R, - transcript: &'a mut T, - ) -> Result - // TODO: Can I move this `where` to the struct definition? - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - Ok(Self(ProverV2::new( - params, - pk, - &[instance], - rng, - transcript, - )?)) - } - - /// Commit the `witness` at `phase` and return the challenges after `phase`. - pub fn commit_phase( - &mut self, - phase: u8, - witness: Vec>>>, - ) -> Result, Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - self.0.commit_phase(phase, vec![witness]) - } - - /// Finalizes the proof creation. - pub fn create_proof(self) -> Result<(), Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - self.0.create_proof() - } -} - -/// The prover object used to create proofs interactively by passing the witnesses to commit at -/// each phase. This supports batch proving. -#[derive(Debug)] -pub struct ProverV2< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, -> { - // Circuit and setup fields - params: &'params Scheme::ParamsProver, - pk: &'a ProvingKey, - phases: Vec, - // State - instance: Vec>, - advice: Vec>, - challenges: HashMap, - next_phase_index: usize, - rng: R, - transcript: &'a mut T, - _marker: std::marker::PhantomData<(P, E)>, -} - -impl< - 'a, - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - > ProverV2<'a, 'params, Scheme, P, E, R, T> -{ - /// Create a new prover object - pub fn new( - params: &'params Scheme::ParamsProver, - pk: &'a ProvingKey, - // TODO: If this was a vector the usage would be simpler - instances: &[&[&[Scheme::Scalar]]], - rng: R, - transcript: &'a mut T, - ) -> Result - // TODO: Can I move this `where` to the struct definition? - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - for instance in instances.iter() { - if instance.len() != pk.vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); - } - } - - // Hash verification key into transcript - pk.vk.hash_into(transcript)?; - - let meta = &pk.vk.cs; - let phases = meta.phases().collect(); - - let domain = &pk.vk.domain; - - // TODO: Name this better - let mut instance_fn = - |instance: &[&[Scheme::Scalar]]| -> Result, Error> { - let instance_values = instance - .iter() - .map(|values| { - let mut poly = domain.empty_lagrange(); - assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { - return Err(Error::InstanceTooLarge); - } - for (poly, value) in poly.iter_mut().zip(values.iter()) { - if !P::QUERY_INSTANCE { - // dbg!(1, value); - transcript.common_scalar(*value)?; - } - *poly = *value; - } - Ok(poly) - }) - .collect::, _>>()?; - - if P::QUERY_INSTANCE { - let instance_commitments_projective: Vec<_> = instance_values - .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default())) - .collect(); - let mut instance_commitments = - vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &instance_commitments_projective, - &mut instance_commitments, - ); - let instance_commitments = instance_commitments; - drop(instance_commitments_projective); - - for commitment in &instance_commitments { - // dbg!(2, commitment); - transcript.common_point(*commitment)?; - } - } - - let instance_polys: Vec<_> = instance_values - .iter() - .map(|poly| { - let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); - domain.lagrange_to_coeff(lagrange_vec) - }) - .collect(); - - Ok(InstanceSingle { - instance_values, - instance_polys, - }) - }; - let instance: Vec> = instances - .iter() - .map(|instance| instance_fn(instance)) - .collect::, _>>()?; - - let advice = vec![ - AdviceSingle:: { - // Create vectors with empty polynomials to free space while they are not being used - advice_polys: vec![ - Polynomial::new_empty(0, Scheme::Scalar::ZERO); - meta.num_advice_columns - ], - advice_blinds: vec![Blind::default(); meta.num_advice_columns], - }; - instances.len() - ]; - let challenges = HashMap::::with_capacity(meta.num_challenges); - - Ok(ProverV2 { - params, - pk, - phases, - instance, - rng, - transcript, - advice, - challenges, - next_phase_index: 0, - _marker: std::marker::PhantomData {}, - }) - } - - /// Commit the `witness` at `phase` and return the challenges after `phase`. - pub fn commit_phase( - &mut self, - phase: u8, - witness: Vec>>>>, - ) -> Result, Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - let current_phase = match self.phases.get(self.next_phase_index) { - Some(phase) => phase, - None => { - panic!("TODO: Return Error instead. All phases already commited"); - } - }; - if phase != current_phase.0 { - panic!("TODO: Return Error instead. Committing invalid phase"); - } - - let params = self.params; - let meta = &self.pk.vk.cs; - - let mut rng = &mut self.rng; - - let advice = &mut self.advice; - let challenges = &mut self.challenges; - - let column_indices = meta - .advice_column_phase - .iter() - .enumerate() - .filter_map(|(column_index, phase)| { - if current_phase == phase { - Some(column_index) - } else { - None - } - }) - .collect::>(); - - if witness.len() != advice.len() { - return Err(Error::Other("witness.len() != advice.len()".to_string())); - } - for witness_circuit in &witness { - if witness_circuit.len() != meta.num_advice_columns { - return Err(Error::Other(format!( - "unexpected length in witness_circuitk. Got {}, expected {}", - witness_circuit.len(), - meta.num_advice_columns, - ))); - } - // Check that all current_phase advice columns are Some, and their length is correct - for (column_index, advice_column) in witness_circuit.iter().enumerate() { - if column_indices.contains(&column_index) { - match advice_column { - None => { - return Err(Error::Other(format!( - "expected advice column with index {} at phase {}", - column_index, current_phase.0 - ))) - } - Some(advice_column) => { - if advice_column.len() != params.n() as usize { - return Err(Error::Other(format!( - "expected advice column with index {} to have length {}", - column_index, - params.n(), - ))); - } - } - } - } else if advice_column.is_some() { - return Err(Error::Other(format!( - "expected no advice column with index {} at phase {}", - column_index, current_phase.0 - ))); - }; - } - } - - let mut commit_phase_fn = |advice: &mut AdviceSingle, - witness: Vec< - Option, LagrangeCoeff>>, - >| - -> Result<(), Error> { - let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); - let mut advice_values = - batch_invert_assigned::(witness.into_iter().flatten().collect()); - let unblinded_advice: HashSet = - HashSet::from_iter(meta.unblinded_advice_columns.clone()); - - // Add blinding factors to advice columns - for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { - if !unblinded_advice.contains(column_index) { - for cell in &mut advice_values[unusable_rows_start..] { - *cell = Scheme::Scalar::random(&mut rng); - } - } else { - #[cfg(feature = "sanity-checks")] - for cell in &advice_values[unusable_rows_start..] { - assert_eq!(*cell, Scheme::Scalar::ZERO); - } - } - } - - // Compute commitments to advice column polynomials - let blinds: Vec<_> = column_indices - .iter() - .map(|i| { - if unblinded_advice.contains(i) { - Blind::default() - } else { - Blind(Scheme::Scalar::random(&mut rng)) - } - }) - .collect(); - let advice_commitments_projective: Vec<_> = advice_values - .iter() - .zip(blinds.iter()) - .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) - .collect(); - let mut advice_commitments = - vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &advice_commitments_projective, - &mut advice_commitments, - ); - let advice_commitments = advice_commitments; - drop(advice_commitments_projective); - - for commitment in &advice_commitments { - self.transcript.write_point(*commitment)?; - } - for ((column_index, advice_values), blind) in - column_indices.iter().zip(advice_values).zip(blinds) - { - advice.advice_polys[*column_index] = advice_values; - advice.advice_blinds[*column_index] = blind; - } - Ok(()) - }; - - for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { - commit_phase_fn( - advice, - witness - .into_iter() - .map(|v| v.map(Polynomial::new_lagrange_from_vec)) - .collect(), - )?; - } - - for (index, phase) in meta.challenge_phase.iter().enumerate() { - if current_phase == phase { - let existing = - challenges.insert(index, *self.transcript.squeeze_challenge_scalar::<()>()); - assert!(existing.is_none()); - } - } - - self.next_phase_index += 1; - Ok(challenges.clone()) - } - - /// Finalizes the proof creation. - pub fn create_proof(mut self) -> Result<(), Error> - where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, - { - let params = self.params; - let meta = &self.pk.vk.cs; - // let queries = &self.pk.vk.queries; - let pk = self.pk; - let domain = &self.pk.vk.domain; - - let mut rng = self.rng; - - let instance = std::mem::take(&mut self.instance); - let advice = std::mem::take(&mut self.advice); - let mut challenges = self.challenges; - - assert_eq!(challenges.len(), meta.num_challenges); - let challenges = (0..meta.num_challenges) - .map(|index| challenges.remove(&index).unwrap()) - .collect::>(); - - // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = self.transcript.squeeze_challenge_scalar(); - - let mut lookups_fn = - |instance: &InstanceSingle, - advice: &AdviceSingle| - -> Result>, Error> { - meta.lookups - .iter() - .map(|lookup| { - lookup.commit_permuted( - pk, - params, - domain, - theta, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - self.transcript, - ) - }) - .collect::, _>>() - }; - let lookups: Vec>> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, Error> { - // Construct and commit to permuted values for each lookup - lookups_fn(instance, advice) - }) - .collect::, _>>()?; - - // Sample beta challenge - let beta: ChallengeBeta<_> = self.transcript.squeeze_challenge_scalar(); - - // Sample gamma challenge - let gamma: ChallengeGamma<_> = self.transcript.squeeze_challenge_scalar(); - - // Commit to permutation. - let permutations: Vec> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| { - meta.permutation.commit( - params, - pk, - &pk.permutation, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - beta, - gamma, - &mut rng, - self.transcript, - ) - }) - .collect::, _>>()?; - - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - // Construct and commit to products for each lookup - lookups - .into_iter() - .map(|lookup| { - lookup.commit_product(pk, params, beta, gamma, &mut rng, self.transcript) - }) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles: Vec>> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, _> { - // Compress expressions for each shuffle - meta.shuffles - .iter() - .map(|shuffle| { - shuffle.commit_product( - pk, - params, - domain, - theta, - gamma, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - self.transcript, - ) - }) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Commit to the vanishing argument's random polynomial for blinding h(x_3) - let vanishing = vanishing::Argument::commit(params, domain, &mut rng, self.transcript)?; - - // Obtain challenge for keeping all separate gates linearly independent - let y: ChallengeY<_> = self.transcript.squeeze_challenge_scalar(); - - // Calculate the advice polys - let advice: Vec> = advice - .into_iter() - .map( - |AdviceSingle { - advice_polys, - advice_blinds, - }| { - AdviceSingle { - advice_polys: advice_polys - .into_iter() - .map(|poly| domain.lagrange_to_coeff(poly)) - .collect::>(), - advice_blinds, - } - }, - ) - .collect(); - - // Evaluate the h(X) polynomial - let h_poly = pk.ev.evaluate_h( - pk, - &advice - .iter() - .map(|a| a.advice_polys.as_slice()) - .collect::>(), - &instance - .iter() - .map(|i| i.instance_polys.as_slice()) - .collect::>(), - &challenges, - *y, - *beta, - *gamma, - *theta, - &lookups, - &shuffles, - &permutations, - ); - - // Construct the vanishing argument's h(X) commitments - let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, self.transcript)?; - - let x: ChallengeX<_> = self.transcript.squeeze_challenge_scalar(); - let xn = x.pow([params.n()]); - - if P::QUERY_INSTANCE { - // Compute and hash instance evals for the circuit instance - for instance in instance.iter() { - // Evaluate polynomials at omega^i x - let instance_evals: Vec<_> = meta - .instance_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &instance.instance_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - - // Hash each instance column evaluation - for eval in instance_evals.iter() { - self.transcript.write_scalar(*eval)?; - } - } - } - - // Compute and hash advice evals for the circuit instance - for advice in advice.iter() { - // Evaluate polynomials at omega^i x - let advice_evals: Vec<_> = meta - .advice_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &advice.advice_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - // dbg!(&advice_evals); - - // Hash each advice column evaluation - for eval in advice_evals.iter() { - self.transcript.write_scalar(*eval)?; - } - } - - // Compute and hash fixed evals - let fixed_evals: Vec<_> = meta - .fixed_queries - .iter() - .map(|&(column, at)| { - eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) - }) - .collect(); - - // Hash each fixed column evaluation - for eval in fixed_evals.iter() { - self.transcript.write_scalar(*eval)?; - } - - let vanishing = vanishing.evaluate(x, xn, domain, self.transcript)?; - - // Evaluate common permutation data - pk.permutation.evaluate(x, self.transcript)?; - - // Evaluate the permutations, if any, at omega^i x. - let permutations: Vec> = permutations - .into_iter() - .map(|permutation| -> Result<_, _> { - permutation.construct().evaluate(pk, x, self.transcript) - }) - .collect::, _>>()?; - - // Evaluate the lookups, if any, at omega^i x. - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - lookups - .into_iter() - .map(|p| p.evaluate(pk, x, self.transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Evaluate the shuffles, if any, at omega^i x. - let shuffles: Vec>> = shuffles - .into_iter() - .map(|shuffles| -> Result, _> { - shuffles - .into_iter() - .map(|p| p.evaluate(pk, x, self.transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let instances = instance - .iter() - .zip(advice.iter()) - .zip(permutations.iter()) - .zip(lookups.iter()) - .zip(shuffles.iter()) - .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { - iter::empty() - .chain( - P::QUERY_INSTANCE - .then_some(meta.instance_queries.iter().map(move |&(column, at)| { - ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &instance.instance_polys[column.index()], - blind: Blind::default(), - } - })) - .into_iter() - .flatten(), - ) - .chain( - meta.advice_queries - .iter() - .map(move |&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &advice.advice_polys[column.index()], - blind: advice.advice_blinds[column.index()], - }), - ) - .chain(permutation.open(pk, x)) - .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) - .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) - }) - .chain(meta.fixed_queries.iter().map(|&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &pk.fixed_polys[column.index()], - blind: Blind::default(), - })) - .chain(pk.permutation.open(x)) - // We query the h(X) polynomial at x - .chain(vanishing.open(x)); - - let prover = P::new(params); - println!("DBG create_proof"); - prover - .create_proof(rng, self.transcript, instances) - .map_err(|_| Error::ConstraintSystemFailure)?; - - Ok(()) - } -} - -pub(crate) struct WitnessCollection<'a, F: Field> { - pub(crate) k: u32, - pub(crate) current_phase: sealed::Phase, - pub(crate) advice: Vec>>, - // pub(crate) unblinded_advice: HashSet, - pub(crate) challenges: &'a HashMap, - pub(crate) instances: &'a [&'a [F]], - pub(crate) usable_rows: RangeTo, - pub(crate) _marker: std::marker::PhantomData, -} - -impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn query_instance(&self, column: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.instances - .get(column.index()) - .and_then(|column| column.get(row)) - .map(|v| Value::known(*v)) - .ok_or(Error::BoundsFailure) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Ignore assignment of advice column in different phase than current one. - if self.current_phase != column.column_type().phase { - return Ok(()); - } - - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .advice - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { - // We only care about advice columns here - - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.challenges - .get(&challenge.index()) - .cloned() - .map(Value::known) - .unwrap_or_else(Value::unknown) - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} - -/// This creates a proof for the provided `circuit` when given the public -/// parameters `params` and the proving key [`ProvingKey`] that was -/// generated previously for the same circuit. The provided `instances` -/// are zero-padded internally. -pub fn create_proof< - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - ConcreteCircuit: Circuit, ->( - params: &'params Scheme::ParamsProver, - pk: &ProvingKey, - circuits: &[ConcreteCircuit], - instances: &[&[&[Scheme::Scalar]]], - rng: R, - transcript: &mut T, -) -> Result<(), Error> -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - if circuits.len() != instances.len() { - return Err(Error::InvalidInstances); - } - let (_, config, cs) = compile_circuit(params.k(), &circuits[0], pk.vk.compress_selectors)?; - let mut witness_calcs: Vec<_> = circuits - .iter() - .enumerate() - .map(|(i, circuit)| WitnessCalculator::new(params.k(), circuit, &config, &cs, instances[i])) - .collect(); - let mut prover = ProverV2::::new(params, pk, instances, rng, transcript)?; - let mut challenges = HashMap::new(); - let phases = prover.phases.clone(); - for phase in &phases { - println!("DBG phase {}", phase.0); - let mut witnesses = Vec::with_capacity(circuits.len()); - for witness_calc in witness_calcs.iter_mut() { - witnesses.push(witness_calc.calc(phase.0, &challenges)?); - } - challenges = prover.commit_phase(phase.0, witnesses).unwrap(); - } - prover.create_proof() -} - -#[test] -fn test_create_proof() { - use crate::{ - circuit::SimpleFloorPlanner, - plonk::{keygen_pk, keygen_vk, ConstraintSystem}, - poly::kzg::{ - commitment::{KZGCommitmentScheme, ParamsKZG}, - multiopen::ProverSHPLONK, - }, - transcript::{Blake2bWrite, Challenge255, TranscriptWriterBuffer}, - }; - use halo2curves::bn256::Bn256; - use rand_core::OsRng; - - #[derive(Clone, Copy)] - struct MyCircuit; - - impl Circuit for MyCircuit { - type Config = (); - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - *self - } - - fn configure(_meta: &mut ConstraintSystem) -> Self::Config {} - - fn synthesize( - &self, - _config: Self::Config, - _layouter: impl crate::circuit::Layouter, - ) -> Result<(), Error> { - Ok(()) - } - } - - let params: ParamsKZG = ParamsKZG::setup(3, OsRng); - let vk = keygen_vk(¶ms, &MyCircuit).expect("keygen_vk should not fail"); - let pk = keygen_pk(¶ms, vk, &MyCircuit).expect("keygen_pk should not fail"); - let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); - - // Create proof with wrong number of instances - let proof = create_proof::, ProverSHPLONK<_>, _, _, _, _>( - ¶ms, - &pk, - &[MyCircuit, MyCircuit], - &[], - OsRng, - &mut transcript, - ); - assert!(matches!(proof.unwrap_err(), Error::InvalidInstances)); - - // Create proof with correct number of instances - create_proof::, ProverSHPLONK<_>, _, _, _, _>( - ¶ms, - &pk, - &[MyCircuit, MyCircuit], - &[&[], &[]], - OsRng, - &mut transcript, - ) - .expect("proof generation should not fail"); -} diff --git a/halo2_proofs_rm/src/plonk/shuffle.rs b/halo2_proofs_rm/src/plonk/shuffle.rs deleted file mode 100644 index 0779c2b451..0000000000 --- a/halo2_proofs_rm/src/plonk/shuffle.rs +++ /dev/null @@ -1,76 +0,0 @@ -use super::circuit::{Expression, ExpressionMid}; -use ff::Field; -use std::fmt::{self, Debug}; - -pub(crate) mod prover; -pub(crate) mod verifier; - -/// Expressions involved in a shuffle argument, with a name as metadata. -#[derive(Clone, Debug)] -pub struct ArgumentV2 { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) shuffle_expressions: Vec>, -} - -/// Expressions involved in a shuffle argument, with a name as metadata. -#[derive(Clone)] -pub struct Argument { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) shuffle_expressions: Vec>, -} - -impl Debug for Argument { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Argument") - .field("input_expressions", &self.input_expressions) - .field("shuffle_expressions", &self.shuffle_expressions) - .finish() - } -} - -impl Argument { - /// Constructs a new shuffle argument. - /// - /// `shuffle` is a sequence of `(input, shuffle)` tuples. - pub fn new>(name: S, shuffle: Vec<(Expression, Expression)>) -> Self { - let (input_expressions, shuffle_expressions) = shuffle.into_iter().unzip(); - Argument { - name: name.as_ref().to_string(), - input_expressions, - shuffle_expressions, - } - } - - pub(crate) fn required_degree(&self) -> usize { - assert_eq!(self.input_expressions.len(), self.shuffle_expressions.len()); - - let mut input_degree = 1; - for expr in self.input_expressions.iter() { - input_degree = std::cmp::max(input_degree, expr.degree()); - } - let mut shuffle_degree = 1; - for expr in self.shuffle_expressions.iter() { - shuffle_degree = std::cmp::max(shuffle_degree, expr.degree()); - } - - // (1 - (l_last + l_blind)) (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) - std::cmp::max(2 + shuffle_degree, 2 + input_degree) - } - - /// Returns input of this argument - pub fn input_expressions(&self) -> &Vec> { - &self.input_expressions - } - - /// Returns table of this argument - pub fn shuffle_expressions(&self) -> &Vec> { - &self.shuffle_expressions - } - - /// Returns name of this argument - pub fn name(&self) -> &str { - &self.name - } -} diff --git a/halo2_proofs_rm/src/plonk/shuffle/prover.rs b/halo2_proofs_rm/src/plonk/shuffle/prover.rs deleted file mode 100644 index fd30436a47..0000000000 --- a/halo2_proofs_rm/src/plonk/shuffle/prover.rs +++ /dev/null @@ -1,250 +0,0 @@ -use super::super::{ - circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, -}; -use super::Argument; -use crate::plonk::evaluation::evaluate; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - poly::{ - commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use ff::WithSmallOrderMulGroup; -use group::{ff::BatchInvert, Curve}; -use rand_core::RngCore; -use std::{ - iter, - ops::{Mul, MulAssign}, -}; - -#[derive(Debug)] -struct Compressed { - input_expression: Polynomial, - shuffle_expression: Polynomial, -} - -#[derive(Debug)] -pub(in crate::plonk) struct Committed { - pub(in crate::plonk) product_poly: Polynomial, - product_blind: Blind, -} - -pub(in crate::plonk) struct Evaluated { - constructed: Committed, -} - -impl> Argument { - /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - #[allow(clippy::too_many_arguments)] - fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - ) -> Compressed - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the shuffle and compress them - let input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the shuffle and compress them - let shuffle_expression = compress_expressions(&self.shuffle_expressions); - - Compressed { - input_expression, - shuffle_expression, - } - } - - /// Given a Shuffle with input expressions and table expressions this method - /// constructs the grand product polynomial over the shuffle. - /// The grand product polynomial is used to populate the Product struct. - /// The Product struct is added to the Shuffle and finally returned by the method. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_product< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - gamma: ChallengeGamma, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - let compressed = self.compress( - pk, - params, - domain, - theta, - advice_values, - fixed_values, - instance_values, - challenges, - ); - - let blinding_factors = pk.vk.cs.blinding_factors(); - - let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; - parallelize(&mut shuffle_product, |shuffle_product, start| { - for (shuffle_product, shuffle_value) in shuffle_product - .iter_mut() - .zip(compressed.shuffle_expression[start..].iter()) - { - *shuffle_product = *gamma + shuffle_value; - } - }); - - shuffle_product.iter_mut().batch_invert(); - - parallelize(&mut shuffle_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - *product *= &(*gamma + compressed.input_expression[i]); - } - }); - - // Compute the evaluations of the shuffle product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(shuffle_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - assert_eq!(z[0], C::Scalar::ONE); - for i in 0..u { - let mut left = z[i + 1]; - let input_value = &compressed.input_expression[i]; - let shuffle_value = &compressed.shuffle_expression[i]; - left *= &(*gamma + shuffle_value); - let mut right = z[i]; - right *= &(*gamma + input_value); - assert_eq!(left, right); - } - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - product_poly: z, - product_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open shuffle product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open shuffle product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } -} diff --git a/halo2_proofs_rm/src/plonk/shuffle/verifier.rs b/halo2_proofs_rm/src/plonk/shuffle/verifier.rs deleted file mode 100644 index 379cc5c8a1..0000000000 --- a/halo2_proofs_rm/src/plonk/shuffle/verifier.rs +++ /dev/null @@ -1,138 +0,0 @@ -use std::iter; - -use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX}; -use super::Argument; -use crate::{ - arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, Rotation, VerifierQuery}, - transcript::{EncodedChallenge, TranscriptRead}, -}; -use ff::Field; - -pub struct Committed { - product_commitment: C, -} - -pub struct Evaluated { - committed: Committed, - product_eval: C::Scalar, - product_next_eval: C::Scalar, -} - -impl Argument { - pub(in crate::plonk) fn read_product_commitment< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let product_commitment = transcript.read_point()?; - - Ok(Committed { product_commitment }) - } -} - -impl Committed { - pub(crate) fn evaluate, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_eval = transcript.read_scalar()?; - let product_next_eval = transcript.read_scalar()?; - - Ok(Evaluated { - committed: self, - product_eval, - product_next_eval, - }) - } -} - -impl Evaluated { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions<'a>( - &'a self, - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - argument: &'a Argument, - theta: ChallengeTheta, - gamma: ChallengeGamma, - advice_evals: &[C::Scalar], - fixed_evals: &[C::Scalar], - instance_evals: &[C::Scalar], - challenges: &[C::Scalar], - ) -> impl Iterator + 'a { - let active_rows = C::Scalar::ONE - (l_last + l_blind); - - let product_expression = || { - // z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma) - let compress_expressions = |expressions: &[Expression]| { - expressions - .iter() - .map(|expression| { - expression.evaluate( - &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) - }; - // z(\omega X) (s(X) + \gamma) - let left = self.product_next_eval - * &(compress_expressions(&argument.shuffle_expressions) + &*gamma); - // z(X) (a(X) + \gamma) - let right = - self.product_eval * &(compress_expressions(&argument.input_expressions) + &*gamma); - - (left - &right) * &active_rows - }; - - std::iter::empty() - .chain( - // l_0(X) * (1 - z'(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), - ) - .chain( - // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), - ) - .chain( - // (1 - (l_last(X) + l_blind(X))) * ( z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) - Some(product_expression()), - ) - } - - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vk: &'r VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open shuffle product commitment at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - *x, - self.product_eval, - ))) - // Open shuffle product commitment at \omega x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - x_next, - self.product_next_eval, - ))) - } -} diff --git a/halo2_proofs_rm/src/plonk/vanishing.rs b/halo2_proofs_rm/src/plonk/vanishing.rs deleted file mode 100644 index 81f86b02e2..0000000000 --- a/halo2_proofs_rm/src/plonk/vanishing.rs +++ /dev/null @@ -1,11 +0,0 @@ -use std::marker::PhantomData; - -use crate::arithmetic::CurveAffine; - -mod prover; -mod verifier; - -/// A vanishing argument. -pub(crate) struct Argument { - _marker: PhantomData, -} diff --git a/halo2_proofs_rm/src/plonk/vanishing/prover.rs b/halo2_proofs_rm/src/plonk/vanishing/prover.rs deleted file mode 100644 index 7943086826..0000000000 --- a/halo2_proofs_rm/src/plonk/vanishing/prover.rs +++ /dev/null @@ -1,199 +0,0 @@ -use std::{collections::HashMap, iter}; - -use ff::Field; -use group::Curve; -use rand_chacha::ChaCha20Rng; -use rand_core::{RngCore, SeedableRng}; - -use super::Argument; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - multicore::current_num_threads, - plonk::{ChallengeX, Error}, - poly::{ - commitment::{Blind, ParamsProver}, - Coeff, EvaluationDomain, ExtendedLagrangeCoeff, Polynomial, ProverQuery, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; - -pub(in crate::plonk) struct Committed { - random_poly: Polynomial, - random_blind: Blind, -} - -pub(in crate::plonk) struct Constructed { - h_pieces: Vec>, - h_blinds: Vec>, - committed: Committed, -} - -pub(in crate::plonk) struct Evaluated { - h_poly: Polynomial, - h_blind: Blind, - committed: Committed, -} - -impl Argument { - pub(in crate::plonk) fn commit< - 'params, - P: ParamsProver<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - params: &P, - domain: &EvaluationDomain, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - // Sample a random polynomial of degree n - 1 - let n = 1usize << domain.k() as usize; - let mut rand_vec = vec![C::Scalar::ZERO; n]; - - let num_threads = current_num_threads(); - let chunk_size = n / num_threads; - let thread_seeds = (0..) - .step_by(chunk_size + 1) - .take(n % num_threads) - .chain( - (chunk_size != 0) - .then(|| ((n % num_threads) * (chunk_size + 1)..).step_by(chunk_size)) - .into_iter() - .flatten(), - ) - .take(num_threads) - .zip(iter::repeat_with(|| { - let mut seed = [0u8; 32]; - rng.fill_bytes(&mut seed); - ChaCha20Rng::from_seed(seed) - })) - .collect::>(); - - parallelize(&mut rand_vec, |chunk, offset| { - let mut rng = thread_seeds[&offset].clone(); - chunk - .iter_mut() - .for_each(|v| *v = C::Scalar::random(&mut rng)); - }); - - let random_poly: Polynomial = domain.coeff_from_vec(rand_vec); - - // Sample a random blinding factor - let random_blind = Blind(C::Scalar::random(rng)); - - // Commit - let c = params.commit(&random_poly, random_blind).to_affine(); - transcript.write_point(c)?; - - Ok(Committed { - random_poly, - random_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn construct< - 'params, - P: ParamsProver<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - self, - params: &P, - domain: &EvaluationDomain, - h_poly: Polynomial, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - // Divide by t(X) = X^{params.n} - 1. - let h_poly = domain.divide_by_vanishing_poly(h_poly); - - // Obtain final h(X) polynomial - let h_poly = domain.extended_to_coeff(h_poly); - - // Split h(X) up into pieces - let h_pieces = h_poly - .chunks_exact(params.n() as usize) - .map(|v| domain.coeff_from_vec(v.to_vec())) - .collect::>(); - drop(h_poly); - let h_blinds: Vec<_> = h_pieces - .iter() - .map(|_| Blind(C::Scalar::random(&mut rng))) - .collect(); - - // Compute commitments to each h(X) piece - let h_commitments_projective: Vec<_> = h_pieces - .iter() - .zip(h_blinds.iter()) - .map(|(h_piece, blind)| params.commit(h_piece, *blind)) - .collect(); - let mut h_commitments = vec![C::identity(); h_commitments_projective.len()]; - C::Curve::batch_normalize(&h_commitments_projective, &mut h_commitments); - let h_commitments = h_commitments; - - // Hash each h(X) piece - for c in h_commitments.iter() { - transcript.write_point(*c)?; - } - - Ok(Constructed { - h_pieces, - h_blinds, - committed: self, - }) - } -} - -impl Constructed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - x: ChallengeX, - xn: C::Scalar, - domain: &EvaluationDomain, - transcript: &mut T, - ) -> Result, Error> { - let h_poly = self - .h_pieces - .iter() - .rev() - .fold(domain.empty_coeff(), |acc, eval| acc * xn + eval); - - let h_blind = self - .h_blinds - .iter() - .rev() - .fold(Blind(C::Scalar::ZERO), |acc, eval| acc * Blind(xn) + *eval); - - let random_eval = eval_polynomial(&self.committed.random_poly, *x); - transcript.write_scalar(random_eval)?; - - Ok(Evaluated { - h_poly, - h_blind, - committed: self.committed, - }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open( - &self, - x: ChallengeX, - ) -> impl Iterator> + Clone { - iter::empty() - .chain(Some(ProverQuery { - point: *x, - poly: &self.h_poly, - blind: self.h_blind, - })) - .chain(Some(ProverQuery { - point: *x, - poly: &self.committed.random_poly, - blind: self.committed.random_blind, - })) - } -} diff --git a/halo2_proofs_rm/src/plonk/vanishing/verifier.rs b/halo2_proofs_rm/src/plonk/vanishing/verifier.rs deleted file mode 100644 index 0881dfb2c0..0000000000 --- a/halo2_proofs_rm/src/plonk/vanishing/verifier.rs +++ /dev/null @@ -1,138 +0,0 @@ -use std::iter; - -use ff::Field; - -use crate::{ - arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{ - commitment::{Params, MSM}, - VerifierQuery, - }, - transcript::{read_n_points, EncodedChallenge, TranscriptRead}, -}; - -use super::super::{ChallengeX, ChallengeY}; -use super::Argument; - -pub struct Committed { - random_poly_commitment: C, -} - -pub struct Constructed { - h_commitments: Vec, - random_poly_commitment: C, -} - -pub struct PartiallyEvaluated { - h_commitments: Vec, - random_poly_commitment: C, - random_eval: C::Scalar, -} - -pub struct Evaluated> { - h_commitment: M, - random_poly_commitment: C, - expected_h_eval: C::Scalar, - random_eval: C::Scalar, -} - -impl Argument { - pub(in crate::plonk) fn read_commitments_before_y< - E: EncodedChallenge, - T: TranscriptRead, - >( - transcript: &mut T, - ) -> Result, Error> { - let random_poly_commitment = transcript.read_point()?; - - Ok(Committed { - random_poly_commitment, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn read_commitments_after_y< - E: EncodedChallenge, - T: TranscriptRead, - >( - self, - vk: &VerifyingKey, - transcript: &mut T, - ) -> Result, Error> { - // Obtain a commitment to h(X) in the form of multiple pieces of degree n - 1 - let h_commitments = read_n_points(transcript, vk.domain.get_quotient_poly_degree())?; - - Ok(Constructed { - h_commitments, - random_poly_commitment: self.random_poly_commitment, - }) - } -} - -impl Constructed { - pub(in crate::plonk) fn evaluate_after_x, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let random_eval = transcript.read_scalar()?; - - Ok(PartiallyEvaluated { - h_commitments: self.h_commitments, - random_poly_commitment: self.random_poly_commitment, - random_eval, - }) - } -} - -impl PartiallyEvaluated { - pub(in crate::plonk) fn verify<'params, P: Params<'params, C>>( - self, - params: &'params P, - expressions: impl Iterator, - y: ChallengeY, - xn: C::Scalar, - ) -> Evaluated { - let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * &*y + &v); - let expected_h_eval = expected_h_eval * ((xn - C::Scalar::ONE).invert().unwrap()); - - let h_commitment = - self.h_commitments - .iter() - .rev() - .fold(params.empty_msm(), |mut acc, commitment| { - acc.scale(xn); - let commitment: C::CurveExt = (*commitment).into(); - acc.append_term(C::Scalar::ONE, commitment); - - acc - }); - - Evaluated { - expected_h_eval, - h_commitment, - random_poly_commitment: self.random_poly_commitment, - random_eval: self.random_eval, - } - } -} - -impl> Evaluated { - pub(in crate::plonk) fn queries( - &self, - x: ChallengeX, - ) -> impl Iterator> + Clone { - iter::empty() - .chain(Some(VerifierQuery::new_msm( - &self.h_commitment, - *x, - self.expected_h_eval, - ))) - .chain(Some(VerifierQuery::new_commitment( - &self.random_poly_commitment, - *x, - self.random_eval, - ))) - } -} diff --git a/halo2_proofs_rm/src/plonk/verifier.rs b/halo2_proofs_rm/src/plonk/verifier.rs deleted file mode 100644 index 62c18c609a..0000000000 --- a/halo2_proofs_rm/src/plonk/verifier.rs +++ /dev/null @@ -1,462 +0,0 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; -use group::Curve; -use std::iter; - -use super::{ - vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, - VerifyingKey, -}; -use crate::arithmetic::compute_inner_product; -use crate::poly::commitment::{CommitmentScheme, Verifier}; -use crate::poly::VerificationStrategy; -use crate::poly::{ - commitment::{Blind, Params}, - VerifierQuery, -}; -use crate::transcript::{read_n_scalars, EncodedChallenge, TranscriptRead}; - -#[cfg(feature = "batch")] -mod batch; -#[cfg(feature = "batch")] -pub use batch::BatchVerifier; - -/// Returns a boolean indicating whether or not the proof is valid. Verifies a single proof (not -/// batched). -pub fn verify_proof_single< - 'params, - Scheme: CommitmentScheme, - V: Verifier<'params, Scheme>, - E: EncodedChallenge, - T: TranscriptRead, - Strategy: VerificationStrategy<'params, Scheme, V>, ->( - params: &'params Scheme::ParamsVerifier, - vk: &VerifyingKey, - strategy: Strategy, - instance: &[&[Scheme::Scalar]], - transcript: &mut T, -) -> Result -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - verify_proof(params, vk, strategy, &[instance], transcript) -} - -/// Returns a boolean indicating whether or not the proof is valid -pub fn verify_proof< - 'params, - Scheme: CommitmentScheme, - V: Verifier<'params, Scheme>, - E: EncodedChallenge, - T: TranscriptRead, - Strategy: VerificationStrategy<'params, Scheme, V>, ->( - params: &'params Scheme::ParamsVerifier, - vk: &VerifyingKey, - strategy: Strategy, - instances: &[&[&[Scheme::Scalar]]], - transcript: &mut T, -) -> Result -where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, -{ - // Check that instances matches the expected number of instance columns - for instances in instances.iter() { - if instances.len() != vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); - } - } - - let instance_commitments = if V::QUERY_INSTANCE { - instances - .iter() - .map(|instance| { - instance - .iter() - .map(|instance| { - if instance.len() > params.n() as usize - (vk.cs.blinding_factors() + 1) { - return Err(Error::InstanceTooLarge); - } - let mut poly = instance.to_vec(); - poly.resize(params.n() as usize, Scheme::Scalar::ZERO); - let poly = vk.domain.lagrange_from_vec(poly); - - Ok(params.commit_lagrange(&poly, Blind::default()).to_affine()) - }) - .collect::, _>>() - }) - .collect::, _>>()? - } else { - vec![vec![]; instances.len()] - }; - - let num_proofs = instance_commitments.len(); - - // Hash verification key into transcript - vk.hash_into(transcript)?; - - if V::QUERY_INSTANCE { - for instance_commitments in instance_commitments.iter() { - // Hash the instance (external) commitments into the transcript - for commitment in instance_commitments { - transcript.common_point(*commitment)? - } - } - } else { - for instance in instances.iter() { - for instance in instance.iter() { - for value in instance.iter() { - transcript.common_scalar(*value)?; - } - } - } - } - - // Hash the prover's advice commitments into the transcript and squeeze challenges - let (advice_commitments, challenges) = { - let mut advice_commitments = - vec![vec![Scheme::Curve::default(); vk.cs.num_advice_columns]; num_proofs]; - let mut challenges = vec![Scheme::Scalar::ZERO; vk.cs.num_challenges]; - - for current_phase in vk.cs.phases() { - for advice_commitments in advice_commitments.iter_mut() { - for (phase, commitment) in vk - .cs - .advice_column_phase - .iter() - .zip(advice_commitments.iter_mut()) - { - if current_phase == *phase { - *commitment = transcript.read_point()?; - } - } - } - for (phase, challenge) in vk.cs.challenge_phase.iter().zip(challenges.iter_mut()) { - if current_phase == *phase { - *challenge = *transcript.squeeze_challenge_scalar::<()>(); - } - } - } - - (advice_commitments, challenges) - }; - - // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); - - let lookups_permuted = (0..num_proofs) - .map(|_| -> Result, _> { - // Hash each lookup permuted commitment - vk.cs - .lookups - .iter() - .map(|argument| argument.read_permuted_commitments(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Sample beta challenge - let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); - - // Sample gamma challenge - let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); - - let permutations_committed = (0..num_proofs) - .map(|_| { - // Hash each permutation product commitment - vk.cs.permutation.read_product_commitments(vk, transcript) - }) - .collect::, _>>()?; - - let lookups_committed = lookups_permuted - .into_iter() - .map(|lookups| { - // Hash each lookup product commitment - lookups - .into_iter() - .map(|lookup| lookup.read_product_commitment(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles_committed = (0..num_proofs) - .map(|_| -> Result, _> { - // Hash each shuffle product commitment - vk.cs - .shuffles - .iter() - .map(|argument| argument.read_product_commitment(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?; - - // Sample y challenge, which keeps the gates linearly independent. - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); - - let vanishing = vanishing.read_commitments_after_y(vk, transcript)?; - - // Sample x challenge, which is used to ensure the circuit is - // satisfied with high probability. - let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); - let instance_evals = if V::QUERY_INSTANCE { - (0..num_proofs) - .map(|_| -> Result, _> { - read_n_scalars(transcript, vk.cs.instance_queries.len()) - }) - .collect::, _>>()? - } else { - let xn = x.pow([params.n()]); - let (min_rotation, max_rotation) = - vk.cs - .instance_queries - .iter() - .fold((0, 0), |(min, max), (_, rotation)| { - if rotation.0 < min { - (rotation.0, max) - } else if rotation.0 > max { - (min, rotation.0) - } else { - (min, max) - } - }); - let max_instance_len = instances - .iter() - .flat_map(|instance| instance.iter().map(|instance| instance.len())) - .max_by(Ord::cmp) - .unwrap_or_default(); - let l_i_s = &vk.domain.l_i_range( - *x, - xn, - -max_rotation..max_instance_len as i32 + min_rotation.abs(), - ); - instances - .iter() - .map(|instances| { - vk.cs - .instance_queries - .iter() - .map(|(column, rotation)| { - let instances = instances[column.index()]; - let offset = (max_rotation - rotation.0) as usize; - compute_inner_product(instances, &l_i_s[offset..offset + instances.len()]) - }) - .collect::>() - }) - .collect::>() - }; - - let advice_evals = (0..num_proofs) - .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) - .collect::, _>>()?; - - let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; - - let vanishing = vanishing.evaluate_after_x(transcript)?; - - let permutations_common = vk.permutation.evaluate(transcript)?; - - let permutations_evaluated = permutations_committed - .into_iter() - .map(|permutation| permutation.evaluate(transcript)) - .collect::, _>>()?; - - let lookups_evaluated = lookups_committed - .into_iter() - .map(|lookups| -> Result, _> { - lookups - .into_iter() - .map(|lookup| lookup.evaluate(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles_evaluated = shuffles_committed - .into_iter() - .map(|shuffles| -> Result, _> { - shuffles - .into_iter() - .map(|shuffle| shuffle.evaluate(transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // This check ensures the circuit is satisfied so long as the polynomial - // commitments open to the correct values. - let vanishing = { - // x^n - let xn = x.pow([params.n()]); - - let blinding_factors = vk.cs.blinding_factors(); - let l_evals = vk - .domain - .l_i_range(*x, xn, (-((blinding_factors + 1) as i32))..=0); - assert_eq!(l_evals.len(), 2 + blinding_factors); - let l_last = l_evals[0]; - let l_blind: Scheme::Scalar = l_evals[1..(1 + blinding_factors)] - .iter() - .fold(Scheme::Scalar::ZERO, |acc, eval| acc + eval); - let l_0 = l_evals[1 + blinding_factors]; - - // Compute the expected value of h(x) - let expressions = advice_evals - .iter() - .zip(instance_evals.iter()) - .zip(permutations_evaluated.iter()) - .zip(lookups_evaluated.iter()) - .zip(shuffles_evaluated.iter()) - .flat_map( - |((((advice_evals, instance_evals), permutation), lookups), shuffles)| { - let challenges = &challenges; - let fixed_evals = &fixed_evals; - std::iter::empty() - // Evaluate the circuit using the custom gates provided - .chain(vk.cs.gates.iter().flat_map(move |gate| { - gate.polynomials().iter().map(move |poly| { - poly.evaluate( - &|scalar| scalar, - &|_| { - panic!("virtual selectors are removed during optimization") - }, - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - })) - .chain(permutation.expressions( - vk, - &vk.cs.permutation, - &permutations_common, - advice_evals, - fixed_evals, - instance_evals, - l_0, - l_last, - l_blind, - beta, - gamma, - x, - )) - .chain(lookups.iter().zip(vk.cs.lookups.iter()).flat_map( - move |(p, argument)| { - p.expressions( - l_0, - l_last, - l_blind, - argument, - theta, - beta, - gamma, - advice_evals, - fixed_evals, - instance_evals, - challenges, - ) - }, - )) - .chain(shuffles.iter().zip(vk.cs.shuffles.iter()).flat_map( - move |(p, argument)| { - p.expressions( - l_0, - l_last, - l_blind, - argument, - theta, - gamma, - advice_evals, - fixed_evals, - instance_evals, - challenges, - ) - }, - )) - }, - ); - - vanishing.verify(params, expressions, y, xn) - }; - - let queries = instance_commitments - .iter() - .zip(instance_evals.iter()) - .zip(advice_commitments.iter()) - .zip(advice_evals.iter()) - .zip(permutations_evaluated.iter()) - .zip(lookups_evaluated.iter()) - .zip(shuffles_evaluated.iter()) - .flat_map( - |( - ( - ( - ( - ((instance_commitments, instance_evals), advice_commitments), - advice_evals, - ), - permutation, - ), - lookups, - ), - shuffles, - )| { - iter::empty() - .chain( - V::QUERY_INSTANCE - .then_some(vk.cs.instance_queries.iter().enumerate().map( - move |(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &instance_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - instance_evals[query_index], - ) - }, - )) - .into_iter() - .flatten(), - ) - .chain(vk.cs.advice_queries.iter().enumerate().map( - move |(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &advice_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - advice_evals[query_index], - ) - }, - )) - .chain(permutation.queries(vk, x)) - .chain(lookups.iter().flat_map(move |p| p.queries(vk, x))) - .chain(shuffles.iter().flat_map(move |p| p.queries(vk, x))) - }, - ) - .chain( - vk.cs - .fixed_queries - .iter() - .enumerate() - .map(|(query_index, &(column, at))| { - VerifierQuery::new_commitment( - &vk.fixed_commitments[column.index()], - vk.domain.rotate_omega(*x, at), - fixed_evals[query_index], - ) - }), - ) - .chain(permutations_common.queries(&vk.permutation, x)) - .chain(vanishing.queries(x)); - - // We are now convinced the circuit is satisfied so long as the - // polynomial commitments open to the correct values. - - let verifier = V::new(params); - strategy.process(|msm| { - verifier - .verify_proof(transcript, queries, msm) - .map_err(|_| Error::Opening) - }) -} diff --git a/halo2_proofs_rm/src/plonk/verifier/batch.rs b/halo2_proofs_rm/src/plonk/verifier/batch.rs deleted file mode 100644 index ba3e2419e6..0000000000 --- a/halo2_proofs_rm/src/plonk/verifier/batch.rs +++ /dev/null @@ -1,135 +0,0 @@ -use ff::FromUniformBytes; -use group::ff::Field; -use halo2curves::CurveAffine; -use rand_core::OsRng; - -use super::{verify_proof, VerificationStrategy}; -use crate::{ - multicore::{ - IndexedParallelIterator, IntoParallelIterator, ParallelIterator, TryFoldAndReduce, - }, - plonk::{Error, VerifyingKey}, - poly::{ - commitment::{Params, MSM}, - ipa::{ - commitment::{IPACommitmentScheme, ParamsVerifierIPA}, - msm::MSMIPA, - multiopen::VerifierIPA, - strategy::GuardIPA, - }, - }, - transcript::{Blake2bRead, TranscriptReadBuffer}, -}; - -/// A proof verification strategy that returns the proof's MSM. -/// -/// `BatchVerifier` handles the accumulation of the MSMs for the batched proofs. -#[derive(Debug)] -struct BatchStrategy<'params, C: CurveAffine> { - msm: MSMIPA<'params, C>, -} - -impl<'params, C: CurveAffine> - VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> - for BatchStrategy<'params, C> -{ - type Output = MSMIPA<'params, C>; - - fn new(params: &'params ParamsVerifierIPA) -> Self { - BatchStrategy { - msm: MSMIPA::new(params), - } - } - - fn process( - self, - f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, - ) -> Result { - let guard = f(self.msm)?; - Ok(guard.use_challenges()) - } - - fn finalize(self) -> bool { - unreachable!() - } -} - -#[derive(Debug)] -struct BatchItem { - instances: Vec>>, - proof: Vec, -} - -/// A verifier that checks multiple proofs in a batch. **This requires the -/// `batch` crate feature to be enabled.** -#[derive(Debug, Default)] -pub struct BatchVerifier { - items: Vec>, -} - -impl BatchVerifier -where - C::Scalar: FromUniformBytes<64>, -{ - /// Constructs a new batch verifier. - pub fn new() -> Self { - Self { items: vec![] } - } - - /// Adds a proof to the batch. - pub fn add_proof(&mut self, instances: Vec>>, proof: Vec) { - self.items.push(BatchItem { instances, proof }) - } - - /// Finalizes the batch and checks its validity. - /// - /// Returns `false` if *some* proof was invalid. If the caller needs to identify - /// specific failing proofs, it must re-process the proofs separately. - /// - /// This uses [`OsRng`] internally instead of taking an `R: RngCore` argument, because - /// the internal parallelization requires access to a RNG that is guaranteed to not - /// clone its internal state when shared between threads. - pub fn finalize(self, params: &ParamsVerifierIPA, vk: &VerifyingKey) -> bool { - fn accumulate_msm<'params, C: CurveAffine>( - mut acc: MSMIPA<'params, C>, - msm: MSMIPA<'params, C>, - ) -> MSMIPA<'params, C> { - // Scale the MSM by a random factor to ensure that if the existing MSM has - // `is_zero() == false` then this argument won't be able to interfere with it - // to make it true, with high probability. - acc.scale(C::Scalar::random(OsRng)); - - acc.add_msm(&msm); - acc - } - - let final_msm = self - .items - .into_par_iter() - .enumerate() - .map(|(i, item)| { - let instances: Vec> = item - .instances - .iter() - .map(|i| i.iter().map(|c| &c[..]).collect()) - .collect(); - let instances: Vec<_> = instances.iter().map(|i| &i[..]).collect(); - - let strategy = BatchStrategy::new(params); - let mut transcript = Blake2bRead::init(&item.proof[..]); - verify_proof(params, vk, strategy, &instances, &mut transcript).map_err(|e| { - tracing::debug!("Batch item {} failed verification: {}", i, e); - e - }) - }) - .try_fold_and_reduce( - || params.empty_msm(), - |acc, res| res.map(|proof_msm| accumulate_msm(acc, proof_msm)), - ); - - match final_msm { - Ok(msm) => msm.check(), - Err(_) => false, - } - } -} diff --git a/halo2_proofs_rm/src/poly.rs b/halo2_proofs_rm/src/poly.rs deleted file mode 100644 index b505d6b49b..0000000000 --- a/halo2_proofs_rm/src/poly.rs +++ /dev/null @@ -1,345 +0,0 @@ -//! Contains utilities for performing arithmetic over univariate polynomials in -//! various forms, including computing commitments to them and provably opening -//! the committed polynomials at arbitrary points. - -use crate::arithmetic::parallelize; -use crate::helpers::SerdePrimeField; -use crate::plonk::Assigned; -use crate::SerdeFormat; - -use group::ff::{BatchInvert, Field}; -use std::fmt::Debug; -use std::io; -use std::marker::PhantomData; -use std::ops::{Add, Deref, DerefMut, Index, IndexMut, Mul, RangeFrom, RangeFull, Sub}; - -/// Generic commitment scheme structures -pub mod commitment; -mod domain; -mod query; -mod strategy; - -/// Inner product argument commitment scheme -pub mod ipa; - -/// KZG commitment scheme -pub mod kzg; - -#[cfg(test)] -mod multiopen_test; - -pub use domain::*; -pub use query::{ProverQuery, VerifierQuery}; -pub use strategy::{Guard, VerificationStrategy}; - -/// This is an error that could occur during proving or circuit synthesis. -// TODO: these errors need to be cleaned up -#[derive(Debug)] -pub enum Error { - /// OpeningProof is not well-formed - OpeningError, - /// Caller needs to re-sample a point - SamplingError, -} - -/// The basis over which a polynomial is described. -pub trait Basis: Copy + Debug + Send + Sync {} - -/// The polynomial is defined as coefficients -#[derive(Clone, Copy, Debug)] -pub struct Coeff; -impl Basis for Coeff {} - -/// The polynomial is defined as coefficients of Lagrange basis polynomials -#[derive(Clone, Copy, Debug)] -pub struct LagrangeCoeff; -impl Basis for LagrangeCoeff {} - -/// The polynomial is defined as coefficients of Lagrange basis polynomials in -/// an extended size domain which supports multiplication -#[derive(Clone, Copy, Debug)] -pub struct ExtendedLagrangeCoeff; -impl Basis for ExtendedLagrangeCoeff {} - -/// Represents a univariate polynomial defined over a field and a particular -/// basis. -#[derive(Clone, Debug)] -pub struct Polynomial { - pub(crate) values: Vec, - pub(crate) _marker: PhantomData, -} - -impl Polynomial { - pub(crate) fn new_empty(size: usize, zero: F) -> Self { - Polynomial { - values: vec![zero; size], - _marker: PhantomData, - } - } -} - -impl Polynomial { - /// Obtains a polynomial in Lagrange form when given a vector of Lagrange - /// coefficients of size `n`; panics if the provided vector is the wrong - /// length. - pub(crate) fn new_lagrange_from_vec(values: Vec) -> Polynomial { - Polynomial { - values, - _marker: PhantomData, - } - } -} - -impl Index for Polynomial { - type Output = F; - - fn index(&self, index: usize) -> &F { - self.values.index(index) - } -} - -impl IndexMut for Polynomial { - fn index_mut(&mut self, index: usize) -> &mut F { - self.values.index_mut(index) - } -} - -impl Index> for Polynomial { - type Output = [F]; - - fn index(&self, index: RangeFrom) -> &[F] { - self.values.index(index) - } -} - -impl IndexMut> for Polynomial { - fn index_mut(&mut self, index: RangeFrom) -> &mut [F] { - self.values.index_mut(index) - } -} - -impl Index for Polynomial { - type Output = [F]; - - fn index(&self, index: RangeFull) -> &[F] { - self.values.index(index) - } -} - -impl IndexMut for Polynomial { - fn index_mut(&mut self, index: RangeFull) -> &mut [F] { - self.values.index_mut(index) - } -} - -impl Deref for Polynomial { - type Target = [F]; - - fn deref(&self) -> &[F] { - &self.values[..] - } -} - -impl DerefMut for Polynomial { - fn deref_mut(&mut self) -> &mut [F] { - &mut self.values[..] - } -} - -impl Polynomial { - /// Iterate over the values, which are either in coefficient or evaluation - /// form depending on the basis `B`. - pub fn iter(&self) -> impl Iterator { - self.values.iter() - } - - /// Iterate over the values mutably, which are either in coefficient or - /// evaluation form depending on the basis `B`. - pub fn iter_mut(&mut self) -> impl Iterator { - self.values.iter_mut() - } - - /// Gets the size of this polynomial in terms of the number of - /// coefficients used to describe it. - pub fn num_coeffs(&self) -> usize { - self.values.len() - } -} - -impl Polynomial { - /// Reads polynomial from buffer using `SerdePrimeField::read`. - pub(crate) fn read(reader: &mut R, format: SerdeFormat) -> io::Result { - let mut poly_len = [0u8; 4]; - reader.read_exact(&mut poly_len)?; - let poly_len = u32::from_be_bytes(poly_len); - - (0..poly_len) - .map(|_| F::read(reader, format)) - .collect::>>() - .map(|values| Self { - values, - _marker: PhantomData, - }) - } - - /// Writes polynomial to buffer using `SerdePrimeField::write`. - pub(crate) fn write( - &self, - writer: &mut W, - format: SerdeFormat, - ) -> io::Result<()> { - writer.write_all(&(self.values.len() as u32).to_be_bytes())?; - for value in self.values.iter() { - value.write(writer, format)?; - } - Ok(()) - } -} - -pub(crate) fn batch_invert_assigned( - assigned: Vec, LagrangeCoeff>>, -) -> Vec> { - let mut assigned_denominators: Vec<_> = assigned - .iter() - .map(|f| { - f.iter() - .map(|value| value.denominator()) - .collect::>() - }) - .collect(); - - assigned_denominators - .iter_mut() - .flat_map(|f| { - f.iter_mut() - // If the denominator is trivial, we can skip it, reducing the - // size of the batch inversion. - .filter_map(|d| d.as_mut()) - }) - .batch_invert(); - - assigned - .iter() - .zip(assigned_denominators) - .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) - .collect() -} - -impl Polynomial, LagrangeCoeff> { - pub(crate) fn invert( - &self, - inv_denoms: impl Iterator + ExactSizeIterator, - ) -> Polynomial { - assert_eq!(inv_denoms.len(), self.values.len()); - Polynomial { - values: self - .values - .iter() - .zip(inv_denoms) - .map(|(a, inv_den)| a.numerator() * inv_den) - .collect(), - _marker: self._marker, - } - } -} - -impl<'a, F: Field, B: Basis> Add<&'a Polynomial> for Polynomial { - type Output = Polynomial; - - fn add(mut self, rhs: &'a Polynomial) -> Polynomial { - parallelize(&mut self.values, |lhs, start| { - for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { - *lhs += *rhs; - } - }); - - self - } -} - -impl<'a, F: Field, B: Basis> Sub<&'a Polynomial> for Polynomial { - type Output = Polynomial; - - fn sub(mut self, rhs: &'a Polynomial) -> Polynomial { - parallelize(&mut self.values, |lhs, start| { - for (lhs, rhs) in lhs.iter_mut().zip(rhs.values[start..].iter()) { - *lhs -= *rhs; - } - }); - - self - } -} - -impl Polynomial { - /// Rotates the values in a Lagrange basis polynomial by `Rotation` - pub fn rotate(&self, rotation: Rotation) -> Polynomial { - let mut values = self.values.clone(); - if rotation.0 < 0 { - values.rotate_right((-rotation.0) as usize); - } else { - values.rotate_left(rotation.0 as usize); - } - Polynomial { - values, - _marker: PhantomData, - } - } -} - -impl Mul for Polynomial { - type Output = Polynomial; - - fn mul(mut self, rhs: F) -> Polynomial { - if rhs == F::ZERO { - return Polynomial { - values: vec![F::ZERO; self.len()], - _marker: PhantomData, - }; - } - if rhs == F::ONE { - return self; - } - - parallelize(&mut self.values, |lhs, _| { - for lhs in lhs.iter_mut() { - *lhs *= rhs; - } - }); - - self - } -} - -impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { - type Output = Polynomial; - - fn sub(self, rhs: F) -> Polynomial { - let mut res = self.clone(); - res.values[0] -= rhs; - res - } -} - -/// Describes the relative rotation of a vector. Negative numbers represent -/// reverse (leftmost) rotations and positive numbers represent forward (rightmost) -/// rotations. Zero represents no rotation. -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Rotation(pub i32); - -impl Rotation { - /// The current location in the evaluation domain - pub fn cur() -> Rotation { - Rotation(0) - } - - /// The previous location in the evaluation domain - pub fn prev() -> Rotation { - Rotation(-1) - } - - /// The next location in the evaluation domain - pub fn next() -> Rotation { - Rotation(1) - } -} diff --git a/halo2_proofs_rm/src/poly/commitment.rs b/halo2_proofs_rm/src/poly/commitment.rs deleted file mode 100644 index feae085655..0000000000 --- a/halo2_proofs_rm/src/poly/commitment.rs +++ /dev/null @@ -1,245 +0,0 @@ -use super::{ - query::{ProverQuery, VerifierQuery}, - strategy::Guard, - Coeff, LagrangeCoeff, Polynomial, -}; -use crate::poly::Error; -use crate::transcript::{EncodedChallenge, TranscriptRead, TranscriptWrite}; -use ff::Field; -use halo2curves::CurveAffine; -use rand_core::RngCore; -use std::{ - fmt::Debug, - io::{self}, - ops::{Add, AddAssign, Mul, MulAssign}, -}; - -/// Defines components of a commitment scheme. -pub trait CommitmentScheme { - /// Application field of this commitment scheme - type Scalar: Field; - - /// Elliptic curve used to commit the application and witnesses - type Curve: CurveAffine; - - /// Constant prover parameters - type ParamsProver: for<'params> ParamsProver< - 'params, - Self::Curve, - ParamsVerifier = Self::ParamsVerifier, - >; - - /// Constant verifier parameters - type ParamsVerifier: for<'params> ParamsVerifier<'params, Self::Curve>; - - /// Wrapper for parameter generator - fn new_params(k: u32) -> Self::ParamsProver; - - /// Wrapper for parameter reader - fn read_params(reader: &mut R) -> io::Result; -} - -/// Parameters for circuit sysnthesis and prover parameters. -pub trait Params<'params, C: CurveAffine>: Sized + Clone + Debug { - /// Multi scalar multiplication engine - type MSM: MSM + 'params; - - /// Logaritmic size of the circuit - fn k(&self) -> u32; - - /// Size of the circuit - fn n(&self) -> u64; - - /// Downsize `Params` with smaller `k`. - fn downsize(&mut self, k: u32); - - /// Generates an empty multiscalar multiplication struct using the - /// appropriate params. - fn empty_msm(&'params self) -> Self::MSM; - - /// This commits to a polynomial using its evaluations over the $2^k$ size - /// evaluation domain. The commitment will be blinded by the blinding factor - /// `r`. - fn commit_lagrange( - &self, - poly: &Polynomial, - r: Blind, - ) -> C::CurveExt; - - /// Writes params to a buffer. - fn write(&self, writer: &mut W) -> io::Result<()>; - - /// Reads params from a buffer. - fn read(reader: &mut R) -> io::Result; -} - -/// Parameters for circuit sysnthesis and prover parameters. -pub trait ParamsProver<'params, C: CurveAffine>: Params<'params, C> { - /// Constant verifier parameters. - type ParamsVerifier: ParamsVerifier<'params, C>; - - /// Returns new instance of parameters - fn new(k: u32) -> Self; - - /// This computes a commitment to a polynomial described by the provided - /// slice of coefficients. The commitment may be blinded by the blinding - /// factor `r`. - fn commit(&self, poly: &Polynomial, r: Blind) - -> C::CurveExt; - - /// Getter for g generators - fn get_g(&self) -> &[C]; - - /// Returns verification parameters. - fn verifier_params(&'params self) -> &'params Self::ParamsVerifier; -} - -/// Verifier specific functionality with circuit constraints -pub trait ParamsVerifier<'params, C: CurveAffine>: Params<'params, C> {} - -/// Multi scalar multiplication engine -pub trait MSM: Clone + Debug + Send + Sync { - /// Add arbitrary term (the scalar and the point) - fn append_term(&mut self, scalar: C::Scalar, point: C::CurveExt); - - /// Add another multiexp into this one - fn add_msm(&mut self, other: &Self) - where - Self: Sized; - - /// Scale all scalars in the MSM by some scaling factor - fn scale(&mut self, factor: C::Scalar); - - /// Perform multiexp and check that it results in zero - fn check(&self) -> bool; - - /// Perform multiexp and return the result - fn eval(&self) -> C::CurveExt; - - /// Return base points - fn bases(&self) -> Vec; - - /// Scalars - fn scalars(&self) -> Vec; -} - -/// Common multi-open prover interface for various commitment schemes -pub trait Prover<'params, Scheme: CommitmentScheme> { - /// Query instance or not - const QUERY_INSTANCE: bool; - - /// Creates new prover instance - fn new(params: &'params Scheme::ParamsProver) -> Self; - - /// Create a multi-opening proof - fn create_proof< - 'com, - E: EncodedChallenge, - T: TranscriptWrite, - R, - I, - >( - &self, - rng: R, - transcript: &mut T, - queries: I, - ) -> io::Result<()> - where - I: IntoIterator> + Clone, - R: RngCore; -} - -/// Common multi-open verifier interface for various commitment schemes -pub trait Verifier<'params, Scheme: CommitmentScheme> { - /// Unfinalized verification result. This is returned in verification - /// to allow developer to compress or combined verification results - type Guard: Guard; - - /// Accumulator fot comressed verification - type MSMAccumulator; - - /// Query instance or not - const QUERY_INSTANCE: bool; - - /// Creates new verifier instance - fn new(params: &'params Scheme::ParamsVerifier) -> Self; - - /// Process the proof and returns unfinished result named `Guard` - fn verify_proof< - 'com, - E: EncodedChallenge, - T: TranscriptRead, - I, - >( - &self, - transcript: &mut T, - queries: I, - msm: Self::MSMAccumulator, - ) -> Result - where - 'params: 'com, - I: IntoIterator< - Item = VerifierQuery< - 'com, - Scheme::Curve, - >::MSM, - >, - > + Clone; -} - -/// Wrapper type around a blinding factor. -#[derive(Copy, Clone, Eq, PartialEq, Debug)] -pub struct Blind(pub F); - -impl Default for Blind { - fn default() -> Self { - Blind(F::ONE) - } -} - -impl Blind { - /// Given `rng` creates new blinding scalar - pub fn new(rng: &mut R) -> Self { - Blind(F::random(rng)) - } -} - -impl Add for Blind { - type Output = Self; - - fn add(self, rhs: Blind) -> Self { - Blind(self.0 + rhs.0) - } -} - -impl Mul for Blind { - type Output = Self; - - fn mul(self, rhs: Blind) -> Self { - Blind(self.0 * rhs.0) - } -} - -impl AddAssign for Blind { - fn add_assign(&mut self, rhs: Blind) { - self.0 += rhs.0; - } -} - -impl MulAssign for Blind { - fn mul_assign(&mut self, rhs: Blind) { - self.0 *= rhs.0; - } -} - -impl AddAssign for Blind { - fn add_assign(&mut self, rhs: F) { - self.0 += rhs; - } -} - -impl MulAssign for Blind { - fn mul_assign(&mut self, rhs: F) { - self.0 *= rhs; - } -} diff --git a/halo2_proofs_rm/src/poly/domain.rs b/halo2_proofs_rm/src/poly/domain.rs deleted file mode 100644 index ae9b8bf9ae..0000000000 --- a/halo2_proofs_rm/src/poly/domain.rs +++ /dev/null @@ -1,557 +0,0 @@ -//! Contains utilities for performing polynomial arithmetic over an evaluation -//! domain that is of a suitable size for the application. - -use crate::{ - arithmetic::{best_fft, parallelize}, - plonk::Assigned, -}; - -use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, Rotation}; -use ff::WithSmallOrderMulGroup; -use group::ff::{BatchInvert, Field}; - -use std::marker::PhantomData; - -/// This structure contains precomputed constants and other details needed for -/// performing operations on an evaluation domain of size $2^k$ and an extended -/// domain of size $2^{k} * j$ with $j \neq 0$. -#[derive(Clone, Debug)] -pub struct EvaluationDomain { - n: u64, - k: u32, - extended_k: u32, - omega: F, - omega_inv: F, - extended_omega: F, - extended_omega_inv: F, - g_coset: F, - g_coset_inv: F, - quotient_poly_degree: u64, - ifft_divisor: F, - extended_ifft_divisor: F, - t_evaluations: Vec, - barycentric_weight: F, -} - -impl> EvaluationDomain { - /// This constructs a new evaluation domain object based on the provided - /// values $j, k$. - pub fn new(j: u32, k: u32) -> Self { - // quotient_poly_degree * params.n - 1 is the degree of the quotient polynomial - let quotient_poly_degree = (j - 1) as u64; - - // n = 2^k - let n = 1u64 << k; - - // We need to work within an extended domain, not params.k but params.k + i - // for some integer i such that 2^(params.k + i) is sufficiently large to - // describe the quotient polynomial. - let mut extended_k = k; - while (1 << extended_k) < (n * quotient_poly_degree) { - extended_k += 1; - } - - // ensure extended_k <= S - assert!(extended_k <= F::S); - - let mut extended_omega = F::ROOT_OF_UNITY; - - // Get extended_omega, the 2^{extended_k}'th root of unity - // The loop computes extended_omega = omega^{2 ^ (S - extended_k)} - // Notice that extended_omega ^ {2 ^ extended_k} = omega ^ {2^S} = 1. - for _ in extended_k..F::S { - extended_omega = extended_omega.square(); - } - let extended_omega = extended_omega; - let mut extended_omega_inv = extended_omega; // Inversion computed later - - // Get omega, the 2^{k}'th root of unity (i.e. n'th root of unity) - // The loop computes omega = extended_omega ^ {2 ^ (extended_k - k)} - // = (omega^{2 ^ (S - extended_k)}) ^ {2 ^ (extended_k - k)} - // = omega ^ {2 ^ (S - k)}. - // Notice that omega ^ {2^k} = omega ^ {2^S} = 1. - let mut omega = extended_omega; - for _ in k..extended_k { - omega = omega.square(); - } - let omega = omega; - let mut omega_inv = omega; // Inversion computed later - - // We use zeta here because we know it generates a coset, and it's available - // already. - // The coset evaluation domain is: - // zeta {1, extended_omega, extended_omega^2, ..., extended_omega^{(2^extended_k) - 1}} - let g_coset = F::ZETA; - let g_coset_inv = g_coset.square(); - - let mut t_evaluations = Vec::with_capacity(1 << (extended_k - k)); - { - // Compute the evaluations of t(X) = X^n - 1 in the coset evaluation domain. - // We don't have to compute all of them, because it will repeat. - let orig = F::ZETA.pow_vartime([n, 0, 0, 0]); - let step = extended_omega.pow_vartime([n, 0, 0, 0]); - let mut cur = orig; - loop { - t_evaluations.push(cur); - cur *= &step; - if cur == orig { - break; - } - } - assert_eq!(t_evaluations.len(), 1 << (extended_k - k)); - - // Subtract 1 from each to give us t_evaluations[i] = t(zeta * extended_omega^i) - for coeff in &mut t_evaluations { - *coeff -= &F::ONE; - } - - // Invert, because we're dividing by this polynomial. - // We invert in a batch, below. - } - - let mut ifft_divisor = F::from(1 << k); // Inversion computed later - let mut extended_ifft_divisor = F::from(1 << extended_k); // Inversion computed later - - // The barycentric weight of 1 over the evaluation domain - // 1 / \prod_{i != 0} (1 - omega^i) - let mut barycentric_weight = F::from(n); // Inversion computed later - - // Compute batch inversion - t_evaluations - .iter_mut() - .chain(Some(&mut ifft_divisor)) - .chain(Some(&mut extended_ifft_divisor)) - .chain(Some(&mut barycentric_weight)) - .chain(Some(&mut extended_omega_inv)) - .chain(Some(&mut omega_inv)) - .batch_invert(); - - EvaluationDomain { - n, - k, - extended_k, - omega, - omega_inv, - extended_omega, - extended_omega_inv, - g_coset, - g_coset_inv, - quotient_poly_degree, - ifft_divisor, - extended_ifft_divisor, - t_evaluations, - barycentric_weight, - } - } - - /// Obtains a polynomial in Lagrange form when given a vector of Lagrange - /// coefficients of size `n`; panics if the provided vector is the wrong - /// length. - pub fn lagrange_from_vec(&self, values: Vec) -> Polynomial { - assert_eq!(values.len(), self.n as usize); - - Polynomial { - values, - _marker: PhantomData, - } - } - - /// Obtains a polynomial in coefficient form when given a vector of - /// coefficients of size `n`; panics if the provided vector is the wrong - /// length. - pub fn coeff_from_vec(&self, values: Vec) -> Polynomial { - assert_eq!(values.len(), self.n as usize); - - Polynomial { - values, - _marker: PhantomData, - } - } - - /// Returns an empty (zero) polynomial in the coefficient basis - pub fn empty_coeff(&self) -> Polynomial { - Polynomial { - values: vec![F::ZERO; self.n as usize], - _marker: PhantomData, - } - } - - /// Returns an empty (zero) polynomial in the Lagrange coefficient basis - pub fn empty_lagrange(&self) -> Polynomial { - Polynomial { - values: vec![F::ZERO; self.n as usize], - _marker: PhantomData, - } - } - - /// Returns an empty (zero) polynomial in the Lagrange coefficient basis, with - /// deferred inversions. - pub fn empty_lagrange_assigned(&self) -> Polynomial, LagrangeCoeff> { - Polynomial { - values: vec![F::ZERO.into(); self.n as usize], - _marker: PhantomData, - } - } - - /// Returns a constant polynomial in the Lagrange coefficient basis - pub fn constant_lagrange(&self, scalar: F) -> Polynomial { - Polynomial { - values: vec![scalar; self.n as usize], - _marker: PhantomData, - } - } - - /// Returns an empty (zero) polynomial in the extended Lagrange coefficient - /// basis - pub fn empty_extended(&self) -> Polynomial { - Polynomial { - values: vec![F::ZERO; self.extended_len()], - _marker: PhantomData, - } - } - - /// Returns a constant polynomial in the extended Lagrange coefficient - /// basis - pub fn constant_extended(&self, scalar: F) -> Polynomial { - Polynomial { - values: vec![scalar; self.extended_len()], - _marker: PhantomData, - } - } - - /// This takes us from an n-length vector into the coefficient form. - /// - /// This function will panic if the provided vector is not the correct - /// length. - pub fn lagrange_to_coeff(&self, mut a: Polynomial) -> Polynomial { - assert_eq!(a.values.len(), 1 << self.k); - - // Perform inverse FFT to obtain the polynomial in coefficient form - Self::ifft(&mut a.values, self.omega_inv, self.k, self.ifft_divisor); - - Polynomial { - values: a.values, - _marker: PhantomData, - } - } - - /// This takes us from an n-length coefficient vector into a coset of the extended - /// evaluation domain, rotating by `rotation` if desired. - pub fn coeff_to_extended( - &self, - mut a: Polynomial, - ) -> Polynomial { - assert_eq!(a.values.len(), 1 << self.k); - - self.distribute_powers_zeta(&mut a.values, true); - a.values.resize(self.extended_len(), F::ZERO); - best_fft(&mut a.values, self.extended_omega, self.extended_k); - - Polynomial { - values: a.values, - _marker: PhantomData, - } - } - - /// Rotate the extended domain polynomial over the original domain. - pub fn rotate_extended( - &self, - poly: &Polynomial, - rotation: Rotation, - ) -> Polynomial { - let new_rotation = ((1 << (self.extended_k - self.k)) * rotation.0.abs()) as usize; - - let mut poly = poly.clone(); - - if rotation.0 >= 0 { - poly.values.rotate_left(new_rotation); - } else { - poly.values.rotate_right(new_rotation); - } - - poly - } - - /// This takes us from the extended evaluation domain and gets us the - /// quotient polynomial coefficients. - /// - /// This function will panic if the provided vector is not the correct - /// length. - // TODO/FIXME: caller should be responsible for truncating - pub fn extended_to_coeff(&self, mut a: Polynomial) -> Vec { - assert_eq!(a.values.len(), self.extended_len()); - - // Inverse FFT - Self::ifft( - &mut a.values, - self.extended_omega_inv, - self.extended_k, - self.extended_ifft_divisor, - ); - - // Distribute powers to move from coset; opposite from the - // transformation we performed earlier. - self.distribute_powers_zeta(&mut a.values, false); - - // Truncate it to match the size of the quotient polynomial; the - // evaluation domain might be slightly larger than necessary because - // it always lies on a power-of-two boundary. - a.values - .truncate((&self.n * self.quotient_poly_degree) as usize); - - a.values - } - - /// This divides the polynomial (in the extended domain) by the vanishing - /// polynomial of the $2^k$ size domain. - pub fn divide_by_vanishing_poly( - &self, - mut a: Polynomial, - ) -> Polynomial { - assert_eq!(a.values.len(), self.extended_len()); - - // Divide to obtain the quotient polynomial in the coset evaluation - // domain. - parallelize(&mut a.values, |h, mut index| { - for h in h { - *h *= &self.t_evaluations[index % self.t_evaluations.len()]; - index += 1; - } - }); - - Polynomial { - values: a.values, - _marker: PhantomData, - } - } - - /// Given a slice of group elements `[a_0, a_1, a_2, ...]`, this returns - /// `[a_0, [zeta]a_1, [zeta^2]a_2, a_3, [zeta]a_4, [zeta^2]a_5, a_6, ...]`, - /// where zeta is a cube root of unity in the multiplicative subgroup with - /// order (p - 1), i.e. zeta^3 = 1. - /// - /// `into_coset` should be set to `true` when moving into the coset, - /// and `false` when moving out. This toggles the choice of `zeta`. - fn distribute_powers_zeta(&self, a: &mut [F], into_coset: bool) { - let coset_powers = if into_coset { - [self.g_coset, self.g_coset_inv] - } else { - [self.g_coset_inv, self.g_coset] - }; - parallelize(a, |a, mut index| { - for a in a { - // Distribute powers to move into/from coset - let i = index % (coset_powers.len() + 1); - if i != 0 { - *a *= &coset_powers[i - 1]; - } - index += 1; - } - }); - } - - fn ifft(a: &mut [F], omega_inv: F, log_n: u32, divisor: F) { - best_fft(a, omega_inv, log_n); - parallelize(a, |a, _| { - for a in a { - // Finish iFFT - *a *= &divisor; - } - }); - } - - /// Get the size of the domain - pub fn k(&self) -> u32 { - self.k - } - - /// Get the size of the extended domain - pub fn extended_k(&self) -> u32 { - self.extended_k - } - - /// Get the size of the extended domain - pub fn extended_len(&self) -> usize { - 1 << self.extended_k - } - - /// Get $\omega$, the generator of the $2^k$ order multiplicative subgroup. - pub fn get_omega(&self) -> F { - self.omega - } - - /// Get $\omega^{-1}$, the inverse of the generator of the $2^k$ order - /// multiplicative subgroup. - pub fn get_omega_inv(&self) -> F { - self.omega_inv - } - - /// Get the generator of the extended domain's multiplicative subgroup. - pub fn get_extended_omega(&self) -> F { - self.extended_omega - } - - /// Multiplies a value by some power of $\omega$, essentially rotating over - /// the domain. - pub fn rotate_omega(&self, value: F, rotation: Rotation) -> F { - let mut point = value; - if rotation.0 >= 0 { - point *= &self.get_omega().pow_vartime([rotation.0 as u64]); - } else { - point *= &self - .get_omega_inv() - .pow_vartime([(rotation.0 as i64).unsigned_abs()]); - } - point - } - - /// Computes evaluations (at the point `x`, where `xn = x^n`) of Lagrange - /// basis polynomials `l_i(X)` defined such that `l_i(omega^i) = 1` and - /// `l_i(omega^j) = 0` for all `j != i` at each provided rotation `i`. - /// - /// # Implementation - /// - /// The polynomial - /// $$\prod_{j=0,j \neq i}^{n - 1} (X - \omega^j)$$ - /// has a root at all points in the domain except $\omega^i$, where it evaluates to - /// $$\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)$$ - /// and so we divide that polynomial by this value to obtain $l_i(X)$. Since - /// $$\prod_{j=0,j \neq i}^{n - 1} (X - \omega^j) - /// = \frac{X^n - 1}{X - \omega^i}$$ - /// then $l_i(x)$ for some $x$ is evaluated as - /// $$\left(\frac{x^n - 1}{x - \omega^i}\right) - /// \cdot \left(\frac{1}{\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)}\right).$$ - /// We refer to - /// $$1 \over \prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)$$ - /// as the barycentric weight of $\omega^i$. - /// - /// We know that for $i = 0$ - /// $$\frac{1}{\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)} = \frac{1}{n}.$$ - /// - /// If we multiply $(1 / n)$ by $\omega^i$ then we obtain - /// $$\frac{1}{\prod_{j=0,j \neq 0}^{n - 1} (\omega^i - \omega^j)} - /// = \frac{1}{\prod_{j=0,j \neq i}^{n - 1} (\omega^i - \omega^j)}$$ - /// which is the barycentric weight of $\omega^i$. - pub fn l_i_range + Clone>( - &self, - x: F, - xn: F, - rotations: I, - ) -> Vec { - let mut results; - { - let rotations = rotations.clone().into_iter(); - results = Vec::with_capacity(rotations.size_hint().1.unwrap_or(0)); - for rotation in rotations { - let rotation = Rotation(rotation); - let result = x - self.rotate_omega(F::ONE, rotation); - results.push(result); - } - results.iter_mut().batch_invert(); - } - - let common = (xn - F::ONE) * self.barycentric_weight; - for (rotation, result) in rotations.into_iter().zip(results.iter_mut()) { - let rotation = Rotation(rotation); - *result = self.rotate_omega(*result * common, rotation); - } - - results - } - - /// Gets the quotient polynomial's degree (as a multiple of n) - pub fn get_quotient_poly_degree(&self) -> usize { - self.quotient_poly_degree as usize - } - - /// Obtain a pinned version of this evaluation domain; a structure with the - /// minimal parameters needed to determine the rest of the evaluation - /// domain. - pub fn pinned(&self) -> PinnedEvaluationDomain<'_, F> { - PinnedEvaluationDomain { - k: &self.k, - extended_k: &self.extended_k, - omega: &self.omega, - } - } -} - -/// Represents the minimal parameters that determine an `EvaluationDomain`. -#[allow(dead_code)] -#[derive(Debug)] -pub struct PinnedEvaluationDomain<'a, F: Field> { - k: &'a u32, - extended_k: &'a u32, - omega: &'a F, -} - -#[test] -fn test_rotate() { - use rand_core::OsRng; - - use crate::arithmetic::eval_polynomial; - use halo2curves::pasta::pallas::Scalar; - - let domain = EvaluationDomain::::new(1, 3); - let rng = OsRng; - - let mut poly = domain.empty_lagrange(); - assert_eq!(poly.len(), 8); - for value in poly.iter_mut() { - *value = Scalar::random(rng); - } - - let poly_rotated_cur = poly.rotate(Rotation::cur()); - let poly_rotated_next = poly.rotate(Rotation::next()); - let poly_rotated_prev = poly.rotate(Rotation::prev()); - - let poly = domain.lagrange_to_coeff(poly); - let poly_rotated_cur = domain.lagrange_to_coeff(poly_rotated_cur); - let poly_rotated_next = domain.lagrange_to_coeff(poly_rotated_next); - let poly_rotated_prev = domain.lagrange_to_coeff(poly_rotated_prev); - - let x = Scalar::random(rng); - - assert_eq!( - eval_polynomial(&poly[..], x), - eval_polynomial(&poly_rotated_cur[..], x) - ); - assert_eq!( - eval_polynomial(&poly[..], x * domain.omega), - eval_polynomial(&poly_rotated_next[..], x) - ); - assert_eq!( - eval_polynomial(&poly[..], x * domain.omega_inv), - eval_polynomial(&poly_rotated_prev[..], x) - ); -} - -#[test] -fn test_l_i() { - use rand_core::OsRng; - - use crate::arithmetic::{eval_polynomial, lagrange_interpolate}; - use halo2curves::pasta::pallas::Scalar; - let domain = EvaluationDomain::::new(1, 3); - - let mut l = vec![]; - let mut points = vec![]; - for i in 0..8 { - points.push(domain.omega.pow([i])); - } - for i in 0..8 { - let mut l_i = vec![Scalar::zero(); 8]; - l_i[i] = Scalar::ONE; - let l_i = lagrange_interpolate(&points[..], &l_i[..]); - l.push(l_i); - } - - let x = Scalar::random(OsRng); - let xn = x.pow([8]); - - let evaluations = domain.l_i_range(x, xn, -7..=7); - for i in 0..8 { - assert_eq!(eval_polynomial(&l[i][..], x), evaluations[7 + i]); - assert_eq!(eval_polynomial(&l[(8 - i) % 8][..], x), evaluations[7 - i]); - } -} diff --git a/halo2_proofs_rm/src/poly/ipa/commitment.rs b/halo2_proofs_rm/src/poly/ipa/commitment.rs deleted file mode 100644 index 7be053c49c..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/commitment.rs +++ /dev/null @@ -1,370 +0,0 @@ -//! This module contains an implementation of the polynomial commitment scheme -//! described in the [Halo][halo] paper. -//! -//! [halo]: https://eprint.iacr.org/2019/1021 - -use crate::arithmetic::{best_multiexp, g_to_lagrange, parallelize, CurveAffine, CurveExt}; -use crate::helpers::CurveRead; -use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, ParamsVerifier}; -use crate::poly::ipa::msm::MSMIPA; -use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; - -use group::{Curve, Group}; -use std::marker::PhantomData; - -mod prover; -mod verifier; - -pub use prover::create_proof; -pub use verifier::verify_proof; - -use std::io; - -/// Public parameters for IPA commitment scheme -#[derive(Debug, Clone)] -pub struct ParamsIPA { - pub(crate) k: u32, - pub(crate) n: u64, - pub(crate) g: Vec, - pub(crate) g_lagrange: Vec, - pub(crate) w: C, - pub(crate) u: C, -} - -/// Concrete IPA commitment scheme -#[derive(Debug)] -pub struct IPACommitmentScheme { - _marker: PhantomData, -} - -impl CommitmentScheme for IPACommitmentScheme { - type Scalar = C::ScalarExt; - type Curve = C; - - type ParamsProver = ParamsIPA; - type ParamsVerifier = ParamsVerifierIPA; - - fn new_params(k: u32) -> Self::ParamsProver { - ParamsIPA::new(k) - } - - fn read_params(reader: &mut R) -> io::Result { - ParamsIPA::read(reader) - } -} - -/// Verifier parameters -pub type ParamsVerifierIPA = ParamsIPA; - -impl<'params, C: CurveAffine> ParamsVerifier<'params, C> for ParamsIPA {} - -impl<'params, C: CurveAffine> Params<'params, C> for ParamsIPA { - type MSM = MSMIPA<'params, C>; - - fn k(&self) -> u32 { - self.k - } - - fn n(&self) -> u64 { - self.n - } - - fn downsize(&mut self, k: u32) { - assert!(k <= self.k); - - self.k = k; - self.n = 1 << k; - self.g.truncate(self.n as usize); - self.g_lagrange = g_to_lagrange(self.g.iter().map(|g| g.to_curve()).collect(), k); - } - - fn empty_msm(&'params self) -> MSMIPA { - MSMIPA::new(self) - } - - /// This commits to a polynomial using its evaluations over the $2^k$ size - /// evaluation domain. The commitment will be blinded by the blinding factor - /// `r`. - fn commit_lagrange( - &self, - poly: &Polynomial, - r: Blind, - ) -> C::Curve { - let mut tmp_scalars = Vec::with_capacity(poly.len() + 1); - let mut tmp_bases = Vec::with_capacity(poly.len() + 1); - - tmp_scalars.extend(poly.iter()); - tmp_scalars.push(r.0); - - tmp_bases.extend(self.g_lagrange.iter()); - tmp_bases.push(self.w); - - best_multiexp::(&tmp_scalars, &tmp_bases) - } - - /// Writes params to a buffer. - fn write(&self, writer: &mut W) -> io::Result<()> { - writer.write_all(&self.k.to_le_bytes())?; - for g_element in &self.g { - writer.write_all(g_element.to_bytes().as_ref())?; - } - for g_lagrange_element in &self.g_lagrange { - writer.write_all(g_lagrange_element.to_bytes().as_ref())?; - } - writer.write_all(self.w.to_bytes().as_ref())?; - writer.write_all(self.u.to_bytes().as_ref())?; - - Ok(()) - } - - /// Reads params from a buffer. - fn read(reader: &mut R) -> io::Result { - let mut k = [0u8; 4]; - reader.read_exact(&mut k[..])?; - let k = u32::from_le_bytes(k); - - let n: u64 = 1 << k; - - let g: Vec<_> = (0..n).map(|_| C::read(reader)).collect::>()?; - let g_lagrange: Vec<_> = (0..n).map(|_| C::read(reader)).collect::>()?; - - let w = C::read(reader)?; - let u = C::read(reader)?; - - Ok(Self { - k, - n, - g, - g_lagrange, - w, - u, - }) - } -} - -impl<'params, C: CurveAffine> ParamsProver<'params, C> for ParamsIPA { - type ParamsVerifier = ParamsVerifierIPA; - - fn verifier_params(&'params self) -> &'params Self::ParamsVerifier { - self - } - - /// Initializes parameters for the curve, given a random oracle to draw - /// points from. - fn new(k: u32) -> Self { - // This is usually a limitation on the curve, but we also want 32-bit - // architectures to be supported. - assert!(k < 32); - - // In src/arithmetic/fields.rs we ensure that usize is at least 32 bits. - - let n: u64 = 1 << k; - - let g_projective = { - let mut g = Vec::with_capacity(n as usize); - g.resize(n as usize, C::Curve::identity()); - - parallelize(&mut g, move |g, start| { - let hasher = C::CurveExt::hash_to_curve("Halo2-Parameters"); - - for (i, g) in g.iter_mut().enumerate() { - let i = (i + start) as u32; - - let mut message = [0u8; 5]; - message[1..5].copy_from_slice(&i.to_le_bytes()); - - *g = hasher(&message); - } - }); - - g - }; - - let g = { - let mut g = vec![C::identity(); n as usize]; - parallelize(&mut g, |g, starts| { - C::Curve::batch_normalize(&g_projective[starts..(starts + g.len())], g); - }); - g - }; - - // Let's evaluate all of the Lagrange basis polynomials - // using an inverse FFT. - let g_lagrange = g_to_lagrange(g_projective, k); - - let hasher = C::CurveExt::hash_to_curve("Halo2-Parameters"); - let w = hasher(&[1]).to_affine(); - let u = hasher(&[2]).to_affine(); - - ParamsIPA { - k, - n, - g, - g_lagrange, - w, - u, - } - } - - /// This computes a commitment to a polynomial described by the provided - /// slice of coefficients. The commitment will be blinded by the blinding - /// factor `r`. - fn commit(&self, poly: &Polynomial, r: Blind) -> C::Curve { - let mut tmp_scalars = Vec::with_capacity(poly.len() + 1); - let mut tmp_bases = Vec::with_capacity(poly.len() + 1); - - tmp_scalars.extend(poly.iter()); - tmp_scalars.push(r.0); - - tmp_bases.extend(self.g.iter()); - tmp_bases.push(self.w); - - best_multiexp::(&tmp_scalars, &tmp_bases) - } - - fn get_g(&self) -> &[C] { - &self.g - } -} - -#[cfg(test)] -mod test { - use crate::poly::commitment::ParamsProver; - use crate::poly::commitment::{Blind, Params, MSM}; - use crate::poly::ipa::commitment::{create_proof, verify_proof, ParamsIPA}; - use crate::poly::ipa::msm::MSMIPA; - - use ff::Field; - use group::Curve; - - #[test] - fn test_commit_lagrange_epaffine() { - const K: u32 = 6; - - use rand_core::OsRng; - - use crate::poly::EvaluationDomain; - use halo2curves::pasta::{EpAffine, Fq}; - - let params = ParamsIPA::::new(K); - let domain = EvaluationDomain::new(1, K); - - let mut a = domain.empty_lagrange(); - - for (i, a) in a.iter_mut().enumerate() { - *a = Fq::from(i as u64); - } - - let b = domain.lagrange_to_coeff(a.clone()); - - let alpha = Blind(Fq::random(OsRng)); - - assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); - } - - #[test] - fn test_commit_lagrange_eqaffine() { - const K: u32 = 6; - - use rand_core::OsRng; - - use crate::poly::EvaluationDomain; - use halo2curves::pasta::{EqAffine, Fp}; - - let params: ParamsIPA = ParamsIPA::::new(K); - let domain = EvaluationDomain::new(1, K); - - let mut a = domain.empty_lagrange(); - - for (i, a) in a.iter_mut().enumerate() { - *a = Fp::from(i as u64); - } - - let b = domain.lagrange_to_coeff(a.clone()); - - let alpha = Blind(Fp::random(OsRng)); - - assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); - } - - #[test] - fn test_opening_proof() { - const K: u32 = 6; - - use ff::Field; - use rand_core::OsRng; - - use super::super::commitment::{Blind, Params}; - use crate::arithmetic::eval_polynomial; - use crate::halo2curves::pasta::{EpAffine, Fq}; - use crate::poly::EvaluationDomain; - use crate::transcript::{ - Blake2bRead, Blake2bWrite, Challenge255, Transcript, TranscriptRead, TranscriptWrite, - }; - - use crate::transcript::TranscriptReadBuffer; - use crate::transcript::TranscriptWriterBuffer; - - let rng = OsRng; - - let params = ParamsIPA::::new(K); - let mut params_buffer = vec![]; - as Params<_>>::write(¶ms, &mut params_buffer).unwrap(); - let params: ParamsIPA = Params::read::<_>(&mut ¶ms_buffer[..]).unwrap(); - - let domain = EvaluationDomain::new(1, K); - - let mut px = domain.empty_coeff(); - - for (i, a) in px.iter_mut().enumerate() { - *a = Fq::from(i as u64); - } - - let blind = Blind(Fq::random(rng)); - - let p = params.commit(&px, blind).to_affine(); - - let mut transcript = - Blake2bWrite::, EpAffine, Challenge255>::init(vec![]); - transcript.write_point(p).unwrap(); - let x = transcript.squeeze_challenge_scalar::<()>(); - // Evaluate the polynomial - let v = eval_polynomial(&px, *x); - transcript.write_scalar(v).unwrap(); - - let (proof, ch_prover) = { - create_proof(¶ms, rng, &mut transcript, &px, blind, *x).unwrap(); - let ch_prover = transcript.squeeze_challenge(); - (transcript.finalize(), ch_prover) - }; - - // Verify the opening proof - let mut transcript = - Blake2bRead::<&[u8], EpAffine, Challenge255>::init(&proof[..]); - let p_prime = transcript.read_point().unwrap(); - assert_eq!(p, p_prime); - let x_prime = transcript.squeeze_challenge_scalar::<()>(); - assert_eq!(*x, *x_prime); - let v_prime = transcript.read_scalar().unwrap(); - assert_eq!(v, v_prime); - - let mut commitment_msm = MSMIPA::new(¶ms); - commitment_msm.append_term(Fq::one(), p.into()); - - let guard = verify_proof(¶ms, commitment_msm, &mut transcript, *x, v).unwrap(); - let ch_verifier = transcript.squeeze_challenge(); - assert_eq!(*ch_prover, *ch_verifier); - - // Test guard behavior prior to checking another proof - { - // Test use_challenges() - let msm_challenges = guard.clone().use_challenges(); - assert!(msm_challenges.check()); - - // Test use_g() - let g = guard.compute_g(); - let (msm_g, _accumulator) = guard.clone().use_g(g); - assert!(msm_g.check()); - } - } -} diff --git a/halo2_proofs_rm/src/poly/ipa/commitment/prover.rs b/halo2_proofs_rm/src/poly/ipa/commitment/prover.rs deleted file mode 100644 index 344dbc0e65..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/commitment/prover.rs +++ /dev/null @@ -1,167 +0,0 @@ -use ff::Field; -use rand_core::RngCore; - -use super::ParamsIPA; -use crate::arithmetic::{ - best_multiexp, compute_inner_product, eval_polynomial, parallelize, CurveAffine, -}; - -use crate::poly::commitment::ParamsProver; -use crate::poly::{commitment::Blind, Coeff, Polynomial}; -use crate::transcript::{EncodedChallenge, TranscriptWrite}; - -use group::Curve; -use std::io::{self}; - -/// Create a polynomial commitment opening proof for the polynomial defined -/// by the coefficients `px`, the blinding factor `blind` used for the -/// polynomial commitment, and the point `x` that the polynomial is -/// evaluated at. -/// -/// This function will panic if the provided polynomial is too large with -/// respect to the polynomial commitment parameters. -/// -/// **Important:** This function assumes that the provided `transcript` has -/// already seen the common inputs: the polynomial commitment P, the claimed -/// opening v, and the point x. It's probably also nice for the transcript -/// to have seen the elliptic curve description and the URS, if you want to -/// be rigorous. -pub fn create_proof< - C: CurveAffine, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, ->( - params: &ParamsIPA, - mut rng: R, - transcript: &mut T, - p_poly: &Polynomial, - p_blind: Blind, - x_3: C::Scalar, -) -> io::Result<()> { - // We're limited to polynomials of degree n - 1. - assert_eq!(p_poly.len(), params.n as usize); - - // Sample a random polynomial (of same degree) that has a root at x_3, first - // by setting all coefficients to random values. - let mut s_poly = (*p_poly).clone(); - for coeff in s_poly.iter_mut() { - *coeff = C::Scalar::random(&mut rng); - } - // Evaluate the random polynomial at x_3 - let s_at_x3 = eval_polynomial(&s_poly[..], x_3); - // Subtract constant coefficient to get a random polynomial with a root at x_3 - s_poly[0] -= &s_at_x3; - // And sample a random blind - let s_poly_blind = Blind(C::Scalar::random(&mut rng)); - - // Write a commitment to the random polynomial to the transcript - let s_poly_commitment = params.commit(&s_poly, s_poly_blind).to_affine(); - transcript.write_point(s_poly_commitment)?; - - // Challenge that will ensure that the prover cannot change P but can only - // witness a random polynomial commitment that agrees with P at x_3, with high - // probability. - let xi = *transcript.squeeze_challenge_scalar::<()>(); - - // Challenge that ensures that the prover did not interfere with the U term - // in their commitments. - let z = *transcript.squeeze_challenge_scalar::<()>(); - - // We'll be opening `P' = P - [v] G_0 + [ξ] S` to ensure it has a root at - // zero. - let mut p_prime_poly = s_poly * xi + p_poly; - let v = eval_polynomial(&p_prime_poly, x_3); - p_prime_poly[0] -= &v; - let p_prime_blind = s_poly_blind * Blind(xi) + p_blind; - - // This accumulates the synthetic blinding factor `f` starting - // with the blinding factor for `P'`. - let mut f = p_prime_blind.0; - - // Initialize the vector `p_prime` as the coefficients of the polynomial. - let mut p_prime = p_prime_poly.values; - assert_eq!(p_prime.len(), params.n as usize); - - // Initialize the vector `b` as the powers of `x_3`. The inner product of - // `p_prime` and `b` is the evaluation of the polynomial at `x_3`. - let mut b = Vec::with_capacity(1 << params.k); - { - let mut cur = C::Scalar::ONE; - for _ in 0..(1 << params.k) { - b.push(cur); - cur *= &x_3; - } - } - - // Initialize the vector `G'` from the URS. We'll be progressively collapsing - // this vector into smaller and smaller vectors until it is of length 1. - let mut g_prime = params.g.clone(); - - // Perform the inner product argument, round by round. - for j in 0..params.k { - let half = 1 << (params.k - j - 1); // half the length of `p_prime`, `b`, `G'` - - // Compute L, R - // - // TODO: If we modify multiexp to take "extra" bases, we could speed - // this piece up a bit by combining the multiexps. - let l_j = best_multiexp(&p_prime[half..], &g_prime[0..half]); - let r_j = best_multiexp(&p_prime[0..half], &g_prime[half..]); - let value_l_j = compute_inner_product(&p_prime[half..], &b[0..half]); - let value_r_j = compute_inner_product(&p_prime[0..half], &b[half..]); - let l_j_randomness = C::Scalar::random(&mut rng); - let r_j_randomness = C::Scalar::random(&mut rng); - let l_j = l_j + &best_multiexp(&[value_l_j * &z, l_j_randomness], &[params.u, params.w]); - let r_j = r_j + &best_multiexp(&[value_r_j * &z, r_j_randomness], &[params.u, params.w]); - let l_j = l_j.to_affine(); - let r_j = r_j.to_affine(); - - // Feed L and R into the real transcript - transcript.write_point(l_j)?; - transcript.write_point(r_j)?; - - let u_j = *transcript.squeeze_challenge_scalar::<()>(); - let u_j_inv = u_j.invert().unwrap(); // TODO, bubble this up - - // Collapse `p_prime` and `b`. - // TODO: parallelize - for i in 0..half { - p_prime[i] = p_prime[i] + &(p_prime[i + half] * &u_j_inv); - b[i] = b[i] + &(b[i + half] * &u_j); - } - p_prime.truncate(half); - b.truncate(half); - - // Collapse `G'` - parallel_generator_collapse(&mut g_prime, u_j); - g_prime.truncate(half); - - // Update randomness (the synthetic blinding factor at the end) - f += &(l_j_randomness * &u_j_inv); - f += &(r_j_randomness * &u_j); - } - - // We have fully collapsed `p_prime`, `b`, `G'` - assert_eq!(p_prime.len(), 1); - let c = p_prime[0]; - - transcript.write_scalar(c)?; - transcript.write_scalar(f)?; - - Ok(()) -} - -fn parallel_generator_collapse(g: &mut [C], challenge: C::Scalar) { - let len = g.len() / 2; - let (g_lo, g_hi) = g.split_at_mut(len); - - parallelize(g_lo, |g_lo, start| { - let g_hi = &g_hi[start..]; - let mut tmp = Vec::with_capacity(g_lo.len()); - for (g_lo, g_hi) in g_lo.iter().zip(g_hi.iter()) { - tmp.push(g_lo.to_curve() + &(*g_hi * challenge)); - } - C::Curve::batch_normalize(&tmp, g_lo); - }); -} diff --git a/halo2_proofs_rm/src/poly/ipa/commitment/verifier.rs b/halo2_proofs_rm/src/poly/ipa/commitment/verifier.rs deleted file mode 100644 index cf258625d5..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/commitment/verifier.rs +++ /dev/null @@ -1,100 +0,0 @@ -use group::ff::{BatchInvert, Field}; - -use super::ParamsIPA; -use crate::{arithmetic::CurveAffine, poly::ipa::strategy::GuardIPA}; -use crate::{ - poly::{commitment::MSM, ipa::msm::MSMIPA, Error}, - transcript::{EncodedChallenge, TranscriptRead}, -}; - -/// Checks to see if the proof represented within `transcript` is valid, and a -/// point `x` that the polynomial commitment `P` opens purportedly to the value -/// `v`. The provided `msm` should evaluate to the commitment `P` being opened. -pub fn verify_proof<'params, C: CurveAffine, E: EncodedChallenge, T: TranscriptRead>( - params: &'params ParamsIPA, - mut msm: MSMIPA<'params, C>, - transcript: &mut T, - x: C::Scalar, - v: C::Scalar, -) -> Result, Error> { - let k = params.k as usize; - - // P' = P - [v] G_0 + [ξ] S - msm.add_constant_term(-v); // add [-v] G_0 - let s_poly_commitment = transcript.read_point().map_err(|_| Error::OpeningError)?; - let xi = *transcript.squeeze_challenge_scalar::<()>(); - msm.append_term(xi, s_poly_commitment.into()); - - let z = *transcript.squeeze_challenge_scalar::<()>(); - - let mut rounds = vec![]; - for _ in 0..k { - // Read L and R from the proof and write them to the transcript - let l = transcript.read_point().map_err(|_| Error::OpeningError)?; - let r = transcript.read_point().map_err(|_| Error::OpeningError)?; - - let u_j_packed = transcript.squeeze_challenge(); - let u_j = *u_j_packed.as_challenge_scalar::<()>(); - - rounds.push((l, r, u_j, /* to be inverted */ u_j, u_j_packed)); - } - - rounds - .iter_mut() - .map(|&mut (_, _, _, ref mut u_j, _)| u_j) - .batch_invert(); - - // This is the left-hand side of the verifier equation. - // P' + \sum([u_j^{-1}] L_j) + \sum([u_j] R_j) - let mut u = Vec::with_capacity(k); - let mut u_packed: Vec = Vec::with_capacity(k); - for (l, r, u_j, u_j_inv, u_j_packed) in rounds { - msm.append_term(u_j_inv, l.into()); - msm.append_term(u_j, r.into()); - - u.push(u_j); - u_packed.push(u_j_packed.get_scalar()); - } - - // Our goal is to check that the left hand side of the verifier - // equation - // P' + \sum([u_j^{-1}] L_j) + \sum([u_j] R_j) - // equals (given b = \mathbf{b}_0, and the prover's values c, f), - // the right-hand side - // = [c] (G'_0 + [b * z] U) + [f] W - // Subtracting the right-hand side from both sides we get - // P' + \sum([u_j^{-1}] L_j) + \sum([u_j] R_j) - // + [-c] G'_0 + [-cbz] U + [-f] W - // = 0 - // - // Note that the guard returned from this function does not include - // the [-c]G'_0 term. - - let c = transcript.read_scalar().map_err(|_| Error::SamplingError)?; - let neg_c = -c; - let f = transcript.read_scalar().map_err(|_| Error::SamplingError)?; - let b = compute_b(x, &u); - - msm.add_to_u_scalar(neg_c * &b * &z); - msm.add_to_w_scalar(-f); - - let guard = GuardIPA { - msm, - neg_c, - u, - u_packed, - }; - - Ok(guard) -} - -/// Computes $\prod\limits_{i=0}^{k-1} (1 + u_{k - 1 - i} x^{2^i})$. -fn compute_b(x: F, u: &[F]) -> F { - let mut tmp = F::ONE; - let mut cur = x; - for u_j in u.iter().rev() { - tmp *= F::ONE + &(*u_j * &cur); - cur *= cur; - } - tmp -} diff --git a/halo2_proofs_rm/src/poly/ipa/mod.rs b/halo2_proofs_rm/src/poly/ipa/mod.rs deleted file mode 100644 index 3600e2f051..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -pub mod commitment; -/// Multiscalar multiplication engines -pub mod msm; -/// IPA multi-open scheme -pub mod multiopen; -/// Strategies used with KZG scheme -pub mod strategy; diff --git a/halo2_proofs_rm/src/poly/ipa/msm.rs b/halo2_proofs_rm/src/poly/ipa/msm.rs deleted file mode 100644 index a615ddce49..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/msm.rs +++ /dev/null @@ -1,271 +0,0 @@ -use crate::arithmetic::{best_multiexp, CurveAffine}; -use crate::poly::{commitment::MSM, ipa::commitment::ParamsVerifierIPA}; -use ff::Field; -use group::Group; -use std::collections::BTreeMap; - -/// A multiscalar multiplication in the polynomial commitment scheme -#[derive(Debug, Clone)] -pub struct MSMIPA<'params, C: CurveAffine> { - pub(crate) params: &'params ParamsVerifierIPA, - g_scalars: Option>, - w_scalar: Option, - u_scalar: Option, - // x-coordinate -> (scalar, y-coordinate) - other: BTreeMap, -} - -impl<'a, C: CurveAffine> MSMIPA<'a, C> { - /// Given verifier parameters Creates an empty multi scalar engine - pub fn new(params: &'a ParamsVerifierIPA) -> Self { - let g_scalars = None; - let w_scalar = None; - let u_scalar = None; - let other = BTreeMap::new(); - - Self { - g_scalars, - w_scalar, - u_scalar, - other, - - params, - } - } - - /// Add another multiexp into this one - pub fn add_msm(&mut self, other: &Self) { - for (x, (scalar, y)) in other.other.iter() { - self.other - .entry(*x) - .and_modify(|(our_scalar, our_y)| { - if our_y == y { - *our_scalar += *scalar; - } else { - assert!(*our_y == -*y); - *our_scalar -= *scalar; - } - }) - .or_insert((*scalar, *y)); - } - - if let Some(g_scalars) = &other.g_scalars { - self.add_to_g_scalars(g_scalars); - } - - if let Some(w_scalar) = &other.w_scalar { - self.add_to_w_scalar(*w_scalar); - } - - if let Some(u_scalar) = &other.u_scalar { - self.add_to_u_scalar(*u_scalar); - } - } -} - -impl<'a, C: CurveAffine> MSM for MSMIPA<'a, C> { - fn append_term(&mut self, scalar: C::Scalar, point: C::Curve) { - if !bool::from(point.is_identity()) { - use group::Curve; - let point = point.to_affine(); - let xy = point.coordinates().unwrap(); - let x = *xy.x(); - let y = *xy.y(); - - self.other - .entry(x) - .and_modify(|(our_scalar, our_y)| { - if *our_y == y { - *our_scalar += scalar; - } else { - assert!(*our_y == -y); - *our_scalar -= scalar; - } - }) - .or_insert((scalar, y)); - } - } - - /// Add another multiexp into this one - fn add_msm(&mut self, other: &Self) { - for (x, (scalar, y)) in other.other.iter() { - self.other - .entry(*x) - .and_modify(|(our_scalar, our_y)| { - if our_y == y { - *our_scalar += *scalar; - } else { - assert!(*our_y == -*y); - *our_scalar -= *scalar; - } - }) - .or_insert((*scalar, *y)); - } - - if let Some(g_scalars) = &other.g_scalars { - self.add_to_g_scalars(g_scalars); - } - - if let Some(w_scalar) = &other.w_scalar { - self.add_to_w_scalar(*w_scalar); - } - - if let Some(u_scalar) = &other.u_scalar { - self.add_to_u_scalar(*u_scalar); - } - } - - fn scale(&mut self, factor: C::Scalar) { - if let Some(g_scalars) = &mut self.g_scalars { - for g_scalar in g_scalars { - *g_scalar *= &factor; - } - } - - for other in self.other.values_mut() { - other.0 *= factor; - } - - self.w_scalar = self.w_scalar.map(|a| a * &factor); - self.u_scalar = self.u_scalar.map(|a| a * &factor); - } - - fn check(&self) -> bool { - bool::from(self.eval().is_identity()) - } - - fn eval(&self) -> C::Curve { - let len = self.g_scalars.as_ref().map(|v| v.len()).unwrap_or(0) - + self.w_scalar.map(|_| 1).unwrap_or(0) - + self.u_scalar.map(|_| 1).unwrap_or(0) - + self.other.len(); - let mut scalars: Vec = Vec::with_capacity(len); - let mut bases: Vec = Vec::with_capacity(len); - - scalars.extend(self.other.values().map(|(scalar, _)| scalar)); - bases.extend( - self.other - .iter() - .map(|(x, (_, y))| C::from_xy(*x, *y).unwrap()), - ); - - if let Some(w_scalar) = self.w_scalar { - scalars.push(w_scalar); - bases.push(self.params.w); - } - - if let Some(u_scalar) = self.u_scalar { - scalars.push(u_scalar); - bases.push(self.params.u); - } - - if let Some(g_scalars) = &self.g_scalars { - scalars.extend(g_scalars); - bases.extend(self.params.g.iter()); - } - - assert_eq!(scalars.len(), len); - - best_multiexp(&scalars, &bases) - } - - fn bases(&self) -> Vec { - self.other - .iter() - .map(|(x, (_, y))| C::from_xy(*x, *y).unwrap().into()) - .collect() - } - - fn scalars(&self) -> Vec { - self.other.values().map(|(scalar, _)| *scalar).collect() - } -} - -impl<'a, C: CurveAffine> MSMIPA<'a, C> { - /// Add a value to the first entry of `g_scalars`. - pub fn add_constant_term(&mut self, constant: C::Scalar) { - if let Some(g_scalars) = self.g_scalars.as_mut() { - g_scalars[0] += &constant; - } else { - let mut g_scalars = vec![C::Scalar::ZERO; self.params.n as usize]; - g_scalars[0] += &constant; - self.g_scalars = Some(g_scalars); - } - } - - /// Add a vector of scalars to `g_scalars`. This function will panic if the - /// caller provides a slice of scalars that is not of length `params.n`. - pub fn add_to_g_scalars(&mut self, scalars: &[C::Scalar]) { - assert_eq!(scalars.len(), self.params.n as usize); - if let Some(g_scalars) = &mut self.g_scalars { - for (g_scalar, scalar) in g_scalars.iter_mut().zip(scalars.iter()) { - *g_scalar += scalar; - } - } else { - self.g_scalars = Some(scalars.to_vec()); - } - } - /// Add to `w_scalar` - pub fn add_to_w_scalar(&mut self, scalar: C::Scalar) { - self.w_scalar = self.w_scalar.map_or(Some(scalar), |a| Some(a + &scalar)); - } - - /// Add to `u_scalar` - pub fn add_to_u_scalar(&mut self, scalar: C::Scalar) { - self.u_scalar = self.u_scalar.map_or(Some(scalar), |a| Some(a + &scalar)); - } -} - -#[cfg(test)] -mod tests { - use crate::poly::{ - commitment::{ParamsProver, MSM}, - ipa::{commitment::ParamsIPA, msm::MSMIPA}, - }; - use halo2curves::{ - pasta::{Ep, EpAffine, Fp, Fq}, - CurveAffine, - }; - - #[test] - fn msm_arithmetic() { - let base: Ep = EpAffine::from_xy(-Fp::one(), Fp::from(2)).unwrap().into(); - let base_viol = base + base; - - let params = ParamsIPA::new(4); - let mut a: MSMIPA = MSMIPA::new(¶ms); - a.append_term(Fq::one(), base); - // a = [1] P - assert!(!a.clone().check()); - a.append_term(Fq::one(), base); - // a = [1+1] P - assert!(!a.clone().check()); - a.append_term(-Fq::one(), base_viol); - // a = [1+1] P + [-1] 2P - assert!(a.clone().check()); - let b = a.clone(); - - // Append a point that is the negation of an existing one. - a.append_term(Fq::from(4), -base); - // a = [1+1-4] P + [-1] 2P - assert!(!a.clone().check()); - a.append_term(Fq::from(2), base_viol); - // a = [1+1-4] P + [-1+2] 2P - assert!(a.clone().check()); - - // Add two MSMs with common bases. - a.scale(Fq::from(3)); - a.add_msm(&b); - // a = [3*(1+1)+(1+1-4)] P + [3*(-1)+(-1+2)] 2P - assert!(a.clone().check()); - - let mut c: MSMIPA = MSMIPA::new(¶ms); - c.append_term(Fq::from(2), base); - c.append_term(Fq::one(), -base_viol); - // c = [2] P + [1] (-2P) - assert!(c.clone().check()); - // Add two MSMs with bases that differ only in sign. - a.add_msm(&c); - assert!(a.check()); - } -} diff --git a/halo2_proofs_rm/src/poly/ipa/multiopen.rs b/halo2_proofs_rm/src/poly/ipa/multiopen.rs deleted file mode 100644 index b78acb5934..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/multiopen.rs +++ /dev/null @@ -1,172 +0,0 @@ -//! This module contains an optimisation of the polynomial commitment opening -//! scheme described in the [Halo][halo] paper. -//! -//! [halo]: https://eprint.iacr.org/2019/1021 - -use super::*; -use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; -use std::collections::{BTreeMap, BTreeSet}; - -mod prover; -mod verifier; - -pub use prover::ProverIPA; -pub use verifier::VerifierIPA; - -#[derive(Clone, Copy, Debug)] -struct X1 {} -/// Challenge for compressing openings at the same point sets together. -type ChallengeX1 = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct X2 {} -/// Challenge for keeping the multi-point quotient polynomial terms linearly independent. -type ChallengeX2 = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct X3 {} -/// Challenge point at which the commitments are opened. -type ChallengeX3 = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct X4 {} -/// Challenge for collapsing the openings of the various remaining polynomials at x_3 -/// together. -type ChallengeX4 = ChallengeScalar; - -#[derive(Debug)] -struct CommitmentData { - pub(crate) commitment: T, - pub(crate) set_index: usize, - pub(crate) point_indices: Vec, - pub(crate) evals: Vec, -} - -impl CommitmentData { - fn new(commitment: T) -> Self { - CommitmentData { - commitment, - set_index: 0, - point_indices: vec![], - evals: vec![], - } - } -} - -type IntermediateSets = ( - Vec>::Eval, >::Commitment>>, - Vec>, -); - -fn construct_intermediate_sets>(queries: I) -> IntermediateSets -where - I: IntoIterator + Clone, -{ - // Construct sets of unique commitments and corresponding information about - // their queries. - let mut commitment_map: Vec> = vec![]; - - // Also construct mapping from a unique point to a point_index. This defines - // an ordering on the points. - let mut point_index_map = BTreeMap::new(); - - // Iterate over all of the queries, computing the ordering of the points - // while also creating new commitment data. - for query in queries.clone() { - let num_points = point_index_map.len(); - let point_idx = point_index_map - .entry(query.get_point()) - .or_insert(num_points); - - if let Some(pos) = commitment_map - .iter() - .position(|comm| comm.commitment == query.get_commitment()) - { - commitment_map[pos].point_indices.push(*point_idx); - } else { - let mut tmp = CommitmentData::new(query.get_commitment()); - tmp.point_indices.push(*point_idx); - commitment_map.push(tmp); - } - } - - // Also construct inverse mapping from point_index to the point - let mut inverse_point_index_map = BTreeMap::new(); - for (&point, &point_index) in point_index_map.iter() { - inverse_point_index_map.insert(point_index, point); - } - - // Construct map of unique ordered point_idx_sets to their set_idx - let mut point_idx_sets = BTreeMap::new(); - // Also construct mapping from commitment to point_idx_set - let mut commitment_set_map = Vec::new(); - - for commitment_data in commitment_map.iter() { - let mut point_index_set = BTreeSet::new(); - // Note that point_index_set is ordered, unlike point_indices - for &point_index in commitment_data.point_indices.iter() { - point_index_set.insert(point_index); - } - - // Push point_index_set to CommitmentData for the relevant commitment - commitment_set_map.push((commitment_data.commitment, point_index_set.clone())); - - let num_sets = point_idx_sets.len(); - point_idx_sets.entry(point_index_set).or_insert(num_sets); - } - - // Initialise empty evals vec for each unique commitment - for commitment_data in commitment_map.iter_mut() { - let len = commitment_data.point_indices.len(); - commitment_data.evals = vec![Q::Eval::default(); len]; - } - - // Populate set_index, evals and points for each commitment using point_idx_sets - for query in queries { - // The index of the point at which the commitment is queried - let point_index = point_index_map.get(&query.get_point()).unwrap(); - - // The point_index_set at which the commitment was queried - let mut point_index_set = BTreeSet::new(); - for (commitment, point_idx_set) in commitment_set_map.iter() { - if query.get_commitment() == *commitment { - point_index_set = point_idx_set.clone(); - } - } - assert!(!point_index_set.is_empty()); - - // The set_index of the point_index_set - let set_index = point_idx_sets.get(&point_index_set).unwrap(); - for commitment_data in commitment_map.iter_mut() { - if query.get_commitment() == commitment_data.commitment { - commitment_data.set_index = *set_index; - } - } - let point_index_set: Vec = point_index_set.iter().cloned().collect(); - - // The offset of the point_index in the point_index_set - let point_index_in_set = point_index_set - .iter() - .position(|i| i == point_index) - .unwrap(); - - for commitment_data in commitment_map.iter_mut() { - if query.get_commitment() == commitment_data.commitment { - // Insert the eval using the ordering of the point_index_set - commitment_data.evals[point_index_in_set] = query.get_eval(); - } - } - } - - // Get actual points in each point set - let mut point_sets: Vec> = vec![Vec::new(); point_idx_sets.len()]; - for (point_idx_set, &set_idx) in point_idx_sets.iter() { - for &point_idx in point_idx_set.iter() { - let point = inverse_point_index_map.get(&point_idx).unwrap(); - point_sets[set_idx].push(*point); - } - } - - (commitment_map, point_sets) -} diff --git a/halo2_proofs_rm/src/poly/ipa/multiopen/prover.rs b/halo2_proofs_rm/src/poly/ipa/multiopen/prover.rs deleted file mode 100644 index 2ae745d457..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/multiopen/prover.rs +++ /dev/null @@ -1,122 +0,0 @@ -use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; -use crate::arithmetic::{eval_polynomial, kate_division, CurveAffine}; -use crate::poly::commitment::ParamsProver; -use crate::poly::commitment::{Blind, Prover}; -use crate::poly::ipa::commitment::{self, IPACommitmentScheme, ParamsIPA}; -use crate::poly::query::ProverQuery; -use crate::poly::{Coeff, Polynomial}; -use crate::transcript::{EncodedChallenge, TranscriptWrite}; - -use ff::Field; -use group::Curve; -use rand_core::RngCore; -use std::io; -use std::marker::PhantomData; - -/// IPA multi-open prover -#[derive(Debug)] -pub struct ProverIPA<'params, C: CurveAffine> { - pub(crate) params: &'params ParamsIPA, -} - -impl<'params, C: CurveAffine> Prover<'params, IPACommitmentScheme> for ProverIPA<'params, C> { - const QUERY_INSTANCE: bool = true; - - fn new(params: &'params ParamsIPA) -> Self { - Self { params } - } - - /// Create a multi-opening proof - fn create_proof<'com, Z: EncodedChallenge, T: TranscriptWrite, R, I>( - &self, - mut rng: R, - transcript: &mut T, - queries: I, - ) -> io::Result<()> - where - I: IntoIterator> + Clone, - R: RngCore, - { - let x_1: ChallengeX1<_> = transcript.squeeze_challenge_scalar(); - let x_2: ChallengeX2<_> = transcript.squeeze_challenge_scalar(); - - let (poly_map, point_sets) = construct_intermediate_sets(queries); - - // Collapse openings at same point sets together into single openings using - // x_1 challenge. - let mut q_polys: Vec>> = vec![None; point_sets.len()]; - let mut q_blinds = vec![Blind(C::Scalar::ZERO); point_sets.len()]; - - { - let mut accumulate = |set_idx: usize, - new_poly: &Polynomial, - blind: Blind| { - if let Some(poly) = &q_polys[set_idx] { - q_polys[set_idx] = Some(poly.clone() * *x_1 + new_poly); - } else { - q_polys[set_idx] = Some(new_poly.clone()); - } - q_blinds[set_idx] *= *x_1; - q_blinds[set_idx] += blind; - }; - - for commitment_data in poly_map.into_iter() { - accumulate( - commitment_data.set_index, // set_idx, - commitment_data.commitment.poly, // poly, - commitment_data.commitment.blind, // blind, - ); - } - } - - let q_prime_poly = point_sets - .iter() - .zip(q_polys.iter()) - .fold(None, |q_prime_poly, (points, poly)| { - let mut poly = points - .iter() - .fold(poly.clone().unwrap().values, |poly, point| { - kate_division(&poly, *point) - }); - poly.resize(self.params.n as usize, C::Scalar::ZERO); - let poly = Polynomial { - values: poly, - _marker: PhantomData, - }; - - if q_prime_poly.is_none() { - Some(poly) - } else { - q_prime_poly.map(|q_prime_poly| q_prime_poly * *x_2 + &poly) - } - }) - .unwrap(); - - let q_prime_blind = Blind(C::Scalar::random(&mut rng)); - let q_prime_commitment = self.params.commit(&q_prime_poly, q_prime_blind).to_affine(); - - transcript.write_point(q_prime_commitment)?; - - let x_3: ChallengeX3<_> = transcript.squeeze_challenge_scalar(); - - // Prover sends u_i for all i, which correspond to the evaluation - // of each Q polynomial commitment at x_3. - for q_i_poly in &q_polys { - transcript.write_scalar(eval_polynomial(q_i_poly.as_ref().unwrap(), *x_3))?; - } - - let x_4: ChallengeX4<_> = transcript.squeeze_challenge_scalar(); - - let (p_poly, p_poly_blind) = q_polys.into_iter().zip(q_blinds).fold( - (q_prime_poly, q_prime_blind), - |(q_prime_poly, q_prime_blind), (poly, blind)| { - ( - q_prime_poly * *x_4 + &poly.unwrap(), - Blind((q_prime_blind.0 * &(*x_4)) + &blind.0), - ) - }, - ); - - commitment::create_proof(self.params, rng, transcript, &p_poly, p_poly_blind, *x_3) - } -} diff --git a/halo2_proofs_rm/src/poly/ipa/multiopen/verifier.rs b/halo2_proofs_rm/src/poly/ipa/multiopen/verifier.rs deleted file mode 100644 index d559e33384..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/multiopen/verifier.rs +++ /dev/null @@ -1,148 +0,0 @@ -use std::fmt::Debug; - -use ff::Field; - -use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; -use crate::arithmetic::{eval_polynomial, lagrange_interpolate, CurveAffine}; -use crate::poly::commitment::{Params, Verifier, MSM}; -use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA, ParamsVerifierIPA}; -use crate::poly::ipa::msm::MSMIPA; -use crate::poly::ipa::strategy::GuardIPA; -use crate::poly::query::{CommitmentReference, VerifierQuery}; -use crate::poly::Error; -use crate::transcript::{EncodedChallenge, TranscriptRead}; - -/// IPA multi-open verifier -#[derive(Debug)] -pub struct VerifierIPA<'params, C: CurveAffine> { - params: &'params ParamsIPA, -} - -impl<'params, C: CurveAffine> Verifier<'params, IPACommitmentScheme> - for VerifierIPA<'params, C> -{ - type Guard = GuardIPA<'params, C>; - type MSMAccumulator = MSMIPA<'params, C>; - - const QUERY_INSTANCE: bool = true; - - fn new(params: &'params ParamsVerifierIPA) -> Self { - Self { params } - } - - fn verify_proof<'com, E: EncodedChallenge, T: TranscriptRead, I>( - &self, - transcript: &mut T, - queries: I, - mut msm: MSMIPA<'params, C>, - ) -> Result - where - 'params: 'com, - I: IntoIterator>> + Clone, - { - // Sample x_1 for compressing openings at the same point sets together - let x_1: ChallengeX1<_> = transcript.squeeze_challenge_scalar(); - - // Sample a challenge x_2 for keeping the multi-point quotient - // polynomial terms linearly independent. - let x_2: ChallengeX2<_> = transcript.squeeze_challenge_scalar(); - - let (commitment_map, point_sets) = construct_intermediate_sets(queries); - - // Compress the commitments and expected evaluations at x together. - // using the challenge x_1 - let mut q_commitments: Vec<_> = vec![ - (self.params.empty_msm(), C::Scalar::ONE); // (accumulator, next x_1 power). - point_sets.len()]; - - // A vec of vecs of evals. The outer vec corresponds to the point set, - // while the inner vec corresponds to the points in a particular set. - let mut q_eval_sets = Vec::with_capacity(point_sets.len()); - for point_set in point_sets.iter() { - q_eval_sets.push(vec![C::Scalar::ZERO; point_set.len()]); - } - - { - let mut accumulate = |set_idx: usize, - new_commitment: CommitmentReference>, - evals: Vec| { - let (q_commitment, x_1_power) = &mut q_commitments[set_idx]; - match new_commitment { - CommitmentReference::Commitment(c) => { - q_commitment.append_term(*x_1_power, (*c).into()); - } - CommitmentReference::MSM(msm) => { - let mut msm = msm.clone(); - msm.scale(*x_1_power); - q_commitment.add_msm(&msm); - } - } - for (eval, set_eval) in evals.iter().zip(q_eval_sets[set_idx].iter_mut()) { - *set_eval += (*eval) * (*x_1_power); - } - *x_1_power *= *x_1; - }; - - // Each commitment corresponds to evaluations at a set of points. - // For each set, we collapse each commitment's evals pointwise. - // Run in order of increasing x_1 powers. - for commitment_data in commitment_map.into_iter().rev() { - accumulate( - commitment_data.set_index, // set_idx, - commitment_data.commitment, // commitment, - commitment_data.evals, // evals - ); - } - } - - // Obtain the commitment to the multi-point quotient polynomial f(X). - let q_prime_commitment = transcript.read_point().map_err(|_| Error::SamplingError)?; - - // Sample a challenge x_3 for checking that f(X) was committed to - // correctly. - let x_3: ChallengeX3<_> = transcript.squeeze_challenge_scalar(); - - // u is a vector containing the evaluations of the Q polynomial - // commitments at x_3 - let mut u = Vec::with_capacity(q_eval_sets.len()); - for _ in 0..q_eval_sets.len() { - u.push(transcript.read_scalar().map_err(|_| Error::SamplingError)?); - } - - // We can compute the expected msm_eval at x_3 using the u provided - // by the prover and from x_2 - let msm_eval = point_sets - .iter() - .zip(q_eval_sets.iter()) - .zip(u.iter()) - .fold( - C::Scalar::ZERO, - |msm_eval, ((points, evals), proof_eval)| { - let r_poly = lagrange_interpolate(points, evals); - let r_eval = eval_polynomial(&r_poly, *x_3); - let eval = points.iter().fold(*proof_eval - &r_eval, |eval, point| { - eval * &(*x_3 - point).invert().unwrap() - }); - msm_eval * &(*x_2) + &eval - }, - ); - - // Sample a challenge x_4 that we will use to collapse the openings of - // the various remaining polynomials at x_3 together. - let x_4: ChallengeX4<_> = transcript.squeeze_challenge_scalar(); - - // Compute the final commitment that has to be opened - msm.append_term(C::Scalar::ONE, q_prime_commitment.into()); - let (msm, v) = q_commitments.into_iter().zip(u.iter()).fold( - (msm, msm_eval), - |(mut msm, msm_eval), ((q_commitment, _), q_eval)| { - msm.scale(*x_4); - msm.add_msm(&q_commitment); - (msm, msm_eval * &(*x_4) + q_eval) - }, - ); - - // Verify the opening proof - super::commitment::verify_proof(self.params, msm, transcript, *x_3, v) - } -} diff --git a/halo2_proofs_rm/src/poly/ipa/strategy.rs b/halo2_proofs_rm/src/poly/ipa/strategy.rs deleted file mode 100644 index d2d1b3d364..0000000000 --- a/halo2_proofs_rm/src/poly/ipa/strategy.rs +++ /dev/null @@ -1,171 +0,0 @@ -use super::commitment::{IPACommitmentScheme, ParamsIPA}; -use super::msm::MSMIPA; -use super::multiopen::VerifierIPA; -use crate::{ - arithmetic::best_multiexp, - plonk::Error, - poly::{ - commitment::MSM, - strategy::{Guard, VerificationStrategy}, - }, -}; -use ff::Field; -use group::Curve; -use halo2curves::CurveAffine; -use rand_core::OsRng; - -/// Wrapper for verification accumulator -#[derive(Debug, Clone)] -pub struct GuardIPA<'params, C: CurveAffine> { - pub(crate) msm: MSMIPA<'params, C>, - pub(crate) neg_c: C::Scalar, - pub(crate) u: Vec, - pub(crate) u_packed: Vec, -} - -/// An accumulator instance consisting of an evaluation claim and a proof. -#[derive(Debug, Clone)] -pub struct Accumulator { - /// The claimed output of the linear-time polycommit opening protocol - pub g: C, - - /// A vector of challenges u_0, ..., u_{k - 1} sampled by the verifier, to - /// be used in computing G'_0. - pub u_packed: Vec, -} - -/// Define accumulator type as `MSMIPA` -impl<'params, C: CurveAffine> Guard> for GuardIPA<'params, C> { - type MSMAccumulator = MSMIPA<'params, C>; -} - -/// IPA specific operations -impl<'params, C: CurveAffine> GuardIPA<'params, C> { - /// Lets caller supply the challenges and obtain an MSM with updated - /// scalars and points. - pub fn use_challenges(mut self) -> MSMIPA<'params, C> { - let s = compute_s(&self.u, self.neg_c); - self.msm.add_to_g_scalars(&s); - - self.msm - } - - /// Lets caller supply the purported G point and simply appends - /// [-c] G to return an updated MSM. - pub fn use_g(mut self, g: C) -> (MSMIPA<'params, C>, Accumulator) { - self.msm.append_term(self.neg_c, g.into()); - - let accumulator = Accumulator { - g, - u_packed: self.u_packed, - }; - - (self.msm, accumulator) - } - - /// Computes G = ⟨s, params.g⟩ - pub fn compute_g(&self) -> C { - let s = compute_s(&self.u, C::Scalar::ONE); - - best_multiexp(&s, &self.msm.params.g).to_affine() - } -} - -/// A verifier that checks multiple proofs in a batch. -#[derive(Debug)] -pub struct AccumulatorStrategy<'params, C: CurveAffine> { - msm: MSMIPA<'params, C>, -} - -impl<'params, C: CurveAffine> - VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> - for AccumulatorStrategy<'params, C> -{ - type Output = Self; - - fn new(params: &'params ParamsIPA) -> Self { - AccumulatorStrategy { - msm: MSMIPA::new(params), - } - } - - fn process( - mut self, - f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, - ) -> Result { - self.msm.scale(C::Scalar::random(OsRng)); - let guard = f(self.msm)?; - - Ok(Self { - msm: guard.use_challenges(), - }) - } - - /// Finalizes the batch and checks its validity. - /// - /// Returns `false` if *some* proof was invalid. If the caller needs to identify - /// specific failing proofs, it must re-process the proofs separately. - #[must_use] - fn finalize(self) -> bool { - self.msm.check() - } -} - -/// A verifier that checks single proof -#[derive(Debug)] -pub struct SingleStrategy<'params, C: CurveAffine> { - msm: MSMIPA<'params, C>, -} - -impl<'params, C: CurveAffine> - VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> - for SingleStrategy<'params, C> -{ - type Output = (); - - fn new(params: &'params ParamsIPA) -> Self { - SingleStrategy { - msm: MSMIPA::new(params), - } - } - - fn process( - self, - f: impl FnOnce(MSMIPA<'params, C>) -> Result, Error>, - ) -> Result { - let guard = f(self.msm)?; - let msm = guard.use_challenges(); - if msm.check() { - Ok(()) - } else { - Err(Error::ConstraintSystemFailure) - } - } - - /// Finalizes the batch and checks its validity. - /// - /// Returns `false` if *some* proof was invalid. If the caller needs to identify - /// specific failing proofs, it must re-process the proofs separately. - #[must_use] - fn finalize(self) -> bool { - unreachable!() - } -} - -/// Computes the coefficients of $g(X) = \prod\limits_{i=0}^{k-1} (1 + u_{k - 1 - i} X^{2^i})$. -fn compute_s(u: &[F], init: F) -> Vec { - assert!(!u.is_empty()); - let mut v = vec![F::ZERO; 1 << u.len()]; - v[0] = init; - - for (len, u_j) in u.iter().rev().enumerate().map(|(i, u_j)| (1 << i, u_j)) { - let (left, right) = v.split_at_mut(len); - let right = &mut right[0..len]; - right.copy_from_slice(left); - for v in right { - *v *= u_j; - } - } - - v -} diff --git a/halo2_proofs_rm/src/poly/kzg/commitment.rs b/halo2_proofs_rm/src/poly/kzg/commitment.rs deleted file mode 100644 index 114b9ac013..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/commitment.rs +++ /dev/null @@ -1,417 +0,0 @@ -use crate::arithmetic::{best_multiexp, g_to_lagrange, parallelize}; -use crate::helpers::SerdeCurveAffine; -use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, ParamsVerifier}; -use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; -use crate::SerdeFormat; - -use ff::{Field, PrimeField}; -use group::{prime::PrimeCurveAffine, Curve, Group}; -use halo2curves::pairing::Engine; -use halo2curves::CurveExt; -use rand_core::{OsRng, RngCore}; -use std::fmt::Debug; -use std::marker::PhantomData; - -use std::io; - -use super::msm::MSMKZG; - -/// These are the public parameters for the polynomial commitment scheme. -#[derive(Debug, Clone)] -pub struct ParamsKZG { - pub(crate) k: u32, - pub(crate) n: u64, - pub(crate) g: Vec, - pub(crate) g_lagrange: Vec, - pub(crate) g2: E::G2Affine, - pub(crate) s_g2: E::G2Affine, -} - -/// Umbrella commitment scheme construction for all KZG variants -#[derive(Debug)] -pub struct KZGCommitmentScheme { - _marker: PhantomData, -} - -impl CommitmentScheme for KZGCommitmentScheme -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type Scalar = E::Fr; - type Curve = E::G1Affine; - - type ParamsProver = ParamsKZG; - type ParamsVerifier = ParamsVerifierKZG; - - fn new_params(k: u32) -> Self::ParamsProver { - ParamsKZG::new(k) - } - - fn read_params(reader: &mut R) -> io::Result { - ParamsKZG::read(reader) - } -} - -impl ParamsKZG -where - E::G1Affine: SerdeCurveAffine, - E::G1: CurveExt, -{ - /// Initializes parameters for the curve, draws toxic secret from given rng. - /// MUST NOT be used in production. - pub fn setup(k: u32, rng: R) -> Self { - // Largest root of unity exponent of the Engine is `2^E::Fr::S`, so we can - // only support FFTs of polynomials below degree `2^E::Fr::S`. - assert!(k <= E::Fr::S); - let n: u64 = 1 << k; - - // Calculate g = [G1, [s] G1, [s^2] G1, ..., [s^(n-1)] G1] in parallel. - let g1 = E::G1Affine::generator(); - let s = ::random(rng); - - let mut g_projective = vec![E::G1::identity(); n as usize]; - parallelize(&mut g_projective, |g, start| { - let mut current_g: E::G1 = g1.into(); - current_g *= s.pow_vartime([start as u64]); - for g in g.iter_mut() { - *g = current_g; - current_g *= s; - } - }); - - let g = { - let mut g = vec![E::G1Affine::identity(); n as usize]; - parallelize(&mut g, |g, starts| { - E::G1::batch_normalize(&g_projective[starts..(starts + g.len())], g); - }); - g - }; - - let mut g_lagrange_projective = vec![E::G1::identity(); n as usize]; - let mut root = E::Fr::ROOT_OF_UNITY; - for _ in k..E::Fr::S { - root = root.square(); - } - let n_inv = E::Fr::from(n) - .invert() - .expect("inversion should be ok for n = 1<::generator(); - let s_g2 = (g2 * s).into(); - - Self { - k, - n, - g, - g_lagrange, - g2, - s_g2, - } - } - - /// Initializes parameters for the curve through existing parameters - /// k, g, g_lagrange (optional), g2, s_g2 - pub fn from_parts( - &self, - k: u32, - g: Vec, - g_lagrange: Option>, - g2: E::G2Affine, - s_g2: E::G2Affine, - ) -> Self { - Self { - k, - n: 1 << k, - g_lagrange: match g_lagrange { - Some(g_l) => g_l, - None => g_to_lagrange(g.iter().map(PrimeCurveAffine::to_curve).collect(), k), - }, - g, - g2, - s_g2, - } - } - - /// Returns gernerator on G2 - pub fn g2(&self) -> E::G2Affine { - self.g2 - } - - /// Returns first power of secret on G2 - pub fn s_g2(&self) -> E::G2Affine { - self.s_g2 - } - - /// Writes parameters to buffer - pub fn write_custom(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> - where - E::G2Affine: SerdeCurveAffine, - { - writer.write_all(&self.k.to_le_bytes())?; - for el in self.g.iter() { - el.write(writer, format)?; - } - for el in self.g_lagrange.iter() { - el.write(writer, format)?; - } - self.g2.write(writer, format)?; - self.s_g2.write(writer, format)?; - Ok(()) - } - - /// Reads params from a buffer. - pub fn read_custom(reader: &mut R, format: SerdeFormat) -> io::Result - where - E::G2Affine: SerdeCurveAffine, - { - let mut k = [0u8; 4]; - reader.read_exact(&mut k[..])?; - let k = u32::from_le_bytes(k); - let n = 1 << k; - - let (g, g_lagrange) = match format { - SerdeFormat::Processed => { - use group::GroupEncoding; - let load_points_from_file_parallelly = - |reader: &mut R| -> io::Result>> { - let mut points_compressed = - vec![<::G1Affine as GroupEncoding>::Repr::default(); n]; - for points_compressed in points_compressed.iter_mut() { - reader.read_exact((*points_compressed).as_mut())?; - } - - let mut points = vec![Option::::None; n]; - parallelize(&mut points, |points, chunks| { - for (i, point) in points.iter_mut().enumerate() { - *point = Option::from(E::G1Affine::from_bytes( - &points_compressed[chunks + i], - )); - } - }); - Ok(points) - }; - - let g = load_points_from_file_parallelly(reader)?; - let g: Vec<::G1Affine> = g - .iter() - .map(|point| { - point.ok_or_else(|| { - io::Error::new(io::ErrorKind::Other, "invalid point encoding") - }) - }) - .collect::>()?; - let g_lagrange = load_points_from_file_parallelly(reader)?; - let g_lagrange: Vec<::G1Affine> = g_lagrange - .iter() - .map(|point| { - point.ok_or_else(|| { - io::Error::new(io::ErrorKind::Other, "invalid point encoding") - }) - }) - .collect::>()?; - (g, g_lagrange) - } - SerdeFormat::RawBytes => { - let g = (0..n) - .map(|_| ::read(reader, format)) - .collect::, _>>()?; - let g_lagrange = (0..n) - .map(|_| ::read(reader, format)) - .collect::, _>>()?; - (g, g_lagrange) - } - SerdeFormat::RawBytesUnchecked => { - // avoid try branching for performance - let g = (0..n) - .map(|_| ::read(reader, format).unwrap()) - .collect::>(); - let g_lagrange = (0..n) - .map(|_| ::read(reader, format).unwrap()) - .collect::>(); - (g, g_lagrange) - } - }; - - let g2 = E::G2Affine::read(reader, format)?; - let s_g2 = E::G2Affine::read(reader, format)?; - - Ok(Self { - k, - n: n as u64, - g, - g_lagrange, - g2, - s_g2, - }) - } -} - -// TODO: see the issue at https://github.com/appliedzkp/halo2/issues/45 -// So we probably need much smaller verifier key. However for new bases in g1 should be in verifier keys. -/// KZG multi-open verification parameters -pub type ParamsVerifierKZG = ParamsKZG; - -impl<'params, E: Engine + Debug> Params<'params, E::G1Affine> for ParamsKZG -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type MSM = MSMKZG; - - fn k(&self) -> u32 { - self.k - } - - fn n(&self) -> u64 { - self.n - } - - fn downsize(&mut self, k: u32) { - assert!(k <= self.k); - - self.k = k; - self.n = 1 << k; - - self.g.truncate(self.n as usize); - self.g_lagrange = g_to_lagrange(self.g.iter().map(|g| g.to_curve()).collect(), k); - } - - fn empty_msm(&'params self) -> MSMKZG { - MSMKZG::new() - } - - fn commit_lagrange(&self, poly: &Polynomial, _: Blind) -> E::G1 { - let mut scalars = Vec::with_capacity(poly.len()); - scalars.extend(poly.iter()); - let bases = &self.g_lagrange; - let size = scalars.len(); - assert!(bases.len() >= size); - best_multiexp(&scalars, &bases[0..size]) - } - - /// Writes params to a buffer. - fn write(&self, writer: &mut W) -> io::Result<()> { - self.write_custom(writer, SerdeFormat::RawBytes) - } - - /// Reads params from a buffer. - fn read(reader: &mut R) -> io::Result { - Self::read_custom(reader, SerdeFormat::RawBytes) - } -} - -impl<'params, E: Engine + Debug> ParamsVerifier<'params, E::G1Affine> for ParamsKZG -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ -} - -impl<'params, E: Engine + Debug> ParamsProver<'params, E::G1Affine> for ParamsKZG -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type ParamsVerifier = ParamsVerifierKZG; - - fn verifier_params(&'params self) -> &'params Self::ParamsVerifier { - self - } - - fn new(k: u32) -> Self { - Self::setup(k, OsRng) - } - - fn commit(&self, poly: &Polynomial, _: Blind) -> E::G1 { - let mut scalars = Vec::with_capacity(poly.len()); - scalars.extend(poly.iter()); - let bases = &self.g; - let size = scalars.len(); - assert!(bases.len() >= size); - best_multiexp(&scalars, &bases[0..size]) - } - - fn get_g(&self) -> &[E::G1Affine] { - &self.g - } -} - -#[cfg(test)] -mod test { - use crate::poly::commitment::ParamsProver; - use crate::poly::commitment::{Blind, Params}; - use crate::poly::kzg::commitment::ParamsKZG; - use ff::Field; - - #[test] - fn test_commit_lagrange() { - const K: u32 = 6; - - use rand_core::OsRng; - - use crate::poly::EvaluationDomain; - use halo2curves::bn256::{Bn256, Fr}; - - let params = ParamsKZG::::new(K); - let domain = EvaluationDomain::new(1, K); - - let mut a = domain.empty_lagrange(); - - for (i, a) in a.iter_mut().enumerate() { - *a = Fr::from(i as u64); - } - - let b = domain.lagrange_to_coeff(a.clone()); - - let alpha = Blind(Fr::random(OsRng)); - - assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); - } - - #[test] - fn test_parameter_serialisation_roundtrip() { - const K: u32 = 4; - - use super::super::commitment::Params; - use crate::halo2curves::bn256::Bn256; - - let params0 = ParamsKZG::::new(K); - let mut data = vec![]; - as Params<_>>::write(¶ms0, &mut data).unwrap(); - let params1: ParamsKZG = Params::read::<_>(&mut &data[..]).unwrap(); - - assert_eq!(params0.k, params1.k); - assert_eq!(params0.n, params1.n); - assert_eq!(params0.g.len(), params1.g.len()); - assert_eq!(params0.g_lagrange.len(), params1.g_lagrange.len()); - - assert_eq!(params0.g, params1.g); - assert_eq!(params0.g_lagrange, params1.g_lagrange); - assert_eq!(params0.g2, params1.g2); - assert_eq!(params0.s_g2, params1.s_g2); - } -} diff --git a/halo2_proofs_rm/src/poly/kzg/mod.rs b/halo2_proofs_rm/src/poly/kzg/mod.rs deleted file mode 100644 index 0c99a20c34..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -/// KZG commitment scheme -pub mod commitment; -/// Multiscalar multiplication engines -pub mod msm; -/// KZG multi-open scheme -pub mod multiopen; -/// Strategies used with KZG scheme -pub mod strategy; diff --git a/halo2_proofs_rm/src/poly/kzg/msm.rs b/halo2_proofs_rm/src/poly/kzg/msm.rs deleted file mode 100644 index f9b8c284bd..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/msm.rs +++ /dev/null @@ -1,203 +0,0 @@ -use std::fmt::Debug; - -use super::commitment::ParamsKZG; -use crate::{ - arithmetic::{best_multiexp, parallelize}, - poly::commitment::MSM, -}; -use group::{Curve, Group}; -use halo2curves::{ - pairing::{Engine, MillerLoopResult, MultiMillerLoop}, - CurveAffine, CurveExt, -}; - -/// A multiscalar multiplication in the polynomial commitment scheme -#[derive(Clone, Default, Debug)] -pub struct MSMKZG -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - pub(crate) scalars: Vec, - pub(crate) bases: Vec, -} - -impl MSMKZG -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - /// Create an empty MSM instance - pub fn new() -> Self { - MSMKZG { - scalars: vec![], - bases: vec![], - } - } - - /// Prepares all scalars in the MSM to linear combination - pub fn combine_with_base(&mut self, base: E::Fr) { - use ff::Field; - let mut acc = E::Fr::ONE; - if !self.scalars.is_empty() { - for scalar in self.scalars.iter_mut().rev() { - *scalar *= &acc; - acc *= base; - } - } - } -} - -impl MSM for MSMKZG -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - fn append_term(&mut self, scalar: E::Fr, point: E::G1) { - self.scalars.push(scalar); - self.bases.push(point); - } - - fn add_msm(&mut self, other: &Self) { - self.scalars.extend(other.scalars().iter()); - self.bases.extend(other.bases().iter()); - } - - fn scale(&mut self, factor: E::Fr) { - if !self.scalars.is_empty() { - parallelize(&mut self.scalars, |scalars, _| { - for other_scalar in scalars { - *other_scalar *= &factor; - } - }) - } - } - - fn check(&self) -> bool { - bool::from(self.eval().is_identity()) - } - - fn eval(&self) -> E::G1 { - use group::prime::PrimeCurveAffine; - let mut bases = vec![E::G1Affine::identity(); self.scalars.len()]; - E::G1::batch_normalize(&self.bases, &mut bases); - best_multiexp(&self.scalars, &bases) - } - - fn bases(&self) -> Vec { - self.bases.clone() - } - - fn scalars(&self) -> Vec { - self.scalars.clone() - } -} - -/// A projective point collector -#[derive(Debug, Clone)] -pub(crate) struct PreMSM -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - projectives_msms: Vec>, -} - -impl PreMSM -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - pub(crate) fn new() -> Self { - PreMSM { - projectives_msms: vec![], - } - } - - pub(crate) fn normalize(self) -> MSMKZG { - let (scalars, bases) = self - .projectives_msms - .into_iter() - .map(|msm| (msm.scalars, msm.bases)) - .unzip::<_, _, Vec<_>, Vec<_>>(); - - MSMKZG { - scalars: scalars.into_iter().flatten().collect(), - bases: bases.into_iter().flatten().collect(), - } - } - - pub(crate) fn add_msm(&mut self, other: MSMKZG) { - self.projectives_msms.push(other); - } -} - -impl<'params, E: MultiMillerLoop + Debug> From<&'params ParamsKZG> for DualMSM<'params, E> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - fn from(params: &'params ParamsKZG) -> Self { - DualMSM::new(params) - } -} - -/// Two channel MSM accumulator -#[derive(Debug, Clone)] -pub struct DualMSM<'a, E: Engine> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - pub(crate) params: &'a ParamsKZG, - pub(crate) left: MSMKZG, - pub(crate) right: MSMKZG, -} - -impl<'a, E: MultiMillerLoop + Debug> DualMSM<'a, E> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - /// Create a new two channel MSM accumulator instance - pub fn new(params: &'a ParamsKZG) -> Self { - Self { - params, - left: MSMKZG::new(), - right: MSMKZG::new(), - } - } - - /// Scale all scalars in the MSM by some scaling factor - pub fn scale(&mut self, e: E::Fr) { - self.left.scale(e); - self.right.scale(e); - } - - /// Add another multiexp into this one - pub fn add_msm(&mut self, other: Self) { - self.left.add_msm(&other.left); - self.right.add_msm(&other.right); - } - - /// Performs final pairing check with given verifier params and two channel linear combination - pub fn check(self) -> bool { - let s_g2_prepared = E::G2Prepared::from(self.params.s_g2); - let n_g2_prepared = E::G2Prepared::from(-self.params.g2); - - let left = self.left.eval(); - let right = self.right.eval(); - - let (term_1, term_2) = ( - (&left.into(), &s_g2_prepared), - (&right.into(), &n_g2_prepared), - ); - let terms = &[term_1, term_2]; - - bool::from( - E::multi_miller_loop(&terms[..]) - .final_exponentiation() - .is_identity(), - ) - } -} diff --git a/halo2_proofs_rm/src/poly/kzg/multiopen.rs b/halo2_proofs_rm/src/poly/kzg/multiopen.rs deleted file mode 100644 index 97b7e2b777..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/multiopen.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod gwc; -mod shplonk; - -pub use gwc::*; -pub use shplonk::*; diff --git a/halo2_proofs_rm/src/poly/kzg/multiopen/gwc.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/gwc.rs deleted file mode 100644 index 3fd28dd00a..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/multiopen/gwc.rs +++ /dev/null @@ -1,50 +0,0 @@ -mod prover; -mod verifier; - -pub use prover::ProverGWC; -pub use verifier::VerifierGWC; - -use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; -use std::marker::PhantomData; - -#[derive(Clone, Copy, Debug)] -struct U {} -type ChallengeU = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct V {} -type ChallengeV = ChallengeScalar; - -struct CommitmentData> { - queries: Vec, - point: F, - _marker: PhantomData, -} - -fn construct_intermediate_sets>(queries: I) -> Vec> -where - I: IntoIterator + Clone, -{ - let mut point_query_map: Vec<(F, Vec)> = Vec::new(); - for query in queries { - if let Some(pos) = point_query_map - .iter() - .position(|(point, _)| *point == query.get_point()) - { - let (_, queries) = &mut point_query_map[pos]; - queries.push(query); - } else { - point_query_map.push((query.get_point(), vec![query])); - } - } - - point_query_map - .into_iter() - .map(|(point, queries)| CommitmentData { - queries, - point, - _marker: PhantomData, - }) - .collect() -} diff --git a/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/prover.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/prover.rs deleted file mode 100644 index ecea01cb01..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/prover.rs +++ /dev/null @@ -1,89 +0,0 @@ -use super::{construct_intermediate_sets, ChallengeV, Query}; -use crate::arithmetic::{kate_division, powers}; -use crate::helpers::SerdeCurveAffine; -use crate::poly::commitment::ParamsProver; -use crate::poly::commitment::Prover; -use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; -use crate::poly::query::ProverQuery; -use crate::poly::{commitment::Blind, Polynomial}; -use crate::transcript::{EncodedChallenge, TranscriptWrite}; - -use group::Curve; -use halo2curves::pairing::Engine; -use halo2curves::CurveExt; -use rand_core::RngCore; -use std::fmt::Debug; -use std::io; -use std::marker::PhantomData; - -/// Concrete KZG prover with GWC variant -#[derive(Debug)] -pub struct ProverGWC<'params, E: Engine> { - params: &'params ParamsKZG, -} - -/// Create a multi-opening proof -impl<'params, E: Engine + Debug> Prover<'params, KZGCommitmentScheme> for ProverGWC<'params, E> -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - const QUERY_INSTANCE: bool = false; - - fn new(params: &'params ParamsKZG) -> Self { - Self { params } - } - - /// Create a multi-opening proof - fn create_proof< - 'com, - Ch: EncodedChallenge, - T: TranscriptWrite, - R, - I, - >( - &self, - _: R, - transcript: &mut T, - queries: I, - ) -> io::Result<()> - where - I: IntoIterator> + Clone, - R: RngCore, - { - let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); - let commitment_data = construct_intermediate_sets(queries); - - for commitment_at_a_point in commitment_data.iter() { - let z = commitment_at_a_point.point; - let (poly_batch, eval_batch) = commitment_at_a_point - .queries - .iter() - .zip(powers(*v)) - .map(|(query, power_of_v)| { - assert_eq!(query.get_point(), z); - - let poly = query.get_commitment().poly; - let eval = query.get_eval(); - - (poly.clone() * power_of_v, eval * power_of_v) - }) - .reduce(|(poly_acc, eval_acc), (poly, eval)| (poly_acc + &poly, eval_acc + eval)) - .unwrap(); - - let poly_batch = &poly_batch - eval_batch; - let witness_poly = Polynomial { - values: kate_division(&poly_batch.values, z), - _marker: PhantomData, - }; - let w = self - .params - .commit(&witness_poly, Blind::default()) - .to_affine(); - - transcript.write_point(w)?; - } - Ok(()) - } -} diff --git a/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/verifier.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/verifier.rs deleted file mode 100644 index fcfda6941f..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/multiopen/gwc/verifier.rs +++ /dev/null @@ -1,124 +0,0 @@ -use std::fmt::Debug; - -use super::{construct_intermediate_sets, ChallengeU, ChallengeV}; -use crate::arithmetic::powers; -use crate::helpers::SerdeCurveAffine; -use crate::poly::commitment::Verifier; -use crate::poly::commitment::MSM; -use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; -use crate::poly::kzg::msm::{DualMSM, MSMKZG}; -use crate::poly::kzg::strategy::GuardKZG; -use crate::poly::query::Query; -use crate::poly::query::{CommitmentReference, VerifierQuery}; -use crate::poly::Error; -use crate::transcript::{EncodedChallenge, TranscriptRead}; - -use ff::Field; -use halo2curves::pairing::{Engine, MultiMillerLoop}; -use halo2curves::CurveExt; - -#[derive(Debug)] -/// Concrete KZG verifier with GWC variant -pub struct VerifierGWC<'params, E: Engine> { - params: &'params ParamsKZG, -} - -impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierGWC<'params, E> -where - E: MultiMillerLoop + Debug, - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type Guard = GuardKZG<'params, E>; - type MSMAccumulator = DualMSM<'params, E>; - - const QUERY_INSTANCE: bool = false; - - fn new(params: &'params ParamsKZG) -> Self { - Self { params } - } - - fn verify_proof< - 'com, - Ch: EncodedChallenge, - T: TranscriptRead, - I, - >( - &self, - transcript: &mut T, - queries: I, - mut msm_accumulator: DualMSM<'params, E>, - ) -> Result - where - I: IntoIterator>> + Clone, - { - let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); - - let commitment_data = construct_intermediate_sets(queries); - - let w: Vec = (0..commitment_data.len()) - .map(|_| transcript.read_point().map_err(|_| Error::SamplingError)) - .collect::, Error>>()?; - - let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); - - let mut commitment_multi = MSMKZG::::new(); - let mut eval_multi = E::Fr::ZERO; - - let mut witness = MSMKZG::::new(); - let mut witness_with_aux = MSMKZG::::new(); - - for ((commitment_at_a_point, wi), power_of_u) in - commitment_data.iter().zip(w.into_iter()).zip(powers(*u)) - { - assert!(!commitment_at_a_point.queries.is_empty()); - let z = commitment_at_a_point.point; - - let (mut commitment_batch, eval_batch) = commitment_at_a_point - .queries - .iter() - .zip(powers(*v)) - .map(|(query, power_of_v)| { - assert_eq!(query.get_point(), z); - - let commitment = match query.get_commitment() { - CommitmentReference::Commitment(c) => { - let mut msm = MSMKZG::::new(); - msm.append_term(power_of_v, (*c).into()); - msm - } - CommitmentReference::MSM(msm) => { - let mut msm = msm.clone(); - msm.scale(power_of_v); - msm - } - }; - let eval = power_of_v * query.get_eval(); - - (commitment, eval) - }) - .reduce(|(mut commitment_acc, eval_acc), (commitment, eval)| { - commitment_acc.add_msm(&commitment); - (commitment_acc, eval_acc + eval) - }) - .unwrap(); - - commitment_batch.scale(power_of_u); - commitment_multi.add_msm(&commitment_batch); - eval_multi += power_of_u * eval_batch; - - witness_with_aux.append_term(power_of_u * z, wi.into()); - witness.append_term(power_of_u, wi.into()); - } - - msm_accumulator.left.add_msm(&witness); - - msm_accumulator.right.add_msm(&witness_with_aux); - msm_accumulator.right.add_msm(&commitment_multi); - let g0: E::G1 = self.params.g[0].into(); - msm_accumulator.right.append_term(eval_multi, -g0); - - Ok(Self::Guard::new(msm_accumulator)) - } -} diff --git a/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk.rs deleted file mode 100644 index d0814e83e3..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk.rs +++ /dev/null @@ -1,247 +0,0 @@ -mod prover; -mod verifier; - -use crate::multicore::{IntoParallelIterator, ParallelIterator}; -use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; -pub use prover::ProverSHPLONK; -use std::collections::BTreeSet; -pub use verifier::VerifierSHPLONK; - -#[derive(Clone, Copy, Debug)] -struct U {} -type ChallengeU = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct V {} -type ChallengeV = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Y {} -type ChallengeY = ChallengeScalar; - -#[derive(Debug, Clone, PartialEq)] -struct Commitment((T, Vec)); - -impl Commitment { - fn get(&self) -> T { - self.0 .0.clone() - } - - fn evals(&self) -> Vec { - self.0 .1.clone() - } -} - -#[derive(Debug, Clone, PartialEq)] -struct RotationSet { - commitments: Vec>, - points: Vec, -} - -#[derive(Debug, PartialEq)] -struct IntermediateSets> { - rotation_sets: Vec>, - super_point_set: BTreeSet, -} - -fn construct_intermediate_sets>( - queries: I, -) -> IntermediateSets -where - I: IntoIterator + Clone, -{ - let queries = queries.into_iter().collect::>(); - - // Find evaluation of a commitment at a rotation - let get_eval = |commitment: Q::Commitment, rotation: F| -> F { - queries - .iter() - .find(|query| query.get_commitment() == commitment && query.get_point() == rotation) - .unwrap() - .get_eval() - }; - - // All points that appear in queries - let mut super_point_set = BTreeSet::new(); - - // Collect rotation sets for each commitment - // Example elements in the vector: - // (C_0, {r_5}), - // (C_1, {r_1, r_2, r_3}), - // (C_2, {r_2, r_3, r_4}), - // (C_3, {r_2, r_3, r_4}), - // ... - let mut commitment_rotation_set_map: Vec<(Q::Commitment, BTreeSet)> = vec![]; - for query in queries.iter() { - let rotation = query.get_point(); - super_point_set.insert(rotation); - if let Some(commitment_rotation_set) = commitment_rotation_set_map - .iter_mut() - .find(|(commitment, _)| *commitment == query.get_commitment()) - { - let (_, rotation_set) = commitment_rotation_set; - rotation_set.insert(rotation); - } else { - commitment_rotation_set_map.push(( - query.get_commitment(), - BTreeSet::from_iter(std::iter::once(rotation)), - )); - }; - } - - // Flatten rotation sets and collect commitments that opens against each commitment set - // Example elements in the vector: - // {r_5}: [C_0], - // {r_1, r_2, r_3} : [C_1] - // {r_2, r_3, r_4} : [C_2, C_3], - // ... - // NOTE: we want to make the order of the collection of rotation sets independent of the opening points, to ease the verifier computation - let mut rotation_set_commitment_map: Vec<(BTreeSet, Vec)> = vec![]; - for (commitment, rotation_set) in commitment_rotation_set_map.into_iter() { - if let Some(rotation_set_commitment) = rotation_set_commitment_map - .iter_mut() - .find(|(set, _)| set == &rotation_set) - { - let (_, commitments) = rotation_set_commitment; - commitments.push(commitment); - } else { - rotation_set_commitment_map.push((rotation_set, vec![commitment])); - }; - } - - let rotation_sets = rotation_set_commitment_map - .into_par_iter() - .map(|(rotations, commitments)| { - let rotations_vec = rotations.iter().collect::>(); - let commitments: Vec> = commitments - .into_par_iter() - .map(|commitment| { - let evals: Vec = rotations_vec - .as_slice() - .into_par_iter() - .map(|&&rotation| get_eval(commitment, rotation)) - .collect(); - Commitment((commitment, evals)) - }) - .collect(); - - RotationSet { - commitments, - points: rotations.into_iter().collect(), - } - }) - .collect::>>(); - - IntermediateSets { - rotation_sets, - super_point_set, - } -} - -#[cfg(test)] -mod proptests { - use super::{construct_intermediate_sets, Commitment, IntermediateSets}; - use ff::FromUniformBytes; - use halo2curves::pasta::Fp; - use proptest::{collection::vec, prelude::*, sample::select}; - use std::convert::TryFrom; - - #[derive(Debug, Clone)] - struct MyQuery { - point: F, - eval: F, - commitment: usize, - } - - impl super::Query for MyQuery { - type Commitment = usize; - type Eval = Fp; - - fn get_point(&self) -> Fp { - self.point - } - - fn get_eval(&self) -> Self::Eval { - self.eval - } - - fn get_commitment(&self) -> Self::Commitment { - self.commitment - } - } - - prop_compose! { - fn arb_point()( - bytes in vec(any::(), 64) - ) -> Fp { - Fp::from_uniform_bytes(&<[u8; 64]>::try_from(bytes).unwrap()) - } - } - - prop_compose! { - fn arb_query(commitment: usize, point: Fp)( - eval in arb_point() - ) -> MyQuery { - MyQuery { - point, - eval, - commitment - } - } - } - - prop_compose! { - // Mapping from column index to point index. - fn arb_queries_inner(num_points: usize, num_cols: usize, num_queries: usize)( - col_indices in vec(select((0..num_cols).collect::>()), num_queries), - point_indices in vec(select((0..num_points).collect::>()), num_queries) - ) -> Vec<(usize, usize)> { - col_indices.into_iter().zip(point_indices.into_iter()).collect() - } - } - - prop_compose! { - fn compare_queries( - num_points: usize, - num_cols: usize, - num_queries: usize, - )( - points_1 in vec(arb_point(), num_points), - points_2 in vec(arb_point(), num_points), - mapping in arb_queries_inner(num_points, num_cols, num_queries) - )( - queries_1 in mapping.iter().map(|(commitment, point_idx)| arb_query(*commitment, points_1[*point_idx])).collect::>(), - queries_2 in mapping.iter().map(|(commitment, point_idx)| arb_query(*commitment, points_2[*point_idx])).collect::>(), - ) -> ( - Vec>, - Vec> - ) { - ( - queries_1, - queries_2, - ) - } - } - - proptest! { - #[test] - fn test_intermediate_sets( - (queries_1, queries_2) in compare_queries(8, 8, 16) - ) { - let IntermediateSets { rotation_sets, .. } = construct_intermediate_sets(queries_1); - let commitment_sets = rotation_sets.iter().map(|data| - data.commitments.iter().map(Commitment::get).collect::>() - ).collect::>(); - - // It shouldn't matter what the point or eval values are; we should get - // the same exact point set indices and point indices again. - let IntermediateSets { rotation_sets: new_rotation_sets, .. } = construct_intermediate_sets(queries_2); - let new_commitment_sets = new_rotation_sets.iter().map(|data| - data.commitments.iter().map(Commitment::get).collect::>() - ).collect::>(); - - assert_eq!(commitment_sets, new_commitment_sets); - } - } -} diff --git a/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/prover.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/prover.rs deleted file mode 100644 index 5001d69094..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/prover.rs +++ /dev/null @@ -1,298 +0,0 @@ -use super::{ - construct_intermediate_sets, ChallengeU, ChallengeV, ChallengeY, Commitment, RotationSet, -}; -use crate::arithmetic::{ - eval_polynomial, evaluate_vanishing_polynomial, kate_division, lagrange_interpolate, - parallelize, powers, CurveAffine, -}; -use crate::helpers::SerdeCurveAffine; -use crate::poly::commitment::{Blind, ParamsProver, Prover}; -use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; -use crate::poly::query::{PolynomialPointer, ProverQuery}; -use crate::poly::{Coeff, Polynomial}; -use crate::transcript::{EncodedChallenge, TranscriptWrite}; - -use crate::multicore::{IntoParallelIterator, ParallelIterator}; -use ff::Field; -use group::Curve; -use halo2curves::pairing::Engine; -use halo2curves::CurveExt; -use rand_core::RngCore; -use std::fmt::Debug; -use std::io; -use std::marker::PhantomData; -use std::ops::MulAssign; - -fn div_by_vanishing(poly: Polynomial, roots: &[F]) -> Vec { - let poly = roots - .iter() - .fold(poly.values, |poly, point| kate_division(&poly, *point)); - - poly -} - -struct CommitmentExtension<'a, C: CurveAffine> { - commitment: Commitment>, - low_degree_equivalent: Polynomial, -} - -impl<'a, C: CurveAffine> Commitment> { - fn extend(&self, points: &[C::Scalar]) -> CommitmentExtension<'a, C> { - let poly = lagrange_interpolate(points, &self.evals()[..]); - - let low_degree_equivalent = Polynomial { - values: poly, - _marker: PhantomData, - }; - - CommitmentExtension { - commitment: self.clone(), - low_degree_equivalent, - } - } -} - -impl<'a, C: CurveAffine> CommitmentExtension<'a, C> { - fn linearisation_contribution(&self, u: C::Scalar) -> Polynomial { - let p_x = self.commitment.get().poly; - let r_eval = eval_polynomial(&self.low_degree_equivalent.values[..], u); - p_x - r_eval - } - - fn quotient_contribution(&self) -> Polynomial { - let len = self.low_degree_equivalent.len(); - let mut p_x = self.commitment.get().poly.clone(); - parallelize(&mut p_x.values[0..len], |lhs, start| { - for (lhs, rhs) in lhs - .iter_mut() - .zip(self.low_degree_equivalent.values[start..].iter()) - { - *lhs -= *rhs; - } - }); - p_x - } -} - -struct RotationSetExtension<'a, C: CurveAffine> { - commitments: Vec>, - points: Vec, -} - -impl<'a, C: CurveAffine> RotationSet> { - fn extend(self, commitments: Vec>) -> RotationSetExtension<'a, C> { - RotationSetExtension { - commitments, - points: self.points, - } - } -} - -/// Concrete KZG prover with SHPLONK variant -#[derive(Debug)] -pub struct ProverSHPLONK<'a, E: Engine> { - params: &'a ParamsKZG, -} - -impl<'a, E: Engine> ProverSHPLONK<'a, E> { - /// Given parameters creates new prover instance - pub fn new(params: &'a ParamsKZG) -> Self { - Self { params } - } -} - -/// Create a multi-opening proof -impl<'params, E: Engine + Debug> Prover<'params, KZGCommitmentScheme> - for ProverSHPLONK<'params, E> -where - E::Fr: Ord, - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - const QUERY_INSTANCE: bool = false; - - fn new(params: &'params ParamsKZG) -> Self { - Self { params } - } - - /// Create a multi-opening proof - fn create_proof< - 'com, - Ch: EncodedChallenge, - T: TranscriptWrite, - R, - I, - >( - &self, - _: R, - transcript: &mut T, - queries: I, - ) -> io::Result<()> - where - I: IntoIterator> + Clone, - R: RngCore, - { - // TODO: explore if it is safe to use same challenge - // for different sets that are already combined with another challenge - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); - - let quotient_contribution = |rotation_set: &RotationSetExtension| { - // [P_i_0(X) - R_i_0(X), P_i_1(X) - R_i_1(X), ... ] - #[allow(clippy::needless_collect)] - let numerators = rotation_set - .commitments - .as_slice() - .into_par_iter() - .map(|commitment| commitment.quotient_contribution()) - .collect::>(); - - // define numerator polynomial as - // N_i_j(X) = (P_i_j(X) - R_i_j(X)) - // and combine polynomials with same evaluation point set - // N_i(X) = linear_combination(y, N_i_j(X)) - // where y is random scalar to combine numerator polynomials - let n_x = numerators - .into_iter() - .zip(powers(*y)) - .map(|(numerator, power_of_y)| numerator * power_of_y) - .reduce(|acc, numerator| acc + &numerator) - .unwrap(); - - let points = &rotation_set.points[..]; - - // quotient contribution of this evaluation set is - // Q_i(X) = N_i(X) / Z_i(X) where - // Z_i(X) = (x - r_i_0) * (x - r_i_1) * ... - let mut poly = div_by_vanishing(n_x, points); - poly.resize(self.params.n as usize, E::Fr::ZERO); - - Polynomial { - values: poly, - _marker: PhantomData, - } - }; - - let intermediate_sets = construct_intermediate_sets(queries); - let (rotation_sets, super_point_set) = ( - intermediate_sets.rotation_sets, - intermediate_sets.super_point_set, - ); - - let rotation_sets: Vec> = rotation_sets - .into_par_iter() - .map(|rotation_set| { - let commitments: Vec> = rotation_set - .commitments - .as_slice() - .into_par_iter() - .map(|commitment_data| commitment_data.extend(&rotation_set.points)) - .collect(); - rotation_set.extend(commitments) - }) - .collect(); - - let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); - - #[allow(clippy::needless_collect)] - let quotient_polynomials = rotation_sets - .as_slice() - .into_par_iter() - .map(quotient_contribution) - .collect::>(); - - let h_x: Polynomial = quotient_polynomials - .into_iter() - .zip(powers(*v)) - .map(|(poly, power_of_v)| poly * power_of_v) - .reduce(|acc, poly| acc + &poly) - .unwrap(); - - let h = self.params.commit(&h_x, Blind::default()).to_affine(); - transcript.write_point(h)?; - let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); - - let linearisation_contribution = |rotation_set: RotationSetExtension| { - let mut diffs = super_point_set.clone(); - for point in rotation_set.points.iter() { - diffs.remove(point); - } - let diffs = diffs.into_iter().collect::>(); - - // calculate difference vanishing polynomial evaluation - let z_i = evaluate_vanishing_polynomial(&diffs[..], *u); - - // inner linearisation contributions are - // [P_i_0(X) - r_i_0, P_i_1(X) - r_i_1, ... ] where - // r_i_j = R_i_j(u) is the evaluation of low degree equivalent polynomial - // where u is random evaluation point - #[allow(clippy::needless_collect)] - let inner_contributions = rotation_set - .commitments - .as_slice() - .into_par_iter() - .map(|commitment| commitment.linearisation_contribution(*u)) - .collect::>(); - - // define inner contributor polynomial as - // L_i_j(X) = (P_i_j(X) - r_i_j) - // and combine polynomials with same evaluation point set - // L_i(X) = linear_combination(y, L_i_j(X)) - // where y is random scalar to combine inner contributors - let l_x: Polynomial = inner_contributions - .into_iter() - .zip(powers(*y)) - .map(|(poly, power_of_y)| poly * power_of_y) - .reduce(|acc, poly| acc + &poly) - .unwrap(); - - // finally scale l_x by difference vanishing polynomial evaluation z_i - (l_x * z_i, z_i) - }; - - #[allow(clippy::type_complexity)] - let (linearisation_contributions, z_diffs): ( - Vec>, - Vec, - ) = rotation_sets - .into_par_iter() - .map(linearisation_contribution) - .unzip(); - - let l_x: Polynomial = linearisation_contributions - .into_iter() - .zip(powers(*v)) - .map(|(poly, power_of_v)| poly * power_of_v) - .reduce(|acc, poly| acc + &poly) - .unwrap(); - - let super_point_set = super_point_set.into_iter().collect::>(); - let zt_eval = evaluate_vanishing_polynomial(&super_point_set[..], *u); - let l_x = l_x - &(h_x * zt_eval); - - // sanity check - #[cfg(debug_assertions)] - { - let must_be_zero = eval_polynomial(&l_x.values[..], *u); - assert_eq!(must_be_zero, E::Fr::ZERO); - } - - let mut h_x = div_by_vanishing(l_x, &[*u]); - - // normalize coefficients by the coefficient of the first polynomial - let z_0_diff_inv = z_diffs[0].invert().unwrap(); - for h_i in h_x.iter_mut() { - h_i.mul_assign(z_0_diff_inv) - } - - let h_x = Polynomial { - values: h_x, - _marker: PhantomData, - }; - - let h = self.params.commit(&h_x, Blind::default()).to_affine(); - transcript.write_point(h)?; - - Ok(()) - } -} diff --git a/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/verifier.rs b/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/verifier.rs deleted file mode 100644 index 5d03940177..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/multiopen/shplonk/verifier.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::fmt::Debug; - -use super::ChallengeY; -use super::{construct_intermediate_sets, ChallengeU, ChallengeV}; -use crate::arithmetic::{ - eval_polynomial, evaluate_vanishing_polynomial, lagrange_interpolate, powers, -}; -use crate::helpers::SerdeCurveAffine; -use crate::poly::commitment::Verifier; -use crate::poly::commitment::MSM; -use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; -use crate::poly::kzg::msm::DualMSM; -use crate::poly::kzg::msm::{PreMSM, MSMKZG}; -use crate::poly::kzg::strategy::GuardKZG; -use crate::poly::query::{CommitmentReference, VerifierQuery}; -use crate::poly::Error; -use crate::transcript::{EncodedChallenge, TranscriptRead}; -use ff::Field; -use halo2curves::pairing::{Engine, MultiMillerLoop}; -use halo2curves::CurveExt; -use std::ops::MulAssign; - -/// Concrete KZG multiopen verifier with SHPLONK variant -#[derive(Debug)] -pub struct VerifierSHPLONK<'params, E: Engine> { - params: &'params ParamsKZG, -} - -impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierSHPLONK<'params, E> -where - E: MultiMillerLoop + Debug, - E::Fr: Ord, - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type Guard = GuardKZG<'params, E>; - type MSMAccumulator = DualMSM<'params, E>; - - const QUERY_INSTANCE: bool = false; - - fn new(params: &'params ParamsKZG) -> Self { - Self { params } - } - - /// Verify a multi-opening proof - fn verify_proof< - 'com, - Ch: EncodedChallenge, - T: TranscriptRead, - I, - >( - &self, - transcript: &mut T, - queries: I, - mut msm_accumulator: DualMSM<'params, E>, - ) -> Result - where - I: IntoIterator>> + Clone, - { - let intermediate_sets = construct_intermediate_sets(queries); - let (rotation_sets, super_point_set) = ( - intermediate_sets.rotation_sets, - intermediate_sets.super_point_set, - ); - - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); - let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); - - let h1 = transcript.read_point().map_err(|_| Error::SamplingError)?; - let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); - let h2 = transcript.read_point().map_err(|_| Error::SamplingError)?; - - let (mut z_0_diff_inverse, mut z_0) = (E::Fr::ZERO, E::Fr::ZERO); - let (mut outer_msm, mut r_outer_acc) = (PreMSM::::new(), E::Fr::ZERO); - for (i, (rotation_set, power_of_v)) in rotation_sets.iter().zip(powers(*v)).enumerate() { - let diffs: Vec = super_point_set - .iter() - .filter(|point| !rotation_set.points.contains(point)) - .copied() - .collect(); - let mut z_diff_i = evaluate_vanishing_polynomial(&diffs[..], *u); - - // normalize coefficients by the coefficient of the first commitment - if i == 0 { - z_0 = evaluate_vanishing_polynomial(&rotation_set.points[..], *u); - z_0_diff_inverse = z_diff_i.invert().unwrap(); - z_diff_i = E::Fr::ONE; - } else { - z_diff_i.mul_assign(z_0_diff_inverse); - } - - let (mut inner_msm, r_inner_acc) = rotation_set - .commitments - .iter() - .zip(powers(*y)) - .map(|(commitment_data, power_of_y)| { - // calculate low degree equivalent - let r_x = lagrange_interpolate( - &rotation_set.points[..], - &commitment_data.evals()[..], - ); - let r_eval = power_of_y * eval_polynomial(&r_x[..], *u); - let msm = match commitment_data.get() { - CommitmentReference::Commitment(c) => { - let mut msm = MSMKZG::::new(); - msm.append_term(power_of_y, (*c).into()); - msm - } - CommitmentReference::MSM(msm) => { - let mut msm = msm.clone(); - msm.scale(power_of_y); - msm - } - }; - (msm, r_eval) - }) - .reduce(|(mut msm_acc, r_eval_acc), (msm, r_eval)| { - msm_acc.add_msm(&msm); - (msm_acc, r_eval_acc + r_eval) - }) - .unwrap(); - - inner_msm.scale(power_of_v * z_diff_i); - outer_msm.add_msm(inner_msm); - r_outer_acc += power_of_v * r_inner_acc * z_diff_i; - } - let mut outer_msm = outer_msm.normalize(); - let g1: E::G1 = self.params.g[0].into(); - outer_msm.append_term(-r_outer_acc, g1); - outer_msm.append_term(-z_0, h1.into()); - outer_msm.append_term(*u, h2.into()); - - msm_accumulator.left.append_term(E::Fr::ONE, h2.into()); - - msm_accumulator.right.add_msm(&outer_msm); - - Ok(Self::Guard::new(msm_accumulator)) - } -} diff --git a/halo2_proofs_rm/src/poly/kzg/strategy.rs b/halo2_proofs_rm/src/poly/kzg/strategy.rs deleted file mode 100644 index ee80d800ac..0000000000 --- a/halo2_proofs_rm/src/poly/kzg/strategy.rs +++ /dev/null @@ -1,181 +0,0 @@ -use super::{ - commitment::{KZGCommitmentScheme, ParamsKZG}, - msm::DualMSM, -}; -use crate::{ - helpers::SerdeCurveAffine, - plonk::Error, - poly::{ - commitment::Verifier, - strategy::{Guard, VerificationStrategy}, - }, -}; -use ff::Field; -use halo2curves::{ - pairing::{Engine, MultiMillerLoop}, - CurveAffine, CurveExt, -}; -use rand_core::OsRng; -use std::fmt::Debug; - -/// Wrapper for linear verification accumulator -#[derive(Debug, Clone)] -pub struct GuardKZG<'params, E: MultiMillerLoop + Debug> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - pub(crate) msm_accumulator: DualMSM<'params, E>, -} - -/// Define accumulator type as `DualMSM` -impl<'params, E> Guard> for GuardKZG<'params, E> -where - E: MultiMillerLoop + Debug, - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type MSMAccumulator = DualMSM<'params, E>; -} - -/// KZG specific operations -impl<'params, E: MultiMillerLoop + Debug> GuardKZG<'params, E> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - pub(crate) fn new(msm_accumulator: DualMSM<'params, E>) -> Self { - Self { msm_accumulator } - } -} - -/// A verifier that checks multiple proofs in a batch -#[derive(Clone, Debug)] -pub struct AccumulatorStrategy<'params, E: Engine> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - pub(crate) msm_accumulator: DualMSM<'params, E>, -} - -impl<'params, E: MultiMillerLoop + Debug> AccumulatorStrategy<'params, E> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - /// Constructs an empty batch verifier - pub fn new(params: &'params ParamsKZG) -> Self { - AccumulatorStrategy { - msm_accumulator: DualMSM::new(params), - } - } - - /// Constructs and initialized new batch verifier - pub fn with(msm_accumulator: DualMSM<'params, E>) -> Self { - AccumulatorStrategy { msm_accumulator } - } -} - -/// A verifier that checks a single proof -#[derive(Clone, Debug)] -pub struct SingleStrategy<'params, E: Engine> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - pub(crate) msm: DualMSM<'params, E>, -} - -impl<'params, E: MultiMillerLoop + Debug> SingleStrategy<'params, E> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - /// Constructs an empty batch verifier - pub fn new(params: &'params ParamsKZG) -> Self { - SingleStrategy { - msm: DualMSM::new(params), - } - } -} - -impl< - 'params, - E: MultiMillerLoop + Debug, - V: Verifier< - 'params, - KZGCommitmentScheme, - MSMAccumulator = DualMSM<'params, E>, - Guard = GuardKZG<'params, E>, - >, - > VerificationStrategy<'params, KZGCommitmentScheme, V> for AccumulatorStrategy<'params, E> -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type Output = Self; - - fn new(params: &'params ParamsKZG) -> Self { - AccumulatorStrategy::new(params) - } - - fn process( - mut self, - f: impl FnOnce(V::MSMAccumulator) -> Result, - ) -> Result { - self.msm_accumulator.scale(E::Fr::random(OsRng)); - - // Guard is updated with new msm contributions - let guard = f(self.msm_accumulator)?; - Ok(Self { - msm_accumulator: guard.msm_accumulator, - }) - } - - fn finalize(self) -> bool { - self.msm_accumulator.check() - } -} - -impl< - 'params, - E: MultiMillerLoop + Debug, - V: Verifier< - 'params, - KZGCommitmentScheme, - MSMAccumulator = DualMSM<'params, E>, - Guard = GuardKZG<'params, E>, - >, - > VerificationStrategy<'params, KZGCommitmentScheme, V> for SingleStrategy<'params, E> -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type Output = (); - - fn new(params: &'params ParamsKZG) -> Self { - Self::new(params) - } - - fn process( - self, - f: impl FnOnce(V::MSMAccumulator) -> Result, - ) -> Result { - // Guard is updated with new msm contributions - let guard = f(self.msm)?; - let msm = guard.msm_accumulator; - if msm.check() { - Ok(()) - } else { - Err(Error::ConstraintSystemFailure) - } - } - - fn finalize(self) -> bool { - unreachable!(); - } -} diff --git a/halo2_proofs_rm/src/poly/multiopen_test.rs b/halo2_proofs_rm/src/poly/multiopen_test.rs deleted file mode 100644 index 47c6731167..0000000000 --- a/halo2_proofs_rm/src/poly/multiopen_test.rs +++ /dev/null @@ -1,298 +0,0 @@ -#[cfg(test)] -mod test { - use crate::arithmetic::eval_polynomial; - use crate::plonk::Error; - use crate::poly::commitment::Blind; - use crate::poly::commitment::ParamsProver; - use crate::poly::{ - commitment::{CommitmentScheme, Params, Prover, Verifier}, - query::{ProverQuery, VerifierQuery}, - strategy::VerificationStrategy, - EvaluationDomain, - }; - use crate::transcript::{ - Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read, Keccak256Write, - TranscriptReadBuffer, TranscriptWriterBuffer, - }; - use ff::WithSmallOrderMulGroup; - use group::Curve; - use rand_core::OsRng; - - #[test] - fn test_roundtrip_ipa() { - use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA}; - use crate::poly::ipa::multiopen::{ProverIPA, VerifierIPA}; - use crate::poly::ipa::strategy::AccumulatorStrategy; - use halo2curves::pasta::EqAffine; - - const K: u32 = 4; - - let params = ParamsIPA::::new(K); - - let proof = create_proof::< - IPACommitmentScheme, - ProverIPA<_>, - _, - Blake2bWrite<_, _, Challenge255<_>>, - >(¶ms); - - let verifier_params = params.verifier_params(); - - verify::< - IPACommitmentScheme, - VerifierIPA<_>, - _, - Blake2bRead<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, &proof[..], false); - - verify::< - IPACommitmentScheme, - VerifierIPA<_>, - _, - Blake2bRead<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); - } - - #[test] - fn test_roundtrip_ipa_keccak() { - use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA}; - use crate::poly::ipa::multiopen::{ProverIPA, VerifierIPA}; - use crate::poly::ipa::strategy::AccumulatorStrategy; - use halo2curves::pasta::EqAffine; - - const K: u32 = 4; - - let params = ParamsIPA::::new(K); - - let proof = create_proof::< - IPACommitmentScheme, - ProverIPA<_>, - _, - Keccak256Write<_, _, Challenge255<_>>, - >(¶ms); - - let verifier_params = params.verifier_params(); - - verify::< - IPACommitmentScheme, - VerifierIPA<_>, - _, - Keccak256Read<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, &proof[..], false); - - verify::< - IPACommitmentScheme, - VerifierIPA<_>, - _, - Keccak256Read<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); - } - - #[test] - fn test_roundtrip_gwc() { - use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; - use crate::poly::kzg::multiopen::{ProverGWC, VerifierGWC}; - use crate::poly::kzg::strategy::AccumulatorStrategy; - use halo2curves::bn256::Bn256; - - const K: u32 = 4; - - let params = ParamsKZG::::new(K); - - let proof = - create_proof::<_, ProverGWC<_>, _, Blake2bWrite<_, _, Challenge255<_>>>(¶ms); - - let verifier_params = params.verifier_params(); - - verify::<_, VerifierGWC<_>, _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>>( - verifier_params, - &proof[..], - false, - ); - - verify::< - KZGCommitmentScheme, - VerifierGWC<_>, - _, - Blake2bRead<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); - } - - #[test] - fn test_roundtrip_shplonk() { - use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; - use crate::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; - use crate::poly::kzg::strategy::AccumulatorStrategy; - use halo2curves::bn256::Bn256; - - const K: u32 = 4; - - let params = ParamsKZG::::new(K); - - let proof = create_proof::< - KZGCommitmentScheme, - ProverSHPLONK<_>, - _, - Blake2bWrite<_, _, Challenge255<_>>, - >(¶ms); - - let verifier_params = params.verifier_params(); - - verify::< - KZGCommitmentScheme, - VerifierSHPLONK<_>, - _, - Blake2bRead<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, &proof[..], false); - - verify::< - KZGCommitmentScheme, - VerifierSHPLONK<_>, - _, - Blake2bRead<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); - } - - fn verify< - 'a, - 'params, - Scheme: CommitmentScheme, - V: Verifier<'params, Scheme>, - E: EncodedChallenge, - T: TranscriptReadBuffer<&'a [u8], Scheme::Curve, E>, - Strategy: VerificationStrategy<'params, Scheme, V, Output = Strategy>, - >( - params: &'params Scheme::ParamsVerifier, - proof: &'a [u8], - should_fail: bool, - ) { - let verifier = V::new(params); - - let mut transcript = T::init(proof); - - let a = transcript.read_point().unwrap(); - let b = transcript.read_point().unwrap(); - let c = transcript.read_point().unwrap(); - - let x = transcript.squeeze_challenge(); - let y = transcript.squeeze_challenge(); - - let avx = transcript.read_scalar().unwrap(); - let bvx = transcript.read_scalar().unwrap(); - let cvy = transcript.read_scalar().unwrap(); - - let valid_queries = std::iter::empty() - .chain(Some(VerifierQuery::new_commitment(&a, x.get_scalar(), avx))) - .chain(Some(VerifierQuery::new_commitment(&b, x.get_scalar(), bvx))) - .chain(Some(VerifierQuery::new_commitment(&c, y.get_scalar(), cvy))); - - let invalid_queries = std::iter::empty() - .chain(Some(VerifierQuery::new_commitment(&a, x.get_scalar(), avx))) - .chain(Some(VerifierQuery::new_commitment(&b, x.get_scalar(), avx))) - .chain(Some(VerifierQuery::new_commitment(&c, y.get_scalar(), cvy))); - - let queries = if should_fail { - invalid_queries.clone() - } else { - valid_queries.clone() - }; - - { - let strategy = Strategy::new(params); - let strategy = strategy - .process(|msm_accumulator| { - verifier - .verify_proof(&mut transcript, queries.clone(), msm_accumulator) - .map_err(|_| Error::Opening) - }) - .unwrap(); - - assert_eq!(strategy.finalize(), !should_fail); - } - } - - fn create_proof< - 'params, - Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, - E: EncodedChallenge, - T: TranscriptWriterBuffer, Scheme::Curve, E>, - >( - params: &'params Scheme::ParamsProver, - ) -> Vec - where - Scheme::Scalar: WithSmallOrderMulGroup<3>, - { - let domain = EvaluationDomain::new(1, params.k()); - - let mut ax = domain.empty_coeff(); - for (i, a) in ax.iter_mut().enumerate() { - *a = <::Scalar>::from(10 + i as u64); - } - - let mut bx = domain.empty_coeff(); - for (i, a) in bx.iter_mut().enumerate() { - *a = <::Scalar>::from(100 + i as u64); - } - - let mut cx = domain.empty_coeff(); - for (i, a) in cx.iter_mut().enumerate() { - *a = <::Scalar>::from(100 + i as u64); - } - - let mut transcript = T::init(vec![]); - - let blind = Blind::new(&mut OsRng); - let a = params.commit(&ax, blind).to_affine(); - let b = params.commit(&bx, blind).to_affine(); - let c = params.commit(&cx, blind).to_affine(); - - transcript.write_point(a).unwrap(); - transcript.write_point(b).unwrap(); - transcript.write_point(c).unwrap(); - - let x = transcript.squeeze_challenge(); - let y = transcript.squeeze_challenge(); - - let avx = eval_polynomial(&ax, x.get_scalar()); - let bvx = eval_polynomial(&bx, x.get_scalar()); - let cvy = eval_polynomial(&cx, y.get_scalar()); - - transcript.write_scalar(avx).unwrap(); - transcript.write_scalar(bvx).unwrap(); - transcript.write_scalar(cvy).unwrap(); - - let queries = [ - ProverQuery { - point: x.get_scalar(), - poly: &ax, - blind, - }, - ProverQuery { - point: x.get_scalar(), - poly: &bx, - blind, - }, - ProverQuery { - point: y.get_scalar(), - poly: &cx, - blind, - }, - ] - .to_vec(); - - let prover = P::new(params); - prover - .create_proof(&mut OsRng, &mut transcript, queries) - .unwrap(); - - transcript.finalize() - } -} diff --git a/halo2_proofs_rm/src/poly/query.rs b/halo2_proofs_rm/src/poly/query.rs deleted file mode 100644 index bc7a20c240..0000000000 --- a/halo2_proofs_rm/src/poly/query.rs +++ /dev/null @@ -1,160 +0,0 @@ -use std::fmt::Debug; - -use super::commitment::{Blind, MSM}; -use crate::{ - arithmetic::eval_polynomial, - poly::{Coeff, Polynomial}, -}; -use halo2curves::CurveAffine; - -pub trait Query: Sized + Clone + Send + Sync { - type Commitment: PartialEq + Copy + Send + Sync; - type Eval: Clone + Default + Debug; - - fn get_point(&self) -> F; - fn get_eval(&self) -> Self::Eval; - fn get_commitment(&self) -> Self::Commitment; -} - -/// A polynomial query at a point -#[derive(Debug, Clone, Copy)] -pub struct ProverQuery<'com, C: CurveAffine> { - /// Point at which polynomial is queried - pub(crate) point: C::Scalar, - /// Coefficients of polynomial - pub(crate) poly: &'com Polynomial, - /// Blinding factor of polynomial - pub(crate) blind: Blind, -} - -impl<'com, C> ProverQuery<'com, C> -where - C: CurveAffine, -{ - /// Create a new prover query based on a polynomial - pub fn new( - point: C::Scalar, - poly: &'com Polynomial, - blind: Blind, - ) -> Self { - ProverQuery { point, poly, blind } - } -} - -#[doc(hidden)] -#[derive(Copy, Clone)] -pub struct PolynomialPointer<'com, C: CurveAffine> { - pub(crate) poly: &'com Polynomial, - pub(crate) blind: Blind, -} - -impl<'com, C: CurveAffine> PartialEq for PolynomialPointer<'com, C> { - fn eq(&self, other: &Self) -> bool { - std::ptr::eq(self.poly, other.poly) - } -} - -impl<'com, C: CurveAffine> Query for ProverQuery<'com, C> { - type Commitment = PolynomialPointer<'com, C>; - type Eval = C::Scalar; - - fn get_point(&self) -> C::Scalar { - self.point - } - fn get_eval(&self) -> Self::Eval { - eval_polynomial(&self.poly[..], self.get_point()) - } - fn get_commitment(&self) -> Self::Commitment { - PolynomialPointer { - poly: self.poly, - blind: self.blind, - } - } -} - -impl<'com, C: CurveAffine, M: MSM> VerifierQuery<'com, C, M> { - /// Create a new verifier query based on a commitment - pub fn new_commitment(commitment: &'com C, point: C::Scalar, eval: C::Scalar) -> Self { - VerifierQuery { - point, - eval, - commitment: CommitmentReference::Commitment(commitment), - } - } - - /// Create a new verifier query based on a linear combination of commitments - pub fn new_msm(msm: &'com M, point: C::Scalar, eval: C::Scalar) -> VerifierQuery<'com, C, M> { - VerifierQuery { - point, - eval, - commitment: CommitmentReference::MSM(msm), - } - } -} - -/// A polynomial query at a point -#[derive(Debug, Clone, Copy)] -pub struct VerifierQuery<'com, C: CurveAffine, M: MSM> { - /// Point at which polynomial is queried - pub(crate) point: C::Scalar, - /// Commitment to polynomial - pub(crate) commitment: CommitmentReference<'com, C, M>, - /// Evaluation of polynomial at query point - pub(crate) eval: C::Scalar, -} - -impl<'com, C, M> VerifierQuery<'com, C, M> -where - C: CurveAffine, - M: MSM, -{ - /// Create a new verifier query based on a commitment - pub fn new( - point: C::Scalar, - commitment: CommitmentReference<'com, C, M>, - eval: C::Scalar, - ) -> Self { - VerifierQuery { - point, - commitment, - eval, - } - } -} - -#[allow(clippy::upper_case_acronyms)] -#[derive(Clone, Debug)] -pub enum CommitmentReference<'r, C: CurveAffine, M: MSM> { - Commitment(&'r C), - MSM(&'r M), -} - -impl<'r, C: CurveAffine, M: MSM> Copy for CommitmentReference<'r, C, M> {} - -impl<'r, C: CurveAffine, M: MSM> PartialEq for CommitmentReference<'r, C, M> { - #![allow(clippy::vtable_address_comparisons)] - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (&CommitmentReference::Commitment(a), &CommitmentReference::Commitment(b)) => { - std::ptr::eq(a, b) - } - (&CommitmentReference::MSM(a), &CommitmentReference::MSM(b)) => std::ptr::eq(a, b), - _ => false, - } - } -} - -impl<'com, C: CurveAffine, M: MSM> Query for VerifierQuery<'com, C, M> { - type Eval = C::Scalar; - type Commitment = CommitmentReference<'com, C, M>; - - fn get_point(&self) -> C::Scalar { - self.point - } - fn get_eval(&self) -> C::Scalar { - self.eval - } - fn get_commitment(&self) -> Self::Commitment { - self.commitment - } -} diff --git a/halo2_proofs_rm/src/poly/strategy.rs b/halo2_proofs_rm/src/poly/strategy.rs deleted file mode 100644 index 850f95e6c9..0000000000 --- a/halo2_proofs_rm/src/poly/strategy.rs +++ /dev/null @@ -1,31 +0,0 @@ -use super::commitment::{CommitmentScheme, Verifier}; -use crate::plonk::Error; - -/// Guards is unfinished verification result. Implement this to construct various -/// verification strategies such as aggregation and recursion. -pub trait Guard { - /// Multi scalar engine which is not evaluated yet. - type MSMAccumulator; -} - -/// Trait representing a strategy for verifying Halo 2 proofs. -pub trait VerificationStrategy<'params, Scheme: CommitmentScheme, V: Verifier<'params, Scheme>> { - /// The output type of this verification strategy after processing a proof. - type Output; - - /// Creates new verification strategy instance - fn new(params: &'params Scheme::ParamsVerifier) -> Self; - - /// Obtains an MSM from the verifier strategy and yields back the strategy's - /// output. - fn process( - self, - f: impl FnOnce(V::MSMAccumulator) -> Result, - ) -> Result; - - /// Finalizes the batch and checks its validity. - /// - /// Returns `false` if *some* proof was invalid. If the caller needs to identify - /// specific failing proofs, it must re-process the proofs separately. - fn finalize(self) -> bool; -} diff --git a/halo2_proofs_rm/src/transcript.rs b/halo2_proofs_rm/src/transcript.rs deleted file mode 100644 index 6e4f812bdf..0000000000 --- a/halo2_proofs_rm/src/transcript.rs +++ /dev/null @@ -1,554 +0,0 @@ -//! This module contains utilities and traits for dealing with Fiat-Shamir -//! transcripts. - -use blake2b_simd::{Params as Blake2bParams, State as Blake2bState}; -use group::ff::{FromUniformBytes, PrimeField}; -use sha3::{Digest, Keccak256}; -use std::convert::TryInto; - -use halo2curves::{Coordinates, CurveAffine}; - -use std::io::{self, Read, Write}; -use std::marker::PhantomData; - -/// Prefix to a prover's message soliciting a challenge -const BLAKE2B_PREFIX_CHALLENGE: u8 = 0; - -/// Prefix to a prover's message containing a curve point -const BLAKE2B_PREFIX_POINT: u8 = 1; - -/// Prefix to a prover's message containing a scalar -const BLAKE2B_PREFIX_SCALAR: u8 = 2; - -/// Prefix to a prover's message soliciting a challenge -const KECCAK256_PREFIX_CHALLENGE: u8 = 0; - -/// First prefix to a prover's message soliciting a challenge -/// Not included in the growing state! -const KECCAK256_PREFIX_CHALLENGE_LO: u8 = 10; - -/// Second prefix to a prover's message soliciting a challenge -/// Not included in the growing state! -const KECCAK256_PREFIX_CHALLENGE_HI: u8 = 11; - -/// Prefix to a prover's message containing a curve point -const KECCAK256_PREFIX_POINT: u8 = 1; - -/// Prefix to a prover's message containing a scalar -const KECCAK256_PREFIX_SCALAR: u8 = 2; - -/// Generic transcript view (from either the prover or verifier's perspective) -pub trait Transcript> { - /// Squeeze an encoded verifier challenge from the transcript. - fn squeeze_challenge(&mut self) -> E; - - /// Squeeze a typed challenge (in the scalar field) from the transcript. - fn squeeze_challenge_scalar(&mut self) -> ChallengeScalar { - ChallengeScalar { - inner: self.squeeze_challenge().get_scalar(), - _marker: PhantomData, - } - } - - /// Writing the point to the transcript without writing it to the proof, - /// treating it as a common input. - fn common_point(&mut self, point: C) -> io::Result<()>; - - /// Writing the scalar to the transcript without writing it to the proof, - /// treating it as a common input. - fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>; -} - -/// Transcript view from the perspective of a verifier that has access to an -/// input stream of data from the prover to the verifier. -pub trait TranscriptRead>: Transcript { - /// Read a curve point from the prover. - fn read_point(&mut self) -> io::Result; - - /// Read a curve scalar from the prover. - fn read_scalar(&mut self) -> io::Result; -} - -/// Transcript view from the perspective of a prover that has access to an -/// output stream of messages from the prover to the verifier. -pub trait TranscriptWrite>: Transcript { - /// Write a curve point to the proof and the transcript. - fn write_point(&mut self, point: C) -> io::Result<()>; - - /// Write a scalar to the proof and the transcript. - fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>; -} - -/// Initializes transcript at verifier side. -pub trait TranscriptReadBuffer>: - TranscriptRead -{ - /// Initialize a transcript given an input buffer. - fn init(reader: R) -> Self; -} - -/// Manages beginning and finishing of transcript pipeline. -pub trait TranscriptWriterBuffer>: - TranscriptWrite -{ - /// Initialize a transcript given an output buffer. - fn init(writer: W) -> Self; - - /// Conclude the interaction and return the output buffer (writer). - fn finalize(self) -> W; -} - -/// We will replace BLAKE2b with an algebraic hash function in a later version. -#[derive(Debug, Clone)] -pub struct Blake2bRead> { - state: Blake2bState, - reader: R, - _marker: PhantomData<(C, E)>, -} - -/// Keccak256 hash function reader for EVM compatibility -#[derive(Debug, Clone)] -pub struct Keccak256Read> { - state: Keccak256, - reader: R, - _marker: PhantomData<(C, E)>, -} - -impl TranscriptReadBuffer> - for Blake2bRead> -where - C::Scalar: FromUniformBytes<64>, -{ - /// Initialize a transcript given an input buffer. - fn init(reader: R) -> Self { - Blake2bRead { - state: Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Transcript") - .to_state(), - reader, - _marker: PhantomData, - } - } -} - -impl TranscriptReadBuffer> - for Keccak256Read> -where - C::Scalar: FromUniformBytes<64>, -{ - /// Initialize a transcript given an input buffer. - fn init(reader: R) -> Self { - let mut state = Keccak256::new(); - state.update(b"Halo2-Transcript"); - Keccak256Read { - state, - reader, - _marker: PhantomData, - } - } -} - -impl TranscriptRead> - for Blake2bRead> -where - C::Scalar: FromUniformBytes<64>, -{ - fn read_point(&mut self) -> io::Result { - let mut compressed = C::Repr::default(); - self.reader.read_exact(compressed.as_mut())?; - let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| { - io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof") - })?; - self.common_point(point)?; - - Ok(point) - } - - fn read_scalar(&mut self) -> io::Result { - let mut data = ::Repr::default(); - self.reader.read_exact(data.as_mut())?; - let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| { - io::Error::new( - io::ErrorKind::Other, - "invalid field element encoding in proof", - ) - })?; - self.common_scalar(scalar)?; - - Ok(scalar) - } -} - -impl TranscriptRead> - for Keccak256Read> -where - C::Scalar: FromUniformBytes<64>, -{ - fn read_point(&mut self) -> io::Result { - let mut compressed = C::Repr::default(); - self.reader.read_exact(compressed.as_mut())?; - let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| { - io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof") - })?; - self.common_point(point)?; - - Ok(point) - } - - fn read_scalar(&mut self) -> io::Result { - let mut data = ::Repr::default(); - self.reader.read_exact(data.as_mut())?; - let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| { - io::Error::new( - io::ErrorKind::Other, - "invalid field element encoding in proof", - ) - })?; - self.common_scalar(scalar)?; - - Ok(scalar) - } -} - -impl Transcript> for Blake2bRead> -where - C::Scalar: FromUniformBytes<64>, -{ - fn squeeze_challenge(&mut self) -> Challenge255 { - self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]); - let hasher = self.state.clone(); - let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap(); - Challenge255::::new(&result) - } - - fn common_point(&mut self, point: C) -> io::Result<()> { - self.state.update(&[BLAKE2B_PREFIX_POINT]); - let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { - io::Error::new( - io::ErrorKind::Other, - "cannot write points at infinity to the transcript", - ) - })?; - self.state.update(coords.x().to_repr().as_ref()); - self.state.update(coords.y().to_repr().as_ref()); - - Ok(()) - } - - fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { - self.state.update(&[BLAKE2B_PREFIX_SCALAR]); - self.state.update(scalar.to_repr().as_ref()); - - Ok(()) - } -} - -impl Transcript> - for Keccak256Read> -where - C::Scalar: FromUniformBytes<64>, -{ - fn squeeze_challenge(&mut self) -> Challenge255 { - self.state.update([KECCAK256_PREFIX_CHALLENGE]); - - let mut state_lo = self.state.clone(); - let mut state_hi = self.state.clone(); - state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]); - state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]); - let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap(); - let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap(); - - let mut t = result_lo.to_vec(); - t.extend_from_slice(&result_hi[..]); - let result: [u8; 64] = t.as_slice().try_into().unwrap(); - - Challenge255::::new(&result) - } - - fn common_point(&mut self, point: C) -> io::Result<()> { - self.state.update([KECCAK256_PREFIX_POINT]); - let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { - io::Error::new( - io::ErrorKind::Other, - "cannot write points at infinity to the transcript", - ) - })?; - self.state.update(coords.x().to_repr().as_ref()); - self.state.update(coords.y().to_repr().as_ref()); - - Ok(()) - } - - fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { - self.state.update([KECCAK256_PREFIX_SCALAR]); - self.state.update(scalar.to_repr().as_ref()); - - Ok(()) - } -} - -/// We will replace BLAKE2b with an algebraic hash function in a later version. -#[derive(Debug, Clone)] -pub struct Blake2bWrite> { - state: Blake2bState, - writer: W, - _marker: PhantomData<(C, E)>, -} - -/// Keccak256 hash function writer for EVM compatibility -#[derive(Debug, Clone)] -pub struct Keccak256Write> { - state: Keccak256, - writer: W, - _marker: PhantomData<(C, E)>, -} - -impl TranscriptWriterBuffer> - for Blake2bWrite> -where - C::Scalar: FromUniformBytes<64>, -{ - /// Initialize a transcript given an output buffer. - fn init(writer: W) -> Self { - Blake2bWrite { - state: Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Transcript") - .to_state(), - writer, - _marker: PhantomData, - } - } - - fn finalize(self) -> W { - // TODO: handle outstanding scalars? see issue #138 - self.writer - } -} - -impl TranscriptWriterBuffer> - for Keccak256Write> -where - C::Scalar: FromUniformBytes<64>, -{ - /// Initialize a transcript given an output buffer. - fn init(writer: W) -> Self { - let mut state = Keccak256::new(); - state.update(b"Halo2-Transcript"); - Keccak256Write { - state, - writer, - _marker: PhantomData, - } - } - - /// Conclude the interaction and return the output buffer (writer). - fn finalize(self) -> W { - // TODO: handle outstanding scalars? see issue #138 - self.writer - } -} - -impl TranscriptWrite> - for Blake2bWrite> -where - C::Scalar: FromUniformBytes<64>, -{ - fn write_point(&mut self, point: C) -> io::Result<()> { - self.common_point(point)?; - let compressed = point.to_bytes(); - self.writer.write_all(compressed.as_ref()) - } - fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { - self.common_scalar(scalar)?; - let data = scalar.to_repr(); - self.writer.write_all(data.as_ref()) - } -} - -impl TranscriptWrite> - for Keccak256Write> -where - C::Scalar: FromUniformBytes<64>, -{ - fn write_point(&mut self, point: C) -> io::Result<()> { - self.common_point(point)?; - let compressed = point.to_bytes(); - self.writer.write_all(compressed.as_ref()) - } - fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { - self.common_scalar(scalar)?; - let data = scalar.to_repr(); - self.writer.write_all(data.as_ref()) - } -} - -impl Transcript> - for Blake2bWrite> -where - C::Scalar: FromUniformBytes<64>, -{ - fn squeeze_challenge(&mut self) -> Challenge255 { - self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]); - let hasher = self.state.clone(); - let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap(); - Challenge255::::new(&result) - } - - fn common_point(&mut self, point: C) -> io::Result<()> { - self.state.update(&[BLAKE2B_PREFIX_POINT]); - let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { - io::Error::new( - io::ErrorKind::Other, - "cannot write points at infinity to the transcript", - ) - })?; - self.state.update(coords.x().to_repr().as_ref()); - self.state.update(coords.y().to_repr().as_ref()); - - Ok(()) - } - - fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { - self.state.update(&[BLAKE2B_PREFIX_SCALAR]); - self.state.update(scalar.to_repr().as_ref()); - - Ok(()) - } -} - -impl Transcript> - for Keccak256Write> -where - C::Scalar: FromUniformBytes<64>, -{ - fn squeeze_challenge(&mut self) -> Challenge255 { - self.state.update([KECCAK256_PREFIX_CHALLENGE]); - - let mut state_lo = self.state.clone(); - let mut state_hi = self.state.clone(); - state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]); - state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]); - let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap(); - let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap(); - - let mut t = result_lo.to_vec(); - t.extend_from_slice(&result_hi[..]); - let result: [u8; 64] = t.as_slice().try_into().unwrap(); - - Challenge255::::new(&result) - } - - fn common_point(&mut self, point: C) -> io::Result<()> { - self.state.update([KECCAK256_PREFIX_POINT]); - let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { - io::Error::new( - io::ErrorKind::Other, - "cannot write points at infinity to the transcript", - ) - })?; - self.state.update(coords.x().to_repr().as_ref()); - self.state.update(coords.y().to_repr().as_ref()); - - Ok(()) - } - - fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { - self.state.update([KECCAK256_PREFIX_SCALAR]); - self.state.update(scalar.to_repr().as_ref()); - - Ok(()) - } -} - -/// The scalar representation of a verifier challenge. -/// -/// The `Type` type can be used to scope the challenge to a specific context, or -/// set to `()` if no context is required. -#[derive(Copy, Clone, Debug)] -pub struct ChallengeScalar { - inner: C::Scalar, - _marker: PhantomData, -} - -impl std::ops::Deref for ChallengeScalar { - type Target = C::Scalar; - - fn deref(&self) -> &Self::Target { - &self.inner - } -} - -/// `EncodedChallenge` defines a challenge encoding with a [`Self::Input`] -/// that is used to derive the challenge encoding and `get_challenge` obtains -/// the _real_ `C::Scalar` that the challenge encoding represents. -pub trait EncodedChallenge { - /// The Input type used to derive the challenge encoding. For example, - /// an input from the Poseidon hash would be a base field element; - /// an input from the Blake2b hash would be a [u8; 64]. - type Input; - - /// Get an encoded challenge from a given input challenge. - fn new(challenge_input: &Self::Input) -> Self; - - /// Get a scalar field element from an encoded challenge. - fn get_scalar(&self) -> C::Scalar; - - /// Cast an encoded challenge as a typed `ChallengeScalar`. - fn as_challenge_scalar(&self) -> ChallengeScalar { - ChallengeScalar { - inner: self.get_scalar(), - _marker: PhantomData, - } - } -} - -/// A 255-bit challenge. -#[derive(Copy, Clone, Debug)] -pub struct Challenge255([u8; 32], PhantomData); - -impl std::ops::Deref for Challenge255 { - type Target = [u8; 32]; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl EncodedChallenge for Challenge255 -where - C::Scalar: FromUniformBytes<64>, -{ - type Input = [u8; 64]; - - fn new(challenge_input: &[u8; 64]) -> Self { - Challenge255( - C::Scalar::from_uniform_bytes(challenge_input) - .to_repr() - .as_ref() - .try_into() - .expect("Scalar fits into 256 bits"), - PhantomData, - ) - } - fn get_scalar(&self) -> C::Scalar { - let mut repr = ::Repr::default(); - repr.as_mut().copy_from_slice(&self.0); - C::Scalar::from_repr(repr).unwrap() - } -} - -pub(crate) fn read_n_points, T: TranscriptRead>( - transcript: &mut T, - n: usize, -) -> io::Result> { - (0..n).map(|_| transcript.read_point()).collect() -} - -pub(crate) fn read_n_scalars, T: TranscriptRead>( - transcript: &mut T, - n: usize, -) -> io::Result> { - (0..n).map(|_| transcript.read_scalar()).collect() -} From f2c9074bdb0f91be24b7caa682e2f20d89589c81 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Thu, 25 Jan 2024 15:03:06 +0000 Subject: [PATCH 55/79] Cleanup unused dependencies in common, backend --- backend/src/lib.rs | 3 --- backend/src/plonk.rs | 7 +----- backend/src/plonk/permutation.rs | 3 --- backend/src/plonk/permutation/keygen.rs | 17 +------------- backend/src/plonk/prover.rs | 5 +---- backend/src/plonk/verifier.rs | 4 ++-- common/src/lib.rs | 2 -- common/src/plonk.rs | 30 +++---------------------- common/src/plonk/assigned.rs | 2 -- common/src/plonk/circuit.rs | 14 +++++------- common/src/plonk/error.rs | 2 ++ common/src/plonk/keygen.rs | 18 ++++----------- common/src/plonk/lookup.rs | 1 - common/src/plonk/permutation.rs | 13 +---------- common/src/plonk/shuffle.rs | 1 - 15 files changed, 20 insertions(+), 102 deletions(-) delete mode 100644 common/src/plonk/assigned.rs diff --git a/backend/src/lib.rs b/backend/src/lib.rs index 6058e4980d..5973dcf661 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -1,6 +1,3 @@ -#![allow(dead_code)] // TODO: Remove -#![allow(unused_imports)] // TODO: Remove - pub mod plonk; // Internal re-exports diff --git a/backend/src/plonk.rs b/backend/src/plonk.rs index daa4e64aa5..63159df4c8 100644 --- a/backend/src/plonk.rs +++ b/backend/src/plonk.rs @@ -12,13 +12,8 @@ use halo2_common::helpers::{ SerdeCurveAffine, SerdePrimeField, }; use halo2_common::plonk::{Circuit, ConstraintSystem, PinnedConstraintSystem}; -use halo2_common::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; +use halo2_common::transcript::{EncodedChallenge, Transcript}; use halo2_common::SerdeFormat; -use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Challenge, Column, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, - Instance, InstanceQueryMid, PreprocessingV2, -}; -use halo2_middleware::poly::Rotation; use std::io; diff --git a/backend/src/plonk/permutation.rs b/backend/src/plonk/permutation.rs index 12f6291398..78f6abf8e7 100644 --- a/backend/src/plonk/permutation.rs +++ b/backend/src/plonk/permutation.rs @@ -6,13 +6,10 @@ use crate::{ polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, SerdePrimeField, }, - plonk::Error, poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, SerdeFormat, }; pub use halo2_common::plonk::permutation::Argument; -use halo2_middleware::circuit::{Any, Column}; -use halo2_middleware::permutation::{ArgumentV2, Cell}; use std::io; diff --git a/backend/src/plonk/permutation/keygen.rs b/backend/src/plonk/permutation/keygen.rs index 3a83e6889c..f44a45a991 100644 --- a/backend/src/plonk/permutation/keygen.rs +++ b/backend/src/plonk/permutation/keygen.rs @@ -11,14 +11,11 @@ use crate::{ }, }; use halo2_middleware::circuit::{Any, Column}; -use halo2_middleware::permutation::{ArgumentV2, AssemblyMid, Cell}; +use halo2_middleware::permutation::{ArgumentV2, AssemblyMid}; #[cfg(feature = "thread-safe-region")] use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; -#[cfg(not(feature = "thread-safe-region"))] -use crate::multicore::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; - #[cfg(feature = "thread-safe-region")] use std::collections::{BTreeSet, HashMap}; @@ -144,18 +141,6 @@ impl Assembly { ) -> ProvingKey { build_pk(params, domain, p, |i, j| self.mapping[i][j]) } - - /// Returns columns that participate in the permutation argument. - pub fn columns(&self) -> &[Column] { - &self.columns - } - - /// Returns mappings of the copies. - pub fn mapping( - &self, - ) -> impl Iterator + '_> { - self.mapping.iter().map(|c| c.par_iter().copied()) - } } #[cfg(feature = "thread-safe-region")] diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index 0fd17bbd99..7ebb022e9c 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -9,15 +9,12 @@ use crate::plonk::permutation::prover::permutation_commit; use crate::plonk::shuffle::prover::shuffle_commit_product; use crate::plonk::{lookup, permutation, shuffle, vanishing, ProvingKey}; use halo2_common::plonk::{ - circuit::{sealed, Assignment, Circuit, Selector}, - ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, + circuit::sealed, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; use group::prime::PrimeCurveAffine; use halo2_common::{ arithmetic::{eval_polynomial, CurveAffine}, - circuit::Value, poly::{ commitment::{Blind, CommitmentScheme, Params, Prover}, Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, diff --git a/backend/src/plonk/verifier.rs b/backend/src/plonk/verifier.rs index 919530fb9c..02b3bae87a 100644 --- a/backend/src/plonk/verifier.rs +++ b/backend/src/plonk/verifier.rs @@ -153,7 +153,7 @@ where vk.cs .lookups .iter() - .map(|argument| lookup_read_permuted_commitments(transcript)) + .map(|_argument| lookup_read_permuted_commitments(transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -188,7 +188,7 @@ where vk.cs .shuffles .iter() - .map(|argument| shuffle_read_product_commitment(transcript)) + .map(|_argument| shuffle_read_product_commitment(transcript)) .collect::, _>>() }) .collect::, _>>()?; diff --git a/common/src/lib.rs b/common/src/lib.rs index 75fbee1c0a..5016057fa6 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -1,7 +1,5 @@ //! # halo2_proofs -#![allow(dead_code)] // TODO: Remove -#![allow(unused_imports)] // TODO: Remove #![cfg_attr(docsrs, feature(doc_cfg))] // The actual lints we want to disable. #![allow(clippy::op_ref, clippy::many_single_char_names)] diff --git a/common/src/plonk.rs b/common/src/plonk.rs index 76b23665bc..fb3284e0b9 100644 --- a/common/src/plonk.rs +++ b/common/src/plonk.rs @@ -5,45 +5,21 @@ //! [halo]: https://eprint.iacr.org/2019/1021 //! [plonk]: https://eprint.iacr.org/2019/953 -use blake2b_simd::Params as Blake2bParams; -use group::ff::{Field, FromUniformBytes, PrimeField}; - -use crate::arithmetic::CurveAffine; -use crate::helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, - SerdePrimeField, -}; -use crate::poly::{ - Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, - Polynomial, -}; -use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; -use crate::SerdeFormat; -use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Challenge, Column, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, - Instance, InstanceQueryMid, PreprocessingV2, -}; +use crate::poly::{LagrangeCoeff, Polynomial}; +use crate::transcript::ChallengeScalar; +use halo2_middleware::circuit::{Advice, Column, Fixed, Instance}; use halo2_middleware::poly::Rotation; -pub mod assigned; pub mod circuit; pub mod error; -// pub mod evaluation; pub mod keygen; pub mod lookup; pub mod permutation; pub mod shuffle; -// pub mod vanishing; -// pub mod verifier; - -pub use assigned::*; pub use circuit::*; pub use error::*; pub use keygen::*; -// pub use verifier::*; - -use std::io; /// List of queries (columns and rotations) used by a circuit #[derive(Debug, Clone)] diff --git a/common/src/plonk/assigned.rs b/common/src/plonk/assigned.rs deleted file mode 100644 index b1f7c7f73b..0000000000 --- a/common/src/plonk/assigned.rs +++ /dev/null @@ -1,2 +0,0 @@ -use group::ff::Field; -use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index b1bce1305b..4186a8222e 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1,26 +1,20 @@ use super::{lookup, permutation, shuffle, Error, Queries}; use crate::circuit::layouter::SyncDeps; -use crate::{ - circuit::{Layouter, Region, Value}, - poly::{batch_invert_assigned, Polynomial}, -}; +use crate::circuit::{Layouter, Region, Value}; use core::cmp::max; use core::ops::{Add, Mul}; use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Any, Challenge, Column, CompiledCircuitV2, ConstraintSystemV2Backend, - ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, - PreprocessingV2, + Advice, AdviceQueryMid, Any, Challenge, Column, ConstraintSystemV2Backend, ExpressionMid, + Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, }; use halo2_middleware::ff::Field; use halo2_middleware::metadata; use halo2_middleware::plonk::Assigned; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; -use std::collections::BTreeSet; use std::collections::HashMap; use std::fmt::Debug; use std::iter::{Product, Sum}; -use std::ops::RangeTo; use std::{ convert::TryFrom, ops::{Neg, Sub}, @@ -454,6 +448,8 @@ pub trait Circuit { fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; } +// TODO: Create two types from this, one with selector for the frontend (this way we can move the +// Layouter traits, Region and Selector to frontend). And one without selector for the backend. /// Low-degree expression representing an identity that must hold over the committed columns. #[derive(Clone, PartialEq, Eq)] pub enum Expression { diff --git a/common/src/plonk/error.rs b/common/src/plonk/error.rs index a1b4ac30eb..eec130cfd7 100644 --- a/common/src/plonk/error.rs +++ b/common/src/plonk/error.rs @@ -5,6 +5,8 @@ use std::io; use super::TableColumn; use halo2_middleware::circuit::{Any, Column}; +// TODO: Consider splitting this Error into a frontend and backend version? + /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up #[derive(Debug)] diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index 0c51e1519d..5f584310e5 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -1,23 +1,13 @@ use std::ops::Range; -use group::Curve; -use halo2_middleware::ff::{Field, FromUniformBytes}; +use halo2_middleware::ff::Field; use super::{ - circuit::{Assignment, Circuit, ConstraintSystem, Selector}, + circuit::{Assignment, Selector}, permutation, Error, LagrangeCoeff, Polynomial, }; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - circuit::Value, - poly::{ - commitment::{Blind, Params}, - EvaluationDomain, - }, -}; -use halo2_middleware::circuit::{ - Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, -}; +use crate::circuit::Value; +use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// Assembly to be used in circuit synthesis. diff --git a/common/src/plonk/lookup.rs b/common/src/plonk/lookup.rs index 80ef82624e..3116b79f5e 100644 --- a/common/src/plonk/lookup.rs +++ b/common/src/plonk/lookup.rs @@ -1,5 +1,4 @@ use super::circuit::Expression; -use halo2_middleware::circuit::ExpressionMid; use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index 2c6a7416b0..2e4f7dba6e 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -1,20 +1,9 @@ //! Implementation of permutation argument. -use crate::{ - arithmetic::CurveAffine, - helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, - SerdeCurveAffine, SerdePrimeField, - }, - plonk::Error, - poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, - SerdeFormat, -}; +use crate::plonk::Error; use halo2_middleware::circuit::{Any, Column}; use halo2_middleware::permutation::{ArgumentV2, Cell}; -use std::io; - /// A permutation argument. #[derive(Debug, Clone)] pub struct Argument { diff --git a/common/src/plonk/shuffle.rs b/common/src/plonk/shuffle.rs index 68fc375cac..cdc773efef 100644 --- a/common/src/plonk/shuffle.rs +++ b/common/src/plonk/shuffle.rs @@ -1,5 +1,4 @@ use super::circuit::Expression; -use halo2_middleware::circuit::ExpressionMid; use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; From 2293d619c0062d6be56588d2b55dcdaebac476bc Mon Sep 17 00:00:00 2001 From: Eduard S Date: Thu, 25 Jan 2024 16:56:32 +0000 Subject: [PATCH 56/79] Clean unused imports --- frontend/src/circuit.rs | 18 +++--------------- frontend/src/dev.rs | 3 +-- frontend/src/lib.rs | 3 --- halo2_proofs/src/lib.rs | 3 --- halo2_proofs/src/plonk/keygen.rs | 11 ++--------- halo2_proofs/src/plonk/prover.rs | 27 +++++---------------------- 6 files changed, 11 insertions(+), 54 deletions(-) diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index b76062ac50..0f9786fa72 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -1,35 +1,23 @@ //! Traits and structs for implementing circuit components. -use core::cmp::max; -use core::ops::{Add, Mul}; -use halo2_common::circuit::layouter::SyncDeps; use halo2_common::plonk::sealed::{self, SealedPhase}; use halo2_common::plonk::FloorPlanner; -use halo2_common::plonk::{lookup, permutation, shuffle, Error, Queries}; +use halo2_common::plonk::{permutation, Error}; use halo2_common::plonk::{Assignment, FirstPhase, SecondPhase, Selector, ThirdPhase}; use halo2_common::plonk::{Circuit, ConstraintSystem}; use halo2_common::{ - circuit::{Layouter, Region, Value}, + circuit::Value, poly::{batch_invert_assigned, Polynomial}, }; use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Any, Challenge, Column, CompiledCircuitV2, ConstraintSystemV2Backend, - ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, - PreprocessingV2, + Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, PreprocessingV2, }; use halo2_middleware::ff::Field; -use halo2_middleware::metadata; use halo2_middleware::plonk::Assigned; -use halo2_middleware::poly::Rotation; use std::collections::BTreeSet; use std::collections::HashMap; use std::fmt::Debug; -use std::iter::{Product, Sum}; use std::ops::RangeTo; -use std::{ - convert::TryFrom, - ops::{Neg, Sub}, -}; /// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the /// circuit into its columns and the column configuration; as well as doing the fixed column and diff --git a/frontend/src/dev.rs b/frontend/src/dev.rs index 4e93f36882..b502cc8d3e 100644 --- a/frontend/src/dev.rs +++ b/frontend/src/dev.rs @@ -22,8 +22,7 @@ use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance} use halo2_middleware::plonk::Assigned; use halo2_common::multicore::{ - IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, - ParallelSliceMut, + IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, ParallelSliceMut, }; pub mod metadata; diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index 39952d73aa..8fa079cfaa 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -1,5 +1,2 @@ -#![allow(dead_code)] // TODO: Remove -#![allow(unused_imports)] // TODO: Remove - pub mod circuit; pub mod dev; diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index 76b89a91e3..9dddaa980d 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -3,9 +3,6 @@ //! module structure so that projects depending on halo2 can update their dependency towards it //! without breaking. -#![allow(dead_code)] // TODO: Remove -#![allow(unused_imports)] // TODO: Remove - pub mod plonk; pub mod circuit { diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index a00e824650..b395d041f2 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -1,16 +1,9 @@ use halo2_backend::plonk::{ - circuit::{Assignment, Circuit, ConstraintSystem, Selector}, + circuit::Circuit, keygen::{keygen_pk_v2, keygen_vk_v2}, ProvingKey, VerifyingKey, }; -use halo2_backend::{ - arithmetic::{parallelize, CurveAffine}, - circuit::Value, - poly::{ - commitment::{Blind, Params}, - EvaluationDomain, - }, -}; +use halo2_backend::{arithmetic::CurveAffine, poly::commitment::Params}; use halo2_common::plonk::Error; use halo2_frontend::circuit::compile_circuit; use halo2_middleware::ff::FromUniformBytes; diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 014eaf4469..9163f795e9 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -1,28 +1,11 @@ -use halo2_backend::plonk::{ - circuit::{Assignment, Circuit, ConstraintSystem, Selector}, - keygen::{keygen_pk_v2, keygen_vk_v2}, - prover::ProverV2, - ProvingKey, VerifyingKey, -}; -use halo2_backend::{arithmetic::parallelize, poly::EvaluationDomain}; +use halo2_backend::plonk::{circuit::Circuit, prover::ProverV2, ProvingKey}; use halo2_common::plonk::Error; -use halo2_common::{ - arithmetic::{eval_polynomial, CurveAffine}, - circuit::Value, - poly::{ - commitment::{Blind, CommitmentScheme, Params, Prover}, - Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, - }, -}; -use halo2_common::{ - poly::batch_invert_assigned, - transcript::{EncodedChallenge, TranscriptWrite}, -}; +use halo2_common::poly::commitment::{CommitmentScheme, Params, Prover}; +use halo2_common::transcript::{EncodedChallenge, TranscriptWrite}; use halo2_frontend::circuit::{compile_circuit, WitnessCalculator}; -use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; -use halo2_middleware::plonk::Assigned; +use halo2_middleware::ff::{FromUniformBytes, WithSmallOrderMulGroup}; use rand_core::RngCore; -use std::{collections::HashMap, iter}; +use std::collections::HashMap; /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was From cd6972a473775ac2fccfbbf7754e92f894391648 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 26 Jan 2024 11:42:36 +0000 Subject: [PATCH 57/79] Pass frontend tests --- common/src/circuit/floor_planner.rs | 4 +- .../src/circuit/floor_planner/single_pass.rs | 57 ---- common/src/circuit/floor_planner/v1.rs | 58 +--- .../src/circuit/floor_planner/v1/strategy.rs | 42 +-- common/src/circuit/layouter.rs | 6 +- common/src/circuit/table_layouter.rs | 261 ------------------ frontend/src/circuit.rs | 12 +- frontend/src/circuit/floor_planner.rs | 4 + .../src/circuit/floor_planner/single_pass.rs | 56 ++++ frontend/src/circuit/floor_planner/v1.rs | 58 ++++ .../src/circuit/floor_planner/v1/strategy.rs | 41 +++ frontend/src/circuit/table_layouter.rs | 259 +++++++++++++++++ frontend/src/dev.rs | 53 +--- 13 files changed, 441 insertions(+), 470 deletions(-) create mode 100644 frontend/src/circuit/floor_planner.rs create mode 100644 frontend/src/circuit/floor_planner/single_pass.rs create mode 100644 frontend/src/circuit/floor_planner/v1.rs create mode 100644 frontend/src/circuit/floor_planner/v1/strategy.rs create mode 100644 frontend/src/circuit/table_layouter.rs diff --git a/common/src/circuit/floor_planner.rs b/common/src/circuit/floor_planner.rs index 1b629034e6..c3ba7d85b7 100644 --- a/common/src/circuit/floor_planner.rs +++ b/common/src/circuit/floor_planner.rs @@ -1,6 +1,6 @@ //! Implementations of common circuit floor planners. -pub(super) mod single_pass; +pub mod single_pass; -mod v1; +pub mod v1; pub use v1::{V1Pass, V1}; diff --git a/common/src/circuit/floor_planner/single_pass.rs b/common/src/circuit/floor_planner/single_pass.rs index dca8fa5b4c..9905208a79 100644 --- a/common/src/circuit/floor_planner/single_pass.rs +++ b/common/src/circuit/floor_planner/single_pass.rs @@ -374,60 +374,3 @@ impl<'r, 'a, F: Field, CS: Assignment + 'a + SyncDeps> RegionLayouter Ok(()) } } - -#[cfg(test)] -mod tests { - use halo2curves::pasta::vesta; - - use super::SimpleFloorPlanner; - use crate::{ - dev::MockProver, - plonk::{Advice, Circuit, Column, Error}, - }; - - #[test] - fn not_enough_columns_for_constants() { - struct MyCircuit {} - - impl Circuit for MyCircuit { - type Config = Column; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - MyCircuit {} - } - - fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { - meta.advice_column() - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl crate::circuit::Layouter, - ) -> Result<(), crate::plonk::Error> { - layouter.assign_region( - || "assign constant", - |mut region| { - region.assign_advice_from_constant( - || "one", - config, - 0, - vesta::Scalar::one(), - ) - }, - )?; - - Ok(()) - } - } - - let circuit = MyCircuit {}; - assert!(matches!( - MockProver::run(3, &circuit, vec![]).unwrap_err(), - Error::NotEnoughColumnsForConstants, - )); - } -} diff --git a/common/src/circuit/floor_planner/v1.rs b/common/src/circuit/floor_planner/v1.rs index fe93680d45..e0ae13e82e 100644 --- a/common/src/circuit/floor_planner/v1.rs +++ b/common/src/circuit/floor_planner/v1.rs @@ -13,7 +13,7 @@ use crate::{ use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; use halo2_middleware::plonk::Assigned; -mod strategy; +pub mod strategy; /// The version 1 [`FloorPlanner`] provided by `halo2`. /// @@ -490,59 +490,3 @@ impl<'r, 'a, F: Field, CS: Assignment + SyncDeps> RegionLayouter for V1Reg Ok(()) } } - -#[cfg(test)] -mod tests { - use halo2curves::pasta::vesta; - - use crate::{ - dev::MockProver, - plonk::{Advice, Circuit, Column, Error}, - }; - - #[test] - fn not_enough_columns_for_constants() { - struct MyCircuit {} - - impl Circuit for MyCircuit { - type Config = Column; - type FloorPlanner = super::V1; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - MyCircuit {} - } - - fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { - meta.advice_column() - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl crate::circuit::Layouter, - ) -> Result<(), crate::plonk::Error> { - layouter.assign_region( - || "assign constant", - |mut region| { - region.assign_advice_from_constant( - || "one", - config, - 0, - vesta::Scalar::one(), - ) - }, - )?; - - Ok(()) - } - } - - let circuit = MyCircuit {}; - assert!(matches!( - MockProver::run(3, &circuit, vec![]).unwrap_err(), - Error::NotEnoughColumnsForConstants, - )); - } -} diff --git a/common/src/circuit/floor_planner/v1/strategy.rs b/common/src/circuit/floor_planner/v1/strategy.rs index bb73c48e4c..c711b7cec5 100644 --- a/common/src/circuit/floor_planner/v1/strategy.rs +++ b/common/src/circuit/floor_planner/v1/strategy.rs @@ -163,7 +163,7 @@ fn first_fit_region( /// Positions the regions starting at the earliest row for which none of the columns are /// in use, taking into account gaps between earlier regions. -fn slot_in( +pub fn slot_in( region_shapes: Vec, ) -> (Vec<(RegionStart, RegionShape)>, CircuitAllocations) { // Tracks the empty regions for each column. @@ -241,43 +241,3 @@ pub fn slot_in_biggest_advice_first( (regions, column_allocations) } - -#[test] -fn test_slot_in() { - use crate::plonk::Column; - - let regions = vec![ - RegionShape { - region_index: 0.into(), - columns: vec![Column::new(0, Any::advice()), Column::new(1, Any::advice())] - .into_iter() - .map(|a| a.into()) - .collect(), - row_count: 15, - }, - RegionShape { - region_index: 1.into(), - columns: vec![Column::new(2, Any::advice())] - .into_iter() - .map(|a| a.into()) - .collect(), - row_count: 10, - }, - RegionShape { - region_index: 2.into(), - columns: vec![Column::new(2, Any::advice()), Column::new(0, Any::advice())] - .into_iter() - .map(|a| a.into()) - .collect(), - row_count: 10, - }, - ]; - assert_eq!( - slot_in(regions) - .0 - .into_iter() - .map(|(i, _)| i) - .collect::>(), - vec![0.into(), 0.into(), 15.into()] - ); -} diff --git a/common/src/circuit/layouter.rs b/common/src/circuit/layouter.rs index 578825734c..e70606ac9b 100644 --- a/common/src/circuit/layouter.rs +++ b/common/src/circuit/layouter.rs @@ -140,9 +140,9 @@ pub trait RegionLayouter: fmt::Debug + SyncDeps { /// the set of columns it uses as well as the number of rows it uses. #[derive(Clone, Debug)] pub struct RegionShape { - pub(super) region_index: RegionIndex, - pub(super) columns: HashSet, - pub(super) row_count: usize, + pub region_index: RegionIndex, + pub columns: HashSet, + pub row_count: usize, } /// The virtual column involved in a region. This includes concrete columns, diff --git a/common/src/circuit/table_layouter.rs b/common/src/circuit/table_layouter.rs index b3455675c7..ae7e7551f3 100644 --- a/common/src/circuit/table_layouter.rs +++ b/common/src/circuit/table_layouter.rs @@ -151,264 +151,3 @@ pub fn compute_table_lengths( }) .map(|col_len| col_len.1) } - -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use crate::{ - circuit::{Layouter, SimpleFloorPlanner}, - dev::MockProver, - plonk::{Circuit, ConstraintSystem}, - }; - use halo2_middleware::poly::Rotation; - - use super::*; - - #[test] - fn table_no_default() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: TableColumn, - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = meta.lookup_table_column(); - - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - vec![(a, table)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "duplicate assignment", - |mut table| { - table.assign_cell( - || "default", - config.table, - 1, - || Value::known(Fp::zero()), - ) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "TableColumn { inner: Column { index: 0, column_type: Fixed } } not fully assigned. Help: assign a value at offset 0." - ); - } - - #[test] - fn table_overwrite_default() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: TableColumn, - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = meta.lookup_table_column(); - - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - vec![(a, table)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "duplicate assignment", - |mut table| { - table.assign_cell( - || "default", - config.table, - 0, - || Value::known(Fp::zero()), - )?; - table.assign_cell( - || "duplicate", - config.table, - 0, - || Value::known(Fp::zero()), - ) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "Attempted to overwrite default value Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } with Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } in TableColumn { inner: Column { index: 0, column_type: Fixed } }" - ); - } - - #[test] - fn table_reuse_column() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: TableColumn, - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = meta.lookup_table_column(); - - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - vec![(a, table)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "first assignment", - |mut table| { - table.assign_cell( - || "default", - config.table, - 0, - || Value::known(Fp::zero()), - ) - }, - )?; - - layouter.assign_table( - || "reuse", - |mut table| { - table.assign_cell(|| "reuse", config.table, 1, || Value::known(Fp::zero())) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "TableColumn { inner: Column { index: 0, column_type: Fixed } } has already been used" - ); - } - - #[test] - fn table_uneven_columns() { - const K: u32 = 4; - - #[derive(Clone)] - struct FaultyCircuitConfig { - table: (TableColumn, TableColumn), - } - - struct FaultyCircuit; - - impl Circuit for FaultyCircuit { - type Config = FaultyCircuitConfig; - type FloorPlanner = SimpleFloorPlanner; - #[cfg(feature = "circuit-params")] - type Params = (); - - fn without_witnesses(&self) -> Self { - Self - } - - fn configure(meta: &mut ConstraintSystem) -> Self::Config { - let a = meta.advice_column(); - let table = (meta.lookup_table_column(), meta.lookup_table_column()); - meta.lookup("", |cells| { - let a = cells.query_advice(a, Rotation::cur()); - - vec![(a.clone(), table.0), (a, table.1)] - }); - - Self::Config { table } - } - - fn synthesize( - &self, - config: Self::Config, - mut layouter: impl Layouter, - ) -> Result<(), Error> { - layouter.assign_table( - || "table with uneven columns", - |mut table| { - table.assign_cell(|| "", config.table.0, 0, || Value::known(Fp::zero()))?; - table.assign_cell(|| "", config.table.0, 1, || Value::known(Fp::zero()))?; - - table.assign_cell(|| "", config.table.1, 0, || Value::known(Fp::zero())) - }, - ) - } - } - - let prover = MockProver::run(K, &FaultyCircuit, vec![]); - assert_eq!( - format!("{}", prover.unwrap_err()), - "TableColumn { inner: Column { index: 0, column_type: Fixed } } has length 2 while TableColumn { inner: Column { index: 1, column_type: Fixed } } has length 1" - ); - } -} diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index 0f9786fa72..d4663438f2 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -5,10 +5,7 @@ use halo2_common::plonk::FloorPlanner; use halo2_common::plonk::{permutation, Error}; use halo2_common::plonk::{Assignment, FirstPhase, SecondPhase, Selector, ThirdPhase}; use halo2_common::plonk::{Circuit, ConstraintSystem}; -use halo2_common::{ - circuit::Value, - poly::{batch_invert_assigned, Polynomial}, -}; +use halo2_common::poly::{batch_invert_assigned, Polynomial}; use halo2_middleware::circuit::{ Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, PreprocessingV2, }; @@ -19,6 +16,13 @@ use std::collections::HashMap; use std::fmt::Debug; use std::ops::RangeTo; +mod floor_planner; +mod table_layouter; + +// Re-exports from common +pub use halo2_common::circuit::floor_planner::single_pass::SimpleFloorPlanner; +pub use halo2_common::circuit::{layouter, Layouter, Value}; + /// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the /// circuit into its columns and the column configuration; as well as doing the fixed column and /// copy constraints assignments. The output of this function can then be used for the key diff --git a/frontend/src/circuit/floor_planner.rs b/frontend/src/circuit/floor_planner.rs new file mode 100644 index 0000000000..cd63de8a67 --- /dev/null +++ b/frontend/src/circuit/floor_planner.rs @@ -0,0 +1,4 @@ +mod single_pass; +mod v1; + +pub use halo2_common::circuit::floor_planner::*; diff --git a/frontend/src/circuit/floor_planner/single_pass.rs b/frontend/src/circuit/floor_planner/single_pass.rs new file mode 100644 index 0000000000..ec2aa18ede --- /dev/null +++ b/frontend/src/circuit/floor_planner/single_pass.rs @@ -0,0 +1,56 @@ +pub use halo2_common::circuit::floor_planner::single_pass::*; +#[cfg(test)] +mod tests { + use halo2curves::pasta::vesta; + + use super::SimpleFloorPlanner; + use crate::dev::MockProver; + use halo2_common::plonk::{Circuit, ConstraintSystem, Error}; + use halo2_middleware::circuit::{Advice, Column}; + + #[test] + fn not_enough_columns_for_constants() { + struct MyCircuit {} + + impl Circuit for MyCircuit { + type Config = Column; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + MyCircuit {} + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + meta.advice_column() + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl crate::circuit::Layouter, + ) -> Result<(), Error> { + layouter.assign_region( + || "assign constant", + |mut region| { + region.assign_advice_from_constant( + || "one", + config, + 0, + vesta::Scalar::one(), + ) + }, + )?; + + Ok(()) + } + } + + let circuit = MyCircuit {}; + assert!(matches!( + MockProver::run(3, &circuit, vec![]).unwrap_err(), + Error::NotEnoughColumnsForConstants, + )); + } +} diff --git a/frontend/src/circuit/floor_planner/v1.rs b/frontend/src/circuit/floor_planner/v1.rs new file mode 100644 index 0000000000..6a78b58eb6 --- /dev/null +++ b/frontend/src/circuit/floor_planner/v1.rs @@ -0,0 +1,58 @@ +mod strategy; + +pub use halo2_common::circuit::floor_planner::V1; + +#[cfg(test)] +mod tests { + use halo2curves::pasta::vesta; + + use crate::dev::MockProver; + use halo2_common::plonk::{Circuit, ConstraintSystem, Error}; + use halo2_middleware::circuit::{Advice, Column}; + + #[test] + fn not_enough_columns_for_constants() { + struct MyCircuit {} + + impl Circuit for MyCircuit { + type Config = Column; + type FloorPlanner = super::V1; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + MyCircuit {} + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + meta.advice_column() + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl crate::circuit::Layouter, + ) -> Result<(), Error> { + layouter.assign_region( + || "assign constant", + |mut region| { + region.assign_advice_from_constant( + || "one", + config, + 0, + vesta::Scalar::one(), + ) + }, + )?; + + Ok(()) + } + } + + let circuit = MyCircuit {}; + assert!(matches!( + MockProver::run(3, &circuit, vec![]).unwrap_err(), + Error::NotEnoughColumnsForConstants, + )); + } +} diff --git a/frontend/src/circuit/floor_planner/v1/strategy.rs b/frontend/src/circuit/floor_planner/v1/strategy.rs new file mode 100644 index 0000000000..7b05709dff --- /dev/null +++ b/frontend/src/circuit/floor_planner/v1/strategy.rs @@ -0,0 +1,41 @@ +#[test] +fn test_slot_in() { + use crate::circuit::layouter::RegionShape; + use halo2_common::circuit::floor_planner::v1::strategy::slot_in; + use halo2_middleware::circuit::{Any, Column}; + + let regions = vec![ + RegionShape { + region_index: 0.into(), + columns: vec![Column::new(0, Any::advice()), Column::new(1, Any::advice())] + .into_iter() + .map(|a| a.into()) + .collect(), + row_count: 15, + }, + RegionShape { + region_index: 1.into(), + columns: vec![Column::new(2, Any::advice())] + .into_iter() + .map(|a| a.into()) + .collect(), + row_count: 10, + }, + RegionShape { + region_index: 2.into(), + columns: vec![Column::new(2, Any::advice()), Column::new(0, Any::advice())] + .into_iter() + .map(|a| a.into()) + .collect(), + row_count: 10, + }, + ]; + assert_eq!( + slot_in(regions) + .0 + .into_iter() + .map(|(i, _)| i) + .collect::>(), + vec![0.into(), 0.into(), 15.into()] + ); +} diff --git a/frontend/src/circuit/table_layouter.rs b/frontend/src/circuit/table_layouter.rs new file mode 100644 index 0000000000..f1c29c5214 --- /dev/null +++ b/frontend/src/circuit/table_layouter.rs @@ -0,0 +1,259 @@ +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use crate::{ + circuit::{Layouter, SimpleFloorPlanner}, + dev::MockProver, + }; + use halo2_common::circuit::Value; + use halo2_common::plonk::{Circuit, ConstraintSystem, Error, TableColumn}; + use halo2_middleware::poly::Rotation; + + #[test] + fn table_no_default() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: TableColumn, + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = meta.lookup_table_column(); + + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + vec![(a, table)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "duplicate assignment", + |mut table| { + table.assign_cell( + || "default", + config.table, + 1, + || Value::known(Fp::zero()), + ) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "TableColumn { inner: Column { index: 0, column_type: Fixed } } not fully assigned. Help: assign a value at offset 0." + ); + } + + #[test] + fn table_overwrite_default() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: TableColumn, + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = meta.lookup_table_column(); + + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + vec![(a, table)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "duplicate assignment", + |mut table| { + table.assign_cell( + || "default", + config.table, + 0, + || Value::known(Fp::zero()), + )?; + table.assign_cell( + || "duplicate", + config.table, + 0, + || Value::known(Fp::zero()), + ) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "Attempted to overwrite default value Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } with Value { inner: Some(Trivial(0x0000000000000000000000000000000000000000000000000000000000000000)) } in TableColumn { inner: Column { index: 0, column_type: Fixed } }" + ); + } + + #[test] + fn table_reuse_column() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: TableColumn, + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = meta.lookup_table_column(); + + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + vec![(a, table)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "first assignment", + |mut table| { + table.assign_cell( + || "default", + config.table, + 0, + || Value::known(Fp::zero()), + ) + }, + )?; + + layouter.assign_table( + || "reuse", + |mut table| { + table.assign_cell(|| "reuse", config.table, 1, || Value::known(Fp::zero())) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "TableColumn { inner: Column { index: 0, column_type: Fixed } } has already been used" + ); + } + + #[test] + fn table_uneven_columns() { + const K: u32 = 4; + + #[derive(Clone)] + struct FaultyCircuitConfig { + table: (TableColumn, TableColumn), + } + + struct FaultyCircuit; + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let table = (meta.lookup_table_column(), meta.lookup_table_column()); + meta.lookup("", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + + vec![(a.clone(), table.0), (a, table.1)] + }); + + Self::Config { table } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "table with uneven columns", + |mut table| { + table.assign_cell(|| "", config.table.0, 0, || Value::known(Fp::zero()))?; + table.assign_cell(|| "", config.table.0, 1, || Value::known(Fp::zero()))?; + + table.assign_cell(|| "", config.table.1, 0, || Value::known(Fp::zero())) + }, + ) + } + } + + let prover = MockProver::run(K, &FaultyCircuit, vec![]); + assert_eq!( + format!("{}", prover.unwrap_err()), + "TableColumn { inner: Column { index: 0, column_type: Fixed } } has length 2 while TableColumn { inner: Column { index: 1, column_type: Fixed } } has length 1" + ); + } +} diff --git a/frontend/src/dev.rs b/frontend/src/dev.rs index b502cc8d3e..96804d4491 100644 --- a/frontend/src/dev.rs +++ b/frontend/src/dev.rs @@ -1249,13 +1249,11 @@ mod tests { use halo2curves::pasta::Fp; use super::{FailureLocation, MockProver, VerifyFailure}; - use crate::{ - circuit::{Layouter, SimpleFloorPlanner, Value}, - plonk::{ - sealed::SealedPhase, Advice, Any, Circuit, Column, ConstraintSystem, Error, Expression, - FirstPhase, Fixed, Instance, Selector, TableColumn, - }, + use crate::circuit::{Layouter, SimpleFloorPlanner, Value}; + use halo2_common::plonk::{ + Circuit, ConstraintSystem, Error, Expression, Selector, TableColumn, }; + use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; use halo2_middleware::poly::Rotation; #[test] @@ -1334,12 +1332,7 @@ mod tests { gate: (0, "Equality check").into(), region: (0, "Faulty synthesis".to_owned()).into(), gate_offset: 1, - column: Column::new( - 1, - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }) - ), + column: Column::new(1, Any::Advice(Advice { phase: 0 })), offset: 1, }]) ); @@ -1787,45 +1780,15 @@ mod tests { }, cell_values: vec![ ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 0 - ) - .into(), - 0 - ) - .into(), + ((Any::Advice(Advice { phase: 0 }), 0).into(), 0).into(), "1".to_string() ), ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 1 - ) - .into(), - 0 - ) - .into(), + ((Any::Advice(Advice { phase: 0 }), 1).into(), 0).into(), "0".to_string() ), ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 2 - ) - .into(), - 0 - ) - .into(), + ((Any::Advice(Advice { phase: 0 }), 2).into(), 0).into(), "0x5".to_string() ), (((Any::Fixed, 0).into(), 0).into(), "0x7".to_string()), From 79d6e110a589373476a947a0468b766412bb0ae7 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 26 Jan 2024 11:44:43 +0000 Subject: [PATCH 58/79] Pass tests in halo2_proofs --- halo2_proofs/src/plonk/prover.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 9163f795e9..0e9ba97a81 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -64,6 +64,7 @@ fn test_create_proof() { }, transcript::{Blake2bWrite, Challenge255, TranscriptWriterBuffer}, }; + use halo2_middleware::ff::Field; use halo2curves::bn256::Bn256; use rand_core::OsRng; From 130e8e174251416655c8defaeccfd0512d0cf829 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 26 Jan 2024 13:13:36 +0000 Subject: [PATCH 59/79] Move individual tests to halo2_proofs --- halo2_proofs/src/plonk.rs | 4 ++ .../tests/frontend_backend_split.rs | 31 +++++++++------ .../tests/plonk_api.rs | 38 ++++++++++++------- 3 files changed, 48 insertions(+), 25 deletions(-) rename {halo2_proofs_rm => halo2_proofs}/tests/frontend_backend_split.rs (97%) rename {halo2_proofs_rm => halo2_proofs}/tests/plonk_api.rs (97%) diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 837a5032bd..a16c9899ed 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -6,3 +6,7 @@ pub mod verifier { pub use halo2_common::plonk::ConstraintSystem; pub use keygen::{keygen_pk, keygen_vk}; + +pub use keygen::*; +pub use prover::*; +pub use verifier::*; diff --git a/halo2_proofs_rm/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs similarity index 97% rename from halo2_proofs_rm/tests/frontend_backend_split.rs rename to halo2_proofs/tests/frontend_backend_split.rs index ef96971a10..baffd33693 100644 --- a/halo2_proofs_rm/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -5,19 +5,28 @@ #[global_allocator] static ALLOC: dhat::Alloc = dhat::Alloc; -use halo2_proofs::arithmetic::Field; -use halo2_proofs::circuit::{AssignedCell, Layouter, Region, SimpleFloorPlanner, Value}; -use halo2_proofs::dev::MockProver; -use halo2_proofs::plonk::{ - compile_circuit, keygen_pk_v2, keygen_vk_v2, verify_proof, verify_proof_single, Advice, - Challenge, Circuit, Column, ConstraintSystem, Error, Expression, FirstPhase, Fixed, Instance, - ProverV2Single, SecondPhase, Selector, WitnessCalculator, +use halo2_backend::plonk::{ + keygen::{keygen_pk_v2, keygen_vk_v2}, + prover::ProverV2Single, + verifier::{verify_proof, verify_proof_single}, }; -use halo2_proofs::poly::commitment::ParamsProver; -use halo2_proofs::poly::Rotation; -use halo2_proofs::transcript::{ - Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, +use halo2_common::{ + circuit::{AssignedCell, Layouter, Region, SimpleFloorPlanner, Value}, + plonk::{Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector}, + transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, + }, +}; +use halo2_frontend::{ + circuit::{compile_circuit, WitnessCalculator}, + dev::MockProver, }; +use halo2_middleware::{ + circuit::{Advice, Challenge, Column, Fixed, Instance}, + ff::Field, + poly::Rotation, +}; +use halo2_proofs::poly::commitment::ParamsProver; use std::collections::HashMap; #[derive(Clone)] diff --git a/halo2_proofs_rm/tests/plonk_api.rs b/halo2_proofs/tests/plonk_api.rs similarity index 97% rename from halo2_proofs_rm/tests/plonk_api.rs rename to halo2_proofs/tests/plonk_api.rs index ef595811f9..81ed46d3aa 100644 --- a/halo2_proofs_rm/tests/plonk_api.rs +++ b/halo2_proofs/tests/plonk_api.rs @@ -2,21 +2,31 @@ #![allow(clippy::op_ref)] use assert_matches::assert_matches; -use ff::{FromUniformBytes, WithSmallOrderMulGroup}; -use halo2_proofs::arithmetic::Field; -use halo2_proofs::circuit::{Cell, Layouter, SimpleFloorPlanner, Value}; -use halo2_proofs::dev::MockProver; -use halo2_proofs::plonk::{ - create_proof as create_plonk_proof, keygen_pk, keygen_vk, verify_proof as verify_plonk_proof, - Advice, Assigned, Circuit, Column, ConstraintSystem, Error, Fixed, ProvingKey, TableColumn, - VerifyingKey, +// TODO: Only import from halo2_proofs to show that we re-export the legacy API correctly +use halo2_backend::plonk::{ProvingKey, VerifyingKey, VerifyingKey}; +use halo2_common::{ + circuit::{Cell, Layouter, SimpleFloorPlanner, Value}, + plonk::{Circuit, ConstraintSystem, Error, TableColumn}, + poly::commitment::{CommitmentScheme, Prover, Verifier}, + transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, + }, +}; +use halo2_frontend::dev::MockProver; +use halo2_middleware::{ + circuit::{Advice, Column, Fixed}, + ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}, + plonk::Assigned, + poly::Rotation, }; -use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; -use halo2_proofs::poly::Rotation; -use halo2_proofs::poly::VerificationStrategy; -use halo2_proofs::transcript::{ - Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptReadBuffer, - TranscriptWriterBuffer, +use halo2_proofs::{ + plonk::{ + create_proof as create_plonk_proof, keygen_pk, keygen_vk, + verify_proof as verify_plonk_proof, + }, + poly::commitment::ParamsProver, + poly::VerificationStrategy, + transcript::EncodedChallenge, }; use rand_core::{OsRng, RngCore}; use std::marker::PhantomData; From 36e82a19d1fcb7fe0519ef3eba6ba6ce2e12b134 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 26 Jan 2024 13:28:06 +0000 Subject: [PATCH 60/79] Add reexports in halo2_proofs for plonk_api.rs --- .../proptest-regressions/plonk/assigned.txt | 0 .../plonk/circuit/compress_selectors.txt | 0 halo2_proofs/src/lib.rs | 23 +++++++++-- halo2_proofs/src/plonk.rs | 11 ++++-- halo2_proofs/tests/plonk_api.rs | 38 +++++++------------ 5 files changed, 41 insertions(+), 31 deletions(-) rename {halo2_proofs_rm => halo2_proofs}/proptest-regressions/plonk/assigned.txt (100%) rename {halo2_proofs_rm => halo2_proofs}/proptest-regressions/plonk/circuit/compress_selectors.txt (100%) diff --git a/halo2_proofs_rm/proptest-regressions/plonk/assigned.txt b/halo2_proofs/proptest-regressions/plonk/assigned.txt similarity index 100% rename from halo2_proofs_rm/proptest-regressions/plonk/assigned.txt rename to halo2_proofs/proptest-regressions/plonk/assigned.txt diff --git a/halo2_proofs_rm/proptest-regressions/plonk/circuit/compress_selectors.txt b/halo2_proofs/proptest-regressions/plonk/circuit/compress_selectors.txt similarity index 100% rename from halo2_proofs_rm/proptest-regressions/plonk/circuit/compress_selectors.txt rename to halo2_proofs/proptest-regressions/plonk/circuit/compress_selectors.txt diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index 9dddaa980d..690dbd7fc3 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -6,7 +6,24 @@ pub mod plonk; pub mod circuit { - pub use halo2_common::circuit::{Layouter, SimpleFloorPlanner}; + pub use halo2_common::circuit::{Cell, Layouter, SimpleFloorPlanner, Value}; +} +pub mod arithmetic { + pub use halo2_common::arithmetic::Field; +} +pub mod dev { + pub use halo2_frontend::dev::MockProver; +} +pub mod poly { + pub use halo2_backend::poly::VerificationStrategy; + pub use halo2_common::poly::commitment; + pub use halo2_common::poly::ipa; + pub use halo2_common::poly::kzg; + pub use halo2_middleware::poly::Rotation; +} +pub mod transcript { + pub use halo2_common::transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptReadBuffer, + TranscriptWriterBuffer, + }; } -pub use halo2_common::poly; -pub use halo2_common::transcript; diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index a16c9899ed..a6f61787e6 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -4,9 +4,12 @@ pub mod verifier { pub use halo2_backend::plonk::verifier::verify_proof; } -pub use halo2_common::plonk::ConstraintSystem; pub use keygen::{keygen_pk, keygen_vk}; -pub use keygen::*; -pub use prover::*; -pub use verifier::*; +pub use prover::create_proof; +pub use verifier::verify_proof; + +pub use halo2_backend::plonk::{ProvingKey, VerifyingKey}; +pub use halo2_common::plonk::{Circuit, ConstraintSystem, Error, TableColumn}; +pub use halo2_middleware::circuit::{Advice, Column, Fixed, Instance}; +pub use halo2_middleware::plonk::Assigned; diff --git a/halo2_proofs/tests/plonk_api.rs b/halo2_proofs/tests/plonk_api.rs index 81ed46d3aa..ef595811f9 100644 --- a/halo2_proofs/tests/plonk_api.rs +++ b/halo2_proofs/tests/plonk_api.rs @@ -2,31 +2,21 @@ #![allow(clippy::op_ref)] use assert_matches::assert_matches; -// TODO: Only import from halo2_proofs to show that we re-export the legacy API correctly -use halo2_backend::plonk::{ProvingKey, VerifyingKey, VerifyingKey}; -use halo2_common::{ - circuit::{Cell, Layouter, SimpleFloorPlanner, Value}, - plonk::{Circuit, ConstraintSystem, Error, TableColumn}, - poly::commitment::{CommitmentScheme, Prover, Verifier}, - transcript::{ - Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, - }, -}; -use halo2_frontend::dev::MockProver; -use halo2_middleware::{ - circuit::{Advice, Column, Fixed}, - ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}, - plonk::Assigned, - poly::Rotation, +use ff::{FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_proofs::arithmetic::Field; +use halo2_proofs::circuit::{Cell, Layouter, SimpleFloorPlanner, Value}; +use halo2_proofs::dev::MockProver; +use halo2_proofs::plonk::{ + create_proof as create_plonk_proof, keygen_pk, keygen_vk, verify_proof as verify_plonk_proof, + Advice, Assigned, Circuit, Column, ConstraintSystem, Error, Fixed, ProvingKey, TableColumn, + VerifyingKey, }; -use halo2_proofs::{ - plonk::{ - create_proof as create_plonk_proof, keygen_pk, keygen_vk, - verify_proof as verify_plonk_proof, - }, - poly::commitment::ParamsProver, - poly::VerificationStrategy, - transcript::EncodedChallenge, +use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; +use halo2_proofs::poly::Rotation; +use halo2_proofs::poly::VerificationStrategy; +use halo2_proofs::transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptReadBuffer, + TranscriptWriterBuffer, }; use rand_core::{OsRng, RngCore}; use std::marker::PhantomData; From ea8cd592682f66b0a2f4427a0b176be4650e6d1c Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 26 Jan 2024 13:59:01 +0000 Subject: [PATCH 61/79] Move examples to halo2_proofs. Breaks --- halo2_proofs/Cargo.toml | 8 ++++++++ .../examples/circuit-layout.rs | 0 .../examples/proof-size.rs | 0 .../examples/serialization.rs | 0 .../examples/shuffle.rs | 0 .../examples/shuffle_api.rs | 0 .../examples/simple-example.rs | 0 .../examples/two-chip.rs | 0 .../examples/vector-mul.rs | 0 .../examples/vector-ops-unblinded.rs | 0 halo2_proofs/src/lib.rs | 20 +++++++++++++------ halo2_proofs/src/plonk.rs | 7 +++++-- halo2_proofs_rm/Cargo.toml | 8 -------- middleware/src/circuit.rs | 5 +++++ 14 files changed, 32 insertions(+), 16 deletions(-) rename {halo2_proofs_rm => halo2_proofs}/examples/circuit-layout.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/proof-size.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/serialization.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/shuffle.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/shuffle_api.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/simple-example.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/two-chip.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/vector-mul.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/examples/vector-ops-unblinded.rs (100%) diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index 40bd92a923..6888c86d81 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -82,3 +82,11 @@ derive_serde = ["halo2curves/derive_serde"] [lib] bench = false + +[[example]] +name = "circuit-layout" +required-features = ["test-dev-graph"] + +[[example]] +name = "proof-size" +required-features = ["cost-estimator"] diff --git a/halo2_proofs_rm/examples/circuit-layout.rs b/halo2_proofs/examples/circuit-layout.rs similarity index 100% rename from halo2_proofs_rm/examples/circuit-layout.rs rename to halo2_proofs/examples/circuit-layout.rs diff --git a/halo2_proofs_rm/examples/proof-size.rs b/halo2_proofs/examples/proof-size.rs similarity index 100% rename from halo2_proofs_rm/examples/proof-size.rs rename to halo2_proofs/examples/proof-size.rs diff --git a/halo2_proofs_rm/examples/serialization.rs b/halo2_proofs/examples/serialization.rs similarity index 100% rename from halo2_proofs_rm/examples/serialization.rs rename to halo2_proofs/examples/serialization.rs diff --git a/halo2_proofs_rm/examples/shuffle.rs b/halo2_proofs/examples/shuffle.rs similarity index 100% rename from halo2_proofs_rm/examples/shuffle.rs rename to halo2_proofs/examples/shuffle.rs diff --git a/halo2_proofs_rm/examples/shuffle_api.rs b/halo2_proofs/examples/shuffle_api.rs similarity index 100% rename from halo2_proofs_rm/examples/shuffle_api.rs rename to halo2_proofs/examples/shuffle_api.rs diff --git a/halo2_proofs_rm/examples/simple-example.rs b/halo2_proofs/examples/simple-example.rs similarity index 100% rename from halo2_proofs_rm/examples/simple-example.rs rename to halo2_proofs/examples/simple-example.rs diff --git a/halo2_proofs_rm/examples/two-chip.rs b/halo2_proofs/examples/two-chip.rs similarity index 100% rename from halo2_proofs_rm/examples/two-chip.rs rename to halo2_proofs/examples/two-chip.rs diff --git a/halo2_proofs_rm/examples/vector-mul.rs b/halo2_proofs/examples/vector-mul.rs similarity index 100% rename from halo2_proofs_rm/examples/vector-mul.rs rename to halo2_proofs/examples/vector-mul.rs diff --git a/halo2_proofs_rm/examples/vector-ops-unblinded.rs b/halo2_proofs/examples/vector-ops-unblinded.rs similarity index 100% rename from halo2_proofs_rm/examples/vector-ops-unblinded.rs rename to halo2_proofs/examples/vector-ops-unblinded.rs diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index 690dbd7fc3..f94ce586f3 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -6,19 +6,20 @@ pub mod plonk; pub mod circuit { - pub use halo2_common::circuit::{Cell, Layouter, SimpleFloorPlanner, Value}; + pub use halo2_common::circuit::floor_planner; + pub use halo2_common::circuit::{ + AssignedCell, Cell, Chip, Layouter, Region, SimpleFloorPlanner, Value, + }; } pub mod arithmetic { - pub use halo2_common::arithmetic::Field; + pub use halo2_common::arithmetic::{CurveAffine, Field}; } pub mod dev { - pub use halo2_frontend::dev::MockProver; + pub use halo2_frontend::dev::{metadata, FailureLocation, MockProver, VerifyFailure}; } pub mod poly { pub use halo2_backend::poly::VerificationStrategy; - pub use halo2_common::poly::commitment; - pub use halo2_common::poly::ipa; - pub use halo2_common::poly::kzg; + pub use halo2_common::poly::{commitment, ipa, kzg}; pub use halo2_middleware::poly::Rotation; } pub mod transcript { @@ -27,3 +28,10 @@ pub mod transcript { TranscriptWriterBuffer, }; } +pub mod helpers { + pub use halo2_common::helpers::SerdeFormat; +} + +pub use crate::helpers::SerdeFormat; + +pub use halo2curves; diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index a6f61787e6..ffde7d076e 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -10,6 +10,9 @@ pub use prover::create_proof; pub use verifier::verify_proof; pub use halo2_backend::plonk::{ProvingKey, VerifyingKey}; -pub use halo2_common::plonk::{Circuit, ConstraintSystem, Error, TableColumn}; -pub use halo2_middleware::circuit::{Advice, Column, Fixed, Instance}; +pub use halo2_common::plonk::{ + Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector, TableColumn, + ThirdPhase, +}; +pub use halo2_middleware::circuit::{Advice, Challenge, Column, Fixed, Instance}; pub use halo2_middleware::plonk::Assigned; diff --git a/halo2_proofs_rm/Cargo.toml b/halo2_proofs_rm/Cargo.toml index 1a746ad414..23f2b40057 100644 --- a/halo2_proofs_rm/Cargo.toml +++ b/halo2_proofs_rm/Cargo.toml @@ -102,11 +102,3 @@ derive_serde = ["halo2curves/derive_serde"] [lib] bench = false - -[[example]] -name = "circuit-layout" -required-features = ["test-dev-graph"] - -[[example]] -name = "proof-size" -required-features = ["cost-estimator"] diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs index 3a58f59cc5..996dba7927 100644 --- a/middleware/src/circuit.rs +++ b/middleware/src/circuit.rs @@ -50,6 +50,11 @@ impl Challenge { pub fn phase(&self) -> u8 { self.phase } + + /// Return Expression + pub fn expr(&self) -> ExpressionMid { + ExpressionMid::Challenge(*self) + } } /// Low-degree expression representing an identity that must hold over the committed columns. From 28722f3cbe793e1f1cdd0996368b08fe8ea02601 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 26 Jan 2024 16:37:48 +0000 Subject: [PATCH 62/79] WIP (Broken): ColumnMid implementation --- backend/src/lib.rs | 2 + backend/src/plonk/keygen.rs | 158 +------------ backend/src/plonk/permutation/keygen.rs | 22 +- common/src/circuit.rs | 4 +- .../src/circuit/floor_planner/single_pass.rs | 4 +- common/src/circuit/floor_planner/v1.rs | 4 +- common/src/circuit/layouter.rs | 4 +- common/src/plonk.rs | 3 +- common/src/plonk/circuit.rs | 219 +++++++++++++++--- common/src/plonk/error.rs | 3 +- common/src/plonk/keygen.rs | 4 +- common/src/plonk/permutation.rs | 10 +- frontend/src/circuit.rs | 14 +- frontend/src/dev.rs | 18 +- frontend/src/dev/cost.rs | 6 +- frontend/src/dev/failure.rs | 4 +- frontend/src/dev/tfp.rs | 6 +- frontend/src/dev/util.rs | 6 +- middleware/src/circuit.rs | 131 +---------- middleware/src/metadata.rs | 8 +- middleware/src/permutation.rs | 6 +- 21 files changed, 255 insertions(+), 381 deletions(-) diff --git a/backend/src/lib.rs b/backend/src/lib.rs index 5973dcf661..36afe4c86d 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -1,3 +1,5 @@ +#![allow(unused_imports)] // TODO: Remove + pub mod plonk; // Internal re-exports diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs index 14ac9c3a3b..0af7d7048b 100644 --- a/backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -18,9 +18,7 @@ use crate::{ EvaluationDomain, }, }; -use halo2_middleware::circuit::{ - Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, -}; +use halo2_middleware::circuit::{Advice, Any, Challenge, CompiledCircuitV2, Fixed, Instance}; use halo2_middleware::plonk::Assigned; pub(crate) fn create_domain( @@ -48,160 +46,6 @@ where (domain, cs, config) } -/// Assembly to be used in circuit synthesis. -#[derive(Debug)] -pub(crate) struct Assembly { - pub(crate) k: u32, - pub(crate) fixed: Vec, LagrangeCoeff>>, - pub(crate) permutation: halo2_common::plonk::permutation::AssemblyFront, - pub(crate) selectors: Vec>, - // A range of available rows for assignment and copies. - pub(crate) usable_rows: Range, - pub(crate) _marker: std::marker::PhantomData, -} - -impl Assignment for Assembly { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.selectors[selector.0][row] = true; - - Ok(()) - } - - fn query_instance(&self, _: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - // There is no instance in this context. - Ok(Value::unknown()) - } - - fn assign_advice( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about fixed columns here - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .fixed - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.permutation - .copy(left_column, left_row, right_column, right_row) - } - - fn fill_from_row( - &mut self, - column: Column, - from_row: usize, - to: Value>, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&from_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - let col = self - .fixed - .get_mut(column.index()) - .ok_or(Error::BoundsFailure)?; - - let filler = to.assign()?; - for row in self.usable_rows.clone().skip(from_row) { - col[row] = filler; - } - - Ok(()) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} - /// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. pub fn keygen_vk_v2<'params, C, P>( params: &P, diff --git a/backend/src/plonk/permutation/keygen.rs b/backend/src/plonk/permutation/keygen.rs index f44a45a991..f5b11afcec 100644 --- a/backend/src/plonk/permutation/keygen.rs +++ b/backend/src/plonk/permutation/keygen.rs @@ -10,7 +10,7 @@ use crate::{ EvaluationDomain, }, }; -use halo2_middleware::circuit::{Any, Column}; +use halo2_middleware::circuit::{Any, ColumnMid}; use halo2_middleware::permutation::{ArgumentV2, AssemblyMid}; #[cfg(feature = "thread-safe-region")] @@ -24,7 +24,7 @@ use std::collections::{BTreeSet, HashMap}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct Assembly { /// Columns that participate on the copy permutation argument. - columns: Vec>, + columns: Vec>, /// Mapping of the actual copies done. mapping: Vec>, /// Some aux data used to swap positions directly when sorting. @@ -60,7 +60,7 @@ impl Assembly { // in a 1-cycle; therefore mapping and aux are identical, because every cell is // its own distinguished element. Assembly { - columns: p.columns.clone(), + columns: p.columns.clone().into_iter().map(|c| c.into()).collect(), mapping: columns.clone(), aux: columns, sizes: vec![vec![1usize; n]; p.columns.len()], @@ -69,21 +69,21 @@ impl Assembly { pub(crate) fn copy( &mut self, - left_column: Column, + left_column: ColumnMid, left_row: usize, - right_column: Column, + right_column: ColumnMid, right_row: usize, ) -> Result<(), Error> { let left_column = self .columns .iter() .position(|c| c == &left_column) - .ok_or(Error::ColumnNotInPermutation(left_column))?; + .ok_or(Error::ColumnNotInPermutation(left_column.into()))?; let right_column = self .columns .iter() .position(|c| c == &right_column) - .ok_or(Error::ColumnNotInPermutation(right_column))?; + .ok_or(Error::ColumnNotInPermutation(right_column.into()))?; // Check bounds if left_row >= self.mapping[left_column].len() @@ -148,7 +148,7 @@ impl Assembly { #[derive(Clone, Debug, PartialEq, Eq)] pub struct Assembly { /// Columns that participate on the copy permutation argument. - columns: Vec>, + columns: Vec>, /// Mapping of the actual copies done. cycles: Vec>, /// Mapping of the actual copies done. @@ -188,9 +188,9 @@ impl Assembly { pub(crate) fn copy( &mut self, - left_column: Column, + left_column: ColumnMid, left_row: usize, - right_column: Column, + right_column: ColumnMid, right_row: usize, ) -> Result<(), Error> { let left_column = self @@ -316,7 +316,7 @@ impl Assembly { } /// Returns columns that participate in the permutation argument. - pub fn columns(&self) -> &[Column] { + pub fn columns(&self) -> &[ColumnMid] { &self.columns } diff --git a/common/src/circuit.rs b/common/src/circuit.rs index 85a42b430c..598267676c 100644 --- a/common/src/circuit.rs +++ b/common/src/circuit.rs @@ -4,8 +4,8 @@ use std::{fmt, marker::PhantomData}; use halo2_middleware::ff::Field; -use crate::plonk::{Error, Selector, TableColumn}; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use crate::plonk::{circuit::Column, Error, Selector, TableColumn}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; use halo2_middleware::plonk::Assigned; mod value; diff --git a/common/src/circuit/floor_planner/single_pass.rs b/common/src/circuit/floor_planner/single_pass.rs index 9905208a79..8a1b9db45f 100644 --- a/common/src/circuit/floor_planner/single_pass.rs +++ b/common/src/circuit/floor_planner/single_pass.rs @@ -9,11 +9,11 @@ use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, table_layouter::{compute_table_lengths, SimpleTableLayouter}, - Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, + Cell, Column, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, plonk::{Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// A simple [`FloorPlanner`] that performs minimal optimizations. diff --git a/common/src/circuit/floor_planner/v1.rs b/common/src/circuit/floor_planner/v1.rs index e0ae13e82e..1557fe2db1 100644 --- a/common/src/circuit/floor_planner/v1.rs +++ b/common/src/circuit/floor_planner/v1.rs @@ -6,11 +6,11 @@ use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, table_layouter::{compute_table_lengths, SimpleTableLayouter}, - Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, + Cell, Column, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, plonk::{Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; use halo2_middleware::plonk::Assigned; pub mod strategy; diff --git a/common/src/circuit/layouter.rs b/common/src/circuit/layouter.rs index e70606ac9b..37cbdb6744 100644 --- a/common/src/circuit/layouter.rs +++ b/common/src/circuit/layouter.rs @@ -8,8 +8,8 @@ use halo2_middleware::ff::Field; pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; -use crate::plonk::{Error, Selector}; -use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; +use crate::plonk::{circuit::Column, Error, Selector}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. diff --git a/common/src/plonk.rs b/common/src/plonk.rs index fb3284e0b9..49f894b208 100644 --- a/common/src/plonk.rs +++ b/common/src/plonk.rs @@ -5,9 +5,10 @@ //! [halo]: https://eprint.iacr.org/2019/1021 //! [plonk]: https://eprint.iacr.org/2019/953 +use crate::plonk::circuit::Column; use crate::poly::{LagrangeCoeff, Polynomial}; use crate::transcript::ChallengeScalar; -use halo2_middleware::circuit::{Advice, Column, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Fixed, Instance}; use halo2_middleware::poly::Rotation; pub mod circuit; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 4186a8222e..137002d582 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -4,8 +4,8 @@ use crate::circuit::{Layouter, Region, Value}; use core::cmp::max; use core::ops::{Add, Mul}; use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Any, Challenge, Column, ConstraintSystemV2Backend, ExpressionMid, - Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, + Advice, AdviceQueryMid, Any, Challenge, ColumnMid, ColumnType, ConstraintSystemV2Backend, + ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, }; use halo2_middleware::ff::Field; use halo2_middleware::metadata; @@ -22,6 +22,160 @@ use std::{ mod compress_selectors; +/// A column with an index and type +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Column { + pub index: usize, + pub column_type: C, +} + +// TODO: Remove all these methods, and directly access the fields? +impl Column { + pub fn new(index: usize, column_type: C) -> Self { + Column { index, column_type } + } + + /// Index of this column. + pub fn index(&self) -> usize { + self.index + } + + /// Type of this column. + pub fn column_type(&self) -> &C { + &self.column_type + } + + /// Return expression from column at a relative position + pub fn query_cell(&self, at: Rotation) -> ExpressionMid { + self.column_type.query_cell(self.index, at) + } + + /// Return expression from column at the current row + pub fn cur(&self) -> ExpressionMid { + self.query_cell(Rotation::cur()) + } + + /// Return expression from column at the next row + pub fn next(&self) -> ExpressionMid { + self.query_cell(Rotation::next()) + } + + /// Return expression from column at the previous row + pub fn prev(&self) -> ExpressionMid { + self.query_cell(Rotation::prev()) + } + + /// Return expression from column at the specified rotation + pub fn rot(&self, rotation: i32) -> ExpressionMid { + self.query_cell(Rotation(rotation)) + } +} + +impl Ord for Column { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match self.column_type.into().cmp(&other.column_type.into()) { + // Indices are assigned within column types. + std::cmp::Ordering::Equal => self.index.cmp(&other.index), + order => order, + } + } +} + +impl PartialOrd for Column { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl From> for Column { + fn from(column: ColumnMid) -> Column { + Column { + index: column.index, + column_type: column.column_type, + } + } +} + +impl Into> for Column { + fn into(self) -> ColumnMid { + ColumnMid { + index: self.index(), + column_type: *self.column_type(), + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Advice(advice.column_type), + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Fixed, + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Instance, + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Advice(advice) => Ok(Column { + index: any.index(), + column_type: *advice, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Fixed => Ok(Column { + index: any.index(), + column_type: Fixed, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Instance => Ok(Column { + index: any.index(), + column_type: Instance, + }), + _ => Err("Cannot convert into Column"), + } + } +} + // TODO: Move sealed phase to frontend, and always use u8 in middleware and backend pub mod sealed { /// Phase of advice column @@ -1336,17 +1490,19 @@ impl From> for ConstraintSystemV2Backend { num_advice_columns: cs.num_advice_columns, num_instance_columns: cs.num_instance_columns, num_challenges: cs.num_challenges, - unblinded_advice_columns: cs.unblinded_advice_columns.clone(), + unblinded_advice_columns: cs.unblinded_advice_columns, advice_column_phase: cs.advice_column_phase.iter().map(|p| p.0).collect(), challenge_phase: cs.challenge_phase.iter().map(|p| p.0).collect(), gates: cs .gates - .iter() - .map(|g| { - g.polys.clone().into_iter().enumerate().map(|(i, e)| { - let name = match g.constraint_name(i) { - "" => g.name.clone(), - constraint_name => format!("{}:{}", g.name, constraint_name), + .into_iter() + .map(|mut g| { + let constraint_names = std::mem::take(&mut g.constraint_names); + let gate_name = g.name.clone(); + g.polys.into_iter().enumerate().map(move |(i, e)| { + let name = match constraint_names[i].as_str() { + "" => gate_name.clone(), + constraint_name => format!("{}:{}", gate_name, constraint_name), }; GateV2Backend { name, @@ -1357,47 +1513,36 @@ impl From> for ConstraintSystemV2Backend { .flatten() .collect(), permutation: halo2_middleware::permutation::ArgumentV2 { - columns: cs.permutation.columns.clone(), + columns: cs + .permutation + .columns + .into_iter() + .map(|c| c.into()) + .collect(), }, lookups: cs .lookups - .iter() + .into_iter() .map(|l| halo2_middleware::lookup::ArgumentV2 { - name: l.name.clone(), - input_expressions: l - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - table_expressions: l - .table_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), + name: l.name, + input_expressions: l.input_expressions.into_iter().map(|e| e.into()).collect(), + table_expressions: l.table_expressions.into_iter().map(|e| e.into()).collect(), }) .collect(), shuffles: cs .shuffles - .iter() + .into_iter() .map(|s| halo2_middleware::shuffle::ArgumentV2 { - name: s.name.clone(), - input_expressions: s - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), + name: s.name, + input_expressions: s.input_expressions.into_iter().map(|e| e.into()).collect(), shuffle_expressions: s .shuffle_expressions - .clone() .into_iter() .map(|e| e.into()) .collect(), }) .collect(), - general_column_annotations: cs.general_column_annotations.clone(), + general_column_annotations: cs.general_column_annotations, } } } @@ -1569,11 +1714,11 @@ pub fn collect_queries( for column in &cs2.permutation.columns { match column.column_type { Any::Instance => { - queries.add_instance(Column::new(column.index(), Instance), Rotation::cur()) + queries.add_instance(Column::new(column.index, Instance), Rotation::cur()) } - Any::Fixed => queries.add_fixed(Column::new(column.index(), Fixed), Rotation::cur()), + Any::Fixed => queries.add_fixed(Column::new(column.index, Fixed), Rotation::cur()), Any::Advice(advice) => { - queries.add_advice(Column::new(column.index(), advice), Rotation::cur()) + queries.add_advice(Column::new(column.index, advice), Rotation::cur()) } }; } diff --git a/common/src/plonk/error.rs b/common/src/plonk/error.rs index eec130cfd7..6b43695009 100644 --- a/common/src/plonk/error.rs +++ b/common/src/plonk/error.rs @@ -3,7 +3,8 @@ use std::fmt; use std::io; use super::TableColumn; -use halo2_middleware::circuit::{Any, Column}; +use crate::plonk::circuit::Column; +use halo2_middleware::circuit::Any; // TODO: Consider splitting this Error into a frontend and backend version? diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index 5f584310e5..cd7a835b23 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -3,11 +3,11 @@ use std::ops::Range; use halo2_middleware::ff::Field; use super::{ - circuit::{Assignment, Selector}, + circuit::{Assignment, Column, Selector}, permutation, Error, LagrangeCoeff, Polynomial, }; use crate::circuit::Value; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// Assembly to be used in circuit synthesis. diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index 2e4f7dba6e..ffe8435641 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -1,7 +1,7 @@ //! Implementation of permutation argument. -use crate::plonk::Error; -use halo2_middleware::circuit::{Any, Column}; +use crate::plonk::{Column, Error}; +use halo2_middleware::circuit::Any; use halo2_middleware::permutation::{ArgumentV2, Cell}; /// A permutation argument. @@ -14,7 +14,7 @@ pub struct Argument { impl From for Argument { fn from(arg: ArgumentV2) -> Self { Self { - columns: arg.columns.clone(), + columns: arg.columns.into_iter().map(|c| c.into()).collect(), } } } @@ -110,11 +110,11 @@ impl AssemblyFront { } self.copies.push(( Cell { - column: left_column, + column: left_column.into(), row: left_row, }, Cell { - column: right_column, + column: right_column.into(), row: right_row, }, )); diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index d4663438f2..9483b0ea30 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -1,13 +1,15 @@ //! Traits and structs for implementing circuit components. -use halo2_common::plonk::sealed::{self, SealedPhase}; -use halo2_common::plonk::FloorPlanner; -use halo2_common::plonk::{permutation, Error}; -use halo2_common::plonk::{Assignment, FirstPhase, SecondPhase, Selector, ThirdPhase}; -use halo2_common::plonk::{Circuit, ConstraintSystem}; +use halo2_common::plonk::{ + circuit::Column, + permutation, + sealed::{self, SealedPhase}, + Assignment, Circuit, ConstraintSystem, Error, FirstPhase, FloorPlanner, SecondPhase, Selector, + ThirdPhase, +}; use halo2_common::poly::{batch_invert_assigned, Polynomial}; use halo2_middleware::circuit::{ - Advice, Any, Challenge, Column, CompiledCircuitV2, Fixed, Instance, PreprocessingV2, + Advice, Any, Challenge, CompiledCircuitV2, Fixed, Instance, PreprocessingV2, }; use halo2_middleware::ff::Field; use halo2_middleware::plonk::Assigned; diff --git a/frontend/src/dev.rs b/frontend/src/dev.rs index 96804d4491..d20abb83e2 100644 --- a/frontend/src/dev.rs +++ b/frontend/src/dev.rs @@ -12,13 +12,14 @@ use halo2_middleware::ff::FromUniformBytes; use halo2_common::{ circuit, plonk::{ + circuit::Column, permutation, sealed::{self, SealedPhase}, Assignment, Circuit, ConstraintSystem, Error, Expression, FirstPhase, FloorPlanner, Phase, Selector, }, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Challenge, ColumnMid, Fixed, Instance}; use halo2_middleware::plonk::Assigned; use halo2_common::multicore::{ @@ -381,9 +382,10 @@ impl Assignment for MockProver { } if let Some(region) = self.current_region.as_mut() { - region - .annotations - .insert(ColumnMetadata::from(column), annotation().into()); + region.annotations.insert( + ColumnMetadata::from(column.into().into()), + annotation().into(), + ); } } @@ -1112,11 +1114,11 @@ impl + Ord> MockProver { // Check that permutations preserve the original values of the cells. // Original values of columns involved in the permutation. - let original = |column: Column, row: usize| match column.column_type() { - Any::Advice(_) => self.advice[column.index()][row], - Any::Fixed => self.fixed[column.index()][row], + let original = |column: ColumnMid, row: usize| match column.column_type { + Any::Advice(_) => self.advice[column.index][row], + Any::Fixed => self.fixed[column.index][row], Any::Instance => { - let cell: &InstanceValue = &self.instance[column.index()][row]; + let cell: &InstanceValue = &self.instance[column.index][row]; CellValue::Assigned(cell.value()) } }; diff --git a/frontend/src/dev/cost.rs b/frontend/src/dev/cost.rs index 4fb13a39fe..36c0f686e6 100644 --- a/frontend/src/dev/cost.rs +++ b/frontend/src/dev/cost.rs @@ -14,9 +14,11 @@ use halo2_middleware::poly::Rotation; use halo2_common::{ circuit::{layouter::RegionColumn, Value}, - plonk::{Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}, + plonk::{ + circuit::Column, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, + }, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// Measures a circuit to determine its costs, and explain what contributes to them. diff --git a/frontend/src/dev/failure.rs b/frontend/src/dev/failure.rs index 150fdbfbb5..81339f353f 100644 --- a/frontend/src/dev/failure.rs +++ b/frontend/src/dev/failure.rs @@ -12,8 +12,8 @@ use super::{ }; use crate::dev::metadata::Constraint; use crate::dev::{Instance, Value}; -use halo2_common::plonk::{ConstraintSystem, Expression, Gate}; -use halo2_middleware::circuit::{Any, Column}; +use halo2_common::plonk::{circuit::Column, ConstraintSystem, Expression, Gate}; +use halo2_middleware::circuit::Any; mod emitter; diff --git a/frontend/src/dev/tfp.rs b/frontend/src/dev/tfp.rs index 8daa95e9b0..72c3ea2776 100644 --- a/frontend/src/dev/tfp.rs +++ b/frontend/src/dev/tfp.rs @@ -7,8 +7,10 @@ use halo2_common::circuit::{ layouter::{RegionLayouter, SyncDeps}, AssignedCell, Cell, Layouter, Region, Table, Value, }; -use halo2_common::plonk::{Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector}; -use halo2_middleware::circuit::{Advice, Any, Challenge, Column, Fixed, Instance}; +use halo2_common::plonk::{ + circuit::Column, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, +}; +use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. diff --git a/frontend/src/dev/util.rs b/frontend/src/dev/util.rs index dbfb6eb32e..c3f3d7e494 100644 --- a/frontend/src/dev/util.rs +++ b/frontend/src/dev/util.rs @@ -2,8 +2,10 @@ use group::ff::Field; use std::collections::BTreeMap; use super::{metadata, CellValue, InstanceValue, Value}; -use halo2_common::plonk::{AdviceQuery, Expression, FixedQuery, Gate, InstanceQuery, VirtualCell}; -use halo2_middleware::circuit::{Advice, Any, Column, ColumnType}; +use halo2_common::plonk::{ + circuit::Column, AdviceQuery, Expression, FixedQuery, Gate, InstanceQuery, VirtualCell, +}; +use halo2_middleware::circuit::{Advice, Any, ColumnType}; use halo2_middleware::poly::Rotation; pub struct AnyQuery { diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs index 996dba7927..1f7f5875a6 100644 --- a/middleware/src/circuit.rs +++ b/middleware/src/circuit.rs @@ -176,71 +176,11 @@ pub trait ColumnType: /// A column with an index and type #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Column { +pub struct ColumnMid { pub index: usize, pub column_type: C, } -// TODO: Remove all these methods, and directly access the fields? -impl Column { - pub fn new(index: usize, column_type: C) -> Self { - Column { index, column_type } - } - - /// Index of this column. - pub fn index(&self) -> usize { - self.index - } - - /// Type of this column. - pub fn column_type(&self) -> &C { - &self.column_type - } - - /// Return expression from column at a relative position - pub fn query_cell(&self, at: Rotation) -> ExpressionMid { - self.column_type.query_cell(self.index, at) - } - - /// Return expression from column at the current row - pub fn cur(&self) -> ExpressionMid { - self.query_cell(Rotation::cur()) - } - - /// Return expression from column at the next row - pub fn next(&self) -> ExpressionMid { - self.query_cell(Rotation::next()) - } - - /// Return expression from column at the previous row - pub fn prev(&self) -> ExpressionMid { - self.query_cell(Rotation::prev()) - } - - /// Return expression from column at the specified rotation - pub fn rot(&self, rotation: i32) -> ExpressionMid { - self.query_cell(Rotation(rotation)) - } -} - -impl Ord for Column { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match self.column_type.into().cmp(&other.column_type.into()) { - // Indices are assigned within column types. - std::cmp::Ordering::Equal => self.index.cmp(&other.index), - order => order, - } - } -} - -impl PartialOrd for Column { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - /// An advice column #[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct Advice { @@ -410,72 +350,3 @@ impl From for Any { Any::Instance } } - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Advice(advice.column_type), - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Fixed, - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Instance, - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Advice(advice) => Ok(Column { - index: any.index(), - column_type: *advice, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Fixed => Ok(Column { - index: any.index(), - column_type: Fixed, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Instance => Ok(Column { - index: any.index(), - column_type: Instance, - }), - _ => Err("Cannot convert into Column"), - } - } -} diff --git a/middleware/src/metadata.rs b/middleware/src/metadata.rs index de3dd40e88..d876423cf8 100644 --- a/middleware/src/metadata.rs +++ b/middleware/src/metadata.rs @@ -34,11 +34,11 @@ impl From<(Any, usize)> for Column { } } -impl From> for Column { - fn from(column: circuit::Column) -> Self { +impl From> for Column { + fn from(column: circuit::ColumnMid) -> Self { Column { - column_type: *column.column_type(), - index: column.index(), + column_type: column.column_type, + index: column.index, } } } diff --git a/middleware/src/permutation.rs b/middleware/src/permutation.rs index 5cc20cd586..5be94e4dcc 100644 --- a/middleware/src/permutation.rs +++ b/middleware/src/permutation.rs @@ -1,9 +1,9 @@ -use crate::circuit::{Any, Column}; +use crate::circuit::{Any, ColumnMid}; // TODO: Dedup with other Cell definition, or move this to a higher level #[derive(Clone, Debug)] pub struct Cell { - pub column: Column, + pub column: ColumnMid, pub row: usize, } @@ -16,5 +16,5 @@ pub struct AssemblyMid { #[derive(Debug, Clone)] pub struct ArgumentV2 { /// A sequence of columns involved in the argument. - pub columns: Vec>, + pub columns: Vec>, } From 3291b3c4294b1f916c6ea23af18debc388de6aa5 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 29 Jan 2024 11:27:25 +0000 Subject: [PATCH 63/79] Checkpoint --- backend/src/plonk/permutation/keygen.rs | 6 ++--- common/src/plonk/circuit.rs | 27 +++++++++++++------- frontend/src/dev.rs | 11 ++++---- halo2_proofs/src/plonk.rs | 6 ++--- halo2_proofs/tests/frontend_backend_split.rs | 7 +++-- middleware/src/circuit.rs | 6 +++-- middleware/src/metadata.rs | 4 +-- middleware/src/permutation.rs | 6 ++--- 8 files changed, 43 insertions(+), 30 deletions(-) diff --git a/backend/src/plonk/permutation/keygen.rs b/backend/src/plonk/permutation/keygen.rs index f5b11afcec..a22c87f08c 100644 --- a/backend/src/plonk/permutation/keygen.rs +++ b/backend/src/plonk/permutation/keygen.rs @@ -24,7 +24,7 @@ use std::collections::{BTreeSet, HashMap}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct Assembly { /// Columns that participate on the copy permutation argument. - columns: Vec>, + columns: Vec, /// Mapping of the actual copies done. mapping: Vec>, /// Some aux data used to swap positions directly when sorting. @@ -69,9 +69,9 @@ impl Assembly { pub(crate) fn copy( &mut self, - left_column: ColumnMid, + left_column: ColumnMid, left_row: usize, - right_column: ColumnMid, + right_column: ColumnMid, right_row: usize, ) -> Result<(), Error> { let left_column = self diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 137002d582..6e11a99bff 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -29,6 +29,15 @@ pub struct Column { pub column_type: C, } +impl Into for Column { + fn into(self) -> metadata::Column { + metadata::Column { + index: self.index(), + column_type: *self.column_type(), + } + } +} + // TODO: Remove all these methods, and directly access the fields? impl Column { pub fn new(index: usize, column_type: C) -> Self { @@ -46,27 +55,27 @@ impl Column { } /// Return expression from column at a relative position - pub fn query_cell(&self, at: Rotation) -> ExpressionMid { + pub fn query_cell(&self, at: Rotation) -> Expression { self.column_type.query_cell(self.index, at) } /// Return expression from column at the current row - pub fn cur(&self) -> ExpressionMid { + pub fn cur(&self) -> Expression { self.query_cell(Rotation::cur()) } /// Return expression from column at the next row - pub fn next(&self) -> ExpressionMid { + pub fn next(&self) -> Expression { self.query_cell(Rotation::next()) } /// Return expression from column at the previous row - pub fn prev(&self) -> ExpressionMid { + pub fn prev(&self) -> Expression { self.query_cell(Rotation::prev()) } /// Return expression from column at the specified rotation - pub fn rot(&self, rotation: i32) -> ExpressionMid { + pub fn rot(&self, rotation: i32) -> Expression { self.query_cell(Rotation(rotation)) } } @@ -89,8 +98,8 @@ impl PartialOrd for Column { } } -impl From> for Column { - fn from(column: ColumnMid) -> Column { +impl From for Column { + fn from(column: ColumnMid) -> Column { Column { index: column.index, column_type: column.column_type, @@ -98,8 +107,8 @@ impl From> for Column { } } -impl Into> for Column { - fn into(self) -> ColumnMid { +impl Into for Column { + fn into(self) -> ColumnMid { ColumnMid { index: self.index(), column_type: *self.column_type(), diff --git a/frontend/src/dev.rs b/frontend/src/dev.rs index d20abb83e2..6707a99f12 100644 --- a/frontend/src/dev.rs +++ b/frontend/src/dev.rs @@ -382,10 +382,9 @@ impl Assignment for MockProver { } if let Some(region) = self.current_region.as_mut() { - region.annotations.insert( - ColumnMetadata::from(column.into().into()), - annotation().into(), - ); + region + .annotations + .insert(column.into(), annotation().into()); } } @@ -1114,7 +1113,7 @@ impl + Ord> MockProver { // Check that permutations preserve the original values of the cells. // Original values of columns involved in the permutation. - let original = |column: ColumnMid, row: usize| match column.column_type { + let original = |column: ColumnMid, row: usize| match column.column_type { Any::Advice(_) => self.advice[column.index][row], Any::Fixed => self.fixed[column.index][row], Any::Instance => { @@ -1136,7 +1135,7 @@ impl + Ord> MockProver { location: FailureLocation::find( &self.regions, cell_a.row, - Some(&cell_a.column).into_iter().cloned().collect(), + Some(&cell_a.column.into()).into_iter().cloned().collect(), ), }) } diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index ffde7d076e..bc47730815 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -11,8 +11,8 @@ pub use verifier::verify_proof; pub use halo2_backend::plonk::{ProvingKey, VerifyingKey}; pub use halo2_common::plonk::{ - Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector, TableColumn, - ThirdPhase, + circuit::Column, Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, + Selector, TableColumn, ThirdPhase, }; -pub use halo2_middleware::circuit::{Advice, Challenge, Column, Fixed, Instance}; +pub use halo2_middleware::circuit::{Advice, Challenge, Fixed, Instance}; pub use halo2_middleware::plonk::Assigned; diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index baffd33693..fb97416ebe 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -12,7 +12,10 @@ use halo2_backend::plonk::{ }; use halo2_common::{ circuit::{AssignedCell, Layouter, Region, SimpleFloorPlanner, Value}, - plonk::{Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector}, + plonk::{ + circuit::Column, Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, + Selector, + }, transcript::{ Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, }, @@ -22,7 +25,7 @@ use halo2_frontend::{ dev::MockProver, }; use halo2_middleware::{ - circuit::{Advice, Challenge, Column, Fixed, Instance}, + circuit::{Advice, Challenge, Fixed, Instance}, ff::Field, poly::Rotation, }; diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs index 1f7f5875a6..918b307ba7 100644 --- a/middleware/src/circuit.rs +++ b/middleware/src/circuit.rs @@ -176,9 +176,11 @@ pub trait ColumnType: /// A column with an index and type #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct ColumnMid { +pub struct ColumnMid { + /// The index of the column. pub index: usize, - pub column_type: C, + /// The type of the column. + pub column_type: Any, } /// An advice column diff --git a/middleware/src/metadata.rs b/middleware/src/metadata.rs index d876423cf8..ebc105985d 100644 --- a/middleware/src/metadata.rs +++ b/middleware/src/metadata.rs @@ -34,8 +34,8 @@ impl From<(Any, usize)> for Column { } } -impl From> for Column { - fn from(column: circuit::ColumnMid) -> Self { +impl From for Column { + fn from(column: circuit::ColumnMid) -> Self { Column { column_type: column.column_type, index: column.index, diff --git a/middleware/src/permutation.rs b/middleware/src/permutation.rs index 5be94e4dcc..349b55bbe9 100644 --- a/middleware/src/permutation.rs +++ b/middleware/src/permutation.rs @@ -1,9 +1,9 @@ -use crate::circuit::{Any, ColumnMid}; +use crate::circuit::ColumnMid; // TODO: Dedup with other Cell definition, or move this to a higher level #[derive(Clone, Debug)] pub struct Cell { - pub column: ColumnMid, + pub column: ColumnMid, pub row: usize, } @@ -16,5 +16,5 @@ pub struct AssemblyMid { #[derive(Debug, Clone)] pub struct ArgumentV2 { /// A sequence of columns involved in the argument. - pub columns: Vec>, + pub columns: Vec, } From 34d35a84f98397ff323551bd65201c38bec83bcc Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 29 Jan 2024 13:00:11 +0000 Subject: [PATCH 64/79] Add ChallengeMid in middleware --- backend/src/plonk/keygen.rs | 2 +- common/src/circuit.rs | 7 +- .../src/circuit/floor_planner/single_pass.rs | 4 +- common/src/circuit/floor_planner/v1.rs | 4 +- common/src/plonk/circuit.rs | 69 +++++++++++++++++-- common/src/plonk/keygen.rs | 4 +- frontend/src/circuit.rs | 6 +- .../src/circuit/floor_planner/single_pass.rs | 4 +- frontend/src/circuit/floor_planner/v1.rs | 4 +- .../src/circuit/floor_planner/v1/strategy.rs | 3 +- frontend/src/dev.rs | 8 +-- frontend/src/dev/cost.rs | 5 +- frontend/src/dev/tfp.rs | 5 +- halo2_proofs/src/plonk.rs | 7 +- halo2_proofs/tests/frontend_backend_split.rs | 6 +- middleware/src/circuit.rs | 20 ++++-- 16 files changed, 115 insertions(+), 43 deletions(-) diff --git a/backend/src/plonk/keygen.rs b/backend/src/plonk/keygen.rs index 0af7d7048b..4c61e4599d 100644 --- a/backend/src/plonk/keygen.rs +++ b/backend/src/plonk/keygen.rs @@ -18,7 +18,7 @@ use crate::{ EvaluationDomain, }, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, CompiledCircuitV2, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, CompiledCircuitV2, Fixed, Instance}; use halo2_middleware::plonk::Assigned; pub(crate) fn create_domain( diff --git a/common/src/circuit.rs b/common/src/circuit.rs index 598267676c..5b50a3d138 100644 --- a/common/src/circuit.rs +++ b/common/src/circuit.rs @@ -4,8 +4,11 @@ use std::{fmt, marker::PhantomData}; use halo2_middleware::ff::Field; -use crate::plonk::{circuit::Column, Error, Selector, TableColumn}; -use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; +use crate::plonk::{ + circuit::{Challenge, Column}, + Error, Selector, TableColumn, +}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::plonk::Assigned; mod value; diff --git a/common/src/circuit/floor_planner/single_pass.rs b/common/src/circuit/floor_planner/single_pass.rs index 8a1b9db45f..18a04582f5 100644 --- a/common/src/circuit/floor_planner/single_pass.rs +++ b/common/src/circuit/floor_planner/single_pass.rs @@ -11,9 +11,9 @@ use crate::{ table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Column, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, + plonk::{circuit::Challenge, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// A simple [`FloorPlanner`] that performs minimal optimizations. diff --git a/common/src/circuit/floor_planner/v1.rs b/common/src/circuit/floor_planner/v1.rs index 1557fe2db1..820af78c7c 100644 --- a/common/src/circuit/floor_planner/v1.rs +++ b/common/src/circuit/floor_planner/v1.rs @@ -8,9 +8,9 @@ use crate::{ table_layouter::{compute_table_lengths, SimpleTableLayouter}, Cell, Column, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, - plonk::{Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, + plonk::{circuit::Challenge, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::plonk::Assigned; pub mod strategy; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 6e11a99bff..0150468dc3 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -4,7 +4,7 @@ use crate::circuit::{Layouter, Region, Value}; use core::cmp::max; use core::ops::{Add, Mul}; use halo2_middleware::circuit::{ - Advice, AdviceQueryMid, Any, Challenge, ColumnMid, ColumnType, ConstraintSystemV2Backend, + Advice, AdviceQueryMid, Any, ChallengeMid, ColumnMid, ColumnType, ConstraintSystemV2Backend, ExpressionMid, Fixed, FixedQueryMid, GateV2Backend, Instance, InstanceQueryMid, }; use halo2_middleware::ff::Field; @@ -56,7 +56,26 @@ impl Column { /// Return expression from column at a relative position pub fn query_cell(&self, at: Rotation) -> Expression { - self.column_type.query_cell(self.index, at) + let expr_mid = self.column_type.query_cell::(self.index, at); + match expr_mid { + ExpressionMid::Advice(q) => Expression::Advice(AdviceQuery { + index: None, + column_index: q.column_index, + rotation: q.rotation, + phase: sealed::Phase(q.phase), + }), + ExpressionMid::Fixed(q) => Expression::Fixed(FixedQuery { + index: None, + column_index: q.column_index, + rotation: q.rotation, + }), + ExpressionMid::Instance(q) => Expression::Instance(InstanceQuery { + index: None, + column_index: q.column_index, + rotation: q.rotation, + }), + _ => unreachable!(), + } } /// Return expression from column at the current row @@ -426,6 +445,48 @@ impl TableColumn { } } +/// A challenge squeezed from transcript after advice columns at the phase have been committed. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Challenge { + pub index: usize, + pub phase: u8, +} + +impl Challenge { + /// Index of this challenge. + pub fn index(&self) -> usize { + self.index + } + + /// Phase of this challenge. + pub fn phase(&self) -> u8 { + self.phase + } + + /// Return Expression + pub fn expr(&self) -> Expression { + Expression::Challenge(*self) + } +} + +impl Into for Challenge { + fn into(self) -> ChallengeMid { + ChallengeMid { + index: self.index, + phase: self.phase, + } + } +} + +impl From for Challenge { + fn from(c: ChallengeMid) -> Self { + Self { + index: c.index, + phase: c.phase, + } + } +} + /// This trait allows a [`Circuit`] to direct some backend to assign a witness /// for a constraint system. pub trait Assignment { @@ -669,7 +730,7 @@ impl Into> for Expression { column_index, rotation, }), - Expression::Challenge(c) => ExpressionMid::Challenge(c), + Expression::Challenge(c) => ExpressionMid::Challenge(c.into()), Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), Expression::Sum(lhs, rhs) => { ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) @@ -1477,7 +1538,7 @@ impl QueriesMap { rotation: query.rotation, }) } - ExpressionMid::Challenge(c) => Expression::Challenge(*c), + ExpressionMid::Challenge(c) => Expression::Challenge(c.clone().into()), ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), ExpressionMid::Sum(lhs, rhs) => Expression::Sum( Box::new(self.as_expression(lhs)), diff --git a/common/src/plonk/keygen.rs b/common/src/plonk/keygen.rs index cd7a835b23..77507589cb 100644 --- a/common/src/plonk/keygen.rs +++ b/common/src/plonk/keygen.rs @@ -3,11 +3,11 @@ use std::ops::Range; use halo2_middleware::ff::Field; use super::{ - circuit::{Assignment, Column, Selector}, + circuit::{Assignment, Challenge, Column, Selector}, permutation, Error, LagrangeCoeff, Polynomial, }; use crate::circuit::Value; -use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// Assembly to be used in circuit synthesis. diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index 9483b0ea30..582ec3949d 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -1,16 +1,14 @@ //! Traits and structs for implementing circuit components. use halo2_common::plonk::{ - circuit::Column, + circuit::{Challenge, Column}, permutation, sealed::{self, SealedPhase}, Assignment, Circuit, ConstraintSystem, Error, FirstPhase, FloorPlanner, SecondPhase, Selector, ThirdPhase, }; use halo2_common::poly::{batch_invert_assigned, Polynomial}; -use halo2_middleware::circuit::{ - Advice, Any, Challenge, CompiledCircuitV2, Fixed, Instance, PreprocessingV2, -}; +use halo2_middleware::circuit::{Advice, Any, CompiledCircuitV2, Fixed, Instance, PreprocessingV2}; use halo2_middleware::ff::Field; use halo2_middleware::plonk::Assigned; use std::collections::BTreeSet; diff --git a/frontend/src/circuit/floor_planner/single_pass.rs b/frontend/src/circuit/floor_planner/single_pass.rs index ec2aa18ede..39d0a6a001 100644 --- a/frontend/src/circuit/floor_planner/single_pass.rs +++ b/frontend/src/circuit/floor_planner/single_pass.rs @@ -5,8 +5,8 @@ mod tests { use super::SimpleFloorPlanner; use crate::dev::MockProver; - use halo2_common::plonk::{Circuit, ConstraintSystem, Error}; - use halo2_middleware::circuit::{Advice, Column}; + use halo2_common::plonk::{circuit::Column, Circuit, ConstraintSystem, Error}; + use halo2_middleware::circuit::Advice; #[test] fn not_enough_columns_for_constants() { diff --git a/frontend/src/circuit/floor_planner/v1.rs b/frontend/src/circuit/floor_planner/v1.rs index 6a78b58eb6..591cab81f8 100644 --- a/frontend/src/circuit/floor_planner/v1.rs +++ b/frontend/src/circuit/floor_planner/v1.rs @@ -7,8 +7,8 @@ mod tests { use halo2curves::pasta::vesta; use crate::dev::MockProver; - use halo2_common::plonk::{Circuit, ConstraintSystem, Error}; - use halo2_middleware::circuit::{Advice, Column}; + use halo2_common::plonk::{circuit::Column, Circuit, ConstraintSystem, Error}; + use halo2_middleware::circuit::Advice; #[test] fn not_enough_columns_for_constants() { diff --git a/frontend/src/circuit/floor_planner/v1/strategy.rs b/frontend/src/circuit/floor_planner/v1/strategy.rs index 7b05709dff..29d8f431d5 100644 --- a/frontend/src/circuit/floor_planner/v1/strategy.rs +++ b/frontend/src/circuit/floor_planner/v1/strategy.rs @@ -2,7 +2,8 @@ fn test_slot_in() { use crate::circuit::layouter::RegionShape; use halo2_common::circuit::floor_planner::v1::strategy::slot_in; - use halo2_middleware::circuit::{Any, Column}; + use halo2_common::plonk::circuit::Column; + use halo2_middleware::circuit::Any; let regions = vec![ RegionShape { diff --git a/frontend/src/dev.rs b/frontend/src/dev.rs index 6707a99f12..23d56c3f9d 100644 --- a/frontend/src/dev.rs +++ b/frontend/src/dev.rs @@ -12,14 +12,14 @@ use halo2_middleware::ff::FromUniformBytes; use halo2_common::{ circuit, plonk::{ - circuit::Column, + circuit::{Challenge, Column}, permutation, sealed::{self, SealedPhase}, Assignment, Circuit, ConstraintSystem, Error, Expression, FirstPhase, FloorPlanner, Phase, Selector, }, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, ColumnMid, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, ColumnMid, Fixed, Instance}; use halo2_middleware::plonk::Assigned; use halo2_common::multicore::{ @@ -1252,9 +1252,9 @@ mod tests { use super::{FailureLocation, MockProver, VerifyFailure}; use crate::circuit::{Layouter, SimpleFloorPlanner, Value}; use halo2_common::plonk::{ - Circuit, ConstraintSystem, Error, Expression, Selector, TableColumn, + circuit::Column, Circuit, ConstraintSystem, Error, Expression, Selector, TableColumn, }; - use halo2_middleware::circuit::{Advice, Any, Column, Fixed, Instance}; + use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::poly::Rotation; #[test] diff --git a/frontend/src/dev/cost.rs b/frontend/src/dev/cost.rs index 36c0f686e6..9452975c17 100644 --- a/frontend/src/dev/cost.rs +++ b/frontend/src/dev/cost.rs @@ -15,10 +15,11 @@ use halo2_middleware::poly::Rotation; use halo2_common::{ circuit::{layouter::RegionColumn, Value}, plonk::{ - circuit::Column, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, + circuit::{Challenge, Column}, + Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, }, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// Measures a circuit to determine its costs, and explain what contributes to them. diff --git a/frontend/src/dev/tfp.rs b/frontend/src/dev/tfp.rs index 72c3ea2776..8729d33afc 100644 --- a/frontend/src/dev/tfp.rs +++ b/frontend/src/dev/tfp.rs @@ -8,9 +8,10 @@ use halo2_common::circuit::{ AssignedCell, Cell, Layouter, Region, Table, Value, }; use halo2_common::plonk::{ - circuit::Column, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, + circuit::{Challenge, Column}, + Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, }; -use halo2_middleware::circuit::{Advice, Any, Challenge, Fixed, Instance}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::plonk::Assigned; /// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index bc47730815..6fbaf2631a 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -11,8 +11,9 @@ pub use verifier::verify_proof; pub use halo2_backend::plonk::{ProvingKey, VerifyingKey}; pub use halo2_common::plonk::{ - circuit::Column, Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, - Selector, TableColumn, ThirdPhase, + circuit::{Challenge, Column}, + Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector, TableColumn, + ThirdPhase, }; -pub use halo2_middleware::circuit::{Advice, Challenge, Fixed, Instance}; +pub use halo2_middleware::circuit::{Advice, Fixed, Instance}; pub use halo2_middleware::plonk::Assigned; diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index fb97416ebe..c647fa2e63 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -13,8 +13,8 @@ use halo2_backend::plonk::{ use halo2_common::{ circuit::{AssignedCell, Layouter, Region, SimpleFloorPlanner, Value}, plonk::{ - circuit::Column, Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, - Selector, + circuit::{Challenge, Column}, + Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector, }, transcript::{ Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, @@ -25,7 +25,7 @@ use halo2_frontend::{ dev::MockProver, }; use halo2_middleware::{ - circuit::{Advice, Challenge, Fixed, Instance}, + circuit::{Advice, Fixed, Instance}, ff::Field, poly::Rotation, }; diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs index 918b307ba7..004e86d4fe 100644 --- a/middleware/src/circuit.rs +++ b/middleware/src/circuit.rs @@ -35,12 +35,12 @@ pub struct InstanceQueryMid { /// A challenge squeezed from transcript after advice columns at the phase have been committed. #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Challenge { +pub struct ChallengeMid { pub index: usize, pub phase: u8, } -impl Challenge { +impl ChallengeMid { /// Index of this challenge. pub fn index(&self) -> usize { self.index @@ -51,10 +51,10 @@ impl Challenge { self.phase } - /// Return Expression - pub fn expr(&self) -> ExpressionMid { - ExpressionMid::Challenge(*self) - } + // /// Return Expression + // pub fn expr(&self) -> ExpressionMid { + // ExpressionMid::Challenge(*self) + // } } /// Low-degree expression representing an identity that must hold over the committed columns. @@ -69,7 +69,7 @@ pub enum ExpressionMid { /// This is an instance (external) column queried at a certain relative location Instance(InstanceQueryMid), /// This is a challenge - Challenge(Challenge), + Challenge(ChallengeMid), /// This is a negated polynomial Negated(Box>), /// This is the sum of two polynomials @@ -166,6 +166,12 @@ pub struct CompiledCircuitV2 { pub cs: ConstraintSystemV2Backend, } +// TODO: The query_cell method is only used in the frontend, which uses Expression. By having this +// trait implemented here we can only return ExpressionMid, which requires conversion to Expression +// when used. On the other hand, it's difficult to move ColumnType to the frontend because this +// trait is implemented for Any which is used in the backend. It would be great to find a way to +// move all the `query_cell` implementations to the frontend and have them return `Expression`, +// while keeping `Any` in the middleware. /// A column type pub trait ColumnType: 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into From 0fb30077b4a09a1f5f78ef74f09acd952c78c9cb Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 29 Jan 2024 13:03:25 +0000 Subject: [PATCH 65/79] Remove halo2_proofs_rm; compile all tests --- .../CHANGELOG.md | 0 halo2_proofs/Cargo.toml | 30 ++++- .../benches/arithmetic.rs | 0 .../benches/commit_zk.rs | 0 .../benches/dev_lookup.rs | 0 .../benches/fft.rs | 0 .../benches/hashtocurve.rs | 0 .../benches/plonk.rs | 0 halo2_proofs/src/lib.rs | 4 +- halo2_proofs_rm/Cargo.toml | 104 ------------------ halo2_proofs_rm/README.md | 37 ------- halo2_proofs_rm/src/lib.rs | 0 12 files changed, 30 insertions(+), 145 deletions(-) rename {halo2_proofs_rm => halo2_proofs}/CHANGELOG.md (100%) rename {halo2_proofs_rm => halo2_proofs}/benches/arithmetic.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/benches/commit_zk.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/benches/dev_lookup.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/benches/fft.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/benches/hashtocurve.rs (100%) rename {halo2_proofs_rm => halo2_proofs}/benches/plonk.rs (100%) delete mode 100644 halo2_proofs_rm/Cargo.toml delete mode 100644 halo2_proofs_rm/README.md delete mode 100644 halo2_proofs_rm/src/lib.rs diff --git a/halo2_proofs_rm/CHANGELOG.md b/halo2_proofs/CHANGELOG.md similarity index 100% rename from halo2_proofs_rm/CHANGELOG.md rename to halo2_proofs/CHANGELOG.md diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index 6888c86d81..5223129900 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -11,11 +11,11 @@ authors = [ edition = "2021" rust-version = "1.66.0" description = """ -TODO +Fast PLONK-based zero-knowledge proving system with no trusted setup """ license = "MIT OR Apache-2.0" -repository = "TODO" -documentation = "TODO" +repository = "https://github.com/zcash/halo2" +documentation = "https://docs.rs/halo2_proofs" readme = "README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "zkp", "zkSNARKs"] @@ -24,6 +24,30 @@ keywords = ["halo", "proofs", "zkp", "zkSNARKs"] all-features = true rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] +[[bench]] +name = "arithmetic" +harness = false + +[[bench]] +name = "commit_zk" +harness = false + +[[bench]] +name = "hashtocurve" +harness = false + +[[bench]] +name = "plonk" +harness = false + +[[bench]] +name = "dev_lookup" +harness = false + +[[bench]] +name = "fft" +harness = false + [dependencies] backtrace = { version = "0.3", optional = true } ff = "0.13" diff --git a/halo2_proofs_rm/benches/arithmetic.rs b/halo2_proofs/benches/arithmetic.rs similarity index 100% rename from halo2_proofs_rm/benches/arithmetic.rs rename to halo2_proofs/benches/arithmetic.rs diff --git a/halo2_proofs_rm/benches/commit_zk.rs b/halo2_proofs/benches/commit_zk.rs similarity index 100% rename from halo2_proofs_rm/benches/commit_zk.rs rename to halo2_proofs/benches/commit_zk.rs diff --git a/halo2_proofs_rm/benches/dev_lookup.rs b/halo2_proofs/benches/dev_lookup.rs similarity index 100% rename from halo2_proofs_rm/benches/dev_lookup.rs rename to halo2_proofs/benches/dev_lookup.rs diff --git a/halo2_proofs_rm/benches/fft.rs b/halo2_proofs/benches/fft.rs similarity index 100% rename from halo2_proofs_rm/benches/fft.rs rename to halo2_proofs/benches/fft.rs diff --git a/halo2_proofs_rm/benches/hashtocurve.rs b/halo2_proofs/benches/hashtocurve.rs similarity index 100% rename from halo2_proofs_rm/benches/hashtocurve.rs rename to halo2_proofs/benches/hashtocurve.rs diff --git a/halo2_proofs_rm/benches/plonk.rs b/halo2_proofs/benches/plonk.rs similarity index 100% rename from halo2_proofs_rm/benches/plonk.rs rename to halo2_proofs/benches/plonk.rs diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index f94ce586f3..3f3a55acbd 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -12,7 +12,9 @@ pub mod circuit { }; } pub mod arithmetic { - pub use halo2_common::arithmetic::{CurveAffine, Field}; + pub use halo2_common::arithmetic::{ + best_fft, parallelize, small_multiexp, CurveAffine, CurveExt, Field, + }; } pub mod dev { pub use halo2_frontend::dev::{metadata, FailureLocation, MockProver, VerifyFailure}; diff --git a/halo2_proofs_rm/Cargo.toml b/halo2_proofs_rm/Cargo.toml deleted file mode 100644 index 23f2b40057..0000000000 --- a/halo2_proofs_rm/Cargo.toml +++ /dev/null @@ -1,104 +0,0 @@ -[package] -name = "halo2_proofs_rm" -version = "0.3.0" -authors = [ - "Sean Bowe ", - "Ying Tong Lai ", - "Daira Hopwood ", - "Jack Grigg ", - "Privacy Scaling Explorations team", -] -edition = "2021" -rust-version = "1.66.0" -description = """ -Fast PLONK-based zero-knowledge proving system with no trusted setup -""" -license = "MIT OR Apache-2.0" -repository = "https://github.com/zcash/halo2" -documentation = "https://docs.rs/halo2_proofs" -readme = "README.md" -categories = ["cryptography"] -keywords = ["halo", "proofs", "zkp", "zkSNARKs"] - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] - -[[bench]] -name = "arithmetic" -harness = false - -[[bench]] -name = "commit_zk" -harness = false - -[[bench]] -name = "hashtocurve" -harness = false - -[[bench]] -name = "plonk" -harness = false - -[[bench]] -name = "dev_lookup" -harness = false - -[[bench]] -name = "fft" -harness = false - -[dependencies] -backtrace = { version = "0.3", optional = true } -ff = "0.13" -group = "0.13" -halo2curves = { version = "0.6.0", default-features = false } -rand_core = { version = "0.6", default-features = false } -tracing = "0.1" -blake2b_simd = "1" # MSRV 1.66.0 -sha3 = "0.9.1" -rand_chacha = "0.3" -serde = { version = "1", optional = true, features = ["derive"] } -serde_derive = { version = "1", optional = true} -rayon = "1.8" - -# Developer tooling dependencies -plotters = { version = "0.3.0", default-features = false, optional = true } -tabbycat = { version = "0.1", features = ["attributes"], optional = true } - -# Legacy circuit compatibility -halo2_legacy_pdqsort = { version = "0.1.0", optional = true } - -[dev-dependencies] -assert_matches = "1.5" -criterion = "0.3" -gumdrop = "0.8" -proptest = "1" -rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -dhat = "0.3.2" -serde_json = "1" - -[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] -getrandom = { version = "0.2", features = ["js"] } - -[features] -default = ["batch", "bits"] -dev-graph = ["plotters", "tabbycat"] -test-dev-graph = [ - "dev-graph", - "plotters/bitmap_backend", - "plotters/bitmap_encoder", - "plotters/ttf", -] -bits = ["halo2curves/bits"] -gadget-traces = ["backtrace"] -thread-safe-region = [] -sanity-checks = [] -batch = ["rand_core/getrandom"] -circuit-params = [] -heap-profiling = [] -cost-estimator = ["serde", "serde_derive"] -derive_serde = ["halo2curves/derive_serde"] - -[lib] -bench = false diff --git a/halo2_proofs_rm/README.md b/halo2_proofs_rm/README.md deleted file mode 100644 index bdb9a63639..0000000000 --- a/halo2_proofs_rm/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# halo2_proofs [![Crates.io](https://img.shields.io/crates/v/halo2_proofs.svg)](https://crates.io/crates/halo2_proofs) # - -## [Documentation](https://docs.rs/halo2_proofs) - -## Minimum Supported Rust Version - -Requires Rust **1.65.0** or higher. - -Minimum supported Rust version can be changed in the future, but it will be done with a -minor version bump. - -## Controlling parallelism - -`halo2_proofs` currently uses [rayon](https://github.com/rayon-rs/rayon) for parallel -computation. The `RAYON_NUM_THREADS` environment variable can be used to set the number of -threads. - -When compiling to WASM-targets, notice that since version `1.7`, `rayon` will fallback automatically (with no need to handle features) to require `getrandom` in order to be able to work. For more info related to WASM-compilation. - -See: [Rayon: Usage with WebAssembly](https://github.com/rayon-rs/rayon#usage-with-webassembly) for more - -## License - -Licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally -submitted for inclusion in the work by you, as defined in the Apache-2.0 -license, shall be dual licensed as above, without any additional terms or -conditions. diff --git a/halo2_proofs_rm/src/lib.rs b/halo2_proofs_rm/src/lib.rs deleted file mode 100644 index e69de29bb2..0000000000 From 1fce6f327b05c11302ddc1e554a5cc28d4bf887b Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 29 Jan 2024 13:36:46 +0000 Subject: [PATCH 66/79] Clean up middleware deps --- Cargo.toml | 1 - middleware/Cargo.toml | 39 ++------------------------------------- 2 files changed, 2 insertions(+), 38 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d7e905a8f0..0b5d9a1ccf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,7 +2,6 @@ members = [ "halo2", "halo2_proofs", - "halo2_proofs_rm", # TODO: Remove "frontend", "middleware", "backend", diff --git a/middleware/Cargo.toml b/middleware/Cargo.toml index 55df462101..e66e8f6592 100644 --- a/middleware/Cargo.toml +++ b/middleware/Cargo.toml @@ -25,56 +25,21 @@ all-features = true rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] [dependencies] -backtrace = { version = "0.3", optional = true } ff = "0.13" -group = "0.13" -halo2curves = { version = "0.6.0", default-features = false } -rand_core = { version = "0.6", default-features = false } -tracing = "0.1" -blake2b_simd = "1" # MSRV 1.66.0 -sha3 = "0.9.1" -rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" -# Developer tooling dependencies -plotters = { version = "0.3.0", default-features = false, optional = true } -tabbycat = { version = "0.1", features = ["attributes"], optional = true } - # Legacy circuit compatibility halo2_legacy_pdqsort = { version = "0.1.0", optional = true } [dev-dependencies] -assert_matches = "1.5" -criterion = "0.3" -gumdrop = "0.8" proptest = "1" -rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -dhat = "0.3.2" -serde_json = "1" +group = "0.13" +halo2curves = { version = "0.6.0", default-features = false } [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] getrandom = { version = "0.2", features = ["js"] } -[features] -default = ["batch", "bits"] -dev-graph = ["plotters", "tabbycat"] -test-dev-graph = [ - "dev-graph", - "plotters/bitmap_backend", - "plotters/bitmap_encoder", - "plotters/ttf", -] -bits = ["halo2curves/bits"] -gadget-traces = ["backtrace"] -thread-safe-region = [] -sanity-checks = [] -batch = ["rand_core/getrandom"] -circuit-params = [] -heap-profiling = [] -cost-estimator = ["serde", "serde_derive"] -derive_serde = ["halo2curves/derive_serde"] - [lib] bench = false From da9d34e78b2a9ca63ef18ae3c72f834802eb4859 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 29 Jan 2024 14:20:21 +0000 Subject: [PATCH 67/79] Clean up and config lints for halo2_proofs --- backend/Cargo.toml | 22 +++------------------ common/Cargo.toml | 26 +++---------------------- frontend/Cargo.toml | 20 ++++---------------- halo2_proofs/Cargo.toml | 40 +++++++++++---------------------------- halo2_proofs/src/lib.rs | 19 ++++++++++++++++++- halo2_proofs/src/plonk.rs | 13 ++++++++++--- middleware/Cargo.toml | 3 --- 7 files changed, 49 insertions(+), 94 deletions(-) diff --git a/backend/Cargo.toml b/backend/Cargo.toml index e97451c686..64c7dd2788 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -11,11 +11,11 @@ authors = [ edition = "2021" rust-version = "1.66.0" description = """ -TODO +Halo2 backend implementation. This package implements the halo2 proof system which includes setup (key generation), proving and verifying. """ license = "MIT OR Apache-2.0" -repository = "TODO" -documentation = "TODO" +repository = "https://github.com/zcash/halo2" +documentation = "https://docs.rs/halo2_proofs" readme = "README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "zkp", "zkSNARKs"] @@ -40,20 +40,12 @@ rayon = "1.8" halo2_middleware = { path = "../middleware" } halo2_common = { path = "../common" } -# Developer tooling dependencies -plotters = { version = "0.3.0", default-features = false, optional = true } -tabbycat = { version = "0.1", features = ["attributes"], optional = true } - -# Legacy circuit compatibility -halo2_legacy_pdqsort = { version = "0.1.0", optional = true } - [dev-dependencies] assert_matches = "1.5" criterion = "0.3" gumdrop = "0.8" proptest = "1" rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -dhat = "0.3.2" serde_json = "1" [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] @@ -61,20 +53,12 @@ getrandom = { version = "0.2", features = ["js"] } [features] default = ["batch", "bits"] -dev-graph = ["plotters", "tabbycat"] -test-dev-graph = [ - "dev-graph", - "plotters/bitmap_backend", - "plotters/bitmap_encoder", - "plotters/ttf", -] bits = ["halo2curves/bits"] gadget-traces = ["backtrace"] thread-safe-region = [] sanity-checks = [] batch = ["rand_core/getrandom"] circuit-params = [] -heap-profiling = [] cost-estimator = ["serde", "serde_derive"] derive_serde = ["halo2curves/derive_serde"] diff --git a/common/Cargo.toml b/common/Cargo.toml index 6f6a128099..2e33c56537 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -11,11 +11,11 @@ authors = [ edition = "2021" rust-version = "1.66.0" description = """ -TODO +Halo2 frontend-backend common functions and types. This package is meant for internal usage only. """ license = "MIT OR Apache-2.0" -repository = "TODO" -documentation = "TODO" +repository = "https://github.com/zcash/halo2" +documentation = "https://docs.rs/halo2_proofs" readme = "README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "zkp", "zkSNARKs"] @@ -29,29 +29,19 @@ backtrace = { version = "0.3", optional = true } group = "0.13" halo2curves = { version = "0.6.0", default-features = false } rand_core = { version = "0.6", default-features = false } -tracing = "0.1" blake2b_simd = "1" # MSRV 1.66.0 sha3 = "0.9.1" -rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" halo2_middleware = { path = "../middleware" } -# Developer tooling dependencies -plotters = { version = "0.3.0", default-features = false, optional = true } -tabbycat = { version = "0.1", features = ["attributes"], optional = true } - # Legacy circuit compatibility halo2_legacy_pdqsort = { version = "0.1.0", optional = true } [dev-dependencies] -assert_matches = "1.5" -criterion = "0.3" -gumdrop = "0.8" proptest = "1" rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -dhat = "0.3.2" serde_json = "1" [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] @@ -59,21 +49,11 @@ getrandom = { version = "0.2", features = ["js"] } [features] default = ["batch", "bits"] -dev-graph = ["plotters", "tabbycat"] -test-dev-graph = [ - "dev-graph", - "plotters/bitmap_backend", - "plotters/bitmap_encoder", - "plotters/ttf", -] bits = ["halo2curves/bits"] gadget-traces = ["backtrace"] thread-safe-region = [] -sanity-checks = [] batch = ["rand_core/getrandom"] circuit-params = [] -heap-profiling = [] -cost-estimator = ["serde", "serde_derive"] derive_serde = ["halo2curves/derive_serde"] [lib] diff --git a/frontend/Cargo.toml b/frontend/Cargo.toml index ada6ac6c30..e3c728b04b 100644 --- a/frontend/Cargo.toml +++ b/frontend/Cargo.toml @@ -11,11 +11,11 @@ authors = [ edition = "2021" rust-version = "1.66.0" description = """ -TODO +Halo2 frontend implementation. This package implements an API to write circuits, handles witness generation and contains the MockProver. """ license = "MIT OR Apache-2.0" -repository = "TODO" -documentation = "TODO" +repository = "https://github.com/zcash/halo2" +documentation = "https://docs.rs/halo2_proofs" readme = "README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "zkp", "zkSNARKs"] @@ -29,14 +29,10 @@ backtrace = { version = "0.3", optional = true } ff = "0.13" group = "0.13" halo2curves = { version = "0.6.0", default-features = false } -rand_core = { version = "0.6", default-features = false } tracing = "0.1" blake2b_simd = "1" # MSRV 1.66.0 -sha3 = "0.9.1" -rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} -rayon = "1.8" halo2_middleware = { path = "../middleware" } halo2_common = { path = "../common" } @@ -44,23 +40,16 @@ halo2_common = { path = "../common" } plotters = { version = "0.3.0", default-features = false, optional = true } tabbycat = { version = "0.1", features = ["attributes"], optional = true } -# Legacy circuit compatibility -halo2_legacy_pdqsort = { version = "0.1.0", optional = true } - [dev-dependencies] -assert_matches = "1.5" -criterion = "0.3" -gumdrop = "0.8" proptest = "1" rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -dhat = "0.3.2" serde_json = "1" [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] getrandom = { version = "0.2", features = ["js"] } [features] -default = ["batch", "bits"] +default = ["bits"] dev-graph = ["plotters", "tabbycat"] test-dev-graph = [ "dev-graph", @@ -72,7 +61,6 @@ bits = ["halo2curves/bits"] gadget-traces = ["backtrace"] thread-safe-region = [] sanity-checks = [] -batch = ["rand_core/getrandom"] circuit-params = [] heap-profiling = [] cost-estimator = ["serde", "serde_derive"] diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index 5223129900..df0160596e 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -49,36 +49,23 @@ name = "fft" harness = false [dependencies] -backtrace = { version = "0.3", optional = true } -ff = "0.13" -group = "0.13" -halo2curves = { version = "0.6.0", default-features = false } -rand_core = { version = "0.6", default-features = false } -tracing = "0.1" -blake2b_simd = "1" # MSRV 1.66.0 -sha3 = "0.9.1" -rand_chacha = "0.3" -serde = { version = "1", optional = true, features = ["derive"] } -serde_derive = { version = "1", optional = true} -rayon = "1.8" halo2_middleware = { path = "../middleware" } halo2_common = { path = "../common" } halo2_backend = { path = "../backend" } halo2_frontend = { path = "../frontend" } - -# Developer tooling dependencies -plotters = { version = "0.3.0", default-features = false, optional = true } -tabbycat = { version = "0.1", features = ["attributes"], optional = true } - -# Legacy circuit compatibility -halo2_legacy_pdqsort = { version = "0.1.0", optional = true } +halo2curves = { version = "0.6.0", default-features = false } +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } [dev-dependencies] +ff = "0.13" +group = "0.13" +tracing = "0.1" +rand_chacha = "0.3" +rayon = "1.8" assert_matches = "1.5" criterion = "0.3" gumdrop = "0.8" proptest = "1" -rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } dhat = "0.3.2" serde_json = "1" @@ -87,21 +74,16 @@ getrandom = { version = "0.2", features = ["js"] } [features] default = ["batch", "bits"] -dev-graph = ["plotters", "tabbycat"] -test-dev-graph = [ - "dev-graph", - "plotters/bitmap_backend", - "plotters/bitmap_encoder", - "plotters/ttf", -] +dev-graph = ["halo2_frontend/dev-graph"] +test-dev-graph = ["halo2_frontend/test-dev-graph"] bits = ["halo2curves/bits"] -gadget-traces = ["backtrace"] +gadget-traces = ["halo2_common/gadget-traces"] thread-safe-region = [] sanity-checks = [] batch = ["rand_core/getrandom"] circuit-params = [] heap-profiling = [] -cost-estimator = ["serde", "serde_derive"] +cost-estimator = ["halo2_frontend/cost-estimator"] derive_serde = ["halo2curves/derive_serde"] [lib] diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index 3f3a55acbd..78ecf3d435 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -3,34 +3,51 @@ //! module structure so that projects depending on halo2 can update their dependency towards it //! without breaking. +#![cfg_attr(docsrs, feature(doc_cfg))] +// The actual lints we want to disable. +#![allow(clippy::op_ref, clippy::many_single_char_names)] +#![deny(rustdoc::broken_intra_doc_links)] +#![deny(missing_debug_implementations)] +#![deny(missing_docs)] +#![deny(unsafe_code)] + pub mod plonk; +/// Traits and structs for implementing circuit components. pub mod circuit { pub use halo2_common::circuit::floor_planner; pub use halo2_common::circuit::{ AssignedCell, Cell, Chip, Layouter, Region, SimpleFloorPlanner, Value, }; } +///! This module provides common utilities, traits and structures for group, +///! field and polynomial arithmetic. pub mod arithmetic { pub use halo2_common::arithmetic::{ best_fft, parallelize, small_multiexp, CurveAffine, CurveExt, Field, }; } +/// Tools for developing circuits. pub mod dev { pub use halo2_frontend::dev::{metadata, FailureLocation, MockProver, VerifyFailure}; } +/// Contains utilities for performing arithmetic over univariate polynomials in +/// various forms, including computing commitments to them and provably opening +/// the committed polynomials at arbitrary points. pub mod poly { pub use halo2_backend::poly::VerificationStrategy; pub use halo2_common::poly::{commitment, ipa, kzg}; pub use halo2_middleware::poly::Rotation; } +/// This module contains utilities and traits for dealing with Fiat-Shamir +/// transcripts. pub mod transcript { pub use halo2_common::transcript::{ Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptReadBuffer, TranscriptWriterBuffer, }; } -pub mod helpers { +mod helpers { pub use halo2_common::helpers::SerdeFormat; } diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 6fbaf2631a..201a342ce7 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -1,6 +1,13 @@ -pub mod keygen; -pub mod prover; -pub mod verifier { +//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] +//! that is designed specifically for the polynomial commitment scheme described +//! in the [Halo][halo] paper. +//! +//! [halo]: https://eprint.iacr.org/2019/1021 +//! [plonk]: https://eprint.iacr.org/2019/953 + +mod keygen; +mod prover; +mod verifier { pub use halo2_backend::plonk::verifier::verify_proof; } diff --git a/middleware/Cargo.toml b/middleware/Cargo.toml index e66e8f6592..a443aa0809 100644 --- a/middleware/Cargo.toml +++ b/middleware/Cargo.toml @@ -30,9 +30,6 @@ serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" -# Legacy circuit compatibility -halo2_legacy_pdqsort = { version = "0.1.0", optional = true } - [dev-dependencies] proptest = "1" group = "0.13" From 4c155be610f0f3df0fdbc416be1da1b779cc29e8 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 29 Jan 2024 16:24:43 +0000 Subject: [PATCH 68/79] Temporarily disable thread-safe-region fetature, fix clippy warnings --- backend/Cargo.toml | 1 - backend/src/plonk/evaluation.rs | 4 +- backend/src/plonk/lookup/prover.rs | 8 ++-- backend/src/plonk/lookup/verifier.rs | 30 ++++++------- backend/src/plonk/permutation/keygen.rs | 43 ++++++++++++------- backend/src/plonk/permutation/prover.rs | 6 +-- backend/src/plonk/permutation/verifier.rs | 18 ++++---- backend/src/plonk/prover.rs | 5 ++- backend/src/plonk/shuffle/verifier.rs | 18 ++++---- backend/src/plonk/vanishing/verifier.rs | 2 +- backend/src/plonk/verifier.rs | 6 +-- common/src/plonk/circuit.rs | 40 ++++++++--------- common/src/plonk/permutation.rs | 6 +-- common/src/poly/kzg/msm.rs | 10 ++++- .../poly/kzg/multiopen/shplonk/verifier.rs | 2 +- frontend/src/circuit.rs | 1 + frontend/src/dev.rs | 4 +- frontend/src/dev/cost_model.rs | 2 +- frontend/src/dev/graph.rs | 16 +++---- frontend/src/dev/graph/layout.rs | 8 ++-- halo2_proofs/Cargo.toml | 1 + halo2_proofs/src/lib.rs | 6 +++ halo2_proofs/tests/frontend_backend_split.rs | 5 ++- middleware/src/circuit.rs | 8 +--- 24 files changed, 133 insertions(+), 117 deletions(-) diff --git a/backend/Cargo.toml b/backend/Cargo.toml index 64c7dd2788..5f7fbfc22f 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -55,7 +55,6 @@ getrandom = { version = "0.2", features = ["js"] } default = ["batch", "bits"] bits = ["halo2curves/bits"] gadget-traces = ["backtrace"] -thread-safe-region = [] sanity-checks = [] batch = ["rand_core/getrandom"] circuit-params = [] diff --git a/backend/src/plonk/evaluation.rs b/backend/src/plonk/evaluation.rs index 2cd00a5f7c..109523ecef 100644 --- a/backend/src/plonk/evaluation.rs +++ b/backend/src/plonk/evaluation.rs @@ -390,7 +390,7 @@ impl Evaluator { let blinding_factors = pk.vk.cs.blinding_factors(); let last_rotation = Rotation(-((blinding_factors + 1) as i32)); let chunk_len = pk.vk.cs.degree() - 2; - let delta_start = beta * &C::Scalar::ZETA; + let delta_start = beta * C::Scalar::ZETA; let first_set = sets.first().unwrap(); let last_set = sets.last().unwrap(); @@ -863,7 +863,7 @@ pub fn evaluate( }, &|challenge| challenges[challenge.index()], &|a| -a, - &|a, b| a + &b, + &|a, b| a + b, &|a, b| a * b, &|a, scalar| a * scalar, ); diff --git a/backend/src/plonk/lookup/prover.rs b/backend/src/plonk/lookup/prover.rs index b0124ca494..3af77a7f2c 100644 --- a/backend/src/plonk/lookup/prover.rs +++ b/backend/src/plonk/lookup/prover.rs @@ -199,7 +199,7 @@ impl Permuted { .zip(self.permuted_input_expression[start..].iter()) .zip(self.permuted_table_expression[start..].iter()) { - *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); + *lookup_product = (*beta + permuted_input_value) * (*gamma + permuted_table_value); } }); @@ -214,8 +214,8 @@ impl Permuted { for (i, product) in product.iter_mut().enumerate() { let i = i + start; - *product *= &(self.compressed_input_expression[i] + &*beta); - *product *= &(self.compressed_table_expression[i] + &*gamma); + *product *= &(self.compressed_input_expression[i] + *beta); + *product *= &(self.compressed_table_expression[i] + *gamma); } }); @@ -276,7 +276,7 @@ impl Permuted { input_term += &(*beta); table_term += &(*gamma); - right *= &(input_term * &table_term); + right *= &(input_term * table_term); assert_eq!(left, right); } diff --git a/backend/src/plonk/lookup/verifier.rs b/backend/src/plonk/lookup/verifier.rs index 8639077a07..b394d39898 100644 --- a/backend/src/plonk/lookup/verifier.rs +++ b/backend/src/plonk/lookup/verifier.rs @@ -109,8 +109,8 @@ impl Evaluated { // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) let left = self.product_next_eval - * &(self.permuted_input_eval + &*beta) - * &(self.permuted_table_eval + &*gamma); + * (self.permuted_input_eval + *beta) + * (self.permuted_table_eval + *gamma); let compress_expressions = |expressions: &[Expression]| { expressions @@ -124,28 +124,28 @@ impl Evaluated { &|query| instance_evals[query.index.unwrap()], &|challenge| challenges[challenge.index()], &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, + &|a, b| a + b, + &|a, b| a * b, + &|a, scalar| a * scalar, ) }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + .fold(C::Scalar::ZERO, |acc, eval| acc * *theta + eval) }; let right = self.product_eval - * &(compress_expressions(&argument.input_expressions) + &*beta) - * &(compress_expressions(&argument.table_expressions) + &*gamma); + * (compress_expressions(&argument.input_expressions) + *beta) + * (compress_expressions(&argument.table_expressions) + *gamma); - (left - &right) * &active_rows + (left - right) * active_rows }; std::iter::empty() .chain( // l_0(X) * (1 - z(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + Some(l_0 * (C::Scalar::ONE - self.product_eval)), ) .chain( // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), + Some(l_last * (self.product_eval.square() - self.product_eval)), ) .chain( // (1 - (l_last(X) + l_blind(X))) * ( @@ -156,13 +156,13 @@ impl Evaluated { ) .chain(Some( // l_0(X) * (a'(X) - s'(X)) = 0 - l_0 * &(self.permuted_input_eval - &self.permuted_table_eval), + l_0 * (self.permuted_input_eval - self.permuted_table_eval), )) .chain(Some( // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - (self.permuted_input_eval - &self.permuted_table_eval) - * &(self.permuted_input_eval - &self.permuted_input_inv_eval) - * &active_rows, + (self.permuted_input_eval - self.permuted_table_eval) + * (self.permuted_input_eval - self.permuted_input_inv_eval) + * active_rows, )) } diff --git a/backend/src/plonk/permutation/keygen.rs b/backend/src/plonk/permutation/keygen.rs index a22c87f08c..f85f343c42 100644 --- a/backend/src/plonk/permutation/keygen.rs +++ b/backend/src/plonk/permutation/keygen.rs @@ -13,13 +13,18 @@ use crate::{ use halo2_middleware::circuit::{Any, ColumnMid}; use halo2_middleware::permutation::{ArgumentV2, AssemblyMid}; -#[cfg(feature = "thread-safe-region")] +// NOTE: Temporarily disabled thread-safe-region feature. Regions are a frontend concept, so the +// thread-safe support for them should be only in the frontend package. + +// #[cfg(feature = "thread-safe-region")] use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; +/* #[cfg(feature = "thread-safe-region")] use std::collections::{BTreeSet, HashMap}; +*/ -#[cfg(not(feature = "thread-safe-region"))] +// #[cfg(not(feature = "thread-safe-region"))] /// Struct that accumulates all the necessary data in order to construct the permutation argument. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Assembly { @@ -33,7 +38,7 @@ pub struct Assembly { sizes: Vec>, } -#[cfg(not(feature = "thread-safe-region"))] +// #[cfg(not(feature = "thread-safe-region"))] impl Assembly { pub(crate) fn new_from_assembly_mid( n: usize, @@ -143,12 +148,13 @@ impl Assembly { } } +/* #[cfg(feature = "thread-safe-region")] /// Struct that accumulates all the necessary data in order to construct the permutation argument. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Assembly { /// Columns that participate on the copy permutation argument. - columns: Vec>, + columns: Vec, /// Mapping of the actual copies done. cycles: Vec>, /// Mapping of the actual copies done. @@ -165,10 +171,10 @@ pub struct Assembly { impl Assembly { pub(crate) fn new_from_assembly_mid( n: usize, - p: &Argument, + p: &ArgumentV2, a: &AssemblyMid, ) -> Result { - let mut assembly = Self::new(n, p); + let mut assembly = Self::new(n, &p.clone().into()); for copy in &a.copies { assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; } @@ -176,21 +182,27 @@ impl Assembly { } pub(crate) fn new(n: usize, p: &Argument) -> Self { + // Initialize the copy vector to keep track of copy constraints in all + // the permutation arguments. + let mut columns = vec![]; + for i in 0..p.columns.len() { + // Computes [(i, 0), (i, 1), ..., (i, n - 1)] + columns.push((0..n).map(|j| (i, j)).collect()); + } + Assembly { - columns: p.columns.clone(), - cycles: Vec::with_capacity(n), - ordered_cycles: Vec::with_capacity(n), - aux: HashMap::new(), - col_len: n, - num_cols: p.columns.len(), + columns: p.columns.clone().into_iter().map(|c| c.into()).collect(), + mapping: columns.clone(), + aux: columns, + sizes: vec![vec![1usize; n]; p.columns.len()], } } pub(crate) fn copy( &mut self, - left_column: ColumnMid, + left_column: ColumnMid, left_row: usize, - right_column: ColumnMid, + right_column: ColumnMid, right_row: usize, ) -> Result<(), Error> { let left_column = self @@ -316,7 +328,7 @@ impl Assembly { } /// Returns columns that participate in the permutation argument. - pub fn columns(&self) -> &[ColumnMid] { + pub fn columns(&self) -> &[ColumnMid] { &self.columns } @@ -331,6 +343,7 @@ impl Assembly { }) } } +*/ pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( params: &P, diff --git a/backend/src/plonk/permutation/prover.rs b/backend/src/plonk/permutation/prover.rs index 317286b6cd..81cdddf4b4 100644 --- a/backend/src/plonk/permutation/prover.rs +++ b/backend/src/plonk/permutation/prover.rs @@ -110,7 +110,7 @@ pub(in crate::plonk) fn permutation_commit< .zip(values[column.index()][start..].iter()) .zip(permuted_column_values[start..].iter()) { - *modified_values *= &(*beta * permuted_value + &*gamma + value); + *modified_values *= *beta * permuted_value + *gamma + value; } }); } @@ -128,13 +128,13 @@ pub(in crate::plonk) fn permutation_commit< Any::Instance => instance, }; parallelize(&mut modified_values, |modified_values, start| { - let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); + let mut deltaomega = deltaomega * omega.pow_vartime([start as u64, 0, 0, 0]); for (modified_values, value) in modified_values .iter_mut() .zip(values[column.index()][start..].iter()) { // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta - *modified_values *= &(deltaomega * &*beta + &*gamma + value); + *modified_values *= deltaomega * *beta + *gamma + value; deltaomega *= ω } }); diff --git a/backend/src/plonk/permutation/verifier.rs b/backend/src/plonk/permutation/verifier.rs index 9fa98ddc2e..e0fe5439be 100644 --- a/backend/src/plonk/permutation/verifier.rs +++ b/backend/src/plonk/permutation/verifier.rs @@ -123,13 +123,13 @@ impl Evaluated { .chain( self.sets .first() - .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), + .map(|first_set| l_0 * (C::Scalar::ONE - first_set.permutation_product_eval)), ) // Enforce only for the last set. // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 .chain(self.sets.last().map(|last_set| { - (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) - * &l_last + (last_set.permutation_product_eval.square() - last_set.permutation_product_eval) + * l_last })) // Except for the first set, enforce. // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 @@ -144,7 +144,7 @@ impl Evaluated { last_set.permutation_product_last_eval.unwrap(), ) }) - .map(move |(set, prev_last)| (set - &prev_last) * &l_0), + .map(move |(set, prev_last)| (set - prev_last) * l_0), ) // And for all the sets we enforce: // (1 - (l_last(X) + l_blind(X))) * ( @@ -175,12 +175,12 @@ impl Evaluated { }) .zip(permutation_evals.iter()) { - left *= &(eval + &(*beta * permutation_eval) + &*gamma); + left *= eval + (*beta * permutation_eval) + *gamma; } let mut right = set.permutation_product_eval; - let mut current_delta = (*beta * &*x) - * &(::DELTA + let mut current_delta = (*beta * *x) + * (::DELTA .pow_vartime([(chunk_index * chunk_len) as u64])); for eval in columns.iter().map(|&column| match column.column_type() { Any::Advice(_) => { @@ -193,11 +193,11 @@ impl Evaluated { instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } }) { - right *= &(eval + ¤t_delta + &*gamma); + right *= eval + current_delta + *gamma; current_delta *= &C::Scalar::DELTA; } - (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) + (left - right) * (C::Scalar::ONE - (l_last + l_blind)) }), ) } diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index 7ebb022e9c..c685945a8a 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -258,6 +258,7 @@ impl< } /// Commit the `witness` at `phase` and return the challenges after `phase`. + #[allow(clippy::type_complexity)] pub fn commit_phase( &mut self, phase: u8, @@ -454,7 +455,7 @@ impl< .iter() .map(|lookup| { lookup_commit_permuted( - &lookup, + lookup, pk, params, domain, @@ -527,7 +528,7 @@ impl< .iter() .map(|shuffle| { shuffle_commit_product( - &shuffle, + shuffle, pk, params, domain, diff --git a/backend/src/plonk/shuffle/verifier.rs b/backend/src/plonk/shuffle/verifier.rs index 9bbb122d9e..e989de2c7b 100644 --- a/backend/src/plonk/shuffle/verifier.rs +++ b/backend/src/plonk/shuffle/verifier.rs @@ -81,31 +81,31 @@ impl Evaluated { &|query| instance_evals[query.index.unwrap()], &|challenge| challenges[challenge.index()], &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, + &|a, b| a + b, + &|a, b| a * b, + &|a, scalar| a * scalar, ) }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + .fold(C::Scalar::ZERO, |acc, eval| acc * *theta + eval) }; // z(\omega X) (s(X) + \gamma) let left = self.product_next_eval - * &(compress_expressions(&argument.shuffle_expressions) + &*gamma); + * (compress_expressions(&argument.shuffle_expressions) + *gamma); // z(X) (a(X) + \gamma) let right = - self.product_eval * &(compress_expressions(&argument.input_expressions) + &*gamma); + self.product_eval * (compress_expressions(&argument.input_expressions) + *gamma); - (left - &right) * &active_rows + (left - right) * active_rows }; std::iter::empty() .chain( // l_0(X) * (1 - z'(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + Some(l_0 * (C::Scalar::ONE - self.product_eval)), ) .chain( // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), + Some(l_last * (self.product_eval.square() - self.product_eval)), ) .chain( // (1 - (l_last(X) + l_blind(X))) * ( z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) diff --git a/backend/src/plonk/vanishing/verifier.rs b/backend/src/plonk/vanishing/verifier.rs index d570a93b59..93de57b093 100644 --- a/backend/src/plonk/vanishing/verifier.rs +++ b/backend/src/plonk/vanishing/verifier.rs @@ -95,7 +95,7 @@ impl PartiallyEvaluated { y: ChallengeY, xn: C::Scalar, ) -> Evaluated { - let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * &*y + &v); + let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * *y + v); let expected_h_eval = expected_h_eval * ((xn - C::Scalar::ONE).invert().unwrap()); let h_commitment = diff --git a/backend/src/plonk/verifier.rs b/backend/src/plonk/verifier.rs index 02b3bae87a..f86f0134db 100644 --- a/backend/src/plonk/verifier.rs +++ b/backend/src/plonk/verifier.rs @@ -327,9 +327,9 @@ where &|query| instance_evals[query.index.unwrap()], &|challenge| challenges[challenge.index()], &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, + &|a, b| a + b, + &|a, b| a * b, + &|a, scalar| a * scalar, ) }) })) diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 0150468dc3..26e4a1b5ab 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -29,11 +29,11 @@ pub struct Column { pub column_type: C, } -impl Into for Column { - fn into(self) -> metadata::Column { +impl From> for metadata::Column { + fn from(val: Column) -> Self { metadata::Column { - index: self.index(), - column_type: *self.column_type(), + index: val.index(), + column_type: *val.column_type(), } } } @@ -126,11 +126,11 @@ impl From for Column { } } -impl Into for Column { - fn into(self) -> ColumnMid { +impl From> for ColumnMid { + fn from(val: Column) -> Self { ColumnMid { - index: self.index(), - column_type: *self.column_type(), + index: val.index(), + column_type: *val.column_type(), } } } @@ -469,11 +469,11 @@ impl Challenge { } } -impl Into for Challenge { - fn into(self) -> ChallengeMid { +impl From for ChallengeMid { + fn from(val: Challenge) -> Self { ChallengeMid { - index: self.index, - phase: self.phase, + index: val.index, + phase: val.phase, } } } @@ -699,9 +699,9 @@ pub enum Expression { Scaled(Box>, F), } -impl Into> for Expression { - fn into(self) -> ExpressionMid { - match self { +impl From> for ExpressionMid { + fn from(val: Expression) -> Self { + match val { Expression::Constant(c) => ExpressionMid::Constant(c), Expression::Selector(_) => unreachable!(), Expression::Fixed(FixedQuery { @@ -1538,7 +1538,7 @@ impl QueriesMap { rotation: query.rotation, }) } - ExpressionMid::Challenge(c) => Expression::Challenge(c.clone().into()), + ExpressionMid::Challenge(c) => Expression::Challenge((*c).into()), ExpressionMid::Negated(e) => Expression::Negated(Box::new(self.as_expression(e))), ExpressionMid::Sum(lhs, rhs) => Expression::Sum( Box::new(self.as_expression(lhs)), @@ -1566,13 +1566,13 @@ impl From> for ConstraintSystemV2Backend { gates: cs .gates .into_iter() - .map(|mut g| { + .flat_map(|mut g| { let constraint_names = std::mem::take(&mut g.constraint_names); let gate_name = g.name.clone(); g.polys.into_iter().enumerate().map(move |(i, e)| { let name = match constraint_names[i].as_str() { "" => gate_name.clone(), - constraint_name => format!("{}:{}", gate_name, constraint_name), + constraint_name => format!("{gate_name}:{constraint_name}"), }; GateV2Backend { name, @@ -1580,7 +1580,6 @@ impl From> for ConstraintSystemV2Backend { } }) }) - .flatten() .collect(), permutation: halo2_middleware::permutation::ArgumentV2 { columns: cs @@ -1759,6 +1758,7 @@ fn cs2_collect_queries_shuffles( /// Collect all queries used in the expressions of gates, lookups and shuffles. Map the /// expressions of gates, lookups and shuffles into equivalent ones with indexed query /// references. +#[allow(clippy::type_complexity)] pub fn collect_queries( cs2: &ConstraintSystemV2Backend, ) -> ( @@ -1972,7 +1972,7 @@ impl Default for ConstraintSystem { advice_queries: Vec::new(), num_advice_queries: Vec::new(), instance_queries: Vec::new(), - permutation: permutation::Argument::new(), + permutation: permutation::Argument::default(), lookups: Vec::new(), shuffles: Vec::new(), general_column_annotations: HashMap::new(), diff --git a/common/src/plonk/permutation.rs b/common/src/plonk/permutation.rs index ffe8435641..cd7fc611b1 100644 --- a/common/src/plonk/permutation.rs +++ b/common/src/plonk/permutation.rs @@ -5,7 +5,7 @@ use halo2_middleware::circuit::Any; use halo2_middleware::permutation::{ArgumentV2, Cell}; /// A permutation argument. -#[derive(Debug, Clone)] +#[derive(Default, Debug, Clone)] pub struct Argument { /// A sequence of columns involved in the argument. pub columns: Vec>, @@ -20,10 +20,6 @@ impl From for Argument { } impl Argument { - pub fn new() -> Self { - Argument { columns: vec![] } - } - /// Returns the minimum circuit degree required by the permutation argument. /// The argument may use larger degree gates depending on the actual /// circuit's degree and how many columns are involved in the permutation. diff --git a/common/src/poly/kzg/msm.rs b/common/src/poly/kzg/msm.rs index fa8359f94a..b45dfe2a99 100644 --- a/common/src/poly/kzg/msm.rs +++ b/common/src/poly/kzg/msm.rs @@ -103,17 +103,23 @@ where projectives_msms: Vec>, } -impl PreMSM +impl Default for PreMSM where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub fn new() -> Self { + fn default() -> Self { PreMSM { projectives_msms: vec![], } } +} +impl PreMSM +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ pub fn normalize(self) -> MSMKZG { let (scalars, bases) = self .projectives_msms diff --git a/common/src/poly/kzg/multiopen/shplonk/verifier.rs b/common/src/poly/kzg/multiopen/shplonk/verifier.rs index f5a4d824f6..27e33c62c7 100644 --- a/common/src/poly/kzg/multiopen/shplonk/verifier.rs +++ b/common/src/poly/kzg/multiopen/shplonk/verifier.rs @@ -72,7 +72,7 @@ where let h2 = transcript.read_point().map_err(|_| Error::SamplingError)?; let (mut z_0_diff_inverse, mut z_0) = (E::Fr::ZERO, E::Fr::ZERO); - let (mut outer_msm, mut r_outer_acc) = (PreMSM::::new(), E::Fr::ZERO); + let (mut outer_msm, mut r_outer_acc) = (PreMSM::::default(), E::Fr::ZERO); for (i, (rotation_set, power_of_v)) in rotation_sets.iter().zip(powers(*v)).enumerate() { let diffs: Vec = super_point_set .iter() diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index 582ec3949d..c4288f560c 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -28,6 +28,7 @@ pub use halo2_common::circuit::{layouter, Layouter, Value}; /// copy constraints assignments. The output of this function can then be used for the key /// generation, and proof generation. /// If `compress_selectors` is true, multiple selector columns may be multiplexed. +#[allow(clippy::type_complexity)] pub fn compile_circuit>( k: u32, circuit: &ConcreteCircuit, diff --git a/frontend/src/dev.rs b/frontend/src/dev.rs index 23d56c3f9d..2dd615ccc9 100644 --- a/frontend/src/dev.rs +++ b/frontend/src/dev.rs @@ -720,8 +720,8 @@ impl + Ord> MockProver { v })); - #[cfg(feature = "thread-safe-region")] - prover.permutation.build_ordered_mapping(); + // #[cfg(feature = "thread-safe-region")] + // prover.permutation.build_ordered_mapping(); Ok(prover) } diff --git a/frontend/src/dev/cost_model.rs b/frontend/src/dev/cost_model.rs index 86ce03800a..ec0bd437c8 100644 --- a/frontend/src/dev/cost_model.rs +++ b/frontend/src/dev/cost_model.rs @@ -4,7 +4,7 @@ use std::collections::HashSet; use std::{iter, num::ParseIntError, str::FromStr}; -use crate::plonk::Circuit; +use halo2_common::plonk::circuit::Circuit; use halo2_middleware::ff::{Field, FromUniformBytes}; use serde::Deserialize; use serde_derive::Serialize; diff --git a/frontend/src/dev/graph.rs b/frontend/src/dev/graph.rs index 381cd6bd37..ed3523c553 100644 --- a/frontend/src/dev/graph.rs +++ b/frontend/src/dev/graph.rs @@ -1,13 +1,13 @@ +use halo2_common::plonk::{ + circuit::{Circuit, Column}, + Assignment, Challenge, ConstraintSystem, Error, FloorPlanner, Selector, +}; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::ff::Field; +use halo2_middleware::plonk::Assigned; use tabbycat::{AttrList, Edge, GraphBuilder, GraphType, Identity, StmtList}; -use crate::{ - circuit::Value, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Fixed, FloorPlanner, Instance, Selector, - }, -}; +use crate::circuit::Value; pub mod layout; @@ -154,7 +154,7 @@ impl Assignment for Graph { _: usize, _: Column, _: usize, - ) -> Result<(), crate::plonk::Error> { + ) -> Result<(), halo2_common::plonk::Error> { // Do nothing; we don't care about permutations in this context. Ok(()) } diff --git a/frontend/src/dev/graph/layout.rs b/frontend/src/dev/graph/layout.rs index 4777e05ff8..829479ad0d 100644 --- a/frontend/src/dev/graph/layout.rs +++ b/frontend/src/dev/graph/layout.rs @@ -6,11 +6,9 @@ use plotters::{ use std::collections::HashSet; use std::ops::Range; -use crate::{ - circuit::layouter::RegionColumn, - dev::cost::Layout, - plonk::{Any, Circuit, Column, ConstraintSystem, FloorPlanner}, -}; +use crate::{circuit::layouter::RegionColumn, dev::cost::Layout}; +use halo2_common::plonk::{circuit::Column, Circuit, ConstraintSystem, FloorPlanner}; +use halo2_middleware::circuit::Any; /// Graphical renderer for circuit layouts. /// diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index df0160596e..0fe79b99aa 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -68,6 +68,7 @@ gumdrop = "0.8" proptest = "1" dhat = "0.3.2" serde_json = "1" +plotters = { version = "0.3.0", features = ["bitmap_backend", "bitmap_encoder", "ttf"] } [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] getrandom = { version = "0.2", features = ["js"] } diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index 78ecf3d435..31c9ca41c4 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -30,6 +30,12 @@ pub mod arithmetic { /// Tools for developing circuits. pub mod dev { pub use halo2_frontend::dev::{metadata, FailureLocation, MockProver, VerifyFailure}; + + #[cfg(feature = "cost-estimator")] + pub use halo2_frontend::dev::cost_model; + + #[cfg(feature = "dev-graph")] + pub use halo2_frontend::dev::{circuit_dot_graph, CircuitLayout}; } /// Contains utilities for performing arithmetic over univariate polynomials in /// various forms, including computing commitments to them and provably opening diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index c647fa2e63..ffc184a40a 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -65,6 +65,7 @@ struct MyCircuitConfig { } impl MyCircuitConfig { + #[allow(clippy::type_complexity)] fn assign_gate>( &self, region: &mut Region<'_, F>, @@ -516,7 +517,7 @@ fn test_mycircuit_full_legacy() { create_proof::, ProverSHPLONK<'_, Bn256>, _, _, _, _>( ¶ms, &pk, - &[circuit.clone()], + &[circuit], &[instances_slice], &mut rng, &mut transcript, @@ -584,7 +585,7 @@ fn test_mycircuit_full_split() { .unwrap(); let mut challenges = HashMap::new(); for phase in 0..cs.phases().count() { - println!("phase {}", phase); + println!("phase {phase}"); let witness = witness_calc.calc(phase as u8, &challenges).unwrap(); challenges = prover.commit_phase(phase as u8, witness).unwrap(); } diff --git a/middleware/src/circuit.rs b/middleware/src/circuit.rs index 004e86d4fe..b5676e61f9 100644 --- a/middleware/src/circuit.rs +++ b/middleware/src/circuit.rs @@ -190,17 +190,11 @@ pub struct ColumnMid { } /// An advice column -#[derive(Clone, Copy, Eq, PartialEq, Hash)] +#[derive(Default, Clone, Copy, Eq, PartialEq, Hash)] pub struct Advice { pub phase: u8, } -impl Default for Advice { - fn default() -> Advice { - Advice { phase: 0 } - } -} - impl Advice { /// Returns `Advice` in given `Phase` pub fn new(phase: u8) -> Advice { From fe2e28ea35bb051a2d70da2d74ccba4689334923 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Mon, 29 Jan 2024 18:22:16 +0000 Subject: [PATCH 69/79] Fix doc tests imports --- common/src/circuit/value.rs | 5 +++-- common/src/plonk/circuit.rs | 19 ++++++++++--------- frontend/src/circuit.rs | 2 +- frontend/src/dev.rs | 9 ++++++--- frontend/src/dev/gates.rs | 6 ++++-- frontend/src/dev/tfp.rs | 4 +++- 6 files changed, 27 insertions(+), 18 deletions(-) diff --git a/common/src/circuit/value.rs b/common/src/circuit/value.rs index 5ec5942c94..20f81d5307 100644 --- a/common/src/circuit/value.rs +++ b/common/src/circuit/value.rs @@ -35,7 +35,7 @@ impl Value { /// # Examples /// /// ``` - /// use halo2_proofs::circuit::Value; + /// use halo2_common::circuit::Value; /// /// let v = Value::known(37); /// ``` @@ -646,7 +646,8 @@ impl Value { /// If you have a `Value`, convert it to `Value>` first: /// ``` /// # use halo2curves::pasta::pallas::Base as F; - /// use halo2_proofs::{circuit::Value, plonk::Assigned}; + /// use halo2_common::circuit::Value; + /// use halo2_middleware::plonk::Assigned; /// /// let v = Value::known(F::from(2)); /// let v: Value> = v.into(); diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 26e4a1b5ab..43bcf75515 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -267,9 +267,9 @@ impl SealedPhase for super::ThirdPhase { /// /// Selectors can be used to conditionally enable (portions of) gates: /// ``` -/// use halo2_proofs::poly::Rotation; +/// use halo2_middleware::poly::Rotation; /// # use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; +/// # use halo2_common::plonk::ConstraintSystem; /// /// # let mut meta = ConstraintSystem::::default(); /// let a = meta.advice_column(); @@ -290,12 +290,12 @@ impl SealedPhase for super::ThirdPhase { /// Selectors are disabled on all rows by default, and must be explicitly enabled on each /// row when required: /// ``` -/// use halo2_proofs::{ -/// circuit::{Chip, Layouter, Value}, -/// plonk::{Advice, Column, Error, Selector}, -/// }; +/// use halo2_middleware::circuit::Advice; +/// use halo2_common::circuit::{Chip, Layouter, Value}; +/// use halo2_common::plonk::circuit::{Column, Selector}; +/// use halo2_common::plonk::Error; /// use halo2_middleware::ff::Field; -/// # use halo2_proofs::plonk::Fixed; +/// # use halo2_middleware::circuit::Fixed; /// /// struct Config { /// a: Column, @@ -1356,9 +1356,10 @@ impl From> for Vec> { /// A set of polynomial constraints with a common selector. /// /// ``` -/// use halo2_proofs::{plonk::{Constraints, Expression}, poly::Rotation}; +/// use halo2_common::{plonk::{Constraints, Expression}}; +/// use halo2_middleware::poly::Rotation; /// use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; +/// # use halo2_common::plonk::ConstraintSystem; /// /// # let mut meta = ConstraintSystem::::default(); /// let a = meta.advice_column(); diff --git a/frontend/src/circuit.rs b/frontend/src/circuit.rs index c4288f560c..09b66da25e 100644 --- a/frontend/src/circuit.rs +++ b/frontend/src/circuit.rs @@ -16,7 +16,7 @@ use std::collections::HashMap; use std::fmt::Debug; use std::ops::RangeTo; -mod floor_planner; +pub mod floor_planner; mod table_layouter; // Re-exports from common diff --git a/frontend/src/dev.rs b/frontend/src/dev.rs index 2dd615ccc9..ba0e7d673d 100644 --- a/frontend/src/dev.rs +++ b/frontend/src/dev.rs @@ -182,12 +182,15 @@ impl Mul for Value { /// # Examples /// /// ``` -/// use halo2_proofs::{ +/// use halo2_frontend::{ /// circuit::{Layouter, SimpleFloorPlanner, Value}, /// dev::{FailureLocation, MockProver, VerifyFailure}, -/// plonk::{Advice, Any, Circuit, Column, ConstraintSystem, Error, Selector}, -/// poly::Rotation, /// }; +/// use halo2_common::{ +/// plonk::{circuit::Column, Circuit, ConstraintSystem, Error, Selector}, +/// }; +/// use halo2_middleware::circuit::{Advice, Any}; +/// use halo2_middleware::poly::Rotation; /// use halo2_middleware::ff::PrimeField; /// use halo2curves::pasta::Fp; /// const K: u32 = 5; diff --git a/frontend/src/dev/gates.rs b/frontend/src/dev/gates.rs index 2d6e43c10c..732ec7eeb3 100644 --- a/frontend/src/dev/gates.rs +++ b/frontend/src/dev/gates.rs @@ -27,11 +27,13 @@ struct Gate { /// /// ``` /// use halo2_middleware::ff::Field; -/// use halo2_proofs::{ +/// use halo2_middleware::poly::Rotation; +/// use halo2_frontend::{ /// circuit::{Layouter, SimpleFloorPlanner}, /// dev::CircuitGates, +/// }; +/// use halo2_common::{ /// plonk::{Circuit, ConstraintSystem, Error}, -/// poly::Rotation, /// }; /// use halo2curves::pasta::pallas; /// diff --git a/frontend/src/dev/tfp.rs b/frontend/src/dev/tfp.rs index 8729d33afc..ff3dd5b623 100644 --- a/frontend/src/dev/tfp.rs +++ b/frontend/src/dev/tfp.rs @@ -30,9 +30,11 @@ use halo2_middleware::plonk::Assigned; /// /// ``` /// use halo2_middleware::ff::Field; -/// use halo2_proofs::{ +/// use halo2_frontend::{ /// circuit::{floor_planner, Layouter, Value}, /// dev::TracingFloorPlanner, +/// }; +/// use halo2_common::{ /// plonk::{Circuit, ConstraintSystem, Error}, /// }; /// From 134143c618f41aea4805fd0cff14962c819397f1 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 2 Feb 2024 09:46:05 +0000 Subject: [PATCH 70/79] Resolve feedback from @han0110 --- backend/src/plonk/prover.rs | 1 - common/src/plonk/circuit.rs | 73 -------------------- halo2_proofs/src/plonk/prover.rs | 1 - halo2_proofs/tests/frontend_backend_split.rs | 7 +- 4 files changed, 5 insertions(+), 77 deletions(-) diff --git a/backend/src/plonk/prover.rs b/backend/src/plonk/prover.rs index c685945a8a..c4561e6b36 100644 --- a/backend/src/plonk/prover.rs +++ b/backend/src/plonk/prover.rs @@ -733,7 +733,6 @@ impl< .chain(vanishing.open(x)); let prover = P::new(params); - println!("DBG create_proof"); prover .create_proof(rng, self.transcript, instances) .map_err(|_| Error::ConstraintSystemFailure)?; diff --git a/common/src/plonk/circuit.rs b/common/src/plonk/circuit.rs index 43bcf75515..3e1363b919 100644 --- a/common/src/plonk/circuit.rs +++ b/common/src/plonk/circuit.rs @@ -1617,79 +1617,6 @@ impl From> for ConstraintSystemV2Backend { } } -/* -impl Into> for ConstraintSystem { - fn into(self) -> ConstraintSystemV2Backend { - ConstraintSystemV2Backend { - num_fixed_columns: self.num_fixed_columns, - num_advice_columns: self.num_advice_columns, - num_instance_columns: self.num_instance_columns, - num_challenges: self.num_challenges, - unblinded_advice_columns: self.unblinded_advice_columns.clone(), - advice_column_phase: self.advice_column_phase.iter().map(|p| p.0).collect(), - challenge_phase: self.challenge_phase.iter().map(|p| p.0).collect(), - gates: self - .gates - .iter() - .map(|g| { - g.polys.clone().into_iter().enumerate().map(|(i, e)| { - let name = match g.constraint_name(i) { - "" => g.name.clone(), - constraint_name => format!("{}:{}", g.name, constraint_name), - }; - GateV2Backend { - name, - poly: e.into(), - } - }) - }) - .flatten() - .collect(), - permutation: self.permutation.clone(), - lookups: self - .lookups - .iter() - .map(|l| halo2_middleware::lookup::ArgumentV2 { - name: l.name.clone(), - input_expressions: l - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - table_expressions: l - .table_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) - .collect(), - shuffles: self - .shuffles - .iter() - .map(|s| halo2_middleware::shuffle::ArgumentV2 { - name: s.name.clone(), - input_expressions: s - .input_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - shuffle_expressions: s - .shuffle_expressions - .clone() - .into_iter() - .map(|e| e.into()) - .collect(), - }) - .collect(), - general_column_annotations: self.general_column_annotations.clone(), - } - } -} -*/ - /// Collect queries used in gates while mapping those gates to equivalent ones with indexed /// query references in the expressions. fn cs2_collect_queries_gates( diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index 0e9ba97a81..a6a5e7195b 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -43,7 +43,6 @@ where let mut challenges = HashMap::new(); let phases = prover.phases.clone(); for phase in &phases { - println!("DBG phase {}", phase.0); let mut witnesses = Vec::with_capacity(circuits.len()); for witness_calc in witness_calcs.iter_mut() { witnesses.push(witness_calc.calc(phase.0, &challenges)?); diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index ffc184a40a..4b7621b9ea 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -423,8 +423,11 @@ impl, const WIDTH_FACTOR: usize> Circuit for MyCircuit, mut layouter: impl Layouter, ) -> Result<(), Error> { - // 2 queries from first gate, 3 for permutation argument, 1 for multipoen, 1 for off-by-one - // errors, 1 for off-by-two errors? + // - 2 queries from first gate + // - 3 for permutation argument + // - 1 for multipoen + // - 1 for the last row of grand product poly to check that the product result is 1 + // - 1 for off-by-one errors let unusable_rows = 2 + 3 + 1 + 1 + 1; let max_rows = 2usize.pow(self.k) - unusable_rows; for config in &config { From 7eda496db87a8b41d079d6e5b87c42cde4434c93 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 2 Feb 2024 10:53:16 +0000 Subject: [PATCH 71/79] Address feedback from @adria0 --- Cargo.toml | 8 ++++---- {backend => halo2_backend}/Cargo.toml | 4 ++-- {backend => halo2_backend}/src/lib.rs | 0 {backend => halo2_backend}/src/plonk.rs | 0 {backend => halo2_backend}/src/plonk/evaluation.rs | 0 {backend => halo2_backend}/src/plonk/keygen.rs | 0 {backend => halo2_backend}/src/plonk/lookup.rs | 0 {backend => halo2_backend}/src/plonk/lookup/prover.rs | 0 {backend => halo2_backend}/src/plonk/lookup/verifier.rs | 0 {backend => halo2_backend}/src/plonk/permutation.rs | 0 .../src/plonk/permutation/keygen.rs | 0 .../src/plonk/permutation/prover.rs | 0 .../src/plonk/permutation/verifier.rs | 0 {backend => halo2_backend}/src/plonk/prover.rs | 0 {backend => halo2_backend}/src/plonk/shuffle.rs | 0 {backend => halo2_backend}/src/plonk/shuffle/prover.rs | 0 {backend => halo2_backend}/src/plonk/shuffle/verifier.rs | 0 {backend => halo2_backend}/src/plonk/vanishing.rs | 0 {backend => halo2_backend}/src/plonk/vanishing/prover.rs | 0 .../src/plonk/vanishing/verifier.rs | 0 {backend => halo2_backend}/src/plonk/verifier.rs | 0 {backend => halo2_backend}/src/plonk/verifier/batch.rs | 0 {common => halo2_common}/Cargo.toml | 2 +- {common => halo2_common}/src/arithmetic.rs | 0 {common => halo2_common}/src/circuit.rs | 0 {common => halo2_common}/src/circuit/floor_planner.rs | 0 .../src/circuit/floor_planner/single_pass.rs | 0 {common => halo2_common}/src/circuit/floor_planner/v1.rs | 0 .../src/circuit/floor_planner/v1/strategy.rs | 0 {common => halo2_common}/src/circuit/layouter.rs | 0 {common => halo2_common}/src/circuit/table_layouter.rs | 0 {common => halo2_common}/src/circuit/value.rs | 0 {common => halo2_common}/src/helpers.rs | 0 {common => halo2_common}/src/lib.rs | 0 {common => halo2_common}/src/multicore.rs | 0 {common => halo2_common}/src/plonk.rs | 0 {common => halo2_common}/src/plonk/circuit.rs | 0 .../src/plonk/circuit/compress_selectors.rs | 0 {common => halo2_common}/src/plonk/error.rs | 0 {common => halo2_common}/src/plonk/keygen.rs | 0 {common => halo2_common}/src/plonk/lookup.rs | 0 {common => halo2_common}/src/plonk/permutation.rs | 0 {common => halo2_common}/src/plonk/shuffle.rs | 0 {common => halo2_common}/src/poly.rs | 0 {common => halo2_common}/src/poly/commitment.rs | 0 {common => halo2_common}/src/poly/domain.rs | 0 {common => halo2_common}/src/poly/ipa/commitment.rs | 0 .../src/poly/ipa/commitment/prover.rs | 0 .../src/poly/ipa/commitment/verifier.rs | 0 {common => halo2_common}/src/poly/ipa/mod.rs | 0 {common => halo2_common}/src/poly/ipa/msm.rs | 0 {common => halo2_common}/src/poly/ipa/multiopen.rs | 0 {common => halo2_common}/src/poly/ipa/multiopen/prover.rs | 0 .../src/poly/ipa/multiopen/verifier.rs | 0 {common => halo2_common}/src/poly/ipa/strategy.rs | 0 {common => halo2_common}/src/poly/kzg/commitment.rs | 0 {common => halo2_common}/src/poly/kzg/mod.rs | 0 {common => halo2_common}/src/poly/kzg/msm.rs | 0 {common => halo2_common}/src/poly/kzg/multiopen.rs | 0 {common => halo2_common}/src/poly/kzg/multiopen/gwc.rs | 0 .../src/poly/kzg/multiopen/gwc/prover.rs | 0 .../src/poly/kzg/multiopen/gwc/verifier.rs | 0 .../src/poly/kzg/multiopen/shplonk.rs | 0 .../src/poly/kzg/multiopen/shplonk/prover.rs | 0 .../src/poly/kzg/multiopen/shplonk/verifier.rs | 0 {common => halo2_common}/src/poly/kzg/strategy.rs | 0 {common => halo2_common}/src/poly/multiopen_test.rs | 0 {common => halo2_common}/src/poly/query.rs | 0 {common => halo2_common}/src/poly/strategy.rs | 0 {common => halo2_common}/src/transcript.rs | 0 {frontend => halo2_frontend}/Cargo.toml | 4 ++-- {frontend => halo2_frontend}/src/circuit.rs | 0 {frontend => halo2_frontend}/src/circuit/floor_planner.rs | 0 .../src/circuit/floor_planner/single_pass.rs | 0 .../src/circuit/floor_planner/v1.rs | 0 .../src/circuit/floor_planner/v1/strategy.rs | 0 .../src/circuit/table_layouter.rs | 0 {frontend => halo2_frontend}/src/dev.rs | 0 {frontend => halo2_frontend}/src/dev/cost.rs | 0 {frontend => halo2_frontend}/src/dev/cost_model.rs | 0 {frontend => halo2_frontend}/src/dev/failure.rs | 0 {frontend => halo2_frontend}/src/dev/failure/emitter.rs | 0 {frontend => halo2_frontend}/src/dev/gates.rs | 0 {frontend => halo2_frontend}/src/dev/graph.rs | 0 {frontend => halo2_frontend}/src/dev/graph/layout.rs | 0 {frontend => halo2_frontend}/src/dev/metadata.rs | 0 {frontend => halo2_frontend}/src/dev/tfp.rs | 0 {frontend => halo2_frontend}/src/dev/util.rs | 0 {frontend => halo2_frontend}/src/lib.rs | 0 {middleware => halo2_middleware}/Cargo.toml | 0 {middleware => halo2_middleware}/src/circuit.rs | 5 ----- {middleware => halo2_middleware}/src/lib.rs | 0 {middleware => halo2_middleware}/src/lookup.rs | 0 {middleware => halo2_middleware}/src/metadata.rs | 0 {middleware => halo2_middleware}/src/permutation.rs | 0 {middleware => halo2_middleware}/src/plonk.rs | 0 {middleware => halo2_middleware}/src/poly.rs | 0 {middleware => halo2_middleware}/src/shuffle.rs | 0 halo2_proofs/Cargo.toml | 8 ++++---- 99 files changed, 13 insertions(+), 18 deletions(-) rename {backend => halo2_backend}/Cargo.toml (95%) rename {backend => halo2_backend}/src/lib.rs (100%) rename {backend => halo2_backend}/src/plonk.rs (100%) rename {backend => halo2_backend}/src/plonk/evaluation.rs (100%) rename {backend => halo2_backend}/src/plonk/keygen.rs (100%) rename {backend => halo2_backend}/src/plonk/lookup.rs (100%) rename {backend => halo2_backend}/src/plonk/lookup/prover.rs (100%) rename {backend => halo2_backend}/src/plonk/lookup/verifier.rs (100%) rename {backend => halo2_backend}/src/plonk/permutation.rs (100%) rename {backend => halo2_backend}/src/plonk/permutation/keygen.rs (100%) rename {backend => halo2_backend}/src/plonk/permutation/prover.rs (100%) rename {backend => halo2_backend}/src/plonk/permutation/verifier.rs (100%) rename {backend => halo2_backend}/src/plonk/prover.rs (100%) rename {backend => halo2_backend}/src/plonk/shuffle.rs (100%) rename {backend => halo2_backend}/src/plonk/shuffle/prover.rs (100%) rename {backend => halo2_backend}/src/plonk/shuffle/verifier.rs (100%) rename {backend => halo2_backend}/src/plonk/vanishing.rs (100%) rename {backend => halo2_backend}/src/plonk/vanishing/prover.rs (100%) rename {backend => halo2_backend}/src/plonk/vanishing/verifier.rs (100%) rename {backend => halo2_backend}/src/plonk/verifier.rs (100%) rename {backend => halo2_backend}/src/plonk/verifier/batch.rs (100%) rename {common => halo2_common}/Cargo.toml (97%) rename {common => halo2_common}/src/arithmetic.rs (100%) rename {common => halo2_common}/src/circuit.rs (100%) rename {common => halo2_common}/src/circuit/floor_planner.rs (100%) rename {common => halo2_common}/src/circuit/floor_planner/single_pass.rs (100%) rename {common => halo2_common}/src/circuit/floor_planner/v1.rs (100%) rename {common => halo2_common}/src/circuit/floor_planner/v1/strategy.rs (100%) rename {common => halo2_common}/src/circuit/layouter.rs (100%) rename {common => halo2_common}/src/circuit/table_layouter.rs (100%) rename {common => halo2_common}/src/circuit/value.rs (100%) rename {common => halo2_common}/src/helpers.rs (100%) rename {common => halo2_common}/src/lib.rs (100%) rename {common => halo2_common}/src/multicore.rs (100%) rename {common => halo2_common}/src/plonk.rs (100%) rename {common => halo2_common}/src/plonk/circuit.rs (100%) rename {common => halo2_common}/src/plonk/circuit/compress_selectors.rs (100%) rename {common => halo2_common}/src/plonk/error.rs (100%) rename {common => halo2_common}/src/plonk/keygen.rs (100%) rename {common => halo2_common}/src/plonk/lookup.rs (100%) rename {common => halo2_common}/src/plonk/permutation.rs (100%) rename {common => halo2_common}/src/plonk/shuffle.rs (100%) rename {common => halo2_common}/src/poly.rs (100%) rename {common => halo2_common}/src/poly/commitment.rs (100%) rename {common => halo2_common}/src/poly/domain.rs (100%) rename {common => halo2_common}/src/poly/ipa/commitment.rs (100%) rename {common => halo2_common}/src/poly/ipa/commitment/prover.rs (100%) rename {common => halo2_common}/src/poly/ipa/commitment/verifier.rs (100%) rename {common => halo2_common}/src/poly/ipa/mod.rs (100%) rename {common => halo2_common}/src/poly/ipa/msm.rs (100%) rename {common => halo2_common}/src/poly/ipa/multiopen.rs (100%) rename {common => halo2_common}/src/poly/ipa/multiopen/prover.rs (100%) rename {common => halo2_common}/src/poly/ipa/multiopen/verifier.rs (100%) rename {common => halo2_common}/src/poly/ipa/strategy.rs (100%) rename {common => halo2_common}/src/poly/kzg/commitment.rs (100%) rename {common => halo2_common}/src/poly/kzg/mod.rs (100%) rename {common => halo2_common}/src/poly/kzg/msm.rs (100%) rename {common => halo2_common}/src/poly/kzg/multiopen.rs (100%) rename {common => halo2_common}/src/poly/kzg/multiopen/gwc.rs (100%) rename {common => halo2_common}/src/poly/kzg/multiopen/gwc/prover.rs (100%) rename {common => halo2_common}/src/poly/kzg/multiopen/gwc/verifier.rs (100%) rename {common => halo2_common}/src/poly/kzg/multiopen/shplonk.rs (100%) rename {common => halo2_common}/src/poly/kzg/multiopen/shplonk/prover.rs (100%) rename {common => halo2_common}/src/poly/kzg/multiopen/shplonk/verifier.rs (100%) rename {common => halo2_common}/src/poly/kzg/strategy.rs (100%) rename {common => halo2_common}/src/poly/multiopen_test.rs (100%) rename {common => halo2_common}/src/poly/query.rs (100%) rename {common => halo2_common}/src/poly/strategy.rs (100%) rename {common => halo2_common}/src/transcript.rs (100%) rename {frontend => halo2_frontend}/Cargo.toml (95%) rename {frontend => halo2_frontend}/src/circuit.rs (100%) rename {frontend => halo2_frontend}/src/circuit/floor_planner.rs (100%) rename {frontend => halo2_frontend}/src/circuit/floor_planner/single_pass.rs (100%) rename {frontend => halo2_frontend}/src/circuit/floor_planner/v1.rs (100%) rename {frontend => halo2_frontend}/src/circuit/floor_planner/v1/strategy.rs (100%) rename {frontend => halo2_frontend}/src/circuit/table_layouter.rs (100%) rename {frontend => halo2_frontend}/src/dev.rs (100%) rename {frontend => halo2_frontend}/src/dev/cost.rs (100%) rename {frontend => halo2_frontend}/src/dev/cost_model.rs (100%) rename {frontend => halo2_frontend}/src/dev/failure.rs (100%) rename {frontend => halo2_frontend}/src/dev/failure/emitter.rs (100%) rename {frontend => halo2_frontend}/src/dev/gates.rs (100%) rename {frontend => halo2_frontend}/src/dev/graph.rs (100%) rename {frontend => halo2_frontend}/src/dev/graph/layout.rs (100%) rename {frontend => halo2_frontend}/src/dev/metadata.rs (100%) rename {frontend => halo2_frontend}/src/dev/tfp.rs (100%) rename {frontend => halo2_frontend}/src/dev/util.rs (100%) rename {frontend => halo2_frontend}/src/lib.rs (100%) rename {middleware => halo2_middleware}/Cargo.toml (100%) rename {middleware => halo2_middleware}/src/circuit.rs (98%) rename {middleware => halo2_middleware}/src/lib.rs (100%) rename {middleware => halo2_middleware}/src/lookup.rs (100%) rename {middleware => halo2_middleware}/src/metadata.rs (100%) rename {middleware => halo2_middleware}/src/permutation.rs (100%) rename {middleware => halo2_middleware}/src/plonk.rs (100%) rename {middleware => halo2_middleware}/src/poly.rs (100%) rename {middleware => halo2_middleware}/src/shuffle.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index 0b5d9a1ccf..458f57fbe9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,8 +2,8 @@ members = [ "halo2", "halo2_proofs", - "frontend", - "middleware", - "backend", - "common", + "halo2_frontend", + "halo2_middleware", + "halo2_backend", + "halo2_common", ] diff --git a/backend/Cargo.toml b/halo2_backend/Cargo.toml similarity index 95% rename from backend/Cargo.toml rename to halo2_backend/Cargo.toml index 5f7fbfc22f..807436a131 100644 --- a/backend/Cargo.toml +++ b/halo2_backend/Cargo.toml @@ -37,8 +37,8 @@ rand_chacha = "0.3" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" -halo2_middleware = { path = "../middleware" } -halo2_common = { path = "../common" } +halo2_middleware = { path = "../halo2_middleware" } +halo2_common = { path = "../halo2_common" } [dev-dependencies] assert_matches = "1.5" diff --git a/backend/src/lib.rs b/halo2_backend/src/lib.rs similarity index 100% rename from backend/src/lib.rs rename to halo2_backend/src/lib.rs diff --git a/backend/src/plonk.rs b/halo2_backend/src/plonk.rs similarity index 100% rename from backend/src/plonk.rs rename to halo2_backend/src/plonk.rs diff --git a/backend/src/plonk/evaluation.rs b/halo2_backend/src/plonk/evaluation.rs similarity index 100% rename from backend/src/plonk/evaluation.rs rename to halo2_backend/src/plonk/evaluation.rs diff --git a/backend/src/plonk/keygen.rs b/halo2_backend/src/plonk/keygen.rs similarity index 100% rename from backend/src/plonk/keygen.rs rename to halo2_backend/src/plonk/keygen.rs diff --git a/backend/src/plonk/lookup.rs b/halo2_backend/src/plonk/lookup.rs similarity index 100% rename from backend/src/plonk/lookup.rs rename to halo2_backend/src/plonk/lookup.rs diff --git a/backend/src/plonk/lookup/prover.rs b/halo2_backend/src/plonk/lookup/prover.rs similarity index 100% rename from backend/src/plonk/lookup/prover.rs rename to halo2_backend/src/plonk/lookup/prover.rs diff --git a/backend/src/plonk/lookup/verifier.rs b/halo2_backend/src/plonk/lookup/verifier.rs similarity index 100% rename from backend/src/plonk/lookup/verifier.rs rename to halo2_backend/src/plonk/lookup/verifier.rs diff --git a/backend/src/plonk/permutation.rs b/halo2_backend/src/plonk/permutation.rs similarity index 100% rename from backend/src/plonk/permutation.rs rename to halo2_backend/src/plonk/permutation.rs diff --git a/backend/src/plonk/permutation/keygen.rs b/halo2_backend/src/plonk/permutation/keygen.rs similarity index 100% rename from backend/src/plonk/permutation/keygen.rs rename to halo2_backend/src/plonk/permutation/keygen.rs diff --git a/backend/src/plonk/permutation/prover.rs b/halo2_backend/src/plonk/permutation/prover.rs similarity index 100% rename from backend/src/plonk/permutation/prover.rs rename to halo2_backend/src/plonk/permutation/prover.rs diff --git a/backend/src/plonk/permutation/verifier.rs b/halo2_backend/src/plonk/permutation/verifier.rs similarity index 100% rename from backend/src/plonk/permutation/verifier.rs rename to halo2_backend/src/plonk/permutation/verifier.rs diff --git a/backend/src/plonk/prover.rs b/halo2_backend/src/plonk/prover.rs similarity index 100% rename from backend/src/plonk/prover.rs rename to halo2_backend/src/plonk/prover.rs diff --git a/backend/src/plonk/shuffle.rs b/halo2_backend/src/plonk/shuffle.rs similarity index 100% rename from backend/src/plonk/shuffle.rs rename to halo2_backend/src/plonk/shuffle.rs diff --git a/backend/src/plonk/shuffle/prover.rs b/halo2_backend/src/plonk/shuffle/prover.rs similarity index 100% rename from backend/src/plonk/shuffle/prover.rs rename to halo2_backend/src/plonk/shuffle/prover.rs diff --git a/backend/src/plonk/shuffle/verifier.rs b/halo2_backend/src/plonk/shuffle/verifier.rs similarity index 100% rename from backend/src/plonk/shuffle/verifier.rs rename to halo2_backend/src/plonk/shuffle/verifier.rs diff --git a/backend/src/plonk/vanishing.rs b/halo2_backend/src/plonk/vanishing.rs similarity index 100% rename from backend/src/plonk/vanishing.rs rename to halo2_backend/src/plonk/vanishing.rs diff --git a/backend/src/plonk/vanishing/prover.rs b/halo2_backend/src/plonk/vanishing/prover.rs similarity index 100% rename from backend/src/plonk/vanishing/prover.rs rename to halo2_backend/src/plonk/vanishing/prover.rs diff --git a/backend/src/plonk/vanishing/verifier.rs b/halo2_backend/src/plonk/vanishing/verifier.rs similarity index 100% rename from backend/src/plonk/vanishing/verifier.rs rename to halo2_backend/src/plonk/vanishing/verifier.rs diff --git a/backend/src/plonk/verifier.rs b/halo2_backend/src/plonk/verifier.rs similarity index 100% rename from backend/src/plonk/verifier.rs rename to halo2_backend/src/plonk/verifier.rs diff --git a/backend/src/plonk/verifier/batch.rs b/halo2_backend/src/plonk/verifier/batch.rs similarity index 100% rename from backend/src/plonk/verifier/batch.rs rename to halo2_backend/src/plonk/verifier/batch.rs diff --git a/common/Cargo.toml b/halo2_common/Cargo.toml similarity index 97% rename from common/Cargo.toml rename to halo2_common/Cargo.toml index 2e33c56537..4f60dbfa11 100644 --- a/common/Cargo.toml +++ b/halo2_common/Cargo.toml @@ -34,7 +34,7 @@ sha3 = "0.9.1" serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" -halo2_middleware = { path = "../middleware" } +halo2_middleware = { path = "../halo2_middleware" } # Legacy circuit compatibility halo2_legacy_pdqsort = { version = "0.1.0", optional = true } diff --git a/common/src/arithmetic.rs b/halo2_common/src/arithmetic.rs similarity index 100% rename from common/src/arithmetic.rs rename to halo2_common/src/arithmetic.rs diff --git a/common/src/circuit.rs b/halo2_common/src/circuit.rs similarity index 100% rename from common/src/circuit.rs rename to halo2_common/src/circuit.rs diff --git a/common/src/circuit/floor_planner.rs b/halo2_common/src/circuit/floor_planner.rs similarity index 100% rename from common/src/circuit/floor_planner.rs rename to halo2_common/src/circuit/floor_planner.rs diff --git a/common/src/circuit/floor_planner/single_pass.rs b/halo2_common/src/circuit/floor_planner/single_pass.rs similarity index 100% rename from common/src/circuit/floor_planner/single_pass.rs rename to halo2_common/src/circuit/floor_planner/single_pass.rs diff --git a/common/src/circuit/floor_planner/v1.rs b/halo2_common/src/circuit/floor_planner/v1.rs similarity index 100% rename from common/src/circuit/floor_planner/v1.rs rename to halo2_common/src/circuit/floor_planner/v1.rs diff --git a/common/src/circuit/floor_planner/v1/strategy.rs b/halo2_common/src/circuit/floor_planner/v1/strategy.rs similarity index 100% rename from common/src/circuit/floor_planner/v1/strategy.rs rename to halo2_common/src/circuit/floor_planner/v1/strategy.rs diff --git a/common/src/circuit/layouter.rs b/halo2_common/src/circuit/layouter.rs similarity index 100% rename from common/src/circuit/layouter.rs rename to halo2_common/src/circuit/layouter.rs diff --git a/common/src/circuit/table_layouter.rs b/halo2_common/src/circuit/table_layouter.rs similarity index 100% rename from common/src/circuit/table_layouter.rs rename to halo2_common/src/circuit/table_layouter.rs diff --git a/common/src/circuit/value.rs b/halo2_common/src/circuit/value.rs similarity index 100% rename from common/src/circuit/value.rs rename to halo2_common/src/circuit/value.rs diff --git a/common/src/helpers.rs b/halo2_common/src/helpers.rs similarity index 100% rename from common/src/helpers.rs rename to halo2_common/src/helpers.rs diff --git a/common/src/lib.rs b/halo2_common/src/lib.rs similarity index 100% rename from common/src/lib.rs rename to halo2_common/src/lib.rs diff --git a/common/src/multicore.rs b/halo2_common/src/multicore.rs similarity index 100% rename from common/src/multicore.rs rename to halo2_common/src/multicore.rs diff --git a/common/src/plonk.rs b/halo2_common/src/plonk.rs similarity index 100% rename from common/src/plonk.rs rename to halo2_common/src/plonk.rs diff --git a/common/src/plonk/circuit.rs b/halo2_common/src/plonk/circuit.rs similarity index 100% rename from common/src/plonk/circuit.rs rename to halo2_common/src/plonk/circuit.rs diff --git a/common/src/plonk/circuit/compress_selectors.rs b/halo2_common/src/plonk/circuit/compress_selectors.rs similarity index 100% rename from common/src/plonk/circuit/compress_selectors.rs rename to halo2_common/src/plonk/circuit/compress_selectors.rs diff --git a/common/src/plonk/error.rs b/halo2_common/src/plonk/error.rs similarity index 100% rename from common/src/plonk/error.rs rename to halo2_common/src/plonk/error.rs diff --git a/common/src/plonk/keygen.rs b/halo2_common/src/plonk/keygen.rs similarity index 100% rename from common/src/plonk/keygen.rs rename to halo2_common/src/plonk/keygen.rs diff --git a/common/src/plonk/lookup.rs b/halo2_common/src/plonk/lookup.rs similarity index 100% rename from common/src/plonk/lookup.rs rename to halo2_common/src/plonk/lookup.rs diff --git a/common/src/plonk/permutation.rs b/halo2_common/src/plonk/permutation.rs similarity index 100% rename from common/src/plonk/permutation.rs rename to halo2_common/src/plonk/permutation.rs diff --git a/common/src/plonk/shuffle.rs b/halo2_common/src/plonk/shuffle.rs similarity index 100% rename from common/src/plonk/shuffle.rs rename to halo2_common/src/plonk/shuffle.rs diff --git a/common/src/poly.rs b/halo2_common/src/poly.rs similarity index 100% rename from common/src/poly.rs rename to halo2_common/src/poly.rs diff --git a/common/src/poly/commitment.rs b/halo2_common/src/poly/commitment.rs similarity index 100% rename from common/src/poly/commitment.rs rename to halo2_common/src/poly/commitment.rs diff --git a/common/src/poly/domain.rs b/halo2_common/src/poly/domain.rs similarity index 100% rename from common/src/poly/domain.rs rename to halo2_common/src/poly/domain.rs diff --git a/common/src/poly/ipa/commitment.rs b/halo2_common/src/poly/ipa/commitment.rs similarity index 100% rename from common/src/poly/ipa/commitment.rs rename to halo2_common/src/poly/ipa/commitment.rs diff --git a/common/src/poly/ipa/commitment/prover.rs b/halo2_common/src/poly/ipa/commitment/prover.rs similarity index 100% rename from common/src/poly/ipa/commitment/prover.rs rename to halo2_common/src/poly/ipa/commitment/prover.rs diff --git a/common/src/poly/ipa/commitment/verifier.rs b/halo2_common/src/poly/ipa/commitment/verifier.rs similarity index 100% rename from common/src/poly/ipa/commitment/verifier.rs rename to halo2_common/src/poly/ipa/commitment/verifier.rs diff --git a/common/src/poly/ipa/mod.rs b/halo2_common/src/poly/ipa/mod.rs similarity index 100% rename from common/src/poly/ipa/mod.rs rename to halo2_common/src/poly/ipa/mod.rs diff --git a/common/src/poly/ipa/msm.rs b/halo2_common/src/poly/ipa/msm.rs similarity index 100% rename from common/src/poly/ipa/msm.rs rename to halo2_common/src/poly/ipa/msm.rs diff --git a/common/src/poly/ipa/multiopen.rs b/halo2_common/src/poly/ipa/multiopen.rs similarity index 100% rename from common/src/poly/ipa/multiopen.rs rename to halo2_common/src/poly/ipa/multiopen.rs diff --git a/common/src/poly/ipa/multiopen/prover.rs b/halo2_common/src/poly/ipa/multiopen/prover.rs similarity index 100% rename from common/src/poly/ipa/multiopen/prover.rs rename to halo2_common/src/poly/ipa/multiopen/prover.rs diff --git a/common/src/poly/ipa/multiopen/verifier.rs b/halo2_common/src/poly/ipa/multiopen/verifier.rs similarity index 100% rename from common/src/poly/ipa/multiopen/verifier.rs rename to halo2_common/src/poly/ipa/multiopen/verifier.rs diff --git a/common/src/poly/ipa/strategy.rs b/halo2_common/src/poly/ipa/strategy.rs similarity index 100% rename from common/src/poly/ipa/strategy.rs rename to halo2_common/src/poly/ipa/strategy.rs diff --git a/common/src/poly/kzg/commitment.rs b/halo2_common/src/poly/kzg/commitment.rs similarity index 100% rename from common/src/poly/kzg/commitment.rs rename to halo2_common/src/poly/kzg/commitment.rs diff --git a/common/src/poly/kzg/mod.rs b/halo2_common/src/poly/kzg/mod.rs similarity index 100% rename from common/src/poly/kzg/mod.rs rename to halo2_common/src/poly/kzg/mod.rs diff --git a/common/src/poly/kzg/msm.rs b/halo2_common/src/poly/kzg/msm.rs similarity index 100% rename from common/src/poly/kzg/msm.rs rename to halo2_common/src/poly/kzg/msm.rs diff --git a/common/src/poly/kzg/multiopen.rs b/halo2_common/src/poly/kzg/multiopen.rs similarity index 100% rename from common/src/poly/kzg/multiopen.rs rename to halo2_common/src/poly/kzg/multiopen.rs diff --git a/common/src/poly/kzg/multiopen/gwc.rs b/halo2_common/src/poly/kzg/multiopen/gwc.rs similarity index 100% rename from common/src/poly/kzg/multiopen/gwc.rs rename to halo2_common/src/poly/kzg/multiopen/gwc.rs diff --git a/common/src/poly/kzg/multiopen/gwc/prover.rs b/halo2_common/src/poly/kzg/multiopen/gwc/prover.rs similarity index 100% rename from common/src/poly/kzg/multiopen/gwc/prover.rs rename to halo2_common/src/poly/kzg/multiopen/gwc/prover.rs diff --git a/common/src/poly/kzg/multiopen/gwc/verifier.rs b/halo2_common/src/poly/kzg/multiopen/gwc/verifier.rs similarity index 100% rename from common/src/poly/kzg/multiopen/gwc/verifier.rs rename to halo2_common/src/poly/kzg/multiopen/gwc/verifier.rs diff --git a/common/src/poly/kzg/multiopen/shplonk.rs b/halo2_common/src/poly/kzg/multiopen/shplonk.rs similarity index 100% rename from common/src/poly/kzg/multiopen/shplonk.rs rename to halo2_common/src/poly/kzg/multiopen/shplonk.rs diff --git a/common/src/poly/kzg/multiopen/shplonk/prover.rs b/halo2_common/src/poly/kzg/multiopen/shplonk/prover.rs similarity index 100% rename from common/src/poly/kzg/multiopen/shplonk/prover.rs rename to halo2_common/src/poly/kzg/multiopen/shplonk/prover.rs diff --git a/common/src/poly/kzg/multiopen/shplonk/verifier.rs b/halo2_common/src/poly/kzg/multiopen/shplonk/verifier.rs similarity index 100% rename from common/src/poly/kzg/multiopen/shplonk/verifier.rs rename to halo2_common/src/poly/kzg/multiopen/shplonk/verifier.rs diff --git a/common/src/poly/kzg/strategy.rs b/halo2_common/src/poly/kzg/strategy.rs similarity index 100% rename from common/src/poly/kzg/strategy.rs rename to halo2_common/src/poly/kzg/strategy.rs diff --git a/common/src/poly/multiopen_test.rs b/halo2_common/src/poly/multiopen_test.rs similarity index 100% rename from common/src/poly/multiopen_test.rs rename to halo2_common/src/poly/multiopen_test.rs diff --git a/common/src/poly/query.rs b/halo2_common/src/poly/query.rs similarity index 100% rename from common/src/poly/query.rs rename to halo2_common/src/poly/query.rs diff --git a/common/src/poly/strategy.rs b/halo2_common/src/poly/strategy.rs similarity index 100% rename from common/src/poly/strategy.rs rename to halo2_common/src/poly/strategy.rs diff --git a/common/src/transcript.rs b/halo2_common/src/transcript.rs similarity index 100% rename from common/src/transcript.rs rename to halo2_common/src/transcript.rs diff --git a/frontend/Cargo.toml b/halo2_frontend/Cargo.toml similarity index 95% rename from frontend/Cargo.toml rename to halo2_frontend/Cargo.toml index e3c728b04b..6a35d66652 100644 --- a/frontend/Cargo.toml +++ b/halo2_frontend/Cargo.toml @@ -33,8 +33,8 @@ tracing = "0.1" blake2b_simd = "1" # MSRV 1.66.0 serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} -halo2_middleware = { path = "../middleware" } -halo2_common = { path = "../common" } +halo2_middleware = { path = "../halo2_middleware" } +halo2_common = { path = "../halo2_common" } # Developer tooling dependencies plotters = { version = "0.3.0", default-features = false, optional = true } diff --git a/frontend/src/circuit.rs b/halo2_frontend/src/circuit.rs similarity index 100% rename from frontend/src/circuit.rs rename to halo2_frontend/src/circuit.rs diff --git a/frontend/src/circuit/floor_planner.rs b/halo2_frontend/src/circuit/floor_planner.rs similarity index 100% rename from frontend/src/circuit/floor_planner.rs rename to halo2_frontend/src/circuit/floor_planner.rs diff --git a/frontend/src/circuit/floor_planner/single_pass.rs b/halo2_frontend/src/circuit/floor_planner/single_pass.rs similarity index 100% rename from frontend/src/circuit/floor_planner/single_pass.rs rename to halo2_frontend/src/circuit/floor_planner/single_pass.rs diff --git a/frontend/src/circuit/floor_planner/v1.rs b/halo2_frontend/src/circuit/floor_planner/v1.rs similarity index 100% rename from frontend/src/circuit/floor_planner/v1.rs rename to halo2_frontend/src/circuit/floor_planner/v1.rs diff --git a/frontend/src/circuit/floor_planner/v1/strategy.rs b/halo2_frontend/src/circuit/floor_planner/v1/strategy.rs similarity index 100% rename from frontend/src/circuit/floor_planner/v1/strategy.rs rename to halo2_frontend/src/circuit/floor_planner/v1/strategy.rs diff --git a/frontend/src/circuit/table_layouter.rs b/halo2_frontend/src/circuit/table_layouter.rs similarity index 100% rename from frontend/src/circuit/table_layouter.rs rename to halo2_frontend/src/circuit/table_layouter.rs diff --git a/frontend/src/dev.rs b/halo2_frontend/src/dev.rs similarity index 100% rename from frontend/src/dev.rs rename to halo2_frontend/src/dev.rs diff --git a/frontend/src/dev/cost.rs b/halo2_frontend/src/dev/cost.rs similarity index 100% rename from frontend/src/dev/cost.rs rename to halo2_frontend/src/dev/cost.rs diff --git a/frontend/src/dev/cost_model.rs b/halo2_frontend/src/dev/cost_model.rs similarity index 100% rename from frontend/src/dev/cost_model.rs rename to halo2_frontend/src/dev/cost_model.rs diff --git a/frontend/src/dev/failure.rs b/halo2_frontend/src/dev/failure.rs similarity index 100% rename from frontend/src/dev/failure.rs rename to halo2_frontend/src/dev/failure.rs diff --git a/frontend/src/dev/failure/emitter.rs b/halo2_frontend/src/dev/failure/emitter.rs similarity index 100% rename from frontend/src/dev/failure/emitter.rs rename to halo2_frontend/src/dev/failure/emitter.rs diff --git a/frontend/src/dev/gates.rs b/halo2_frontend/src/dev/gates.rs similarity index 100% rename from frontend/src/dev/gates.rs rename to halo2_frontend/src/dev/gates.rs diff --git a/frontend/src/dev/graph.rs b/halo2_frontend/src/dev/graph.rs similarity index 100% rename from frontend/src/dev/graph.rs rename to halo2_frontend/src/dev/graph.rs diff --git a/frontend/src/dev/graph/layout.rs b/halo2_frontend/src/dev/graph/layout.rs similarity index 100% rename from frontend/src/dev/graph/layout.rs rename to halo2_frontend/src/dev/graph/layout.rs diff --git a/frontend/src/dev/metadata.rs b/halo2_frontend/src/dev/metadata.rs similarity index 100% rename from frontend/src/dev/metadata.rs rename to halo2_frontend/src/dev/metadata.rs diff --git a/frontend/src/dev/tfp.rs b/halo2_frontend/src/dev/tfp.rs similarity index 100% rename from frontend/src/dev/tfp.rs rename to halo2_frontend/src/dev/tfp.rs diff --git a/frontend/src/dev/util.rs b/halo2_frontend/src/dev/util.rs similarity index 100% rename from frontend/src/dev/util.rs rename to halo2_frontend/src/dev/util.rs diff --git a/frontend/src/lib.rs b/halo2_frontend/src/lib.rs similarity index 100% rename from frontend/src/lib.rs rename to halo2_frontend/src/lib.rs diff --git a/middleware/Cargo.toml b/halo2_middleware/Cargo.toml similarity index 100% rename from middleware/Cargo.toml rename to halo2_middleware/Cargo.toml diff --git a/middleware/src/circuit.rs b/halo2_middleware/src/circuit.rs similarity index 98% rename from middleware/src/circuit.rs rename to halo2_middleware/src/circuit.rs index b5676e61f9..1d4d1f936d 100644 --- a/middleware/src/circuit.rs +++ b/halo2_middleware/src/circuit.rs @@ -50,11 +50,6 @@ impl ChallengeMid { pub fn phase(&self) -> u8 { self.phase } - - // /// Return Expression - // pub fn expr(&self) -> ExpressionMid { - // ExpressionMid::Challenge(*self) - // } } /// Low-degree expression representing an identity that must hold over the committed columns. diff --git a/middleware/src/lib.rs b/halo2_middleware/src/lib.rs similarity index 100% rename from middleware/src/lib.rs rename to halo2_middleware/src/lib.rs diff --git a/middleware/src/lookup.rs b/halo2_middleware/src/lookup.rs similarity index 100% rename from middleware/src/lookup.rs rename to halo2_middleware/src/lookup.rs diff --git a/middleware/src/metadata.rs b/halo2_middleware/src/metadata.rs similarity index 100% rename from middleware/src/metadata.rs rename to halo2_middleware/src/metadata.rs diff --git a/middleware/src/permutation.rs b/halo2_middleware/src/permutation.rs similarity index 100% rename from middleware/src/permutation.rs rename to halo2_middleware/src/permutation.rs diff --git a/middleware/src/plonk.rs b/halo2_middleware/src/plonk.rs similarity index 100% rename from middleware/src/plonk.rs rename to halo2_middleware/src/plonk.rs diff --git a/middleware/src/poly.rs b/halo2_middleware/src/poly.rs similarity index 100% rename from middleware/src/poly.rs rename to halo2_middleware/src/poly.rs diff --git a/middleware/src/shuffle.rs b/halo2_middleware/src/shuffle.rs similarity index 100% rename from middleware/src/shuffle.rs rename to halo2_middleware/src/shuffle.rs diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index 0fe79b99aa..ce3e26e68e 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -49,10 +49,10 @@ name = "fft" harness = false [dependencies] -halo2_middleware = { path = "../middleware" } -halo2_common = { path = "../common" } -halo2_backend = { path = "../backend" } -halo2_frontend = { path = "../frontend" } +halo2_middleware = { path = "../halo2_middleware" } +halo2_common = { path = "../halo2_common" } +halo2_backend = { path = "../halo2_backend" } +halo2_frontend = { path = "../halo2_frontend" } halo2curves = { version = "0.6.0", default-features = false } rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } From acb13f81ff90b81eae1babff68065f814c79c163 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 2 Feb 2024 12:59:17 +0000 Subject: [PATCH 72/79] Address feedback from @CPerezz, part 1 --- halo2_backend/src/lib.rs | 2 -- halo2_backend/src/plonk.rs | 15 ++------------- halo2_backend/src/plonk/evaluation.rs | 3 ++- halo2_backend/src/plonk/keygen.rs | 14 ++++---------- halo2_backend/src/plonk/lookup/prover.rs | 8 ++++---- halo2_backend/src/plonk/lookup/verifier.rs | 8 ++++---- halo2_backend/src/plonk/permutation/keygen.rs | 8 +++++--- halo2_backend/src/plonk/permutation/prover.rs | 7 +++++-- .../src/plonk/permutation/verifier.rs | 3 +-- halo2_backend/src/plonk/prover.rs | 19 +++++++++---------- halo2_backend/src/plonk/shuffle/prover.rs | 4 ++-- halo2_backend/src/plonk/shuffle/verifier.rs | 4 ++-- halo2_backend/src/plonk/vanishing/prover.rs | 2 +- halo2_backend/src/plonk/vanishing/verifier.rs | 3 +-- halo2_backend/src/plonk/verifier.rs | 8 ++++---- halo2_backend/src/plonk/verifier/batch.rs | 3 ++- halo2_common/src/circuit/value.rs | 5 ----- halo2_common/src/lib.rs | 6 +++--- halo2_common/src/plonk/error.rs | 3 ++- halo2_common/src/poly.rs | 1 + halo2_frontend/src/dev/tfp.rs | 15 +++++++++------ halo2_middleware/Cargo.toml | 6 +++--- halo2_proofs/src/plonk/keygen.rs | 3 +-- halo2_proofs/src/plonk/prover.rs | 7 ++++--- 24 files changed, 71 insertions(+), 86 deletions(-) diff --git a/halo2_backend/src/lib.rs b/halo2_backend/src/lib.rs index 36afe4c86d..5973dcf661 100644 --- a/halo2_backend/src/lib.rs +++ b/halo2_backend/src/lib.rs @@ -1,5 +1,3 @@ -#![allow(unused_imports)] // TODO: Remove - pub mod plonk; // Internal re-exports diff --git a/halo2_backend/src/plonk.rs b/halo2_backend/src/plonk.rs index 63159df4c8..1acb0763b8 100644 --- a/halo2_backend/src/plonk.rs +++ b/halo2_backend/src/plonk.rs @@ -17,16 +17,6 @@ use halo2_common::SerdeFormat; use std::io; -// TODO: Import propertly instead of reimporting from inside -pub use halo2_common::plonk::{ - ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, -}; -pub mod circuit { - pub use halo2_common::plonk::circuit::{Assignment, Circuit, ConstraintSystem, Selector}; - pub use halo2_common::plonk::Expression; -} -pub use halo2_common::plonk::Expression; - mod evaluation; pub mod keygen; mod lookup; @@ -49,7 +39,7 @@ pub struct VerifyingKey { /// The representative of this `VerifyingKey` in transcripts. transcript_repr: C::Scalar, selectors: Vec>, - // TODO: Use setter/getter + // TODO: Use setter/getter https://github.com/privacy-scaling-explorations/halo2/issues/259 /// Whether selector compression is turned on or not. pub compress_selectors: bool, } @@ -330,8 +320,7 @@ pub struct PinnedVerificationKey<'a, C: CurveAffine> { /// particular circuit. #[derive(Clone, Debug)] pub struct ProvingKey { - // TODO: Add getter - pub vk: VerifyingKey, + vk: VerifyingKey, l0: Polynomial, l_last: Polynomial, l_active_row: Polynomial, diff --git a/halo2_backend/src/plonk/evaluation.rs b/halo2_backend/src/plonk/evaluation.rs index 109523ecef..74c1fb1933 100644 --- a/halo2_backend/src/plonk/evaluation.rs +++ b/halo2_backend/src/plonk/evaluation.rs @@ -6,10 +6,11 @@ use crate::{ poly::{Coeff, ExtendedLagrangeCoeff, Polynomial}, }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; +use halo2_common::plonk::{ConstraintSystem, Expression}; use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; -use super::{shuffle, ConstraintSystem, Expression}; +use super::shuffle; /// Return the index in the polynomial of size `isize` after rotation `rot`. fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { diff --git a/halo2_backend/src/plonk/keygen.rs b/halo2_backend/src/plonk/keygen.rs index 4c61e4599d..81ff7c2f86 100644 --- a/halo2_backend/src/plonk/keygen.rs +++ b/halo2_backend/src/plonk/keygen.rs @@ -1,25 +1,19 @@ #![allow(clippy::int_plus_one)] -use std::ops::Range; - use group::Curve; use halo2_middleware::ff::{Field, FromUniformBytes}; -use super::{ - circuit::{Assignment, Circuit, ConstraintSystem, Selector}, - evaluation::Evaluator, - permutation, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, -}; +use super::{evaluation::Evaluator, permutation, Polynomial, ProvingKey, VerifyingKey}; use crate::{ arithmetic::{parallelize, CurveAffine}, - circuit::Value, poly::{ commitment::{Blind, Params}, EvaluationDomain, }, }; -use halo2_middleware::circuit::{Advice, Any, CompiledCircuitV2, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use halo2_common::plonk::circuit::{Circuit, ConstraintSystem}; +use halo2_common::plonk::Error; +use halo2_middleware::circuit::CompiledCircuitV2; pub(crate) fn create_domain( k: u32, diff --git a/halo2_backend/src/plonk/lookup/prover.rs b/halo2_backend/src/plonk/lookup/prover.rs index 3af77a7f2c..f95054608f 100644 --- a/halo2_backend/src/plonk/lookup/prover.rs +++ b/halo2_backend/src/plonk/lookup/prover.rs @@ -1,7 +1,4 @@ -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, - ProvingKey, -}; +use super::super::ProvingKey; use super::Argument; use crate::plonk::evaluation::evaluate; use crate::{ @@ -16,6 +13,9 @@ use group::{ ff::{BatchInvert, Field}, Curve, }; +use halo2_common::plonk::{ + ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, Expression, +}; use halo2_middleware::ff::WithSmallOrderMulGroup; use halo2_middleware::poly::Rotation; use rand_core::RngCore; diff --git a/halo2_backend/src/plonk/lookup/verifier.rs b/halo2_backend/src/plonk/lookup/verifier.rs index b394d39898..dda0104200 100644 --- a/halo2_backend/src/plonk/lookup/verifier.rs +++ b/halo2_backend/src/plonk/lookup/verifier.rs @@ -1,15 +1,15 @@ use std::iter; -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, -}; use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, + plonk::VerifyingKey, poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; +use halo2_common::plonk::{ + ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, Expression, +}; use halo2_middleware::ff::Field; use halo2_middleware::poly::Rotation; diff --git a/halo2_backend/src/plonk/permutation/keygen.rs b/halo2_backend/src/plonk/permutation/keygen.rs index f85f343c42..a4a6ee6cdc 100644 --- a/halo2_backend/src/plonk/permutation/keygen.rs +++ b/halo2_backend/src/plonk/permutation/keygen.rs @@ -4,20 +4,22 @@ use halo2_middleware::ff::{Field, PrimeField}; use super::{Argument, ProvingKey, VerifyingKey}; use crate::{ arithmetic::{parallelize, CurveAffine}, - plonk::Error, poly::{ commitment::{Blind, Params}, EvaluationDomain, }, }; -use halo2_middleware::circuit::{Any, ColumnMid}; +use halo2_common::plonk::Error; +use halo2_middleware::circuit::ColumnMid; use halo2_middleware::permutation::{ArgumentV2, AssemblyMid}; // NOTE: Temporarily disabled thread-safe-region feature. Regions are a frontend concept, so the // thread-safe support for them should be only in the frontend package. +// TODO: Bring the thread-safe region feature back +// https://github.com/privacy-scaling-explorations/halo2/issues/258 // #[cfg(feature = "thread-safe-region")] -use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; +// use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; /* #[cfg(feature = "thread-safe-region")] diff --git a/halo2_backend/src/plonk/permutation/prover.rs b/halo2_backend/src/plonk/permutation/prover.rs index 81cdddf4b4..4a1ec7d903 100644 --- a/halo2_backend/src/plonk/permutation/prover.rs +++ b/halo2_backend/src/plonk/permutation/prover.rs @@ -6,20 +6,23 @@ use halo2_middleware::ff::PrimeField; use rand_core::RngCore; use std::iter::{self, ExactSizeIterator}; -use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::Argument; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, - plonk::{self, permutation::ProvingKey, Error}, + plonk::{self, permutation::ProvingKey}, poly::{ commitment::{Blind, Params}, Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, }, transcript::{EncodedChallenge, TranscriptWrite}, }; +use halo2_common::plonk::{ChallengeBeta, ChallengeGamma, ChallengeX, Error}; use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; +// TODO: Document a bit these types +// https://github.com/privacy-scaling-explorations/halo2/issues/264 + pub(crate) struct CommittedSet { pub(crate) permutation_product_poly: Polynomial, pub(crate) permutation_product_coset: Polynomial, diff --git a/halo2_backend/src/plonk/permutation/verifier.rs b/halo2_backend/src/plonk/permutation/verifier.rs index e0fe5439be..f343b60632 100644 --- a/halo2_backend/src/plonk/permutation/verifier.rs +++ b/halo2_backend/src/plonk/permutation/verifier.rs @@ -1,7 +1,6 @@ use halo2_middleware::ff::{Field, PrimeField}; use std::iter; -use super::super::{ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, VerifyingKey}; use crate::{ arithmetic::CurveAffine, @@ -9,7 +8,7 @@ use crate::{ poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use halo2_common::plonk::Error; +use halo2_common::plonk::{ChallengeBeta, ChallengeGamma, ChallengeX, Error}; use halo2_middleware::circuit::Any; use halo2_middleware::poly::Rotation; diff --git a/halo2_backend/src/plonk/prover.rs b/halo2_backend/src/plonk/prover.rs index c4561e6b36..8abd86cce7 100644 --- a/halo2_backend/src/plonk/prover.rs +++ b/halo2_backend/src/plonk/prover.rs @@ -147,12 +147,12 @@ impl< pub fn new( params: &'params Scheme::ParamsProver, pk: &'a ProvingKey, - // TODO: If this was a vector the usage would be simpler + // TODO: If this was a vector the usage would be simpler. + // https://github.com/privacy-scaling-explorations/halo2/issues/265 instances: &[&[&[Scheme::Scalar]]], rng: R, transcript: &'a mut T, ) -> Result - // TODO: Can I move this `where` to the struct definition? where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { @@ -170,8 +170,7 @@ impl< let domain = &pk.vk.domain; - // TODO: Name this better - let mut instance_fn = + let mut commit_instance_fn = |instance: &[&[Scheme::Scalar]]| -> Result, Error> { let instance_values = instance .iter() @@ -183,7 +182,6 @@ impl< } for (poly, value) in poly.iter_mut().zip(values.iter()) { if !P::QUERY_INSTANCE { - // dbg!(1, value); transcript.common_scalar(*value)?; } *poly = *value; @@ -207,7 +205,6 @@ impl< drop(instance_commitments_projective); for commitment in &instance_commitments { - // dbg!(2, commitment); transcript.common_point(*commitment)?; } } @@ -227,7 +224,7 @@ impl< }; let instance: Vec> = instances .iter() - .map(|instance| instance_fn(instance)) + .map(|instance| commit_instance_fn(instance)) .collect::, _>>()?; let advice = vec![ @@ -270,11 +267,14 @@ impl< let current_phase = match self.phases.get(self.next_phase_index) { Some(phase) => phase, None => { - panic!("TODO: Return Error instead. All phases already commited"); + return Err(Error::Other("All phases already committed".to_string())); } }; if phase != current_phase.0 { - panic!("TODO: Return Error instead. Committing invalid phase"); + return Err(Error::Other(format!( + "Committing invalid phase. Expected {}, got {}", + current_phase.0, phase + ))); } let params = self.params; @@ -633,7 +633,6 @@ impl< ) }) .collect(); - // dbg!(&advice_evals); // Hash each advice column evaluation for eval in advice_evals.iter() { diff --git a/halo2_backend/src/plonk/shuffle/prover.rs b/halo2_backend/src/plonk/shuffle/prover.rs index 3903962d81..31dc9714bb 100644 --- a/halo2_backend/src/plonk/shuffle/prover.rs +++ b/halo2_backend/src/plonk/shuffle/prover.rs @@ -1,4 +1,4 @@ -use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, ProvingKey}; +use super::super::ProvingKey; use super::Argument; use crate::plonk::evaluation::evaluate; use crate::{ @@ -10,7 +10,7 @@ use crate::{ transcript::{EncodedChallenge, TranscriptWrite}, }; use group::{ff::BatchInvert, Curve}; -use halo2_common::plonk::Error; +use halo2_common::plonk::{ChallengeGamma, ChallengeTheta, ChallengeX, Error, Expression}; use halo2_middleware::ff::WithSmallOrderMulGroup; use halo2_middleware::poly::Rotation; use rand_core::RngCore; diff --git a/halo2_backend/src/plonk/shuffle/verifier.rs b/halo2_backend/src/plonk/shuffle/verifier.rs index e989de2c7b..dd32f3d6f2 100644 --- a/halo2_backend/src/plonk/shuffle/verifier.rs +++ b/halo2_backend/src/plonk/shuffle/verifier.rs @@ -1,13 +1,13 @@ use std::iter; -use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX}; use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, + plonk::VerifyingKey, poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; +use halo2_common::plonk::{ChallengeGamma, ChallengeTheta, ChallengeX, Error, Expression}; use halo2_middleware::ff::Field; use halo2_middleware::poly::Rotation; diff --git a/halo2_backend/src/plonk/vanishing/prover.rs b/halo2_backend/src/plonk/vanishing/prover.rs index d30d9dc4af..942441aefa 100644 --- a/halo2_backend/src/plonk/vanishing/prover.rs +++ b/halo2_backend/src/plonk/vanishing/prover.rs @@ -1,6 +1,7 @@ use std::{collections::HashMap, iter}; use group::Curve; +use halo2_common::plonk::{ChallengeX, Error}; use halo2_middleware::ff::Field; use rand_chacha::ChaCha20Rng; use rand_core::{RngCore, SeedableRng}; @@ -9,7 +10,6 @@ use super::Argument; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, multicore::current_num_threads, - plonk::{ChallengeX, Error}, poly::{ commitment::{Blind, ParamsProver}, Coeff, EvaluationDomain, ExtendedLagrangeCoeff, Polynomial, ProverQuery, diff --git a/halo2_backend/src/plonk/vanishing/verifier.rs b/halo2_backend/src/plonk/vanishing/verifier.rs index 93de57b093..bdf4a45bc5 100644 --- a/halo2_backend/src/plonk/vanishing/verifier.rs +++ b/halo2_backend/src/plonk/vanishing/verifier.rs @@ -1,6 +1,6 @@ use std::iter; -use halo2_common::plonk::Error; +use halo2_common::plonk::{ChallengeX, ChallengeY, Error}; use halo2_middleware::ff::Field; use crate::{ @@ -13,7 +13,6 @@ use crate::{ transcript::{read_n_points, EncodedChallenge, TranscriptRead}, }; -use super::super::{ChallengeX, ChallengeY}; use super::Argument; pub struct Committed { diff --git a/halo2_backend/src/plonk/verifier.rs b/halo2_backend/src/plonk/verifier.rs index f86f0134db..c8e81f43e2 100644 --- a/halo2_backend/src/plonk/verifier.rs +++ b/halo2_backend/src/plonk/verifier.rs @@ -1,11 +1,11 @@ use group::Curve; +use halo2_common::plonk::{ + ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, +}; use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; use std::iter; -use super::{ - vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, - VerifyingKey, -}; +use super::{vanishing, VerifyingKey}; use crate::arithmetic::compute_inner_product; use crate::plonk::lookup::verifier::lookup_read_permuted_commitments; use crate::plonk::permutation::verifier::permutation_read_product_commitments; diff --git a/halo2_backend/src/plonk/verifier/batch.rs b/halo2_backend/src/plonk/verifier/batch.rs index d869d87559..883e3605c8 100644 --- a/halo2_backend/src/plonk/verifier/batch.rs +++ b/halo2_backend/src/plonk/verifier/batch.rs @@ -1,4 +1,5 @@ use group::ff::Field; +use halo2_common::plonk::Error; use halo2_middleware::ff::FromUniformBytes; use halo2curves::CurveAffine; use rand_core::OsRng; @@ -8,7 +9,7 @@ use crate::{ multicore::{ IndexedParallelIterator, IntoParallelIterator, ParallelIterator, TryFoldAndReduce, }, - plonk::{Error, VerifyingKey}, + plonk::VerifyingKey, poly::{ commitment::{Params, MSM}, ipa::{ diff --git a/halo2_common/src/circuit/value.rs b/halo2_common/src/circuit/value.rs index 20f81d5307..e7118fc51b 100644 --- a/halo2_common/src/circuit/value.rs +++ b/halo2_common/src/circuit/value.rs @@ -64,11 +64,6 @@ impl Value { } } - /// ONLY FOR INTERNAL CRATE USAGE; DO NOT EXPOSE! - pub fn into_option(self) -> Option { - self.inner - } - /// Enforces an assertion on the contained value, if known. /// /// The assertion is ignored if `self` is [`Value::unknown()`]. Do not try to enforce diff --git a/halo2_common/src/lib.rs b/halo2_common/src/lib.rs index 5016057fa6..38e30e00f7 100644 --- a/halo2_common/src/lib.rs +++ b/halo2_common/src/lib.rs @@ -4,8 +4,6 @@ // The actual lints we want to disable. #![allow(clippy::op_ref, clippy::many_single_char_names)] #![deny(rustdoc::broken_intra_doc_links)] -// #![deny(missing_debug_implementations)] // TODO: Uncomment -// #![deny(missing_docs)] // TODO: Uncomment #![deny(unsafe_code)] pub mod arithmetic; @@ -14,6 +12,7 @@ pub use halo2curves; pub mod multicore; pub mod plonk; // TODO: Try to move this to backend and use a lightweight Polynomial type in the frontend +// https://github.com/privacy-scaling-explorations/halo2/issues/257 pub mod poly; pub mod transcript; @@ -21,4 +20,5 @@ pub mod helpers; pub use helpers::SerdeFormat; // TODO: Everything that is moved from this crate to frontend or backend should recover the -// pub status whenever possible. +// pub(crate) status whenever possible. +// https://github.com/privacy-scaling-explorations/halo2/issues/266 diff --git a/halo2_common/src/plonk/error.rs b/halo2_common/src/plonk/error.rs index 6b43695009..095b8dceb9 100644 --- a/halo2_common/src/plonk/error.rs +++ b/halo2_common/src/plonk/error.rs @@ -6,7 +6,8 @@ use super::TableColumn; use crate::plonk::circuit::Column; use halo2_middleware::circuit::Any; -// TODO: Consider splitting this Error into a frontend and backend version? +// TODO: Split this Error into a frontend and backend version +// https://github.com/privacy-scaling-explorations/halo2/issues/266 /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up diff --git a/halo2_common/src/poly.rs b/halo2_common/src/poly.rs index bfabff6011..34d5b50738 100644 --- a/halo2_common/src/poly.rs +++ b/halo2_common/src/poly.rs @@ -35,6 +35,7 @@ pub use strategy::{Guard, VerificationStrategy}; // TODO: move everything from the poly module to the backend. This requires that the frontend // works without Poly (and just Vec). +// https://github.com/privacy-scaling-explorations/halo2/issues/257 /// This is an error that could occur during proving or circuit synthesis. // TODO: these errors need to be cleaned up diff --git a/halo2_frontend/src/dev/tfp.rs b/halo2_frontend/src/dev/tfp.rs index ff3dd5b623..6cf8150dac 100644 --- a/halo2_frontend/src/dev/tfp.rs +++ b/halo2_frontend/src/dev/tfp.rs @@ -225,10 +225,15 @@ impl> Layouter for TracingLayouter { } } -fn debug_value_and_return_cell(value: AssignedCell) -> Cell { - if let Some(v) = value.value().into_option() { +fn debug_value(value: &AssignedCell) { + value.value().assert_if_known(|v| { debug!(target: "assigned", value = ?v); - } + true + }); +} + +fn debug_value_and_return_cell(value: AssignedCell) -> Cell { + debug_value(&value); value.cell() } @@ -311,9 +316,7 @@ impl<'r, F: Field> RegionLayouter for TracingRegion<'r, F> { self.0 .assign_advice_from_instance(annotation, instance, row, advice, offset) .map(|value| { - if let Some(v) = value.value().into_option() { - debug!(target: "assigned", value = ?v); - } + debug_value(&value); (value.cell(), value.value().cloned()) }) } diff --git a/halo2_middleware/Cargo.toml b/halo2_middleware/Cargo.toml index a443aa0809..66ec0b5105 100644 --- a/halo2_middleware/Cargo.toml +++ b/halo2_middleware/Cargo.toml @@ -11,11 +11,11 @@ authors = [ edition = "2021" rust-version = "1.66.0" description = """ -TODO +Halo2 middleware. This package contains the types and traits required for the frontend-backend interaction. """ license = "MIT OR Apache-2.0" -repository = "TODO" -documentation = "TODO" +repository = "https://github.com/zcash/halo2" +documentation = "https://docs.rs/halo2_proofs" readme = "README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "zkp", "zkSNARKs"] diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index b395d041f2..7f40a2cfcf 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -1,10 +1,9 @@ use halo2_backend::plonk::{ - circuit::Circuit, keygen::{keygen_pk_v2, keygen_vk_v2}, ProvingKey, VerifyingKey, }; use halo2_backend::{arithmetic::CurveAffine, poly::commitment::Params}; -use halo2_common::plonk::Error; +use halo2_common::plonk::{circuit::Circuit, Error}; use halo2_frontend::circuit::compile_circuit; use halo2_middleware::ff::FromUniformBytes; diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index a6a5e7195b..6562d998d6 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -1,5 +1,5 @@ -use halo2_backend::plonk::{circuit::Circuit, prover::ProverV2, ProvingKey}; -use halo2_common::plonk::Error; +use halo2_backend::plonk::{prover::ProverV2, ProvingKey}; +use halo2_common::plonk::{circuit::Circuit, Error}; use halo2_common::poly::commitment::{CommitmentScheme, Params, Prover}; use halo2_common::transcript::{EncodedChallenge, TranscriptWrite}; use halo2_frontend::circuit::{compile_circuit, WitnessCalculator}; @@ -33,7 +33,8 @@ where if circuits.len() != instances.len() { return Err(Error::InvalidInstances); } - let (_, config, cs) = compile_circuit(params.k(), &circuits[0], pk.vk.compress_selectors)?; + let (_, config, cs) = + compile_circuit(params.k(), &circuits[0], pk.get_vk().compress_selectors)?; let mut witness_calcs: Vec<_> = circuits .iter() .enumerate() From bbf034a1f1207319ab0318e3ecfabe92392c2116 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 2 Feb 2024 13:06:14 +0000 Subject: [PATCH 73/79] Address feedback from @CPerezz, part 2 --- halo2_backend/src/plonk/prover.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/halo2_backend/src/plonk/prover.rs b/halo2_backend/src/plonk/prover.rs index 8abd86cce7..8f8a60712f 100644 --- a/halo2_backend/src/plonk/prover.rs +++ b/halo2_backend/src/plonk/prover.rs @@ -68,6 +68,7 @@ impl< params: &'params Scheme::ParamsProver, pk: &'a ProvingKey, // TODO: If this was a vector the usage would be simpler + // https://github.com/privacy-scaling-explorations/halo2/issues/265 instance: &[&[Scheme::Scalar]], rng: R, transcript: &'a mut T, From 9927c824927cf82acf7a7c45848a7f86f85a7416 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 2 Feb 2024 15:32:48 +0000 Subject: [PATCH 74/79] Address feedback from @CPerezz, part 3 --- halo2_backend/src/plonk/keygen.rs | 1 + halo2_common/src/plonk/keygen.rs | 2 +- halo2_common/src/plonk/permutation.rs | 9 ++- halo2_frontend/src/circuit.rs | 2 +- halo2_frontend/src/dev.rs | 6 +- halo2_middleware/src/circuit.rs | 8 +++ halo2_middleware/src/lib.rs | 99 --------------------------- halo2_middleware/src/permutation.rs | 9 +-- 8 files changed, 19 insertions(+), 117 deletions(-) diff --git a/halo2_backend/src/plonk/keygen.rs b/halo2_backend/src/plonk/keygen.rs index 81ff7c2f86..7d3bf8c1b4 100644 --- a/halo2_backend/src/plonk/keygen.rs +++ b/halo2_backend/src/plonk/keygen.rs @@ -129,6 +129,7 @@ where // Compute l_0(X) // TODO: this can be done more efficiently + // https://github.com/privacy-scaling-explorations/halo2/issues/269 let mut l0 = vk.domain.empty_lagrange(); l0[0] = C::Scalar::ONE; let l0 = vk.domain.lagrange_to_coeff(l0); diff --git a/halo2_common/src/plonk/keygen.rs b/halo2_common/src/plonk/keygen.rs index 77507589cb..7f1b09d835 100644 --- a/halo2_common/src/plonk/keygen.rs +++ b/halo2_common/src/plonk/keygen.rs @@ -15,7 +15,7 @@ use halo2_middleware::plonk::Assigned; pub struct Assembly { pub k: u32, pub fixed: Vec, LagrangeCoeff>>, - pub permutation: permutation::AssemblyFront, + pub permutation: permutation::Assembly, pub selectors: Vec>, // A range of available rows for assignment and copies. pub usable_rows: Range, diff --git a/halo2_common/src/plonk/permutation.rs b/halo2_common/src/plonk/permutation.rs index cd7fc611b1..ed5704ff33 100644 --- a/halo2_common/src/plonk/permutation.rs +++ b/halo2_common/src/plonk/permutation.rs @@ -1,8 +1,8 @@ //! Implementation of permutation argument. use crate::plonk::{Column, Error}; -use halo2_middleware::circuit::Any; -use halo2_middleware::permutation::{ArgumentV2, Cell}; +use halo2_middleware::circuit::{Any, Cell}; +use halo2_middleware::permutation::ArgumentV2; /// A permutation argument. #[derive(Default, Debug, Clone)] @@ -70,15 +70,14 @@ impl Argument { } } -// TODO: Move to frontend #[derive(Clone, Debug)] -pub struct AssemblyFront { +pub struct Assembly { pub n: usize, pub columns: Vec>, pub copies: Vec<(Cell, Cell)>, } -impl AssemblyFront { +impl Assembly { pub fn new(n: usize, p: &Argument) -> Self { Self { n, diff --git a/halo2_frontend/src/circuit.rs b/halo2_frontend/src/circuit.rs index 09b66da25e..cf5167a5c4 100644 --- a/halo2_frontend/src/circuit.rs +++ b/halo2_frontend/src/circuit.rs @@ -56,7 +56,7 @@ pub fn compile_circuit>( let mut assembly = halo2_common::plonk::keygen::Assembly { k, fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], - permutation: permutation::AssemblyFront::new(n, &cs.permutation), + permutation: permutation::Assembly::new(n, &cs.permutation), selectors: vec![vec![false; n]; cs.num_selectors], usable_rows: 0..n - (cs.blinding_factors() + 1), _marker: std::marker::PhantomData, diff --git a/halo2_frontend/src/dev.rs b/halo2_frontend/src/dev.rs index ba0e7d673d..26dc8481d8 100644 --- a/halo2_frontend/src/dev.rs +++ b/halo2_frontend/src/dev.rs @@ -314,7 +314,7 @@ pub struct MockProver { challenges: Vec, - permutation: permutation::AssemblyFront, + permutation: permutation::Assembly, // A range of available rows for assignment and copies. usable_rows: Range, @@ -673,7 +673,7 @@ impl + Ord> MockProver { }; cs.num_advice_columns ]; - let permutation = permutation::AssemblyFront::new(n, &cs.permutation); + let permutation = permutation::Assembly::new(n, &cs.permutation); let constants = cs.constants.clone(); // Use hash chain to derive deterministic challenges for testing @@ -1243,7 +1243,7 @@ impl + Ord> MockProver { } /// Returns the permutation argument (`Assembly`) used within a MockProver instance. - pub fn permutation(&self) -> &permutation::AssemblyFront { + pub fn permutation(&self) -> &permutation::Assembly { &self.permutation } } diff --git a/halo2_middleware/src/circuit.rs b/halo2_middleware/src/circuit.rs index 1d4d1f936d..2bc17222f1 100644 --- a/halo2_middleware/src/circuit.rs +++ b/halo2_middleware/src/circuit.rs @@ -167,6 +167,7 @@ pub struct CompiledCircuitV2 { // trait is implemented for Any which is used in the backend. It would be great to find a way to // move all the `query_cell` implementations to the frontend and have them return `Expression`, // while keeping `Any` in the middleware. +// https://github.com/privacy-scaling-explorations/halo2/issues/270 /// A column type pub trait ColumnType: 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into @@ -184,6 +185,13 @@ pub struct ColumnMid { pub column_type: Any, } +/// A cell identifies a position in the plonkish matrix identified by a column and a row offset. +#[derive(Clone, Debug)] +pub struct Cell { + pub column: ColumnMid, + pub row: usize, +} + /// An advice column #[derive(Default, Clone, Copy, Eq, PartialEq, Hash)] pub struct Advice { diff --git a/halo2_middleware/src/lib.rs b/halo2_middleware/src/lib.rs index c1362ccae0..da9d15c18e 100644 --- a/halo2_middleware/src/lib.rs +++ b/halo2_middleware/src/lib.rs @@ -7,102 +7,3 @@ pub mod poly; pub mod shuffle; pub use ff; - -// TODO: Remove with permutation::Argument simplification -pub mod multicore { - pub use rayon::{ - current_num_threads, - iter::{IndexedParallelIterator, IntoParallelRefIterator}, - iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}, - join, scope, - slice::ParallelSliceMut, - Scope, - }; - - pub trait TryFoldAndReduce { - /// Implements `iter.try_fold().try_reduce()` for `rayon::iter::ParallelIterator`, - /// falling back on `Iterator::try_fold` when the `multicore` feature flag is - /// disabled. - /// The `try_fold_and_reduce` function can only be called by a iter with - /// `Result` item type because the `fold_op` must meet the trait - /// bounds of both `try_fold` and `try_reduce` from rayon. - fn try_fold_and_reduce( - self, - identity: impl Fn() -> T + Send + Sync, - fold_op: impl Fn(T, Result) -> Result + Send + Sync, - ) -> Result; - } - - impl TryFoldAndReduce for I - where - T: Send + Sync, - E: Send + Sync, - I: rayon::iter::ParallelIterator>, - { - fn try_fold_and_reduce( - self, - identity: impl Fn() -> T + Send + Sync, - fold_op: impl Fn(T, Result) -> Result + Send + Sync, - ) -> Result { - self.try_fold(&identity, &fold_op) - .try_reduce(&identity, |a, b| fold_op(a, Ok(b))) - } - } -} - -// TODO: Remove with permutation::Argument simplification -pub mod arithmetic { - use super::multicore; - - /// This utility function will parallelize an operation that is to be - /// performed over a mutable slice. - pub fn parallelize(v: &mut [T], f: F) { - // Algorithm rationale: - // - // Using the stdlib `chunks_mut` will lead to severe load imbalance. - // From https://github.com/rust-lang/rust/blob/e94bda3/library/core/src/slice/iter.rs#L1607-L1637 - // if the division is not exact, the last chunk will be the remainder. - // - // Dividing 40 items on 12 threads will lead to a chunk size of 40/12 = 3, - // There will be a 13 chunks of size 3 and 1 of size 1 distributed on 12 threads. - // This leads to 1 thread working on 6 iterations, 1 on 4 iterations and 10 on 3 iterations, - // a load imbalance of 2x. - // - // Instead we can divide work into chunks of size - // 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3 = 4*4 + 3*8 = 40 - // - // This would lead to a 6/4 = 1.5x speedup compared to naive chunks_mut - // - // See also OpenMP spec (page 60) - // http://www.openmp.org/mp-documents/openmp-4.5.pdf - // "When no chunk_size is specified, the iteration space is divided into chunks - // that are approximately equal in size, and at most one chunk is distributed to - // each thread. The size of the chunks is unspecified in this case." - // This implies chunks are the same size ±1 - - let f = &f; - let total_iters = v.len(); - let num_threads = multicore::current_num_threads(); - let base_chunk_size = total_iters / num_threads; - let cutoff_chunk_id = total_iters % num_threads; - let split_pos = cutoff_chunk_id * (base_chunk_size + 1); - let (v_hi, v_lo) = v.split_at_mut(split_pos); - - multicore::scope(|scope| { - // Skip special-case: number of iterations is cleanly divided by number of threads. - if cutoff_chunk_id != 0 { - for (chunk_id, chunk) in v_hi.chunks_exact_mut(base_chunk_size + 1).enumerate() { - let offset = chunk_id * (base_chunk_size + 1); - scope.spawn(move |_| f(chunk, offset)); - } - } - // Skip special-case: less iterations than number of threads. - if base_chunk_size != 0 { - for (chunk_id, chunk) in v_lo.chunks_exact_mut(base_chunk_size).enumerate() { - let offset = split_pos + (chunk_id * base_chunk_size); - scope.spawn(move |_| f(chunk, offset)); - } - } - }); - } -} diff --git a/halo2_middleware/src/permutation.rs b/halo2_middleware/src/permutation.rs index 349b55bbe9..d23520548c 100644 --- a/halo2_middleware/src/permutation.rs +++ b/halo2_middleware/src/permutation.rs @@ -1,11 +1,4 @@ -use crate::circuit::ColumnMid; - -// TODO: Dedup with other Cell definition, or move this to a higher level -#[derive(Clone, Debug)] -pub struct Cell { - pub column: ColumnMid, - pub row: usize, -} +use crate::circuit::{Cell, ColumnMid}; #[derive(Clone, Debug)] pub struct AssemblyMid { From fb2e556610b8de31425678373f9602ef86a63fd1 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Fri, 2 Feb 2024 15:57:43 +0000 Subject: [PATCH 75/79] Fix wasm32 build --- halo2_proofs/Cargo.toml | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index ce3e26e68e..8bf059790b 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -55,6 +55,7 @@ halo2_backend = { path = "../halo2_backend" } halo2_frontend = { path = "../halo2_frontend" } halo2curves = { version = "0.6.0", default-features = false } rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +plotters = { version = "0.3.0", default-features = false, optional = true } [dev-dependencies] ff = "0.13" @@ -68,21 +69,26 @@ gumdrop = "0.8" proptest = "1" dhat = "0.3.2" serde_json = "1" -plotters = { version = "0.3.0", features = ["bitmap_backend", "bitmap_encoder", "ttf"] } [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] getrandom = { version = "0.2", features = ["js"] } [features] default = ["batch", "bits"] -dev-graph = ["halo2_frontend/dev-graph"] -test-dev-graph = ["halo2_frontend/test-dev-graph"] +dev-graph = ["halo2_frontend/dev-graph", "plotters"] +test-dev-graph = [ + "halo2_frontend/test-dev-graph", + "dev-graph", + "plotters/bitmap_backend", + "plotters/bitmap_encoder", + "plotters/ttf" +] bits = ["halo2curves/bits"] gadget-traces = ["halo2_common/gadget-traces"] thread-safe-region = [] sanity-checks = [] batch = ["rand_core/getrandom"] -circuit-params = [] +circuit-params = ["halo2_common/circuit-params", "halo2_frontend/circuit-params", "halo2_backend/circuit-params"] heap-profiling = [] cost-estimator = ["halo2_frontend/cost-estimator"] derive_serde = ["halo2curves/derive_serde"] From 137b46e9f793f4032bb4fa16063f4f1d55ec8a60 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 6 Feb 2024 12:01:46 +0000 Subject: [PATCH 76/79] Use Vec instead of Vec> in prover::commit_phase Simplify the prover API by receiving the witness as Vec instead of Vec>. This moves the batch inverstion from the backend to the frontend, so that the backend directly receives the final witness values. This change requires updating the batch_invert_assigned method to work on Vec instead of Polynomial. --- halo2_backend/src/plonk/prover.rs | 125 ++++++++++++++---------------- halo2_common/src/plonk.rs | 1 - halo2_common/src/plonk/keygen.rs | 4 +- halo2_common/src/poly.rs | 33 ++++---- halo2_frontend/src/circuit.rs | 13 ++-- 5 files changed, 80 insertions(+), 96 deletions(-) diff --git a/halo2_backend/src/plonk/prover.rs b/halo2_backend/src/plonk/prover.rs index 8f8a60712f..8ee3cae307 100644 --- a/halo2_backend/src/plonk/prover.rs +++ b/halo2_backend/src/plonk/prover.rs @@ -13,6 +13,7 @@ use halo2_common::plonk::{ }; use group::prime::PrimeCurveAffine; +use halo2_common::transcript::{EncodedChallenge, TranscriptWrite}; use halo2_common::{ arithmetic::{eval_polynomial, CurveAffine}, poly::{ @@ -20,11 +21,6 @@ use halo2_common::{ Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, }, }; -use halo2_common::{ - poly::batch_invert_assigned, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use halo2_middleware::plonk::Assigned; /// Collection of instance data used during proving for a single circuit proof. #[derive(Debug)] @@ -73,7 +69,6 @@ impl< rng: R, transcript: &'a mut T, ) -> Result - // TODO: Can I move this `where` to the struct definition? where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { @@ -90,7 +85,7 @@ impl< pub fn commit_phase( &mut self, phase: u8, - witness: Vec>>>, + witness: Vec>>, ) -> Result, Error> where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, @@ -260,7 +255,7 @@ impl< pub fn commit_phase( &mut self, phase: u8, - witness: Vec>>>>, + witness: Vec>>>, ) -> Result, Error> where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, @@ -339,67 +334,65 @@ impl< } } - let mut commit_phase_fn = |advice: &mut AdviceSingle, - witness: Vec< - Option, LagrangeCoeff>>, - >| - -> Result<(), Error> { - let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); - let mut advice_values = - batch_invert_assigned::(witness.into_iter().flatten().collect()); - let unblinded_advice: HashSet = - HashSet::from_iter(meta.unblinded_advice_columns.clone()); - - // Add blinding factors to advice columns - for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { - if !unblinded_advice.contains(column_index) { - for cell in &mut advice_values[unusable_rows_start..] { - *cell = Scheme::Scalar::random(&mut rng); - } - } else { - #[cfg(feature = "sanity-checks")] - for cell in &advice_values[unusable_rows_start..] { - assert_eq!(*cell, Scheme::Scalar::ZERO); + let mut commit_phase_fn = + |advice: &mut AdviceSingle, + witness: Vec>>| + -> Result<(), Error> { + let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); + let mut advice_values: Vec<_> = witness.into_iter().flatten().collect(); + let unblinded_advice: HashSet = + HashSet::from_iter(meta.unblinded_advice_columns.clone()); + + // Add blinding factors to advice columns + for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { + if !unblinded_advice.contains(column_index) { + for cell in &mut advice_values[unusable_rows_start..] { + *cell = Scheme::Scalar::random(&mut rng); + } + } else { + #[cfg(feature = "sanity-checks")] + for cell in &advice_values[unusable_rows_start..] { + assert_eq!(*cell, Scheme::Scalar::ZERO); + } } } - } - // Compute commitments to advice column polynomials - let blinds: Vec<_> = column_indices - .iter() - .map(|i| { - if unblinded_advice.contains(i) { - Blind::default() - } else { - Blind(Scheme::Scalar::random(&mut rng)) - } - }) - .collect(); - let advice_commitments_projective: Vec<_> = advice_values - .iter() - .zip(blinds.iter()) - .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) - .collect(); - let mut advice_commitments = - vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &advice_commitments_projective, - &mut advice_commitments, - ); - let advice_commitments = advice_commitments; - drop(advice_commitments_projective); - - for commitment in &advice_commitments { - self.transcript.write_point(*commitment)?; - } - for ((column_index, advice_values), blind) in - column_indices.iter().zip(advice_values).zip(blinds) - { - advice.advice_polys[*column_index] = advice_values; - advice.advice_blinds[*column_index] = blind; - } - Ok(()) - }; + // Compute commitments to advice column polynomials + let blinds: Vec<_> = column_indices + .iter() + .map(|i| { + if unblinded_advice.contains(i) { + Blind::default() + } else { + Blind(Scheme::Scalar::random(&mut rng)) + } + }) + .collect(); + let advice_commitments_projective: Vec<_> = advice_values + .iter() + .zip(blinds.iter()) + .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) + .collect(); + let mut advice_commitments = + vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &advice_commitments_projective, + &mut advice_commitments, + ); + let advice_commitments = advice_commitments; + drop(advice_commitments_projective); + + for commitment in &advice_commitments { + self.transcript.write_point(*commitment)?; + } + for ((column_index, advice_values), blind) in + column_indices.iter().zip(advice_values).zip(blinds) + { + advice.advice_polys[*column_index] = advice_values; + advice.advice_blinds[*column_index] = blind; + } + Ok(()) + }; for (witness, advice) in witness.into_iter().zip(advice.iter_mut()) { commit_phase_fn( diff --git a/halo2_common/src/plonk.rs b/halo2_common/src/plonk.rs index 49f894b208..04ade4764f 100644 --- a/halo2_common/src/plonk.rs +++ b/halo2_common/src/plonk.rs @@ -6,7 +6,6 @@ //! [plonk]: https://eprint.iacr.org/2019/953 use crate::plonk::circuit::Column; -use crate::poly::{LagrangeCoeff, Polynomial}; use crate::transcript::ChallengeScalar; use halo2_middleware::circuit::{Advice, Fixed, Instance}; use halo2_middleware::poly::Rotation; diff --git a/halo2_common/src/plonk/keygen.rs b/halo2_common/src/plonk/keygen.rs index 7f1b09d835..a800e8c9a3 100644 --- a/halo2_common/src/plonk/keygen.rs +++ b/halo2_common/src/plonk/keygen.rs @@ -4,7 +4,7 @@ use halo2_middleware::ff::Field; use super::{ circuit::{Assignment, Challenge, Column, Selector}, - permutation, Error, LagrangeCoeff, Polynomial, + permutation, Error, }; use crate::circuit::Value; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; @@ -14,7 +14,7 @@ use halo2_middleware::plonk::Assigned; #[derive(Debug)] pub struct Assembly { pub k: u32, - pub fixed: Vec, LagrangeCoeff>>, + pub fixed: Vec>>, pub permutation: permutation::Assembly, pub selectors: Vec>, // A range of available rows for assignment and copies. diff --git a/halo2_common/src/poly.rs b/halo2_common/src/poly.rs index 34d5b50738..1e30ddd0a9 100644 --- a/halo2_common/src/poly.rs +++ b/halo2_common/src/poly.rs @@ -197,9 +197,7 @@ impl Polynomial { } } -pub fn batch_invert_assigned( - assigned: Vec, LagrangeCoeff>>, -) -> Vec> { +pub fn batch_invert_assigned(assigned: Vec>>) -> Vec> { let mut assigned_denominators: Vec<_> = assigned .iter() .map(|f| { @@ -222,26 +220,21 @@ pub fn batch_invert_assigned( assigned .iter() .zip(assigned_denominators) - .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) + .map(|(poly, inv_denoms)| { + poly_invert(poly, inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE))) + }) .collect() } -impl Polynomial, LagrangeCoeff> { - pub fn invert( - &self, - inv_denoms: impl Iterator + ExactSizeIterator, - ) -> Polynomial { - assert_eq!(inv_denoms.len(), self.values.len()); - Polynomial { - values: self - .values - .iter() - .zip(inv_denoms) - .map(|(a, inv_den)| a.numerator() * inv_den) - .collect(), - _marker: self._marker, - } - } +pub fn poly_invert( + poly: &[Assigned], + inv_denoms: impl Iterator + ExactSizeIterator, +) -> Vec { + assert_eq!(inv_denoms.len(), poly.len()); + poly.iter() + .zip(inv_denoms) + .map(|(a, inv_den)| a.numerator() * inv_den) + .collect() } impl<'a, F: Field, B: Basis> Add<&'a Polynomial> for Polynomial { diff --git a/halo2_frontend/src/circuit.rs b/halo2_frontend/src/circuit.rs index cf5167a5c4..e4a2b78862 100644 --- a/halo2_frontend/src/circuit.rs +++ b/halo2_frontend/src/circuit.rs @@ -7,7 +7,7 @@ use halo2_common::plonk::{ Assignment, Circuit, ConstraintSystem, Error, FirstPhase, FloorPlanner, SecondPhase, Selector, ThirdPhase, }; -use halo2_common::poly::{batch_invert_assigned, Polynomial}; +use halo2_common::poly::batch_invert_assigned; use halo2_middleware::circuit::{Advice, Any, CompiledCircuitV2, Fixed, Instance, PreprocessingV2}; use halo2_middleware::ff::Field; use halo2_middleware::plonk::Assigned; @@ -55,7 +55,7 @@ pub fn compile_circuit>( let mut assembly = halo2_common::plonk::keygen::Assembly { k, - fixed: vec![Polynomial::new_empty(n, F::ZERO.into()); cs.num_fixed_columns], + fixed: vec![vec![F::ZERO.into(); n]; cs.num_fixed_columns], permutation: permutation::Assembly::new(n, &cs.permutation), selectors: vec![vec![false; n]; cs.num_selectors], usable_rows: 0..n - (cs.blinding_factors() + 1), @@ -70,7 +70,7 @@ pub fn compile_circuit>( cs.constants.clone(), )?; - let fixed = batch_invert_assigned(assembly.fixed); + let mut fixed = batch_invert_assigned(assembly.fixed); let (cs, selector_polys) = if compress_selectors { cs.compress_selectors(assembly.selectors.clone()) } else { @@ -78,7 +78,6 @@ pub fn compile_circuit>( let selectors = std::mem::take(&mut assembly.selectors); cs.directly_convert_selectors_to_fixed(selectors) }; - let mut fixed: Vec<_> = fixed.into_iter().map(|p| p.values).collect(); fixed.extend(selector_polys.into_iter()); let preprocessing = PreprocessingV2 { @@ -278,7 +277,7 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret &mut self, phase: u8, challenges: &HashMap, - ) -> Result>>>, Error> { + ) -> Result>>, Error> { if phase != self.next_phase { return Err(Error::Other(format!( "Expected phase {}, got {}", @@ -330,8 +329,8 @@ impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, Concret .collect::>(); self.next_phase += 1; - Ok(witness - .advice + let advice_values = batch_invert_assigned(witness.advice); + Ok(advice_values .into_iter() .enumerate() .map(|(column_index, advice)| { From 7162953033ad0e3c354c765f81a3847d062d39ac Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 6 Feb 2024 12:12:38 +0000 Subject: [PATCH 77/79] Move Assigned from middleware to common --- halo2_common/src/circuit.rs | 2 +- .../src/circuit/floor_planner/single_pass.rs | 2 +- halo2_common/src/circuit/floor_planner/v1.rs | 2 +- halo2_common/src/circuit/layouter.rs | 2 +- halo2_common/src/circuit/table_layouter.rs | 2 +- halo2_common/src/circuit/value.rs | 4 +- halo2_common/src/plonk.rs | 664 +++++++++++++++++ halo2_common/src/plonk/circuit.rs | 2 +- halo2_common/src/plonk/keygen.rs | 2 +- halo2_common/src/poly.rs | 2 +- halo2_common/src/poly/domain.rs | 2 +- halo2_frontend/src/circuit.rs | 5 +- halo2_frontend/src/dev.rs | 5 +- halo2_frontend/src/dev/cost.rs | 3 +- halo2_frontend/src/dev/graph.rs | 3 +- halo2_frontend/src/dev/tfp.rs | 3 +- halo2_middleware/src/lib.rs | 1 - halo2_middleware/src/plonk.rs | 665 ------------------ halo2_proofs/src/plonk.rs | 5 +- 19 files changed, 684 insertions(+), 692 deletions(-) delete mode 100644 halo2_middleware/src/plonk.rs diff --git a/halo2_common/src/circuit.rs b/halo2_common/src/circuit.rs index 5b50a3d138..c0e1addcc1 100644 --- a/halo2_common/src/circuit.rs +++ b/halo2_common/src/circuit.rs @@ -9,7 +9,7 @@ use crate::plonk::{ Error, Selector, TableColumn, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; mod value; pub use value::Value; diff --git a/halo2_common/src/circuit/floor_planner/single_pass.rs b/halo2_common/src/circuit/floor_planner/single_pass.rs index 18a04582f5..d9de8cbf55 100644 --- a/halo2_common/src/circuit/floor_planner/single_pass.rs +++ b/halo2_common/src/circuit/floor_planner/single_pass.rs @@ -14,7 +14,7 @@ use crate::{ plonk::{circuit::Challenge, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; /// A simple [`FloorPlanner`] that performs minimal optimizations. /// diff --git a/halo2_common/src/circuit/floor_planner/v1.rs b/halo2_common/src/circuit/floor_planner/v1.rs index 820af78c7c..fbb39c0a2f 100644 --- a/halo2_common/src/circuit/floor_planner/v1.rs +++ b/halo2_common/src/circuit/floor_planner/v1.rs @@ -11,7 +11,7 @@ use crate::{ plonk::{circuit::Challenge, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; pub mod strategy; diff --git a/halo2_common/src/circuit/layouter.rs b/halo2_common/src/circuit/layouter.rs index 37cbdb6744..d151e3ead9 100644 --- a/halo2_common/src/circuit/layouter.rs +++ b/halo2_common/src/circuit/layouter.rs @@ -10,7 +10,7 @@ pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; use crate::plonk::{circuit::Column, Error, Selector}; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; /// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. #[cfg(feature = "thread-safe-region")] diff --git a/halo2_common/src/circuit/table_layouter.rs b/halo2_common/src/circuit/table_layouter.rs index ae7e7551f3..ecf6e7a0a9 100644 --- a/halo2_common/src/circuit/table_layouter.rs +++ b/halo2_common/src/circuit/table_layouter.rs @@ -8,7 +8,7 @@ use std::{ use halo2_middleware::ff::Field; use crate::plonk::{Assignment, Error, TableColumn, TableError}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; use super::Value; diff --git a/halo2_common/src/circuit/value.rs b/halo2_common/src/circuit/value.rs index e7118fc51b..406bd00aa9 100644 --- a/halo2_common/src/circuit/value.rs +++ b/halo2_common/src/circuit/value.rs @@ -4,7 +4,7 @@ use std::ops::{Add, Mul, Neg, Sub}; use group::ff::Field; use crate::plonk::Error; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; /// A value that might exist within a circuit. /// @@ -642,7 +642,7 @@ impl Value { /// ``` /// # use halo2curves::pasta::pallas::Base as F; /// use halo2_common::circuit::Value; - /// use halo2_middleware::plonk::Assigned; + /// use crate::plonk::Assigned; /// /// let v = Value::known(F::from(2)); /// let v: Value> = v.into(); diff --git a/halo2_common/src/plonk.rs b/halo2_common/src/plonk.rs index 04ade4764f..e58952346a 100644 --- a/halo2_common/src/plonk.rs +++ b/halo2_common/src/plonk.rs @@ -8,7 +8,9 @@ use crate::plonk::circuit::Column; use crate::transcript::ChallengeScalar; use halo2_middleware::circuit::{Advice, Fixed, Instance}; +use halo2_middleware::ff::Field; use halo2_middleware::poly::Rotation; +use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; pub mod circuit; pub mod error; @@ -21,6 +23,369 @@ pub use circuit::*; pub use error::*; pub use keygen::*; +/// A value assigned to a cell within a circuit. +/// +/// Stored as a fraction, so the backend can use batch inversion. +/// +/// A denominator of zero maps to an assigned value of zero. +#[derive(Clone, Copy, Debug)] +pub enum Assigned { + /// The field element zero. + Zero, + /// A value that does not require inversion to evaluate. + Trivial(F), + /// A value stored as a fraction to enable batch inversion. + Rational(F, F), +} + +impl From<&Assigned> for Assigned { + fn from(val: &Assigned) -> Self { + *val + } +} + +impl From<&F> for Assigned { + fn from(numerator: &F) -> Self { + Assigned::Trivial(*numerator) + } +} + +impl From for Assigned { + fn from(numerator: F) -> Self { + Assigned::Trivial(numerator) + } +} + +impl From<(F, F)> for Assigned { + fn from((numerator, denominator): (F, F)) -> Self { + Assigned::Rational(numerator, denominator) + } +} + +impl PartialEq for Assigned { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + // At least one side is directly zero. + (Self::Zero, Self::Zero) => true, + (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + x.is_zero_vartime() + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, + (Self::Trivial(x), Self::Rational(numerator, denominator)) + | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { + &(*x * denominator) == numerator + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, + } + } +} + +impl Eq for Assigned {} + +impl Neg for Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + match self { + Self::Zero => Self::Zero, + Self::Trivial(numerator) => Self::Trivial(-numerator), + Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), + } + } +} + +impl Neg for &Assigned { + type Output = Assigned; + fn neg(self) -> Self::Output { + -*self + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + // One side is directly zero. + (Self::Zero, _) => rhs, + (_, Self::Zero) => self, + + // One side is x/0 which maps to zero. + (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) + if denominator.is_zero_vartime() => + { + other + } + + // Okay, we need to do some actual math... + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator + denominator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Add for Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + self + Self::Trivial(rhs) + } +} + +impl Add for &Assigned { + type Output = Assigned; + fn add(self, rhs: F) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for Assigned { + type Output = Assigned; + fn add(self, rhs: &Self) -> Assigned { + self + *rhs + } +} + +impl Add> for &Assigned { + type Output = Assigned; + fn add(self, rhs: Assigned) -> Assigned { + *self + rhs + } +} + +impl Add<&Assigned> for &Assigned { + type Output = Assigned; + fn add(self, rhs: &Assigned) -> Assigned { + *self + *rhs + } +} + +impl AddAssign for Assigned { + fn add_assign(&mut self, rhs: Self) { + *self = *self + rhs; + } +} + +impl AddAssign<&Assigned> for Assigned { + fn add_assign(&mut self, rhs: &Self) { + *self = *self + rhs; + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + self + (-rhs) + } +} + +impl Sub for Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + self + (-rhs) + } +} + +impl Sub for &Assigned { + type Output = Assigned; + fn sub(self, rhs: F) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for Assigned { + type Output = Assigned; + fn sub(self, rhs: &Self) -> Assigned { + self - *rhs + } +} + +impl Sub> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: Assigned) -> Assigned { + *self - rhs + } +} + +impl Sub<&Assigned> for &Assigned { + type Output = Assigned; + fn sub(self, rhs: &Assigned) -> Assigned { + *self - *rhs + } +} + +impl SubAssign for Assigned { + fn sub_assign(&mut self, rhs: Self) { + *self = *self - rhs; + } +} + +impl SubAssign<&Assigned> for Assigned { + fn sub_assign(&mut self, rhs: &Self) { + *self = *self - rhs; + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: Assigned) -> Assigned { + match (self, rhs) { + (Self::Zero, _) | (_, Self::Zero) => Self::Zero, + (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), + (Self::Rational(numerator, denominator), Self::Trivial(other)) + | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { + Self::Rational(numerator * other, denominator) + } + ( + Self::Rational(lhs_numerator, lhs_denominator), + Self::Rational(rhs_numerator, rhs_denominator), + ) => Self::Rational( + lhs_numerator * rhs_numerator, + lhs_denominator * rhs_denominator, + ), + } + } +} + +impl Mul for Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + self * Self::Trivial(rhs) + } +} + +impl Mul for &Assigned { + type Output = Assigned; + fn mul(self, rhs: F) -> Assigned { + *self * rhs + } +} + +impl Mul<&Assigned> for Assigned { + type Output = Assigned; + fn mul(self, rhs: &Assigned) -> Assigned { + self * *rhs + } +} + +impl MulAssign for Assigned { + fn mul_assign(&mut self, rhs: Self) { + *self = *self * rhs; + } +} + +impl MulAssign<&Assigned> for Assigned { + fn mul_assign(&mut self, rhs: &Self) { + *self = *self * rhs; + } +} + +impl Assigned { + /// Returns the numerator. + pub fn numerator(&self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => *x, + Self::Rational(numerator, _) => *numerator, + } + } + + /// Returns the denominator, if non-trivial. + pub fn denominator(&self) -> Option { + match self { + Self::Zero => None, + Self::Trivial(_) => None, + Self::Rational(_, denominator) => Some(*denominator), + } + } + + /// Returns true iff this element is zero. + pub fn is_zero_vartime(&self) -> bool { + match self { + Self::Zero => true, + Self::Trivial(x) => x.is_zero_vartime(), + // Assigned maps x/0 -> 0. + Self::Rational(numerator, denominator) => { + numerator.is_zero_vartime() || denominator.is_zero_vartime() + } + } + } + + /// Doubles this element. + #[must_use] + pub fn double(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.double()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.double(), *denominator) + } + } + } + + /// Squares this element. + #[must_use] + pub fn square(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Trivial(x.square()), + Self::Rational(numerator, denominator) => { + Self::Rational(numerator.square(), denominator.square()) + } + } + } + + /// Cubes this element. + #[must_use] + pub fn cube(&self) -> Self { + self.square() * self + } + + /// Inverts this assigned value (taking the inverse of zero to be zero). + pub fn invert(&self) -> Self { + match self { + Self::Zero => Self::Zero, + Self::Trivial(x) => Self::Rational(F::ONE, *x), + Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), + } + } + + /// Evaluates this assigned value directly, performing an unbatched inversion if + /// necessary. + /// + /// If the denominator is zero, this returns zero. + pub fn evaluate(self) -> F { + match self { + Self::Zero => F::ZERO, + Self::Trivial(x) => x, + Self::Rational(numerator, denominator) => { + if denominator == F::ONE { + numerator + } else { + numerator * denominator.invert().unwrap_or(F::ZERO) + } + } + } + } +} + /// List of queries (columns and rotations) used by a circuit #[derive(Debug, Clone)] pub struct Queries { @@ -98,3 +463,302 @@ pub type ChallengeY = ChallengeScalar; #[derive(Clone, Copy, Debug)] pub struct X; pub type ChallengeX = ChallengeScalar; + +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use super::Assigned; + // We use (numerator, denominator) in the comments below to denote a rational. + #[test] + fn add_trivial_to_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // 2 + (1,0) = 2 + 0 = 2 + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn add_rational_to_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) + (1,0) = (1,2) + 0 = (1,2) + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn sub_trivial_from_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - 2 = 0 - 2 = -2 + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // 2 - (1,0) = 2 - 0 = 2 + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn sub_rational_from_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - (1,2) = 0 - (1,2) = -(1,2) + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // (1,2) - (1,0) = (1,2) - 0 = (1,2) + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn mul_rational_by_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) * (1,0) = (1,2) * 0 = 0 + assert_eq!((a * b).evaluate(), Fp::zero()); + + // (1,0) * (1,2) = 0 * (1,2) = 0 + assert_eq!((b * a).evaluate(), Fp::zero()); + } +} + +#[cfg(test)] +mod proptests { + use std::{ + cmp, + ops::{Add, Mul, Neg, Sub}, + }; + + use group::ff::Field; + use halo2curves::pasta::Fp; + use proptest::{collection::vec, prelude::*, sample::select}; + + use super::Assigned; + + trait UnaryOperand: Neg { + fn double(&self) -> Self; + fn square(&self) -> Self; + fn cube(&self) -> Self; + fn inv0(&self) -> Self; + } + + impl UnaryOperand for F { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert().unwrap_or(F::ZERO) + } + } + + impl UnaryOperand for Assigned { + fn double(&self) -> Self { + self.double() + } + + fn square(&self) -> Self { + self.square() + } + + fn cube(&self) -> Self { + self.cube() + } + + fn inv0(&self) -> Self { + self.invert() + } + } + + #[derive(Clone, Debug)] + enum UnaryOperator { + Neg, + Double, + Square, + Cube, + Inv0, + } + + const UNARY_OPERATORS: &[UnaryOperator] = &[ + UnaryOperator::Neg, + UnaryOperator::Double, + UnaryOperator::Square, + UnaryOperator::Cube, + UnaryOperator::Inv0, + ]; + + impl UnaryOperator { + fn apply(&self, a: F) -> F { + match self { + Self::Neg => -a, + Self::Double => a.double(), + Self::Square => a.square(), + Self::Cube => a.cube(), + Self::Inv0 => a.inv0(), + } + } + } + + trait BinaryOperand: Sized + Add + Sub + Mul {} + impl BinaryOperand for F {} + impl BinaryOperand for Assigned {} + + #[derive(Clone, Debug)] + enum BinaryOperator { + Add, + Sub, + Mul, + } + + const BINARY_OPERATORS: &[BinaryOperator] = &[ + BinaryOperator::Add, + BinaryOperator::Sub, + BinaryOperator::Mul, + ]; + + impl BinaryOperator { + fn apply(&self, a: F, b: F) -> F { + match self { + Self::Add => a + b, + Self::Sub => a - b, + Self::Mul => a * b, + } + } + } + + #[derive(Clone, Debug)] + enum Operator { + Unary(UnaryOperator), + Binary(BinaryOperator), + } + + prop_compose! { + /// Use narrow that can be easily reduced. + fn arb_element()(val in any::()) -> Fp { + Fp::from(val) + } + } + + prop_compose! { + fn arb_trivial()(element in arb_element()) -> Assigned { + Assigned::Trivial(element) + } + } + + prop_compose! { + /// Generates half of the denominators as zero to represent a deferred inversion. + fn arb_rational()( + numerator in arb_element(), + denominator in prop_oneof![ + 1 => Just(Fp::zero()), + 2 => arb_element(), + ], + ) -> Assigned { + Assigned::Rational(numerator, denominator) + } + } + + prop_compose! { + fn arb_operators(num_unary: usize, num_binary: usize)( + unary in vec(select(UNARY_OPERATORS), num_unary), + binary in vec(select(BINARY_OPERATORS), num_binary), + ) -> Vec { + unary.into_iter() + .map(Operator::Unary) + .chain(binary.into_iter().map(Operator::Binary)) + .collect() + } + } + + prop_compose! { + fn arb_testcase()( + num_unary in 0usize..5, + num_binary in 0usize..5, + )( + values in vec( + prop_oneof![ + 1 => Just(Assigned::Zero), + 2 => arb_trivial(), + 2 => arb_rational(), + ], + // Ensure that: + // - we have at least one value to apply unary operators to. + // - we can apply every binary operator pairwise sequentially. + cmp::max(usize::from(num_unary > 0), num_binary + 1)), + operations in arb_operators(num_unary, num_binary).prop_shuffle(), + ) -> (Vec>, Vec) { + (values, operations) + } + } + + proptest! { + #[test] + fn operation_commutativity((values, operations) in arb_testcase()) { + // Evaluate the values at the start. + let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); + + // Apply the operations to both the deferred and evaluated values. + fn evaluate( + items: Vec, + operators: &[Operator], + ) -> F { + let mut ops = operators.iter(); + + // Process all binary operators. We are guaranteed to have exactly as many + // binary operators as we need calls to the reduction closure. + let mut res = items.into_iter().reduce(|mut a, b| loop { + match ops.next() { + Some(Operator::Unary(op)) => a = op.apply(a), + Some(Operator::Binary(op)) => break op.apply(a, b), + None => unreachable!(), + } + }).unwrap(); + + // Process any unary operators that weren't handled in the reduce() call + // above (either if we only had one item, or there were unary operators + // after the last binary operator). We are guaranteed to have no binary + // operators remaining at this point. + loop { + match ops.next() { + Some(Operator::Unary(op)) => res = op.apply(res), + Some(Operator::Binary(_)) => unreachable!(), + None => break res, + } + } + } + let deferred_result = evaluate(values, &operations); + let evaluated_result = evaluate(elements, &operations); + + // The two should be equal, i.e. deferred inversion should commute with the + // list of operations. + assert_eq!(deferred_result.evaluate(), evaluated_result); + } + } +} diff --git a/halo2_common/src/plonk/circuit.rs b/halo2_common/src/plonk/circuit.rs index 3e1363b919..b8cd95f425 100644 --- a/halo2_common/src/plonk/circuit.rs +++ b/halo2_common/src/plonk/circuit.rs @@ -9,7 +9,7 @@ use halo2_middleware::circuit::{ }; use halo2_middleware::ff::Field; use halo2_middleware::metadata; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; use std::collections::HashMap; diff --git a/halo2_common/src/plonk/keygen.rs b/halo2_common/src/plonk/keygen.rs index a800e8c9a3..d8c4a43063 100644 --- a/halo2_common/src/plonk/keygen.rs +++ b/halo2_common/src/plonk/keygen.rs @@ -8,7 +8,7 @@ use super::{ }; use crate::circuit::Value; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; /// Assembly to be used in circuit synthesis. #[derive(Debug)] diff --git a/halo2_common/src/poly.rs b/halo2_common/src/poly.rs index 1e30ddd0a9..a94847eee0 100644 --- a/halo2_common/src/poly.rs +++ b/halo2_common/src/poly.rs @@ -7,7 +7,7 @@ use crate::helpers::SerdePrimeField; use crate::SerdeFormat; use group::ff::{BatchInvert, Field}; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; use halo2_middleware::poly::Rotation; use std::fmt::Debug; use std::io; diff --git a/halo2_common/src/poly/domain.rs b/halo2_common/src/poly/domain.rs index e2f6b91f8d..19fcd3ef13 100644 --- a/halo2_common/src/poly/domain.rs +++ b/halo2_common/src/poly/domain.rs @@ -6,7 +6,7 @@ use crate::arithmetic::{best_fft, parallelize}; use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}; use group::ff::{BatchInvert, Field}; use halo2_middleware::ff::WithSmallOrderMulGroup; -use halo2_middleware::plonk::Assigned; +use crate::plonk::Assigned; use halo2_middleware::poly::Rotation; use std::marker::PhantomData; diff --git a/halo2_frontend/src/circuit.rs b/halo2_frontend/src/circuit.rs index e4a2b78862..2c3db93a6e 100644 --- a/halo2_frontend/src/circuit.rs +++ b/halo2_frontend/src/circuit.rs @@ -4,13 +4,12 @@ use halo2_common::plonk::{ circuit::{Challenge, Column}, permutation, sealed::{self, SealedPhase}, - Assignment, Circuit, ConstraintSystem, Error, FirstPhase, FloorPlanner, SecondPhase, Selector, - ThirdPhase, + Assigned, Assignment, Circuit, ConstraintSystem, Error, FirstPhase, FloorPlanner, SecondPhase, + Selector, ThirdPhase, }; use halo2_common::poly::batch_invert_assigned; use halo2_middleware::circuit::{Advice, Any, CompiledCircuitV2, Fixed, Instance, PreprocessingV2}; use halo2_middleware::ff::Field; -use halo2_middleware::plonk::Assigned; use std::collections::BTreeSet; use std::collections::HashMap; use std::fmt::Debug; diff --git a/halo2_frontend/src/dev.rs b/halo2_frontend/src/dev.rs index b790a780cb..03c9fd885f 100644 --- a/halo2_frontend/src/dev.rs +++ b/halo2_frontend/src/dev.rs @@ -15,12 +15,11 @@ use halo2_common::{ circuit::{Challenge, Column}, permutation, sealed::{self, SealedPhase}, - Assignment, Circuit, ConstraintSystem, Error, Expression, FirstPhase, FloorPlanner, Phase, - Selector, + Assigned, Assignment, Circuit, ConstraintSystem, Error, Expression, FirstPhase, + FloorPlanner, Phase, Selector, }, }; use halo2_middleware::circuit::{Advice, Any, ColumnMid, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; use halo2_common::multicore::{ IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, ParallelSliceMut, diff --git a/halo2_frontend/src/dev/cost.rs b/halo2_frontend/src/dev/cost.rs index 9452975c17..12bf6f02f2 100644 --- a/halo2_frontend/src/dev/cost.rs +++ b/halo2_frontend/src/dev/cost.rs @@ -16,11 +16,10 @@ use halo2_common::{ circuit::{layouter::RegionColumn, Value}, plonk::{ circuit::{Challenge, Column}, - Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, + Assigned, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, }, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; /// Measures a circuit to determine its costs, and explain what contributes to them. #[allow(dead_code)] diff --git a/halo2_frontend/src/dev/graph.rs b/halo2_frontend/src/dev/graph.rs index ed3523c553..538c76dc04 100644 --- a/halo2_frontend/src/dev/graph.rs +++ b/halo2_frontend/src/dev/graph.rs @@ -1,10 +1,9 @@ use halo2_common::plonk::{ circuit::{Circuit, Column}, - Assignment, Challenge, ConstraintSystem, Error, FloorPlanner, Selector, + Assigned, Assignment, Challenge, ConstraintSystem, Error, FloorPlanner, Selector, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use halo2_middleware::ff::Field; -use halo2_middleware::plonk::Assigned; use tabbycat::{AttrList, Edge, GraphBuilder, GraphType, Identity, StmtList}; use crate::circuit::Value; diff --git a/halo2_frontend/src/dev/tfp.rs b/halo2_frontend/src/dev/tfp.rs index 6cf8150dac..6ad66d8fe3 100644 --- a/halo2_frontend/src/dev/tfp.rs +++ b/halo2_frontend/src/dev/tfp.rs @@ -9,10 +9,9 @@ use halo2_common::circuit::{ }; use halo2_common::plonk::{ circuit::{Challenge, Column}, - Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, + Assigned, Assignment, Circuit, ConstraintSystem, Error, FloorPlanner, Selector, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use halo2_middleware::plonk::Assigned; /// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. /// diff --git a/halo2_middleware/src/lib.rs b/halo2_middleware/src/lib.rs index da9d15c18e..db9734d819 100644 --- a/halo2_middleware/src/lib.rs +++ b/halo2_middleware/src/lib.rs @@ -2,7 +2,6 @@ pub mod circuit; pub mod lookup; pub mod metadata; pub mod permutation; -pub mod plonk; pub mod poly; pub mod shuffle; diff --git a/halo2_middleware/src/plonk.rs b/halo2_middleware/src/plonk.rs deleted file mode 100644 index ea0b1c0e78..0000000000 --- a/halo2_middleware/src/plonk.rs +++ /dev/null @@ -1,665 +0,0 @@ -use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; - -use ff::Field; - -/// A value assigned to a cell within a circuit. -/// -/// Stored as a fraction, so the backend can use batch inversion. -/// -/// A denominator of zero maps to an assigned value of zero. -#[derive(Clone, Copy, Debug)] -pub enum Assigned { - /// The field element zero. - Zero, - /// A value that does not require inversion to evaluate. - Trivial(F), - /// A value stored as a fraction to enable batch inversion. - Rational(F, F), -} - -impl From<&Assigned> for Assigned { - fn from(val: &Assigned) -> Self { - *val - } -} - -impl From<&F> for Assigned { - fn from(numerator: &F) -> Self { - Assigned::Trivial(*numerator) - } -} - -impl From for Assigned { - fn from(numerator: F) -> Self { - Assigned::Trivial(numerator) - } -} - -impl From<(F, F)> for Assigned { - fn from((numerator, denominator): (F, F)) -> Self { - Assigned::Rational(numerator, denominator) - } -} - -impl PartialEq for Assigned { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - // At least one side is directly zero. - (Self::Zero, Self::Zero) => true, - (Self::Zero, x) | (x, Self::Zero) => x.is_zero_vartime(), - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), x) | (x, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - x.is_zero_vartime() - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => lhs == rhs, - (Self::Trivial(x), Self::Rational(numerator, denominator)) - | (Self::Rational(numerator, denominator), Self::Trivial(x)) => { - &(*x * denominator) == numerator - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => *lhs_numerator * rhs_denominator == *lhs_denominator * rhs_numerator, - } - } -} - -impl Eq for Assigned {} - -impl Neg for Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - match self { - Self::Zero => Self::Zero, - Self::Trivial(numerator) => Self::Trivial(-numerator), - Self::Rational(numerator, denominator) => Self::Rational(-numerator, denominator), - } - } -} - -impl Neg for &Assigned { - type Output = Assigned; - fn neg(self) -> Self::Output { - -*self - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - // One side is directly zero. - (Self::Zero, _) => rhs, - (_, Self::Zero) => self, - - // One side is x/0 which maps to zero. - (Self::Rational(_, denominator), other) | (other, Self::Rational(_, denominator)) - if denominator.is_zero_vartime() => - { - other - } - - // Okay, we need to do some actual math... - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs + rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator + denominator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_denominator + lhs_denominator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Add for Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - self + Self::Trivial(rhs) - } -} - -impl Add for &Assigned { - type Output = Assigned; - fn add(self, rhs: F) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for Assigned { - type Output = Assigned; - fn add(self, rhs: &Self) -> Assigned { - self + *rhs - } -} - -impl Add> for &Assigned { - type Output = Assigned; - fn add(self, rhs: Assigned) -> Assigned { - *self + rhs - } -} - -impl Add<&Assigned> for &Assigned { - type Output = Assigned; - fn add(self, rhs: &Assigned) -> Assigned { - *self + *rhs - } -} - -impl AddAssign for Assigned { - fn add_assign(&mut self, rhs: Self) { - *self = *self + rhs; - } -} - -impl AddAssign<&Assigned> for Assigned { - fn add_assign(&mut self, rhs: &Self) { - *self = *self + rhs; - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - self + (-rhs) - } -} - -impl Sub for Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - self + (-rhs) - } -} - -impl Sub for &Assigned { - type Output = Assigned; - fn sub(self, rhs: F) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for Assigned { - type Output = Assigned; - fn sub(self, rhs: &Self) -> Assigned { - self - *rhs - } -} - -impl Sub> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: Assigned) -> Assigned { - *self - rhs - } -} - -impl Sub<&Assigned> for &Assigned { - type Output = Assigned; - fn sub(self, rhs: &Assigned) -> Assigned { - *self - *rhs - } -} - -impl SubAssign for Assigned { - fn sub_assign(&mut self, rhs: Self) { - *self = *self - rhs; - } -} - -impl SubAssign<&Assigned> for Assigned { - fn sub_assign(&mut self, rhs: &Self) { - *self = *self - rhs; - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: Assigned) -> Assigned { - match (self, rhs) { - (Self::Zero, _) | (_, Self::Zero) => Self::Zero, - (Self::Trivial(lhs), Self::Trivial(rhs)) => Self::Trivial(lhs * rhs), - (Self::Rational(numerator, denominator), Self::Trivial(other)) - | (Self::Trivial(other), Self::Rational(numerator, denominator)) => { - Self::Rational(numerator * other, denominator) - } - ( - Self::Rational(lhs_numerator, lhs_denominator), - Self::Rational(rhs_numerator, rhs_denominator), - ) => Self::Rational( - lhs_numerator * rhs_numerator, - lhs_denominator * rhs_denominator, - ), - } - } -} - -impl Mul for Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - self * Self::Trivial(rhs) - } -} - -impl Mul for &Assigned { - type Output = Assigned; - fn mul(self, rhs: F) -> Assigned { - *self * rhs - } -} - -impl Mul<&Assigned> for Assigned { - type Output = Assigned; - fn mul(self, rhs: &Assigned) -> Assigned { - self * *rhs - } -} - -impl MulAssign for Assigned { - fn mul_assign(&mut self, rhs: Self) { - *self = *self * rhs; - } -} - -impl MulAssign<&Assigned> for Assigned { - fn mul_assign(&mut self, rhs: &Self) { - *self = *self * rhs; - } -} - -impl Assigned { - /// Returns the numerator. - pub fn numerator(&self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => *x, - Self::Rational(numerator, _) => *numerator, - } - } - - /// Returns the denominator, if non-trivial. - pub fn denominator(&self) -> Option { - match self { - Self::Zero => None, - Self::Trivial(_) => None, - Self::Rational(_, denominator) => Some(*denominator), - } - } - - /// Returns true iff this element is zero. - pub fn is_zero_vartime(&self) -> bool { - match self { - Self::Zero => true, - Self::Trivial(x) => x.is_zero_vartime(), - // Assigned maps x/0 -> 0. - Self::Rational(numerator, denominator) => { - numerator.is_zero_vartime() || denominator.is_zero_vartime() - } - } - } - - /// Doubles this element. - #[must_use] - pub fn double(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.double()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.double(), *denominator) - } - } - } - - /// Squares this element. - #[must_use] - pub fn square(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Trivial(x.square()), - Self::Rational(numerator, denominator) => { - Self::Rational(numerator.square(), denominator.square()) - } - } - } - - /// Cubes this element. - #[must_use] - pub fn cube(&self) -> Self { - self.square() * self - } - - /// Inverts this assigned value (taking the inverse of zero to be zero). - pub fn invert(&self) -> Self { - match self { - Self::Zero => Self::Zero, - Self::Trivial(x) => Self::Rational(F::ONE, *x), - Self::Rational(numerator, denominator) => Self::Rational(*denominator, *numerator), - } - } - - /// Evaluates this assigned value directly, performing an unbatched inversion if - /// necessary. - /// - /// If the denominator is zero, this returns zero. - pub fn evaluate(self) -> F { - match self { - Self::Zero => F::ZERO, - Self::Trivial(x) => x, - Self::Rational(numerator, denominator) => { - if denominator == F::ONE { - numerator - } else { - numerator * denominator.invert().unwrap_or(F::ZERO) - } - } - } - } -} - -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use super::Assigned; - // We use (numerator, denominator) in the comments below to denote a rational. - #[test] - fn add_trivial_to_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // 2 + (1,0) = 2 + 0 = 2 - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn add_rational_to_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) + (1,0) = (1,2) + 0 = (1,2) - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn sub_trivial_from_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - 2 = 0 - 2 = -2 - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // 2 - (1,0) = 2 - 0 = 2 - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn sub_rational_from_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - (1,2) = 0 - (1,2) = -(1,2) - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // (1,2) - (1,0) = (1,2) - 0 = (1,2) - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn mul_rational_by_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) * (1,0) = (1,2) * 0 = 0 - assert_eq!((a * b).evaluate(), Fp::zero()); - - // (1,0) * (1,2) = 0 * (1,2) = 0 - assert_eq!((b * a).evaluate(), Fp::zero()); - } -} - -#[cfg(test)] -mod proptests { - use std::{ - cmp, - ops::{Add, Mul, Neg, Sub}, - }; - - use group::ff::Field; - use halo2curves::pasta::Fp; - use proptest::{collection::vec, prelude::*, sample::select}; - - use super::Assigned; - - trait UnaryOperand: Neg { - fn double(&self) -> Self; - fn square(&self) -> Self; - fn cube(&self) -> Self; - fn inv0(&self) -> Self; - } - - impl UnaryOperand for F { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert().unwrap_or(F::ZERO) - } - } - - impl UnaryOperand for Assigned { - fn double(&self) -> Self { - self.double() - } - - fn square(&self) -> Self { - self.square() - } - - fn cube(&self) -> Self { - self.cube() - } - - fn inv0(&self) -> Self { - self.invert() - } - } - - #[derive(Clone, Debug)] - enum UnaryOperator { - Neg, - Double, - Square, - Cube, - Inv0, - } - - const UNARY_OPERATORS: &[UnaryOperator] = &[ - UnaryOperator::Neg, - UnaryOperator::Double, - UnaryOperator::Square, - UnaryOperator::Cube, - UnaryOperator::Inv0, - ]; - - impl UnaryOperator { - fn apply(&self, a: F) -> F { - match self { - Self::Neg => -a, - Self::Double => a.double(), - Self::Square => a.square(), - Self::Cube => a.cube(), - Self::Inv0 => a.inv0(), - } - } - } - - trait BinaryOperand: Sized + Add + Sub + Mul {} - impl BinaryOperand for F {} - impl BinaryOperand for Assigned {} - - #[derive(Clone, Debug)] - enum BinaryOperator { - Add, - Sub, - Mul, - } - - const BINARY_OPERATORS: &[BinaryOperator] = &[ - BinaryOperator::Add, - BinaryOperator::Sub, - BinaryOperator::Mul, - ]; - - impl BinaryOperator { - fn apply(&self, a: F, b: F) -> F { - match self { - Self::Add => a + b, - Self::Sub => a - b, - Self::Mul => a * b, - } - } - } - - #[derive(Clone, Debug)] - enum Operator { - Unary(UnaryOperator), - Binary(BinaryOperator), - } - - prop_compose! { - /// Use narrow that can be easily reduced. - fn arb_element()(val in any::()) -> Fp { - Fp::from(val) - } - } - - prop_compose! { - fn arb_trivial()(element in arb_element()) -> Assigned { - Assigned::Trivial(element) - } - } - - prop_compose! { - /// Generates half of the denominators as zero to represent a deferred inversion. - fn arb_rational()( - numerator in arb_element(), - denominator in prop_oneof![ - 1 => Just(Fp::zero()), - 2 => arb_element(), - ], - ) -> Assigned { - Assigned::Rational(numerator, denominator) - } - } - - prop_compose! { - fn arb_operators(num_unary: usize, num_binary: usize)( - unary in vec(select(UNARY_OPERATORS), num_unary), - binary in vec(select(BINARY_OPERATORS), num_binary), - ) -> Vec { - unary.into_iter() - .map(Operator::Unary) - .chain(binary.into_iter().map(Operator::Binary)) - .collect() - } - } - - prop_compose! { - fn arb_testcase()( - num_unary in 0usize..5, - num_binary in 0usize..5, - )( - values in vec( - prop_oneof![ - 1 => Just(Assigned::Zero), - 2 => arb_trivial(), - 2 => arb_rational(), - ], - // Ensure that: - // - we have at least one value to apply unary operators to. - // - we can apply every binary operator pairwise sequentially. - cmp::max(usize::from(num_unary > 0), num_binary + 1)), - operations in arb_operators(num_unary, num_binary).prop_shuffle(), - ) -> (Vec>, Vec) { - (values, operations) - } - } - - proptest! { - #[test] - fn operation_commutativity((values, operations) in arb_testcase()) { - // Evaluate the values at the start. - let elements: Vec<_> = values.iter().cloned().map(|v| v.evaluate()).collect(); - - // Apply the operations to both the deferred and evaluated values. - fn evaluate( - items: Vec, - operators: &[Operator], - ) -> F { - let mut ops = operators.iter(); - - // Process all binary operators. We are guaranteed to have exactly as many - // binary operators as we need calls to the reduction closure. - let mut res = items.into_iter().reduce(|mut a, b| loop { - match ops.next() { - Some(Operator::Unary(op)) => a = op.apply(a), - Some(Operator::Binary(op)) => break op.apply(a, b), - None => unreachable!(), - } - }).unwrap(); - - // Process any unary operators that weren't handled in the reduce() call - // above (either if we only had one item, or there were unary operators - // after the last binary operator). We are guaranteed to have no binary - // operators remaining at this point. - loop { - match ops.next() { - Some(Operator::Unary(op)) => res = op.apply(res), - Some(Operator::Binary(_)) => unreachable!(), - None => break res, - } - } - } - let deferred_result = evaluate(values, &operations); - let evaluated_result = evaluate(elements, &operations); - - // The two should be equal, i.e. deferred inversion should commute with the - // list of operations. - assert_eq!(deferred_result.evaluate(), evaluated_result); - } - } -} diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 201a342ce7..1cfb9ec4e4 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -19,8 +19,7 @@ pub use verifier::verify_proof; pub use halo2_backend::plonk::{ProvingKey, VerifyingKey}; pub use halo2_common::plonk::{ circuit::{Challenge, Column}, - Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector, TableColumn, - ThirdPhase, + Assigned, Circuit, ConstraintSystem, Error, Expression, FirstPhase, SecondPhase, Selector, + TableColumn, ThirdPhase, }; pub use halo2_middleware::circuit::{Advice, Fixed, Instance}; -pub use halo2_middleware::plonk::Assigned; From 490cd8742407f225385bdcbd53e4a469594fa728 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 6 Feb 2024 12:16:54 +0000 Subject: [PATCH 78/79] Run cargo fmt --- halo2_common/src/circuit.rs | 2 +- halo2_common/src/circuit/floor_planner/single_pass.rs | 2 +- halo2_common/src/circuit/floor_planner/v1.rs | 2 +- halo2_common/src/circuit/layouter.rs | 2 +- halo2_common/src/circuit/table_layouter.rs | 2 +- halo2_common/src/circuit/value.rs | 2 +- halo2_common/src/plonk/circuit.rs | 2 +- halo2_common/src/plonk/keygen.rs | 2 +- halo2_common/src/poly.rs | 2 +- halo2_common/src/poly/domain.rs | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/halo2_common/src/circuit.rs b/halo2_common/src/circuit.rs index c0e1addcc1..0646f01416 100644 --- a/halo2_common/src/circuit.rs +++ b/halo2_common/src/circuit.rs @@ -4,12 +4,12 @@ use std::{fmt, marker::PhantomData}; use halo2_middleware::ff::Field; +use crate::plonk::Assigned; use crate::plonk::{ circuit::{Challenge, Column}, Error, Selector, TableColumn, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use crate::plonk::Assigned; mod value; pub use value::Value; diff --git a/halo2_common/src/circuit/floor_planner/single_pass.rs b/halo2_common/src/circuit/floor_planner/single_pass.rs index d9de8cbf55..66de896307 100644 --- a/halo2_common/src/circuit/floor_planner/single_pass.rs +++ b/halo2_common/src/circuit/floor_planner/single_pass.rs @@ -5,6 +5,7 @@ use std::marker::PhantomData; use halo2_middleware::ff::Field; +use crate::plonk::Assigned; use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, @@ -14,7 +15,6 @@ use crate::{ plonk::{circuit::Challenge, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use crate::plonk::Assigned; /// A simple [`FloorPlanner`] that performs minimal optimizations. /// diff --git a/halo2_common/src/circuit/floor_planner/v1.rs b/halo2_common/src/circuit/floor_planner/v1.rs index fbb39c0a2f..e7709687a1 100644 --- a/halo2_common/src/circuit/floor_planner/v1.rs +++ b/halo2_common/src/circuit/floor_planner/v1.rs @@ -2,6 +2,7 @@ use std::fmt; use halo2_middleware::ff::Field; +use crate::plonk::Assigned; use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, @@ -11,7 +12,6 @@ use crate::{ plonk::{circuit::Challenge, Assignment, Circuit, Error, FloorPlanner, Selector, TableColumn}, }; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use crate::plonk::Assigned; pub mod strategy; diff --git a/halo2_common/src/circuit/layouter.rs b/halo2_common/src/circuit/layouter.rs index d151e3ead9..85d94bf74e 100644 --- a/halo2_common/src/circuit/layouter.rs +++ b/halo2_common/src/circuit/layouter.rs @@ -8,9 +8,9 @@ use halo2_middleware::ff::Field; pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; +use crate::plonk::Assigned; use crate::plonk::{circuit::Column, Error, Selector}; use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; -use crate::plonk::Assigned; /// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. #[cfg(feature = "thread-safe-region")] diff --git a/halo2_common/src/circuit/table_layouter.rs b/halo2_common/src/circuit/table_layouter.rs index ecf6e7a0a9..b3087c9d60 100644 --- a/halo2_common/src/circuit/table_layouter.rs +++ b/halo2_common/src/circuit/table_layouter.rs @@ -7,8 +7,8 @@ use std::{ use halo2_middleware::ff::Field; -use crate::plonk::{Assignment, Error, TableColumn, TableError}; use crate::plonk::Assigned; +use crate::plonk::{Assignment, Error, TableColumn, TableError}; use super::Value; diff --git a/halo2_common/src/circuit/value.rs b/halo2_common/src/circuit/value.rs index 406bd00aa9..d48d3220d1 100644 --- a/halo2_common/src/circuit/value.rs +++ b/halo2_common/src/circuit/value.rs @@ -3,8 +3,8 @@ use std::ops::{Add, Mul, Neg, Sub}; use group::ff::Field; -use crate::plonk::Error; use crate::plonk::Assigned; +use crate::plonk::Error; /// A value that might exist within a circuit. /// diff --git a/halo2_common/src/plonk/circuit.rs b/halo2_common/src/plonk/circuit.rs index b8cd95f425..69c09ca9b5 100644 --- a/halo2_common/src/plonk/circuit.rs +++ b/halo2_common/src/plonk/circuit.rs @@ -1,6 +1,7 @@ use super::{lookup, permutation, shuffle, Error, Queries}; use crate::circuit::layouter::SyncDeps; use crate::circuit::{Layouter, Region, Value}; +use crate::plonk::Assigned; use core::cmp::max; use core::ops::{Add, Mul}; use halo2_middleware::circuit::{ @@ -9,7 +10,6 @@ use halo2_middleware::circuit::{ }; use halo2_middleware::ff::Field; use halo2_middleware::metadata; -use crate::plonk::Assigned; use halo2_middleware::poly::Rotation; use sealed::SealedPhase; use std::collections::HashMap; diff --git a/halo2_common/src/plonk/keygen.rs b/halo2_common/src/plonk/keygen.rs index d8c4a43063..4e53c01110 100644 --- a/halo2_common/src/plonk/keygen.rs +++ b/halo2_common/src/plonk/keygen.rs @@ -7,8 +7,8 @@ use super::{ permutation, Error, }; use crate::circuit::Value; -use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; use crate::plonk::Assigned; +use halo2_middleware::circuit::{Advice, Any, Fixed, Instance}; /// Assembly to be used in circuit synthesis. #[derive(Debug)] diff --git a/halo2_common/src/poly.rs b/halo2_common/src/poly.rs index a94847eee0..e02423be89 100644 --- a/halo2_common/src/poly.rs +++ b/halo2_common/src/poly.rs @@ -6,8 +6,8 @@ use crate::arithmetic::parallelize; use crate::helpers::SerdePrimeField; use crate::SerdeFormat; -use group::ff::{BatchInvert, Field}; use crate::plonk::Assigned; +use group::ff::{BatchInvert, Field}; use halo2_middleware::poly::Rotation; use std::fmt::Debug; use std::io; diff --git a/halo2_common/src/poly/domain.rs b/halo2_common/src/poly/domain.rs index 19fcd3ef13..2219ed1a8a 100644 --- a/halo2_common/src/poly/domain.rs +++ b/halo2_common/src/poly/domain.rs @@ -4,9 +4,9 @@ use crate::arithmetic::{best_fft, parallelize}; use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}; +use crate::plonk::Assigned; use group::ff::{BatchInvert, Field}; use halo2_middleware::ff::WithSmallOrderMulGroup; -use crate::plonk::Assigned; use halo2_middleware::poly::Rotation; use std::marker::PhantomData; From e2efca51d92a805d161f1a9209b84ab317db1882 Mon Sep 17 00:00:00 2001 From: Eduard S Date: Tue, 6 Feb 2024 13:09:03 +0000 Subject: [PATCH 79/79] Fix doc example import --- halo2_common/src/circuit/value.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/halo2_common/src/circuit/value.rs b/halo2_common/src/circuit/value.rs index d48d3220d1..c35e9dc4ad 100644 --- a/halo2_common/src/circuit/value.rs +++ b/halo2_common/src/circuit/value.rs @@ -642,7 +642,7 @@ impl Value { /// ``` /// # use halo2curves::pasta::pallas::Base as F; /// use halo2_common::circuit::Value; - /// use crate::plonk::Assigned; + /// use halo2_common::plonk::Assigned; /// /// let v = Value::known(F::from(2)); /// let v: Value> = v.into();