diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5d92f02ffb..8d99826362 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,6 +8,36 @@ on: - main jobs: + + fmt: + name: Rustfmt + timeout-minutes: 30 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions-rs/toolchain@v1 + with: + override: false + - run: rustup component add rustfmt + - uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all -- --check + + typos-check: + needs: [skip_check] + if: | + github.event.pull_request.draft == false && + (github.event.action == 'ready_for_review' || needs.skip_check.outputs.should_skip != 'true') + name: TyposCheck + timeout-minutes: 5 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: crate-ci/typos@v1.22.7 + with: + config: ./typos.toml + isolated: true test: name: Test on ${{ matrix.os }} with ${{ matrix.feature_set }} features runs-on: ${{ matrix.os }} @@ -101,18 +131,4 @@ jobs: command: doc args: --all --document-private-items - fmt: - name: Rustfmt - timeout-minutes: 30 - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions-rs/toolchain@v1 - with: - override: false - - run: rustup component add rustfmt - - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --all -- --check diff --git a/README.md b/README.md index 432b986072..da609a1395 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ This repository contains the [halo2](https://github.com/zcash/halo2) fork from PSE and includes contributions from the community. We use the `main` branch for development, which means it may contain -unstable/unfinished features. For end-users we recomend using the tag releases +unstable/unfinished features. For end-users we recommend using the tag releases which can be seen as curated checkpoints with some level of guarantee of stability. diff --git a/book/src/user/experimental-features.md b/book/src/user/experimental-features.md index 5677fd38b9..b64b9cbc2c 100644 --- a/book/src/user/experimental-features.md +++ b/book/src/user/experimental-features.md @@ -24,11 +24,11 @@ When using `create_proof` and `verify_proof`, we need to specify the commitment create_proof, ProverIPA<_>, _, _, _, _> verify_proof, ProverIPA<_>, _, _, _> -// Using KZG with GWC19 mutli-open strategy +// Using KZG with GWC19 multi-open strategy create_proof, ProverGWC<_>, _, _, _, _> verify_proof, ProverGWC<_>, _, _, _> -// Using KZG with BDFG20 mutli-open strategy +// Using KZG with BDFG20 multi-open strategy create_proof, ProverSHPLONK<_>, _, _, _, _> verify_proof, ProverSHPLONK<_>, _, _, _> ``` diff --git a/halo2/Cargo.toml b/halo2/Cargo.toml index 74e909bef0..16a610cb0e 100644 --- a/halo2/Cargo.toml +++ b/halo2/Cargo.toml @@ -19,7 +19,7 @@ all-features = true rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] [dependencies] -halo2_proofs = { version = "0.3", path = "../halo2_proofs", default-features = false } +halo2_proofs = { version = "0.4", path = "../halo2_proofs", default-features = false } [lib] bench = false diff --git a/halo2_backend/Cargo.toml b/halo2_backend/Cargo.toml index 7b24994c36..e46a42e970 100644 --- a/halo2_backend/Cargo.toml +++ b/halo2_backend/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "halo2_backend" -version = "0.3.0" +version = "0.4.0" authors = [ "Sean Bowe ", "Ying Tong Lai ", @@ -28,7 +28,7 @@ rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] backtrace = { version = "0.3", optional = true } ff = "0.13" group = "0.13" -halo2curves = { version = "0.6.1", default-features = false } +halo2curves = { version = "0.7.0", default-features = false } rand_core = { version = "0.6", default-features = false } tracing = "0.1" blake2b_simd = "1" # MSRV 1.66.0 diff --git a/halo2_backend/src/plonk.rs b/halo2_backend/src/plonk.rs index ee34d50e3d..b4039421d7 100644 --- a/halo2_backend/src/plonk.rs +++ b/halo2_backend/src/plonk.rs @@ -117,7 +117,7 @@ where return Err(io::Error::new( io::ErrorKind::InvalidData, format!( - "circuit size value (k): {} exceeds maxium: {}", + "circuit size value (k): {} exceeds maximum: {}", k, C::Scalar::S ), diff --git a/halo2_backend/src/plonk/evaluation.rs b/halo2_backend/src/plonk/evaluation.rs index 09d8b452d3..f22f6542b7 100644 --- a/halo2_backend/src/plonk/evaluation.rs +++ b/halo2_backend/src/plonk/evaluation.rs @@ -174,11 +174,11 @@ impl Calculation { /// Evaluator #[derive(Clone, Default, Debug)] pub(crate) struct Evaluator { - /// Custom gates evalution + /// Custom gates evaluation custom_gates: GraphEvaluator, - /// Lookups evalution + /// Lookups evaluation lookups: Vec>, - /// Shuffle evalution + /// Shuffle evaluation shuffles: Vec>, } diff --git a/halo2_backend/src/plonk/keygen.rs b/halo2_backend/src/plonk/keygen.rs index 406099a723..2771ee09de 100644 --- a/halo2_backend/src/plonk/keygen.rs +++ b/halo2_backend/src/plonk/keygen.rs @@ -107,7 +107,6 @@ where } // Compute fixeds - let fixed_polys: Vec<_> = circuit .preprocessing .fixed @@ -131,13 +130,7 @@ where .map(Polynomial::new_lagrange_from_vec) .collect(); - // Compute l_0(X) - // TODO: this can be done more efficiently - // https://github.com/privacy-scaling-explorations/halo2/issues/269 - let mut l0 = vk.domain.empty_lagrange(); - l0[0] = C::Scalar::ONE; - let l0 = vk.domain.lagrange_to_coeff(l0); - let l0 = vk.domain.coeff_to_extended(l0); + let l0 = vk.domain.lagrange_extended(0usize); // Compute l_blind(X) which evaluates to 1 for each blinding factor row // and 0 otherwise over the domain. @@ -150,10 +143,8 @@ where // Compute l_last(X) which evaluates to 1 on the first inactive row (just // before the blinding factors) and 0 otherwise over the domain - let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; - let l_last = vk.domain.lagrange_to_coeff(l_last); - let l_last = vk.domain.coeff_to_extended(l_last); + let idx = params.n() as usize - vk.cs.blinding_factors() - 1; + let l_last = vk.domain.lagrange_extended(idx); // Compute l_active_row(X) let one = C::Scalar::ONE; diff --git a/halo2_backend/src/plonk/prover.rs b/halo2_backend/src/plonk/prover.rs index af72d863d8..81bf3cd010 100644 --- a/halo2_backend/src/plonk/prover.rs +++ b/halo2_backend/src/plonk/prover.rs @@ -489,10 +489,10 @@ impl< /// Finalizes the proof creation. /// The following steps are performed: - /// - 1. Generate commited lookup polys - /// - 2. Generate commited permutation polys - /// - 3. Generate commited lookup polys - /// - 4. Generate commited shuffle polys + /// - 1. Generate committed lookup polys + /// - 2. Generate committed permutation polys + /// - 3. Generate committed lookup polys + /// - 4. Generate committed shuffle polys /// - 5. Commit to the vanishing argument's random polynomial /// - 6. Generate the advice polys /// - 7. Evaluate the h(X) polynomial @@ -523,7 +523,7 @@ impl< .map(|index| challenges.remove(&index).unwrap()) .collect::>(); - // 1. Generate commited ( added to transcript ) lookup polys --------------------------------------- + // 1. Generate committed ( added to transcript ) lookup polys --------------------------------------- // Sample theta challenge for keeping lookup columns linearly independent // [TRANSCRIPT-5] @@ -574,9 +574,9 @@ impl< // [TRANSCRIPT-8] let gamma: ChallengeGamma<_> = self.transcript.squeeze_challenge_scalar(); - // 2. Generate commited permutation polys ----------------------------------------- + // 2. Generate committed permutation polys ----------------------------------------- // [TRANSCRIPT-9] - let permutations_commited: Vec> = instances + let permutations_committed: Vec> = instances .iter() .zip(advices.iter()) .map(|(instance, advice)| { @@ -597,34 +597,35 @@ impl< }) .collect::, _>>()?; - // 3. Generate commited lookup polys ---------------------------------------------------------- + // 3. Generate committed lookup polys ---------------------------------------------------------- // [TRANSCRIPT-10] - let lookups_commited: Vec>> = permuted_lookups - .into_iter() - .map(|lookups| -> Result, _> { - // Construct and commit to products for each lookup - lookups - .into_iter() - .map(|lookup| { - lookup.commit_product( - &self.engine, - pk, - params, - beta, - gamma, - &mut rng, - self.transcript, - ) - }) - .collect::, _>>() - }) - .collect::, _>>()?; + let lookups_committed: Vec>> = + permuted_lookups + .into_iter() + .map(|lookups| -> Result, _> { + // Construct and commit to products for each lookup + lookups + .into_iter() + .map(|lookup| { + lookup.commit_product( + &self.engine, + pk, + params, + beta, + gamma, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }) + .collect::, _>>()?; - // 4. Generate commited shuffle polys ------------------------------------------------------- + // 4. Generate committed shuffle polys ------------------------------------------------------- // [TRANSCRIPT-11] - let shuffles_commited: Vec>> = instances + let shuffles_committed: Vec>> = instances .iter() .zip(advices.iter()) .map(|(instance, advice)| -> Result, _> { @@ -703,9 +704,9 @@ impl< *beta, *gamma, *theta, - &lookups_commited, - &shuffles_commited, - &permutations_commited, + &lookups_committed, + &shuffles_committed, + &permutations_committed, ); // 8. Construct the vanishing argument's h(X) commitments -------------------------------------- @@ -796,7 +797,7 @@ impl< // Evaluate the permutations, if any, at omega^i x. // [TRANSCRIPT-21] let permutations_evaluated: Vec> = - permutations_commited + permutations_committed .into_iter() .map(|permutation| -> Result<_, _> { permutation.evaluate(pk, x, self.transcript) }) .collect::, _>>()?; @@ -804,7 +805,7 @@ impl< // Evaluate the lookups, if any, at omega^i x. // [TRANSCRIPT-22] let lookups_evaluated: Vec>> = - lookups_commited + lookups_committed .into_iter() .map(|lookups| -> Result, _> { lookups @@ -817,7 +818,7 @@ impl< // Evaluate the shuffles, if any, at omega^i x. // [TRANSCRIPT-23] let shuffles_evaluated: Vec>> = - shuffles_commited + shuffles_committed .into_iter() .map(|shuffles| -> Result, _> { shuffles diff --git a/halo2_backend/src/poly/domain.rs b/halo2_backend/src/poly/domain.rs index dabc797da4..5ca352e6c7 100644 --- a/halo2_backend/src/poly/domain.rs +++ b/halo2_backend/src/poly/domain.rs @@ -243,6 +243,67 @@ impl> EvaluationDomain { } } + // Compute L_i(X) in the extended co-domain, where + // L_i(X)is the ith Lagrange polynomial in the original domain, + // H = {1, g, g^2, ..., g^(n-1)}. + // We compute its represenation in the extended co-domain + // zH = {z, z*w, z*w^2, ... , z*w^(n*k - 1)}, where k is the extension factor + // of the domain, and z is the extended root such that w^k = g. + // We assume z = F::ZETA, a cubic root the field. This simplifies the computation. + // + // The computation uses the fomula: + // L_i(X) = g^i/n * (X^n -1)/(X-g^i) + pub fn lagrange_extended(&self, idx: usize) -> Polynomial { + let one = F::ONE; + let zeta = >::ZETA; + + let n: u64 = 1 << self.k(); + let g_i = self.omega.pow_vartime([idx as u64]); + let mut lag_poly = vec![F::ZERO; self.extended_len()]; + + let w = self.get_extended_omega(); + let wn = w.pow_vartime([n]); + let zeta_n = match n % 3 { + 1 => zeta, + 2 => zeta * zeta, + _ => one, + }; + + // Compute denominators. ( n * (w^j - g_i)) + let n = F::from(n); + let n_g_i = n * g_i; + parallelize(&mut lag_poly, |e, mut index| { + let mut acc = n * zeta * w.pow_vartime([index as u64]); + for e in e { + *e = acc - n_g_i; + acc *= w; + index += 1; + } + }); + lag_poly.batch_invert(); + + // Compute numerators. + // g_i * (zeta * w^i)^n = (g_i * zeta^n) * w^(i*n) + // We use w^k = g and g^n = 1 to save multiplications. + let k = 1 << (self.extended_k() - self.k()); + let mut wn_powers = vec![zeta_n * g_i; k]; + for i in 1..k { + wn_powers[i] = wn_powers[i - 1] * wn + } + + parallelize(&mut lag_poly, |e, mut index| { + for e in e { + *e *= wn_powers[index % k] - g_i; + index += 1; + } + }); + + Polynomial { + values: lag_poly, + _marker: std::marker::PhantomData, + } + } + /// Rotate the extended domain polynomial over the original domain. pub fn rotate_extended( &self, diff --git a/halo2_debug/Cargo.toml b/halo2_debug/Cargo.toml index 3af6c761d5..52b4417f28 100644 --- a/halo2_debug/Cargo.toml +++ b/halo2_debug/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "halo2_debug" -version = "0.3.0" +version = "0.4.0" authors = [ "Privacy Scaling Explorations team", ] @@ -22,7 +22,7 @@ rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] [dependencies] ff = "0.13" -halo2curves = { version = "0.6.1", default-features = false } +halo2curves = { version = "0.7.0", default-features = false } num-bigint = "0.4.5" halo2_middleware = { path = "../halo2_middleware" } tiny-keccak = { version = "2.0.2", features=["keccak"] } @@ -32,4 +32,4 @@ rand_chacha = "0.3" rayon = "1.8" [features] -vector-tests = [] \ No newline at end of file +vector-tests = [] diff --git a/halo2_debug/src/check_witness.rs b/halo2_debug/src/check_witness.rs new file mode 100644 index 0000000000..e4299e6bf4 --- /dev/null +++ b/halo2_debug/src/check_witness.rs @@ -0,0 +1,178 @@ +use crate::display::FDisp; +use halo2_middleware::circuit::{Any, CompiledCircuit, ExpressionMid, VarMid}; +use halo2_middleware::ff::PrimeField; +use rand_chacha::ChaCha20Rng; +use rand_core::SeedableRng; +use std::collections::HashSet; + +fn rotate(n: usize, offset: usize, rotation: i32) -> usize { + let offset = offset as i32 + rotation; + if offset < 0 { + (offset + n as i32) as usize + } else if offset >= n as i32 { + (offset - n as i32) as usize + } else { + offset as usize + } +} + +struct Assignments<'a, F: PrimeField> { + public: &'a [Vec], + witness: &'a [Vec], + fixed: &'a [Vec], + blinders: &'a [Vec], + blinded: &'a [bool], + usable_rows: usize, + n: usize, +} + +impl<'a, F: PrimeField> Assignments<'a, F> { + // Query a particular Column at an offset + fn query(&self, column_type: Any, column_index: usize, offset: usize) -> F { + match column_type { + Any::Instance => self.public[column_index][offset], + Any::Advice => { + if offset >= self.usable_rows && self.blinded[column_index] { + self.blinders[column_index][offset - self.usable_rows] + } else { + self.witness[column_index][offset] + } + } + Any::Fixed => self.fixed[column_index][offset], + } + } + + // Evaluate an expression using the assingment data + fn eval(&self, expr: &ExpressionMid, offset: usize) -> F { + expr.evaluate( + &|s| s, + &|v| match v { + VarMid::Query(q) => { + let offset = rotate(self.n, offset, q.rotation.0); + self.query(q.column_type, q.column_index, offset) + } + VarMid::Challenge(_c) => unimplemented!(), + }, + &|ne| -ne, + &|a, b| a + b, + &|a, b| a * b, + ) + } + + // Evaluate multiple expressions and return the result as concatenated bytes from the field + // element representation. + fn eval_to_buf(&self, f_len: usize, exprs: &[ExpressionMid], offset: usize) -> Vec { + let mut eval_buf = Vec::with_capacity(exprs.len() * f_len); + for eval in exprs.iter().map(|e| self.eval(e, offset)) { + eval_buf.extend_from_slice(eval.to_repr().as_ref()) + } + eval_buf + } +} + +/// Check that the wintess passes all the constraints defined by the circuit. Panics if any +/// constraint is not satisfied. +pub fn check_witness( + circuit: &CompiledCircuit, + k: u32, + blinding_rows: usize, + witness: &[Vec], + public: &[Vec], +) { + let n = 2usize.pow(k); + let usable_rows = n - blinding_rows; + let cs = &circuit.cs; + + // Calculate blinding values + let mut rng = ChaCha20Rng::seed_from_u64(0xdeadbeef); + let mut blinders = vec![vec![F::ZERO; blinding_rows]; cs.num_advice_columns]; + for column_blinders in blinders.iter_mut() { + for v in column_blinders.iter_mut() { + *v = F::random(&mut rng); + } + } + + let mut blinded = vec![true; cs.num_advice_columns]; + for advice_column_index in &cs.unblinded_advice_columns { + blinded[*advice_column_index] = false; + } + + let assignments = Assignments { + public, + witness, + fixed: &circuit.preprocessing.fixed, + blinders: &blinders, + blinded: &blinded, + usable_rows, + n, + }; + + // Verify all gates + for (i, gate) in cs.gates.iter().enumerate() { + for offset in 0..n { + let res = assignments.eval(&gate.poly, offset); + if !res.is_zero_vartime() { + panic!( + "Unsatisfied gate {} \"{}\" at offset {}", + i, gate.name, offset + ); + } + } + } + + // Verify all copy constraints + for (lhs, rhs) in &circuit.preprocessing.permutation.copies { + let value_lhs = assignments.query(lhs.column.column_type, lhs.column.index, lhs.row); + let value_rhs = assignments.query(rhs.column.column_type, rhs.column.index, rhs.row); + if value_lhs != value_rhs { + panic!( + "Unsatisfied copy constraint ({:?},{:?}): {} != {}", + lhs, + rhs, + FDisp(&value_lhs), + FDisp(&value_rhs) + ) + } + } + + // Verify all lookups + let f_len = F::Repr::default().as_ref().len(); + for (i, lookup) in cs.lookups.iter().enumerate() { + let mut virtual_table = HashSet::new(); + for offset in 0..usable_rows { + let table_eval_buf = assignments.eval_to_buf(f_len, &lookup.table_expressions, offset); + virtual_table.insert(table_eval_buf); + } + for offset in 0..usable_rows { + let input_eval_buf = assignments.eval_to_buf(f_len, &lookup.input_expressions, offset); + if !virtual_table.contains(&input_eval_buf) { + panic!( + "Unsatisfied lookup {} \"{}\" at offset {}", + i, lookup.name, offset + ); + } + } + } + + // Verify all shuffles + for (i, shuffle) in cs.shuffles.iter().enumerate() { + let mut virtual_shuffle = Vec::with_capacity(usable_rows); + for offset in 0..usable_rows { + let shuffle_eval_buf = + assignments.eval_to_buf(f_len, &shuffle.shuffle_expressions, offset); + virtual_shuffle.push(shuffle_eval_buf); + } + let mut virtual_input = Vec::with_capacity(usable_rows); + for offset in 0..usable_rows { + let input_eval_buf = assignments.eval_to_buf(f_len, &shuffle.input_expressions, offset); + virtual_input.push(input_eval_buf); + } + + virtual_shuffle.sort_unstable(); + virtual_input.sort_unstable(); + + if virtual_input != virtual_shuffle { + panic!("Unsatisfied shuffle {} \"{}\"", i, shuffle.name); + } + } +} diff --git a/halo2_debug/src/lib.rs b/halo2_debug/src/lib.rs index 911e90e6df..d9343e50d4 100644 --- a/halo2_debug/src/lib.rs +++ b/halo2_debug/src/lib.rs @@ -1,3 +1,8 @@ +mod check_witness; +pub mod display; + +pub use check_witness::check_witness; + use rand_chacha::ChaCha20Rng; use rand_core::SeedableRng; use tiny_keccak::Hasher; @@ -34,5 +39,3 @@ pub fn test_result Vec + Send>(test: F, _expected: &str) -> V result } - -pub mod display; diff --git a/halo2_frontend/Cargo.toml b/halo2_frontend/Cargo.toml index 926c3a1597..cecb8896d0 100644 --- a/halo2_frontend/Cargo.toml +++ b/halo2_frontend/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "halo2_frontend" -version = "0.3.0" +version = "0.4.0" authors = [ "Sean Bowe ", "Ying Tong Lai ", @@ -28,7 +28,7 @@ rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] backtrace = { version = "0.3", optional = true } ff = "0.13" group = "0.13" -halo2curves = { version = "0.6.1", default-features = false } +halo2curves = { version = "0.7.0", default-features = false } tracing = "0.1" blake2b_simd = "1" # MSRV 1.66.0 serde = { version = "1", optional = true, features = ["derive"] } diff --git a/halo2_frontend/src/circuit.rs b/halo2_frontend/src/circuit.rs index 4f67378545..48cbc4c095 100644 --- a/halo2_frontend/src/circuit.rs +++ b/halo2_frontend/src/circuit.rs @@ -1,6 +1,6 @@ //! Traits and structs for implementing circuit components. -use crate::plonk; +use crate::plonk::{self, AssignmentError}; use crate::plonk::{ permutation, sealed::{self, SealedPhase}, @@ -154,7 +154,12 @@ impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { fn query_instance(&self, column: Column, row: usize) -> Result, Error> { if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); + return Err(Error::AssignmentError(AssignmentError::QueryInstance { + col: column.into(), + row, + usable_rows: (0, self.usable_rows.end), + k: self.k, + })); } self.instances @@ -166,7 +171,7 @@ impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { fn assign_advice( &mut self, - _: A, + desc: A, column: Column, row: usize, to: V, @@ -184,7 +189,13 @@ impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { } if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); + return Err(Error::AssignmentError(AssignmentError::AssignAdvice { + desc: desc().into(), + col: column.into(), + row, + usable_rows: (0, self.usable_rows.end), + k: self.k, + })); } *self diff --git a/halo2_frontend/src/dev.rs b/halo2_frontend/src/dev.rs index 2958a32021..98657d2919 100644 --- a/halo2_frontend/src/dev.rs +++ b/halo2_frontend/src/dev.rs @@ -7,6 +7,7 @@ use std::ops::{Add, Mul, Neg, Range}; use blake2b_simd::blake2b; +use crate::plonk::AssignmentError; use crate::{ circuit, plonk::{ @@ -399,7 +400,12 @@ impl Assignment for MockProver { } } - fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> + fn enable_selector( + &mut self, + desc: A, + selector: &Selector, + row: usize, + ) -> Result<(), Error> where A: FnOnce() -> AR, AR: Into, @@ -408,13 +414,15 @@ impl Assignment for MockProver { return Ok(()); } - assert!( - self.usable_rows.contains(&row), - "row={} not in usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); + if !self.usable_rows.contains(&row) { + return Err(Error::AssignmentError(AssignmentError::EnableSelector { + desc: desc().into(), + selector: *selector, + row, + usable_rows: (self.usable_rows.start, self.usable_rows.end), + k: self.k, + })); + } // Track that this selector was enabled. We require that all selectors are enabled // inside some region (i.e. no floating selectors). @@ -436,13 +444,14 @@ impl Assignment for MockProver { column: Column, row: usize, ) -> Result, Error> { - assert!( - self.usable_rows.contains(&row), - "row={}, usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); + if !self.usable_rows.contains(&row) { + return Err(Error::AssignmentError(AssignmentError::QueryInstance { + col: column.into(), + row, + usable_rows: (self.usable_rows.start, self.usable_rows.end), + k: self.k, + })); + } Ok(self .instance @@ -454,7 +463,7 @@ impl Assignment for MockProver { fn assign_advice( &mut self, - _: A, + desc: A, column: Column, row: usize, to: V, @@ -466,13 +475,15 @@ impl Assignment for MockProver { AR: Into, { if self.in_phase(FirstPhase) { - assert!( - self.usable_rows.contains(&row), - "row={}, usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); + if !self.usable_rows.contains(&row) { + return Err(Error::AssignmentError(AssignmentError::AssignAdvice { + desc: desc().into(), + col: column.into(), + row, + usable_rows: (self.usable_rows.start, self.usable_rows.end), + k: self.k, + })); + } if let Some(region) = self.current_region.as_mut() { region.update_extent(column.into(), row); @@ -507,7 +518,7 @@ impl Assignment for MockProver { fn assign_fixed( &mut self, - _: A, + desc: A, column: Column, row: usize, to: V, @@ -522,13 +533,15 @@ impl Assignment for MockProver { return Ok(()); } - assert!( - self.usable_rows.contains(&row), - "row={}, usable_rows={:?}, k={}", - row, - self.usable_rows, - self.k, - ); + if !self.usable_rows.contains(&row) { + return Err(Error::AssignmentError(AssignmentError::AssignFixed { + desc: desc().into(), + col: column.into(), + row, + usable_rows: (self.usable_rows.start, self.usable_rows.end), + k: self.k, + })); + } if let Some(region) = self.current_region.as_mut() { region.update_extent(column.into(), row); @@ -559,14 +572,16 @@ impl Assignment for MockProver { return Ok(()); } - assert!( - self.usable_rows.contains(&left_row) && self.usable_rows.contains(&right_row), - "left_row={}, right_row={}, usable_rows={:?}, k={}", - left_row, - right_row, - self.usable_rows, - self.k, - ); + if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { + return Err(Error::AssignmentError(AssignmentError::Copy { + left_col: left_column, + left_row, + right_col: right_column, + right_row, + usable_rows: (self.usable_rows.start, self.usable_rows.end), + k: self.k, + })); + } self.permutation .copy(left_column, left_row, right_column, right_row) @@ -582,13 +597,14 @@ impl Assignment for MockProver { return Ok(()); } - assert!( - self.usable_rows.contains(&from_row), - "row={}, usable_rows={:?}, k={}", - from_row, - self.usable_rows, - self.k, - ); + if !self.usable_rows.contains(&from_row) { + return Err(Error::AssignmentError(AssignmentError::FillFromRow { + col: col.into(), + from_row, + usable_rows: (self.usable_rows.start, self.usable_rows.end), + k: self.k, + })); + } for row in self.usable_rows.clone().skip(from_row) { self.assign_fixed(|| "", col, row, || to)?; @@ -1498,7 +1514,7 @@ mod tests { #[cfg(feature = "lookup-any-sanity-checks")] #[test] #[should_panic( - expected = "pair of tagging expressions(query of the tag columns or mutiple query combinations) should be included" + expected = "pair of tagging expressions(query of the tag columns or multiple query combinations) should be included" )] fn bad_lookup_any_not_add_tagging_pairs() { const K: u32 = 4; @@ -1991,7 +2007,7 @@ mod tests { } #[test] - fn contraint_unsatisfied() { + fn constraint_unsatisfied() { const K: u32 = 4; #[derive(Clone)] @@ -2234,7 +2250,7 @@ mod tests { instance[0] = InstanceValue::Assigned(Fp::from(11)); assert_eq!(prover.verify(), Err(vec![err2.clone()])); - // then we modify the witness -> the contraint `squared` will fail + // then we modify the witness -> the constraint `squared` will fail let advice0 = prover.advice_mut(0); advice0[2] = CellValue::Assigned(Fp::from(10)); assert_eq!(prover.verify(), Err(vec![err1, err2])); diff --git a/halo2_frontend/src/dev/cost_model.rs b/halo2_frontend/src/dev/cost_model.rs index 86ce03800a..49bec78197 100644 --- a/halo2_frontend/src/dev/cost_model.rs +++ b/halo2_frontend/src/dev/cost_model.rs @@ -16,9 +16,9 @@ use super::MockProver; pub enum CommitmentScheme { /// Inner Product Argument commitment scheme IPA, - /// KZG with GWC19 mutli-open strategy + /// KZG with GWC19 multi-open strategy KZGGWC, - /// KZG with BDFG20 mutli-open strategy + /// KZG with BDFG20 multi-open strategy KZGSHPLONK, } diff --git a/halo2_frontend/src/plonk/circuit/constraint_system.rs b/halo2_frontend/src/plonk/circuit/constraint_system.rs index b5b6a42912..ad09077a7b 100644 --- a/halo2_frontend/src/plonk/circuit/constraint_system.rs +++ b/halo2_frontend/src/plonk/circuit/constraint_system.rs @@ -445,7 +445,7 @@ impl ConstraintSystem { panic!("all table expressions need selector/fixed query for tagging"); } if !is_tagging_exprs_pair_exists { - panic!("pair of tagging expressions(query of the tag columns or mutiple query combinations) should be included"); + panic!("pair of tagging expressions(query of the tag columns or multiple query combinations) should be included"); } } @@ -953,7 +953,7 @@ impl ConstraintSystem { tmp } - /// Helper funciotn to assert phase exists, to make sure phase-aware resources + /// Helper function to assert phase exists, to make sure phase-aware resources /// are allocated in order, and to avoid any phase to be skipped accidentally /// to cause unexpected issue in the future. fn assert_phase_exists(&self, phase: sealed::Phase, resource: &str) { diff --git a/halo2_frontend/src/plonk/error.rs b/halo2_frontend/src/plonk/error.rs index 18133f2683..89f2e97097 100644 --- a/halo2_frontend/src/plonk/error.rs +++ b/halo2_frontend/src/plonk/error.rs @@ -1,10 +1,15 @@ use std::fmt; use super::TableColumn; -use crate::plonk::Column; +use crate::plonk::{Column, Selector}; use halo2_middleware::circuit::Any; -/// This is an error that could occur during circuit synthesis. +/// This is an error that could occur during circuit synthesis. +/// +/// **NOTE**: [`AssignmentError`] is introduced to provide more debugging info +/// to developers when assigning witnesses to circuit cells. +/// Hence, they are used for [`MockProver`] and [`WitnessCollection`]. +/// The [`keygen`] process use the [`NotEnoughRowsAvailable`], since it is just enough. #[derive(Debug)] pub enum Error { /// This is an error that can occur during synthesis of the circuit, for @@ -27,6 +32,8 @@ pub enum Error { ColumnNotInPermutation(Column), /// An error relating to a lookup table. TableError(TableError), + /// An error relating to an `Assignment`. + AssignmentError(AssignmentError), /// Generic error not covered by previous cases Other(String), } @@ -58,6 +65,7 @@ impl fmt::Display for Error { "Column {column:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", ), Error::TableError(error) => write!(f, "{error}"), + Error::AssignmentError(error) => write!(f, "{error}"), Error::Other(error) => write!(f, "Other: {error}"), } } @@ -101,3 +109,116 @@ impl fmt::Display for TableError { } } } + +/// This is an error that could occur during `assign_advice`, `assign_fixed`, `copy`, etc. +#[derive(Debug)] +pub enum AssignmentError { + AssignAdvice { + desc: String, + col: Column, + row: usize, + usable_rows: (usize, usize), + k: u32, + }, + AssignFixed { + desc: String, + col: Column, + row: usize, + usable_rows: (usize, usize), + k: u32, + }, + EnableSelector { + desc: String, + selector: Selector, + row: usize, + usable_rows: (usize, usize), + k: u32, + }, + QueryInstance { + col: Column, + row: usize, + usable_rows: (usize, usize), + k: u32, + }, + Copy { + left_col: Column, + left_row: usize, + right_col: Column, + right_row: usize, + usable_rows: (usize, usize), + k: u32, + }, + FillFromRow { + col: Column, + from_row: usize, + usable_rows: (usize, usize), + k: u32, + }, +} + +impl fmt::Display for AssignmentError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + AssignmentError::AssignAdvice { desc, col, row, usable_rows:(start, end), k } => write!( + f, + "assign_advice `{}` error: column={:?}({}), row={}, usable_rows={}..{}, k={}", + desc, + col.column_type(), + col.index(), + row, + start, end, + k, + ), + AssignmentError::AssignFixed {desc, col, row, usable_rows: (start, end), k } => write!( + f, + "assign_fixed `{}` error: column={:?}({}), row={}, usable_rows={}..{}, k={}", + desc, + col.column_type(), + col.index(), + row, + start, end, + k, + ), + AssignmentError::EnableSelector { desc, selector, row, usable_rows: (start, end), k } => write!( + f, + "enable_selector `{}` error: column=Selector({:?}), row={}, usable_rows={}..{}, k={}", + desc, + selector.index(), + row, + start, end, + k, + ), + AssignmentError::QueryInstance { col, row, usable_rows:(start, end), k } => write!( + f, + "query_instance error: column={:?}({}), row={}, usable_rows={}..{}, k={}", + col.column_type, + col.index(), + row, + start, + end, + k, + ), + AssignmentError::Copy { left_col, left_row, right_col, right_row, usable_rows:(start, end), k } => write!( + f, + "copy error: left_column={:?}({}), left_row={}, right_column={:?}({}), right_row={}, usable_rows={}..{}, k={}", + left_col.column_type(), + left_col.index(), + left_row, + right_col.column_type(), + right_col.index(), + right_row, + start, end, + k, + ), + AssignmentError::FillFromRow { col, from_row, usable_rows:(start, end), k } => write!( + f, + "fill_from_row error: column={:?}({}), from_row={}, usable_rows={}..{}, k={}", + col.column_type(), + col.index(), + from_row, + start, end, + k, + ), + } + } +} diff --git a/halo2_middleware/Cargo.toml b/halo2_middleware/Cargo.toml index 1c24c77c69..7fa7d2ac8c 100644 --- a/halo2_middleware/Cargo.toml +++ b/halo2_middleware/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "halo2_middleware" -version = "0.3.0" +version = "0.4.0" authors = [ "Sean Bowe ", "Ying Tong Lai ", @@ -26,7 +26,7 @@ rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] [dependencies] ff = "0.13" -halo2curves = { version = "0.6.1", default-features = false } +halo2curves = { version = "0.7.0", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } serde_derive = { version = "1", optional = true} rayon = "1.8" diff --git a/halo2_middleware/src/zal.rs b/halo2_middleware/src/zal.rs index 5d376e3e5b..8ad352d79d 100644 --- a/halo2_middleware/src/zal.rs +++ b/halo2_middleware/src/zal.rs @@ -71,7 +71,7 @@ pub mod traits { // Descriptors are opaque pointers that hold the input in a format suitable for the accelerator engine. // They may be: // - Input moved on accelerator device (only once for repeated calls) - // - Endianess conversion + // - Endianness conversion // - Converting from Montgomery to Canonical form // - Input changed from Projective to Jacobian coordinates or even to a Twisted Edwards curve. // - other form of expensive preprocessing @@ -110,7 +110,7 @@ pub mod impls { use std::marker::PhantomData; use crate::zal::traits::MsmAccel; - use halo2curves::msm::best_multiexp; + use halo2curves::msm::msm_best; use halo2curves::CurveAffine; // Halo2curve Backend @@ -134,7 +134,7 @@ pub mod impls { impl MsmAccel for H2cEngine { fn msm(&self, coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { - best_multiexp(coeffs, bases) + msm_best(coeffs, bases) } // Caching API @@ -156,7 +156,7 @@ pub mod impls { coeffs: &Self::CoeffsDescriptor<'_>, base: &[C], ) -> C::Curve { - best_multiexp(coeffs.raw, base) + msm_best(coeffs.raw, base) } fn msm_with_cached_base( @@ -164,7 +164,7 @@ pub mod impls { coeffs: &[C::Scalar], base: &Self::BaseDescriptor<'_>, ) -> C::Curve { - best_multiexp(coeffs, base.raw) + msm_best(coeffs, base.raw) } fn msm_with_cached_inputs( @@ -172,7 +172,7 @@ pub mod impls { coeffs: &Self::CoeffsDescriptor<'_>, base: &Self::BaseDescriptor<'_>, ) -> C::Curve { - best_multiexp(coeffs.raw, base.raw) + msm_best(coeffs.raw, base.raw) } } @@ -251,7 +251,7 @@ mod test { use crate::zal::impls::{H2cEngine, PlonkEngineConfig}; use crate::zal::traits::MsmAccel; use halo2curves::bn256::G1Affine; - use halo2curves::msm::best_multiexp; + use halo2curves::msm::msm_best; use halo2curves::CurveAffine; use ark_std::{end_timer, start_timer}; @@ -282,7 +282,7 @@ mod test { let scalars = &scalars[..1 << k]; let t0 = start_timer!(|| format!("freestanding msm k={}", k)); - let e0 = best_multiexp(scalars, points); + let e0 = msm_best(scalars, points); end_timer!(t0); let engine = PlonkEngineConfig::build_default::(); @@ -323,7 +323,7 @@ mod test { let scalars = &scalars[..1 << k]; let t0 = start_timer!(|| format!("freestanding msm k={}", k)); - let e0 = best_multiexp(scalars, points); + let e0 = msm_best(scalars, points); end_timer!(t0); let engine = PlonkEngineConfig::new() diff --git a/halo2_proofs/CHANGELOG.md b/halo2_proofs/CHANGELOG.md index a70020222b..ac6dc33c98 100644 --- a/halo2_proofs/CHANGELOG.md +++ b/halo2_proofs/CHANGELOG.md @@ -132,7 +132,7 @@ All APIs that represented witnessed values as `Option` now represent them as directly, and returns `VerificationStrategy::Output` instead of `Guard`. - `ConstraintSystem::enable_equality` and `ConstraintSystem::query_any` now take `Into>` instead of `Column` as a parameter to avoid - excesive `.into()` usage. + excessive `.into()` usage. - `Error` has been overhauled: - `Error` now implements `std::fmt::Display` and `std::error::Error`. - `Error` no longer implements `PartialEq`. Tests can check for specific diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index e5607f6b94..27bde55eca 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "halo2_proofs" -version = "0.3.0" +version = "0.4.0" authors = [ "Sean Bowe ", "Ying Tong Lai ", @@ -44,7 +44,7 @@ harness = false halo2_middleware = { path = "../halo2_middleware" } halo2_backend = { path = "../halo2_backend", default-features = false } halo2_frontend = { path = "../halo2_frontend", default-features = false } -halo2curves = { version = "0.6.1", default-features = false } +halo2curves = { version = "0.7.0", default-features = false } rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } plotters = { version = "0.3.0", default-features = false, optional = true } group = "0.13" diff --git a/halo2_proofs/benches/plonk.rs b/halo2_proofs/benches/plonk.rs index 9827fc5aa4..ce03d5171e 100644 --- a/halo2_proofs/benches/plonk.rs +++ b/halo2_proofs/benches/plonk.rs @@ -291,7 +291,7 @@ fn criterion_benchmark(c: &mut Criterion) { params, pk, &[circuit], - &[vec![vec![]]], + &[vec![]], rng, &mut transcript, ) @@ -302,7 +302,7 @@ fn criterion_benchmark(c: &mut Criterion) { fn verifier(params: &ParamsIPA, vk: &VerifyingKey, proof: &[u8]) { let strategy = SingleStrategy::new(params); let mut transcript = Blake2bRead::<_, _, Challenge255<_>>::init(proof); - assert!(verify_proof(params, vk, strategy, &[vec![vec![]]], &mut transcript).is_ok()); + assert!(verify_proof(params, vk, strategy, &[vec![]], &mut transcript).is_ok()); } let k_range = 8..=16; diff --git a/halo2_proofs/tests/compress_selectors.rs b/halo2_proofs/tests/compress_selectors.rs index 99cacab9ac..b335634f78 100644 --- a/halo2_proofs/tests/compress_selectors.rs +++ b/halo2_proofs/tests/compress_selectors.rs @@ -484,17 +484,17 @@ fn test_compress_gates() { } #[test] -fn test_success() -> Result<(), halo2_proofs::plonk::Error> { - // vk & pk keygen both WITH compress +fn test_key_compression() -> Result<(), halo2_proofs::plonk::Error> { + // vk & pk keygen both WITH compression test_result( || test_mycircuit(true, true).expect("should pass"), - "8326140d1873a91630d439a8812d1f104667144e03e0cd5c59eb358ae5d1a4eb", + "acae50508de5ead584170dd83b139daf40e1026b6debbb78eb05d515173fc2dd", ); - // vk & pk keygen both WITHOUT compress + // vk & pk keygen both WITHOUT compression test_result( || test_mycircuit(false, false).expect("should pass"), - "73dd4c3c9c51d55dc8cf68ca2b5d8acdb40ed44bc8a88d718325bc0023688f64", + "f9c99bd341705ac6a13724a526dd28df0bac1c745e0cde40ab39cab3e1b95309", ); Ok(()) @@ -502,14 +502,14 @@ fn test_success() -> Result<(), halo2_proofs::plonk::Error> { #[should_panic] #[test] -fn test_failure_1() { +fn test_key_compression_failure_1() { // vk keygen WITH compress // pk keygen WITHOUT compress assert!(test_mycircuit(false, true).is_err()); } #[test] -fn test_failure_2() { +fn test_key_compression_failure_2() { // vk keygen WITHOUT compress // pk keygen WITH compress assert!(test_mycircuit(true, false).is_err()); diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs index 6238a13e33..4988139910 100644 --- a/halo2_proofs/tests/frontend_backend_split.rs +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -43,7 +43,7 @@ struct MyCircuitConfig { s_lookup: Column, s_ltable: Column, - // A shuffle: s_shufle * [1, a[0]] shuffle_of s_stable * [1, b[0]] + // A shuffle: s_shuffle * [1, a[0]] shuffle_of s_stable * [1, b[0]] s_shuffle: Column, s_stable: Column, @@ -539,7 +539,7 @@ fn test_mycircuit_full_legacy() { proof }, - "427e55eafeaafd9f4dfc7ec6f782ec7464251c749bb08e23efb663790c0419ed", + "78aadfd46b5cc58b90d832ee47e4df57af3dfc28d1457c4ceeb5d0323a72f130", ); } @@ -619,6 +619,6 @@ fn test_mycircuit_full_split() { proof }, - "427e55eafeaafd9f4dfc7ec6f782ec7464251c749bb08e23efb663790c0419ed", + "78aadfd46b5cc58b90d832ee47e4df57af3dfc28d1457c4ceeb5d0323a72f130", ); } diff --git a/halo2_proofs/tests/plonk_api.rs b/halo2_proofs/tests/plonk_api.rs index d454d0292b..aa7fc08094 100644 --- a/halo2_proofs/tests/plonk_api.rs +++ b/halo2_proofs/tests/plonk_api.rs @@ -601,7 +601,7 @@ fn plonk_api() { proof }, - "b749dfa90ac3bc3d45f994cc8bf527928a274c2225e4e87668eece79938e6d12", + "f87ba1010dede5a2148ed94403ca12a566d3154ebb12ccb6c20a330e9b280af8", ); } @@ -638,7 +638,7 @@ fn plonk_api() { proof }, - "284001f93f86a5d18ad9ebff6da81031e5ad9f799ea1dc2606271a6ff240fbd3", + "0fc67d890faef0ef8ea7ef680cc566b2ab7dabef12fcceb74d3655a0fb08c708", ); } diff --git a/halo2_proofs/tests/serialization.rs b/halo2_proofs/tests/serialization.rs index 2bf9c23b89..328d637cb3 100644 --- a/halo2_proofs/tests/serialization.rs +++ b/halo2_proofs/tests/serialization.rs @@ -224,6 +224,6 @@ fn test_serialization() { proof }, - "0d3baeea90249588c3939dc2f64b071b24b7e4744c4ca8442fe4b2553aae9167", + "b51ea51140e9fbd1f0c665c788bab9e4b3e648ac674b6d07a24ca0844f0962ad", ); } diff --git a/p3_frontend/Cargo.toml b/p3_frontend/Cargo.toml index 7a7e1972ca..bc91918ae7 100644 --- a/p3_frontend/Cargo.toml +++ b/p3_frontend/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "p3_frontend" -version = "0.3.0" +version = "0.4.0" authors = [ "Privacy Scaling Explorations team", ] @@ -28,7 +28,7 @@ serde = { version = "1.0", default-features = false, features = ["derive", "allo num-bigint = { version = "0.4.3", default-features = false } [dev-dependencies] -halo2curves = { version = "0.6.0", default-features = false } +halo2curves = { version = "0.7.0", default-features = false } rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } halo2_backend = { path = "../halo2_backend" } serde_test = { version = "1.0" } @@ -36,3 +36,4 @@ p3-keccak-air = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } p3-keccak = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } p3-util = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } rand = "0.8.5" +halo2_debug = { path = "../halo2_debug" } diff --git a/p3_frontend/src/fwrap.rs b/p3_frontend/src/fwrap.rs index 4106027da3..fd71f39c69 100644 --- a/p3_frontend/src/fwrap.rs +++ b/p3_frontend/src/fwrap.rs @@ -239,7 +239,7 @@ impl p3PrimeField for FWrap { } // HACK: In general an `FWrap` will need more than 64 bits. This trait is only implemented in -// order to use `FWrap` with witness generation from plonky3 that requries this trait but doesn't +// order to use `FWrap` with witness generation from plonky3 that requires this trait but doesn't // use the order. Do not use an `ff::PrimeField` on a circuit that requires a 64 bit prime field // (i.e. relies on the `ORDER_U64` value), only use it on circuits that always assign less than 64 // bit values on the field elements. diff --git a/p3_frontend/src/lib.rs b/p3_frontend/src/lib.rs index 5ba49e25c7..01789918c9 100644 --- a/p3_frontend/src/lib.rs +++ b/p3_frontend/src/lib.rs @@ -5,8 +5,8 @@ extern crate alloc; use halo2_middleware::circuit::{ - Any, Cell, ColumnMid, CompiledCircuit, ConstraintSystemMid, ExpressionMid, GateMid, - Preprocessing, QueryMid, VarMid, + Any, Cell, ColumnMid, ConstraintSystemMid, ExpressionMid, GateMid, Preprocessing, QueryMid, + VarMid, }; use halo2_middleware::ff::{Field, PrimeField}; use halo2_middleware::permutation; @@ -144,7 +144,7 @@ where } // Check if the constraint is an equality against a public input and extract the copy constraint as -// `(advice_column_index, Location)` and `public_index`. If there's no copy constriant, return +// `(advice_column_index, Location)` and `public_index`. If there's no copy constraint, return // None. fn extract_copy_public( e: &SymbolicExpression>, @@ -184,7 +184,7 @@ fn extract_copy_public( pub fn get_public_inputs( preprocessing_info: &PreprocessingInfo, size: usize, - witness: &[Option>], + witness: &[Vec], ) -> Vec> { if preprocessing_info.num_public_values == 0 { return Vec::new(); @@ -196,7 +196,7 @@ pub fn get_public_inputs( Location::LastRow => size - 1, Location::Transition => unreachable!(), }; - public_inputs[*public_index] = witness[cell.0].as_ref().unwrap()[offset] + public_inputs[*public_index] = witness[cell.0][offset] } vec![public_inputs] } @@ -293,7 +293,7 @@ where (cs, preprocessing_info) } -pub fn trace_to_wit(k: u32, trace: RowMajorMatrix>) -> Vec>> { +pub fn trace_to_wit(k: u32, trace: RowMajorMatrix>) -> Vec> { let n = 2usize.pow(k); let num_columns = trace.width; let mut witness = vec![vec![F::ZERO; n]; num_columns]; @@ -302,56 +302,5 @@ pub fn trace_to_wit(k: u32, trace: RowMajorMatrix>) -> Vec( - circuit: &CompiledCircuit, - k: u32, - witness: &[Option>], - public: &[Vec], -) { - let n = 2usize.pow(k); - let cs = &circuit.cs; - let preprocessing = &circuit.preprocessing; - // TODO: Simulate blinding rows - // Verify all gates - for (i, gate) in cs.gates.iter().enumerate() { - for offset in 0..n { - let res = gate.poly.evaluate( - &|s| s, - &|v| match v { - VarMid::Query(q) => { - let offset = offset as i32 + q.rotation.0; - // TODO: Try to do mod n with a rust function - let offset = if offset < 0 { - (offset + n as i32) as usize - } else if offset >= n as i32 { - (offset - n as i32) as usize - } else { - offset as usize - }; - match q.column_type { - Any::Instance => public[q.column_index][offset], - Any::Advice => witness[q.column_index].as_ref().unwrap()[offset], - Any::Fixed => preprocessing.fixed[q.column_index][offset], - } - } - VarMid::Challenge(_c) => unimplemented!(), - }, - &|ne| -ne, - &|a, b| a + b, - &|a, b| a * b, - ); - if !res.is_zero_vartime() { - println!( - "Unsatisfied gate {} \"{}\" at offset {}", - i, gate.name, offset - ); - panic!("KO"); - } - } - } - println!("Check witness: OK"); + witness } diff --git a/p3_frontend/tests/common/mod.rs b/p3_frontend/tests/common/mod.rs index d762b5f2d5..5ea7e8682e 100644 --- a/p3_frontend/tests/common/mod.rs +++ b/p3_frontend/tests/common/mod.rs @@ -11,14 +11,15 @@ use halo2_backend::{ Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, }, }; +use halo2_debug::check_witness; use halo2_debug::test_rng; use halo2_middleware::circuit::CompiledCircuit; use halo2_middleware::zal::impls::H2cEngine; use halo2curves::bn256::{Bn256, Fr, G1Affine}; use p3_air::Air; use p3_frontend::{ - check_witness, compile_circuit_cs, compile_preprocessing, get_public_inputs, trace_to_wit, - CompileParams, FWrap, SymbolicAirBuilder, + compile_circuit_cs, compile_preprocessing, get_public_inputs, trace_to_wit, CompileParams, + FWrap, SymbolicAirBuilder, }; use p3_matrix::dense::RowMajorMatrix; use std::time::Instant; @@ -50,8 +51,12 @@ where let witness = trace_to_wit(k, trace); let pis = get_public_inputs(&preprocessing_info, size, &witness); - check_witness(&compiled_circuit, k, &witness, &pis); - (compiled_circuit, witness, pis) + check_witness(&compiled_circuit, k, 5, &witness, &pis); + ( + compiled_circuit, + witness.into_iter().map(Some).collect(), + pis, + ) } pub(crate) fn setup_prove_verify( diff --git a/typos.toml b/typos.toml new file mode 100644 index 0000000000..1561732dd3 --- /dev/null +++ b/typos.toml @@ -0,0 +1,7 @@ +[default] +extend-ignore-re=[ + "master-thm", + "[aA]dvices", + "projectives" +] +check-filename = true