Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade PMP from upstream #113

Open
wants to merge 13 commits into
base: main
Choose a base branch
from
114 changes: 68 additions & 46 deletions Cargo.lock

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,10 @@ blake2b_simd = { version = "1.0.2", default-features = false }
sha2 = { version = "0.10.7", default-features = false }
sha3 = { version = "0.10.0", default-features = false }

poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", default-features = false, tag = "v0.0.1" }
dusk-plonk = { git = "https://github.com/availproject/plonk.git", default-features = false, features = ["alloc"], tag = "v0.12.0-polygon-2" }
#poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", ref="0e46c89762b5531b1767e4e8e923e0ed982f0e49", default-features = false, features = ['ark-bls12-381']}
poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", ref="494468b86ac8e16dca21d465d921990c578eaad3", default-features = false, features = ['ark-bls12-381']}
#poly-multiproof = { path = "../../poly-multiproof", default-features = false, features = ['ark-bls12-381']}
dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2" }

hash-db = { version = "0.16.0", default-features = false }

Expand Down
1 change: 1 addition & 0 deletions kate/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ std = [
"nalgebra/std",
"once_cell",
"parallel",
"poly-multiproof/std",
"poly-multiproof/blst",
"rand/std",
"rand_chacha/std",
Expand Down
11 changes: 8 additions & 3 deletions kate/examples/multiproof_verification.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use ark_bls12_381::Bls12_381;
use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows};
use core::num::NonZeroU16;
use hex_literal::hex;
Expand All @@ -8,6 +9,8 @@ use kate::{
Seed,
};
use kate_recovery::matrix::Dimensions;
use poly_multiproof::method1::M1NoPrecomp;
use poly_multiproof::msm::blst::BlstMSMEngine;
use poly_multiproof::traits::AsBytes;
use rand::thread_rng;
use thiserror_no_std::Error;
Expand All @@ -26,9 +29,11 @@ fn main() -> Result<(), AppError> {
}

fn multiproof_verification() -> Result<bool, AppError> {
type E = Bls12_381;
type M = BlstMSMEngine;
let target_dims = Dimensions::new_from(16, 64).unwrap();
let pp = multiproof_params(256, 256);
let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng());
let pp = multiproof_params::<Bls12_381, BlstMSMEngine>(256, 256);
let pmp = M1NoPrecomp::<E, M>::new(256, 256, &mut thread_rng());
let points = kate::gridgen::domain_points(256)?;
let exts_data = vec![
hex!("CAFEBABE00000000000000000000000000000000000000").to_vec(),
Expand Down Expand Up @@ -92,7 +97,7 @@ fn multiproof_verification() -> Result<bool, AppError> {
.chunks_exact(mp_block.end_x - mp_block.start_x)
.collect::<Vec<_>>();

let proof = kate::pmp::m1_blst::Proof::from_bytes(&proof)?;
let proof = kate::pmp::method1::Proof::from_bytes(&proof)?;

let verified = pmp.verify(
&mut Transcript::new(b"avail-mp"),
Expand Down
63 changes: 44 additions & 19 deletions kate/src/gridgen/mod.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use crate::pmp::{
ark_bls12_381::{Bls12_381, Fr},
ark_poly::{EvaluationDomain, GeneralEvaluationDomain},
m1_blst::{Bls12_381, M1NoPrecomp},
merlin::Transcript,
traits::Committer,
method1::M1NoPrecomp,
traits::{Committer, MSMEngine},
Pairing,
};
use avail_core::{
app_extrinsic::AppExtrinsic, constants::kate::DATA_CHUNK_SIZE, ensure, AppId, DataLookup,
Expand All @@ -16,7 +18,7 @@ use core::{
use kate_recovery::matrix::Dimensions;
use nalgebra::base::DMatrix;
use poly_multiproof::{
m1_blst::Proof,
method1::Proof,
traits::{KZGProof, PolyMultiProofNoPrecomp},
};
use rand::Rng;
Expand Down Expand Up @@ -44,7 +46,7 @@ macro_rules! cfg_iter {
}

pub const SCALAR_SIZE: usize = 32;
pub type ArkScalar = crate::pmp::m1_blst::Fr;
pub type ArkScalar = Fr;
pub type Commitment = crate::pmp::Commitment<Bls12_381>;
pub use poly_multiproof::traits::AsBytes;

Expand Down Expand Up @@ -302,40 +304,63 @@ impl PolynomialGrid {
.and_then(|poly| srs.commit(poly).map_err(Error::MultiproofError))
}

pub fn proof(&self, srs: &M1NoPrecomp, cell: &Cell) -> Result<Proof, Error> {
pub fn proof<E: Pairing, M: MSMEngine<E = E>>(
&self,
srs: &M1NoPrecomp<E, M>,
cell: &Cell,
) -> Result<Proof<E>, Error>
where
E::ScalarField: From<ArkScalar>,
{
let x = cell.col.0 as usize;
let y = cell.row.0 as usize;
let poly = self.inner.get(y).ok_or(Error::CellLengthExceeded)?;
let witness = KZGProof::compute_witness_polynomial(srs, poly.clone(), self.points[x])?;
let poly: Vec<E::ScalarField> = self
.inner
.get(y)
.ok_or(Error::CellLengthExceeded)?
.iter()
.map(|&scalar| E::ScalarField::from(scalar))
.collect();
let witness = KZGProof::compute_witness_polynomial(srs, poly, self.points[x].into())?;
Ok(KZGProof::open(srs, witness)?)
}

pub fn multiproof(
pub fn multiproof<E: Pairing, M: MSMEngine<E = E>>(
&self,
srs: &M1NoPrecomp,
srs: &M1NoPrecomp<E, M>,
cell: &Cell,
eval_grid: &EvaluationGrid,
target_dims: Dimensions,
) -> Result<Multiproof, Error> {
) -> Result<Multiproof<E>, Error>
where
E::ScalarField: From<ArkScalar>,
{
let block = multiproof_block(
cell.col.0 as usize,
cell.row.0 as usize,
self.dims,
target_dims,
)
.ok_or(Error::CellLengthExceeded)?;
let polys = &self.inner[block.start_y..block.end_y];
let evals: Vec<Vec<ArkScalar>> = (block.start_y..block.end_y)
let polys: Vec<Vec<E::ScalarField>> = self.inner[block.start_y..block.end_y]
.iter()
.map(|row| row.iter().map(|&s| E::ScalarField::from(s)).collect())
.collect();
let evals: Vec<Vec<E::ScalarField>> = (block.start_y..block.end_y)
.map(|y| {
eval_grid.row(y).expect("Already bounds checked .qed")[block.start_x..block.end_x]
.to_vec()
.iter()
.map(|&scalar| E::ScalarField::from(scalar))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
let evals_view = evals.iter().map(|row| row.as_slice()).collect::<Vec<_>>();
let points: Vec<E::ScalarField> = self.points[block.start_x..block.end_x]
.iter()
.map(|&p| E::ScalarField::from(p))
.collect();

let points = &self.points[block.start_x..block.end_x];
let mut ts = Transcript::new(b"avail-mp");
let proof = PolyMultiProofNoPrecomp::open(srs, &mut ts, &evals_view, polys, points)
let proof = PolyMultiProofNoPrecomp::open(srs, &mut ts, &evals, &polys, &points)
.map_err(Error::MultiproofError)?;

Ok(Multiproof {
Expand All @@ -347,9 +372,9 @@ impl PolynomialGrid {
}

#[derive(Debug, Clone)]
pub struct Multiproof {
pub proof: poly_multiproof::m1_blst::Proof,
pub evals: Vec<Vec<poly_multiproof::m1_blst::Fr>>,
pub struct Multiproof<E: Pairing> {
pub proof: Proof<E>,
pub evals: Vec<Vec<<E as Pairing>::ScalarField>>,
pub block: CellBlock,
}

Expand Down
4 changes: 2 additions & 2 deletions kate/src/gridgen/tests/commitments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ fn test_build_commitments_simple_commitment_check() {
76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, 41,
218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3,
];
let pmp_pp = crate::testnet::multiproof_params(256, 256);
let pmp_pp = crate::testnet::multiproof_params::<Bls12_381, BlstMSMEngine>(256, 256);

let evals = EvaluationGrid::from_extrinsics(
vec![AppExtrinsic::from(original_data)],
Expand Down Expand Up @@ -149,7 +149,7 @@ fn test_zero_deg_poly_commit(row_values: Vec<u8>) {
println!("Row: {:?}", ev.evals);

let pg = ev.make_polynomial_grid().unwrap();
let pmp = couscous::multiproof_params();
let pmp = couscous::multiproof_params::<Bls12_381, BlstMSMEngine>();
println!("Poly: {:?}", pg.inner[0]);
let commitment = pg.commitment(&pmp, 0).unwrap().to_bytes().unwrap();

Expand Down
7 changes: 5 additions & 2 deletions kate/src/gridgen/tests/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use ark_bls12_381::Bls12_381;

Check failure on line 1 in kate/src/gridgen/tests/mod.rs

View workflow job for this annotation

GitHub Actions / build_and_test

unresolved import `ark_bls12_381`
use avail_core::{AppExtrinsic, AppId};
use kate_recovery::{data::DataCell, matrix::Position};
use once_cell::sync::Lazy;
use poly_multiproof::{m1_blst::M1NoPrecomp, traits::AsBytes};
use poly_multiproof::msm::blst::BlstMSMEngine;
use poly_multiproof::{method1::M1NoPrecomp, traits::AsBytes};
use proptest::{collection, prelude::*, sample::size_range};
use rand::{distributions::Uniform, prelude::Distribution, SeedableRng};
use rand_chacha::ChaChaRng;
Expand All @@ -14,7 +16,8 @@
mod formatting;
mod reconstruction;

pub static PMP: Lazy<M1NoPrecomp> = Lazy::new(|| testnet::multiproof_params(256, 256));
pub static PMP: Lazy<M1NoPrecomp<Bls12_381, BlstMSMEngine>> =
Lazy::new(|| testnet::multiproof_params::<Bls12_381, BlstMSMEngine>(256, 256));

fn app_extrinsic_strategy() -> impl Strategy<Value = AppExtrinsic> {
(
Expand Down
72 changes: 45 additions & 27 deletions kate/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@
#[cfg(feature = "std")]
pub use poly_multiproof as pmp;

#[cfg(feature = "std")]
pub type M1NoPrecomp =
pmp::method1::M1NoPrecomp<pmp::ark_bls12_381::Bls12_381, pmp::msm::blst::BlstMSMEngine>;

pub mod config {
use super::{BlockLengthColumns, BlockLengthRows};
use core::num::NonZeroU16;
Expand Down Expand Up @@ -54,10 +58,12 @@
use super::*;
use hex_literal::hex;
use once_cell::sync::Lazy;
use poly_multiproof::ark_ff::{BigInt, Fp};
use pmp::ark_bls12_381::Fr;
use poly_multiproof::ark_ff::{BigInt, Fp, PrimeField};
use poly_multiproof::ark_serialize::CanonicalDeserialize;
use poly_multiproof::m1_blst;
use poly_multiproof::m1_blst::{Fr, G1, G2};
use poly_multiproof::method1::M1NoPrecomp;
use poly_multiproof::traits::MSMEngine;
use poly_multiproof::Pairing;
use rand_chacha::{rand_core::SeedableRng, ChaChaRng};
use std::{collections::HashMap, sync::Mutex};

Expand Down Expand Up @@ -87,13 +93,21 @@
const G1_BYTES: [u8; 48] = hex!("a45f754a9e94cccbb2cbe9d7c441b8b527026ef05e2a3aff4aa4bb1c57df3767fb669cc4c7639bd37e683653bdc50b5a");
const G2_BYTES: [u8; 96] = hex!("b845ac5e7b4ec8541d012660276772e001c1e0475e60971884481d43fcbd44de2a02e9862dbf9f536c211814f6cc5448100bcda5dc707854af8e3829750d1fb18b127286aaa4fc959e732e2128a8a315f2f8f419bf5774fe043af46fbbeb4b27");

pub fn multiproof_params(max_degree: usize, max_pts: usize) -> m1_blst::M1NoPrecomp {
let x: Fr = Fp(BigInt(SEC_LIMBS), core::marker::PhantomData);

let g1 = G1::deserialize_compressed(&G1_BYTES[..]).unwrap();
let g2 = G2::deserialize_compressed(&G2_BYTES[..]).unwrap();

m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, max_degree.saturating_add(1), max_pts)
pub fn multiproof_params<E: Pairing, M: MSMEngine<E = E>>(
max_degree: usize,
max_pts: usize,
) -> M1NoPrecomp<E, M>
where
E::ScalarField: PrimeField + From<Fr>,
E::G1: CanonicalDeserialize,
E::G2: CanonicalDeserialize,
{
let x: <E as Pairing>::ScalarField =
Fp(BigInt(SEC_LIMBS), core::marker::PhantomData).into();
let g1: E::G1 = E::G1::deserialize_compressed(&G1_BYTES[..]).unwrap();
let g2: E::G2 = E::G2::deserialize_compressed(&G2_BYTES[..]).unwrap();

M1NoPrecomp::<E, M>::new_from_scalar(x, g1, g2, max_degree.saturating_add(1), max_pts)
}

#[cfg(test)]
Expand All @@ -106,11 +120,12 @@
fft::{EvaluationDomain as PlonkED, Evaluations as PlonkEV},
prelude::BlsScalar,
};
use pmp::ark_bls12_381::Bls12_381;
use poly_multiproof::{
ark_ff::{BigInt, Fp},
ark_poly::{EvaluationDomain, GeneralEvaluationDomain},
ark_serialize::{CanonicalDeserialize, CanonicalSerialize},
m1_blst::Fr,
ark_serialize::CanonicalSerialize,
msm::blst::BlstMSMEngine,
traits::Committer,
};
use rand::thread_rng;
Expand All @@ -125,10 +140,12 @@
hex!("7848b5d711bc9883996317a3f9c90269d56771005d540a19184939c9e8d0db2a");
assert_eq!(SEC_BYTES, out);

let g1 = G1::deserialize_compressed(&G1_BYTES[..]).unwrap();
let g2 = G2::deserialize_compressed(&G2_BYTES[..]).unwrap();
let g1 = <Bls12_381 as Pairing>::G1::deserialize_compressed(&G1_BYTES[..]).unwrap();
let g2 = <Bls12_381 as Pairing>::G2::deserialize_compressed(&G2_BYTES[..]).unwrap();

let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, 1024, 256);
let pmp = poly_multiproof::method1::M1NoPrecomp::<_, BlstMSMEngine>::new_from_scalar(
x, g1, g2, 1024, 256,
);

let dp_evals = (0..30)
.map(|_| BlsScalar::random(&mut thread_rng()))
Expand Down Expand Up @@ -164,10 +181,11 @@
#[cfg(feature = "std")]
pub mod couscous {
use super::*;
use poly_multiproof::ark_serialize::CanonicalDeserialize;
use poly_multiproof::m1_blst;
use poly_multiproof::m1_blst::{G1, G2};

use pmp::ark_bls12_381::{G1Projective as G1, G2Projective as G2};
use pmp::ark_serialize::CanonicalDeserialize;
use pmp::method1::M1NoPrecomp;
use pmp::traits::MSMEngine;
use pmp::Pairing;
/// Constructs public parameters from pre-generated points for degree upto 1024
pub fn public_params() -> PublicParameters {
// We can also use the raw data to make deserilization faster at the cost of size of the data
Expand Down Expand Up @@ -208,14 +226,17 @@
}

/// Construct public parameters from pre-generated points for degree upto 1024
pub fn multiproof_params() -> m1_blst::M1NoPrecomp {
pub fn multiproof_params<E: Pairing<G1 = G1, G2 = G2>, M: MSMEngine<E = E>>(
) -> M1NoPrecomp<E, M> {
let (g1, g2) = load_trusted_g1_g2();
m1_blst::M1NoPrecomp::new_from_powers(g1, g2)
<M1NoPrecomp<_, _>>::new_from_powers(&g1, &g2)
}

#[cfg(test)]
mod tests {
use super::*;
use crate::pmp::msm::blst::BlstMSMEngine;
use ark_bls12_381::{Bls12_381, Fr};

Check failure on line 239 in kate/src/lib.rs

View workflow job for this annotation

GitHub Actions / build_and_test

unresolved import `ark_bls12_381`
use dusk_plonk::{
commitment_scheme::kzg10::proof::Proof,
fft::{EvaluationDomain as DPEvaluationDomain, Evaluations},
Expand All @@ -228,15 +249,12 @@
},
traits::KZGProof,
};
use poly_multiproof::{
m1_blst::Fr,
traits::{AsBytes, Committer},
};
use poly_multiproof::traits::{AsBytes, Committer};
use rand::thread_rng;

#[test]
fn test_consistent_testnet_params() {
let pmp = couscous::multiproof_params();
let pmp = couscous::multiproof_params::<Bls12_381, BlstMSMEngine>();
let pmp2 = couscous::public_params();

let points = DensePolynomial::<Fr>::rand(1023, &mut thread_rng()).coeffs;
Expand Down Expand Up @@ -276,7 +294,7 @@
assert_eq!(proof.to_bytes().unwrap(), proof2.to_bytes());

let verify1 = pmp
.verify(&pmp_commit, pmp_domain_pts[1], points[1], &proof)
.verify::<BlstMSMEngine>(&pmp_commit, pmp_domain_pts[1], points[1], &proof)
.unwrap();

let dp_proof_obj = Proof {
Expand Down
Loading