Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix bug and compress pi hash #4

Merged
merged 12 commits into from
Mar 15, 2024
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 4 additions & 20 deletions aggregator/src/aggregation/circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl AggregationCircuit {
let chunk_hash_bytes = chunk.public_input_hash();
let snark_hash_bytes = &snark.instances[0];

assert_eq!(snark_hash_bytes.len(), ACC_LEN + DIGEST_LEN+BLOB_POINT_LEN);
assert_eq!(snark_hash_bytes.len(), ACC_LEN + DIGEST_LEN);

for i in 0..DIGEST_LEN {
// for each snark,
Expand All @@ -90,9 +90,6 @@ impl AggregationCircuit {
// extract batch's public input hash
let public_input_hash = &batch_hash.instances_exclude_acc()[0];

// extract blob instance
// let (challenge_point_instance, result_isntance) = &batch_hash.instance_for_blob();

// the public instance for this circuit consists of
// - an accumulator (12 elements)
// - the batch's public_input_hash (32 elements)
Expand Down Expand Up @@ -227,7 +224,7 @@ impl Circuit<Fr> for AggregationCircuit {
},
)?;

assert_eq!(snark_inputs.len(), MAX_AGG_SNARKS * (DIGEST_LEN+BLOB_POINT_LEN));
assert_eq!(snark_inputs.len(), MAX_AGG_SNARKS * DIGEST_LEN);
(accumulator_instances, snark_inputs)
};
end_timer!(timer);
Expand All @@ -244,10 +241,6 @@ impl Circuit<Fr> for AggregationCircuit {
Ok(challenge_point)
})?;
let challenge_cells = challenge_point.iter().map(|x| x.cell()).collect::<Vec<_>>();
// let cp_index_start = ACC_LEN + DIGEST_LEN ;
// for (i, v) in challenge_point.iter().enumerate() {
// layouter.constrain_instance(v.cell(), config.instance, i+cp_index_start)?;
// }

// let result = layouter.assign_region(||"Result Summation", |mut region|-> Result<(Vec<AssignedValue<Fr>>), Error>{
// let fp_chip = config.fp_chip();
Expand All @@ -265,15 +258,6 @@ impl Circuit<Fr> for AggregationCircuit {
// )?;



// True index of result in instance should be determined in the future.
// let result_index_start = cp_index_start + CHALLENGE_POINT_LEN;
// for (i, v) in result.iter().enumerate() {
// layouter.constrain_instance(v.cell(), config.instance, i+result_index_start)?;
// }



// ==============================================
// step 2: public input aggregation circuit
// ==============================================
Expand Down Expand Up @@ -367,14 +351,14 @@ impl Circuit<Fr> for AggregationCircuit {
"{}-th snark: {:?} {:?}",
i,
chunk_pi_hash_digests[i][j * 8 + k].value(),
snark_inputs[i * (DIGEST_LEN + 6) + (3 - j) * 8 + k].value()
snark_inputs[i * DIGEST_LEN + (3 - j) * 8 + k].value()
);

region.constrain_equal(
// in the keccak table, the input and output data have different
// endianess
chunk_pi_hash_digests[i][j * 8 + k].cell(),
snark_inputs[i * (DIGEST_LEN + 6) + (3 - j) * 8 + k].cell(),
snark_inputs[i * DIGEST_LEN + (3 - j) * 8 + k].cell(),
)?;
}
}
Expand Down
6 changes: 3 additions & 3 deletions aggregator/src/aggregation/rlc/gates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ impl RlcConfig {
region.assign_fixed(|| "const 13", self.fixed, 5, || Value::known(Fr::from(13)))?;
region.assign_fixed(|| "const 32", self.fixed, 6, || Value::known(Fr::from(32)))?;
region.assign_fixed(
|| "const 136",
|| "const 328",
self.fixed,
7,
|| Value::known(Fr::from(136)),
|| Value::known(Fr::from(328)),
)?;
region.assign_fixed(
|| "const 2^32",
Expand Down Expand Up @@ -98,7 +98,7 @@ impl RlcConfig {
}
}
#[inline]
pub(crate) fn one_hundred_and_thirty_six_cell(&self, region_index: RegionIndex) -> Cell {
pub(crate) fn three_hundred_and_twenty_eight_cell(&self, region_index: RegionIndex) -> Cell {
Cell {
region_index,
row_offset: 7,
Expand Down
37 changes: 23 additions & 14 deletions aggregator/src/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,17 +131,9 @@ impl BatchHash {
result = result+Fp::from_bytes(&chunks_with_padding[i].partial_result.to_le_bytes()).unwrap();
}

let cp_fe = Fp::from_bytes(&challenge_point.to_le_bytes()).unwrap();

let cp = decompose_biguint::<Fr>(&fe_to_biguint(&cp_fe), 3, 88);

let cp_preimage = cp.iter().map(|x| x.to_bytes()).collect::<Vec<_>>();

let re = decompose_biguint::<Fr>(&fe_to_biguint(&result), 3, 88);

let re_preimage = re.iter().map(|x| x.to_bytes()).collect::<Vec<_>>();
let (cp_preimage, re_preimage) = Self::decompose_cp_result(challenge_point, U256::from_little_endian(&result.to_bytes()));

let mut preimage = [
let preimage = [
chunks_with_padding[0].chain_id.to_be_bytes().as_ref(),
chunks_with_padding[0].prev_state_root.as_bytes(),
chunks_with_padding[MAX_AGG_SNARKS - 1]
Expand Down Expand Up @@ -193,10 +185,8 @@ impl BatchHash {
// challenge_point ||
// result)

let (challenge_point_instance, result_instance) = self.instance_for_blob::<Fr>();
let (challenge_point_preimage, result_preimage) = Self::decompose_cp_result(self.challenge_point, self.result);

let challenge_point_preimage = challenge_point_instance.iter().map(|x| x.to_bytes()).collect::<Vec<_>>();
let result_preimage = result_instance.iter().map(|x| x.to_bytes()).collect::<Vec<_>>();

let batch_public_input_hash_preimage = [
self.chain_id.to_be_bytes().as_ref(),
Expand All @@ -223,14 +213,21 @@ impl BatchHash {
// keccak(
// chain id ||
// chunk[i].prevStateRoot || chunk[i].postStateRoot || chunk[i].withdrawRoot ||
// chunk[i].datahash)
// chunk[i].datahash || x || y)
for chunk in self.chunks_with_padding.iter() {
let (challenge_point_preimage, partial_result_preimage) = Self::decompose_cp_result(chunk.challenge_point, chunk.partial_result);
let chunk_public_input_hash_preimage = [
self.chain_id.to_be_bytes().as_ref(),
chunk.prev_state_root.as_bytes(),
chunk.post_state_root.as_bytes(),
chunk.withdraw_root.as_bytes(),
chunk.data_hash.as_bytes(),
challenge_point_preimage[0].as_slice(),
challenge_point_preimage[1].as_slice(),
challenge_point_preimage[2].as_slice(),
partial_result_preimage[0].as_slice(),
partial_result_preimage[1].as_slice(),
partial_result_preimage[2].as_slice(),
]
.concat();
res.push(chunk_public_input_hash_preimage)
Expand Down Expand Up @@ -267,4 +264,16 @@ impl BatchHash {
let result = decompose_biguint::<F>(&fe_to_biguint(&pr_fe), 3, 88);
(challenge_point, result)
}

pub(crate) fn decompose_cp_result(challenge_point: U256, result: U256) -> (Vec<[u8; 32]>,Vec<[u8; 32]>) {
let cp_fe = Fp::from_bytes(&challenge_point.to_le_bytes()).unwrap();
let cp = decompose_biguint::<Fr>(&fe_to_biguint(&cp_fe), 3, 88);
let cp_preimage = cp.iter().map(|x| {let mut be_bytes = x.to_bytes(); be_bytes.reverse(); be_bytes}).collect::<Vec<_>>();
let pr_fe = Fp::from_bytes(&result.to_le_bytes()).unwrap();
let re = decompose_biguint::<Fr>(&fe_to_biguint(&pr_fe), 3, 88);
let re_preimage = re.iter().map(|x| {let mut be_bytes = x.to_bytes(); be_bytes.reverse(); be_bytes}).collect::<Vec<_>>();

(cp_preimage, re_preimage)
}

}
36 changes: 16 additions & 20 deletions aggregator/src/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,6 @@ impl ChunkHash {
.map(|(_, b_ctx)| b_ctx.eth_block.state_root)
.unwrap_or(H256(block.prev_state_root.to_be_bytes()));

//TODO:compute partial_result from witness block;
// let omega = Fp::from(123).pow(&[(FP_S - 12) as u64, 0, 0, 0]);

// let partial_result = polyeval()

Self {
chain_id: block.chain_id,
prev_state_root: H256(block.prev_state_root.to_be_bytes()),
Expand Down Expand Up @@ -130,7 +125,6 @@ impl ChunkHash {
r.fill_bytes(&mut buf);
// let challenge_point = Fp::from_bytes_wide(&buf).to_bytes();
let challenge_point = Fp::from(123).to_bytes();
println!("cp le bytes{:?}", challenge_point);

let mut buf1 = [0u8; 64];
r.fill_bytes(&mut buf1);
Expand Down Expand Up @@ -169,38 +163,40 @@ impl ChunkHash {
}

/// Public input hash for a given chunk is defined as
/// keccak( chain id || prev state root || post state root || withdraw root || data hash )
/// keccak( chain id || prev state root || post state root || withdraw root || data hash || x || y)
pub fn public_input_hash(&self) -> H256 {
let preimage = self.extract_hash_preimage();
keccak256::<&[u8]>(preimage.as_ref()).into()
}

/// Extract the preimage for the hash
/// chain id || prev state root || post state root || withdraw root || data hash
/// chain id || prev state root || post state root || withdraw root || data hash || x || y
pub fn extract_hash_preimage(&self) -> Vec<u8> {
let blob_preimage = self.decompose_cp_result();
[
self.chain_id.to_be_bytes().as_ref(),
self.prev_state_root.as_bytes(),
self.post_state_root.as_bytes(),
self.withdraw_root.as_bytes(),
self.data_hash.as_bytes(),
blob_preimage[0].as_slice(),
blob_preimage[1].as_slice(),
blob_preimage[2].as_slice(),
blob_preimage[3].as_slice(),
blob_preimage[4].as_slice(),
blob_preimage[5].as_slice(),
]
.concat()
}

/// decompose challenge_point
pub fn challenge_point(&self) -> Vec<Fr>{
fn decompose_cp_result(&self) -> Vec<[u8; 32]>{
let cp_fe = Fp::from_bytes(&self.challenge_point.to_le_bytes()).unwrap();
// println!("cp le bytes{:?}", self.challenge_point);
// println!("cpfe{}", cp_fe);
decompose_biguint::<Fr>(&fe_to_biguint(&cp_fe), 3, 88)

}

/// decompose partial_result
pub fn partial_result(&self) -> Vec<Fr>{
let cp = decompose_biguint::<Fr>(&fe_to_biguint(&cp_fe), 3, 88);
let mut preimage = cp.iter().map(|x| {let mut be_bytes = x.to_bytes(); be_bytes.reverse(); be_bytes}).collect::<Vec<_>>();
let pr_fe = Fp::from_bytes(&self.partial_result.to_le_bytes()).unwrap();
decompose_biguint::<Fr>(&fe_to_biguint(&pr_fe), 3, 88)
let re = decompose_biguint::<Fr>(&fe_to_biguint(&pr_fe), 3, 88);
let mut re_preimage = re.iter().map(|x| {let mut be_bytes = x.to_bytes(); be_bytes.reverse(); be_bytes}).collect::<Vec<_>>();
preimage.append(&mut re_preimage);
preimage
}

}
Loading
Loading