Skip to content
This repository was archived by the owner on Apr 18, 2025. It is now read-only.

Commit 5776400

Browse files
Add blobVersionedHash to the preimage of challengeDigest (#1211)
* add blobVersionedHash to the preimage of challenge digest * (doc): aggregator/src/blob.rs Co-authored-by: z2trillion <[email protected]> * (doc): aggregator/src/aggregation/rlc/gates.rs * (doc): aggregator/src/blob.rs --------- Co-authored-by: z2trillion <[email protected]>
1 parent 6c6c662 commit 5776400

File tree

8 files changed

+147
-67
lines changed

8 files changed

+147
-67
lines changed

aggregator/src/aggregation/barycentric.rs

Lines changed: 4 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -349,21 +349,9 @@ pub fn interpolate(z: Scalar, coefficients: &[Scalar; BLOB_WIDTH]) -> Scalar {
349349
#[cfg(test)]
350350
mod tests {
351351
use super::*;
352-
use crate::blob::BlobData;
353-
use c_kzg::{Blob as RethBlob, KzgProof, KzgSettings};
354-
use once_cell::sync::Lazy;
355-
use std::{collections::BTreeSet, sync::Arc};
356-
357-
/// KZG trusted setup
358-
pub static MAINNET_KZG_TRUSTED_SETUP: Lazy<Arc<KzgSettings>> = Lazy::new(|| {
359-
Arc::new(
360-
c_kzg::KzgSettings::load_trusted_setup(
361-
&revm_primitives::kzg::G1_POINTS.0,
362-
&revm_primitives::kzg::G2_POINTS.0,
363-
)
364-
.expect("failed to load trusted setup"),
365-
)
366-
});
352+
use crate::blob::{BlobData, KZG_TRUSTED_SETUP};
353+
use c_kzg::{Blob as RethBlob, KzgProof};
354+
use std::collections::BTreeSet;
367355

368356
#[test]
369357
fn log_blob_width() {
@@ -425,8 +413,7 @@ mod tests {
425413
)
426414
.unwrap();
427415
let (_proof, y) =
428-
KzgProof::compute_kzg_proof(&blob, &to_be_bytes(z).into(), &MAINNET_KZG_TRUSTED_SETUP)
429-
.unwrap();
416+
KzgProof::compute_kzg_proof(&blob, &to_be_bytes(z).into(), &KZG_TRUSTED_SETUP).unwrap();
430417
from_canonical_be_bytes(*y)
431418
}
432419

aggregator/src/aggregation/blob_data.rs

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ pub struct BlobDataConfig {
5959

6060
pub struct AssignedBlobDataExport {
6161
pub num_valid_chunks: AssignedCell<Fr, Fr>,
62-
pub challenge_digest: Vec<AssignedCell<Fr, Fr>>,
62+
pub versioned_hash: Vec<AssignedCell<Fr, Fr>>,
6363
pub chunk_data_digests: Vec<Vec<AssignedCell<Fr, Fr>>>,
6464
}
6565

@@ -304,7 +304,8 @@ impl BlobDataConfig {
304304

305305
// - metadata_digest: 32 bytes
306306
// - chunk[i].chunk_data_digest: 32 bytes each
307-
let preimage_len = 32.expr() * (MAX_AGG_SNARKS + 1).expr();
307+
// - versioned_hash: 32 bytes
308+
let preimage_len = 32.expr() * (MAX_AGG_SNARKS + 1 + 1).expr();
308309

309310
[
310311
1.expr(), // q_enable
@@ -733,6 +734,7 @@ impl BlobDataConfig {
733734

734735
let challenge_digest_preimage_rlc_specified = &rows.last().unwrap().preimage_rlc;
735736
let challenge_digest_rlc_specified = &rows.last().unwrap().digest_rlc;
737+
let versioned_hash_rlc = &rows.get(N_ROWS_DIGEST_RLC - 2).unwrap().digest_rlc;
736738

737739
// ensure that on the last row of this section the is_boundary is turned on
738740
// which would enable the keccak table lookup for challenge_digest
@@ -810,6 +812,7 @@ impl BlobDataConfig {
810812
.collect::<Vec<_>>();
811813
for (i, digest_rlc_specified) in std::iter::once(metadata_digest_rlc_specified)
812814
.chain(chunk_digest_evm_rlcs)
815+
.chain(std::iter::once(versioned_hash_rlc))
813816
.chain(std::iter::once(challenge_digest_rlc_specified))
814817
.enumerate()
815818
{
@@ -833,7 +836,7 @@ impl BlobDataConfig {
833836

834837
// compute the keccak input RLC:
835838
// we do this only for the metadata and chunks, not for the blob row itself.
836-
if i < MAX_AGG_SNARKS + 1 {
839+
if i < MAX_AGG_SNARKS + 1 + 1 {
837840
let digest_keccak_rlc = rlc_config.rlc(
838841
&mut region,
839842
&digest_bytes,
@@ -880,13 +883,21 @@ impl BlobDataConfig {
880883
for chunk in chunk_data_digests_bytes.chunks_exact(N_BYTES_U256) {
881884
chunk_data_digests.push(chunk.to_vec());
882885
}
886+
let challenge_digest = assigned_rows
887+
.iter()
888+
.rev()
889+
.take(N_BYTES_U256)
890+
.map(|row| row.byte.clone())
891+
.collect::<Vec<AssignedCell<Fr, Fr>>>();
883892
let export = AssignedBlobDataExport {
884893
num_valid_chunks,
885-
challenge_digest: assigned_rows
894+
versioned_hash: assigned_rows
886895
.iter()
887896
.rev()
897+
.skip(N_BYTES_U256)
888898
.take(N_BYTES_U256)
889899
.map(|row| row.byte.clone())
900+
.rev()
890901
.collect(),
891902
chunk_data_digests,
892903
};
@@ -906,19 +917,19 @@ impl BlobDataConfig {
906917

907918
let challenge_digest_limb1 = rlc_config.inner_product(
908919
&mut region,
909-
&export.challenge_digest[0..11],
920+
&challenge_digest[0..11],
910921
&pows_of_256,
911922
&mut rlc_config_offset,
912923
)?;
913924
let challenge_digest_limb2 = rlc_config.inner_product(
914925
&mut region,
915-
&export.challenge_digest[11..22],
926+
&challenge_digest[11..22],
916927
&pows_of_256,
917928
&mut rlc_config_offset,
918929
)?;
919930
let challenge_digest_limb3 = rlc_config.inner_product(
920931
&mut region,
921-
&export.challenge_digest[22..32],
932+
&challenge_digest[22..32],
922933
&pows_of_256[0..10],
923934
&mut rlc_config_offset,
924935
)?;

aggregator/src/aggregation/circuit.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -479,6 +479,14 @@ impl Circuit<Fr> for AggregationCircuit {
479479
region.constrain_equal(c.cell(), ec.cell())?;
480480
}
481481

482+
for (c, ec) in blob_data_exports
483+
.versioned_hash
484+
.iter()
485+
.zip_eq(assigned_batch_hash.blob.versioned_hash.iter())
486+
{
487+
region.constrain_equal(c.cell(), ec.cell())?;
488+
}
489+
482490
Ok(())
483491
},
484492
)?;

aggregator/src/aggregation/rlc/gates.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ use super::RlcConfig;
1313

1414
const FIXED_OFFSET_32: usize = MAX_AGG_SNARKS + 1;
1515
const FIXED_OFFSET_168: usize = FIXED_OFFSET_32 + 1;
16-
const FIXED_OFFSET_200: usize = FIXED_OFFSET_168 + 1;
17-
const FIXED_OFFSET_2_POW_32: usize = FIXED_OFFSET_200 + 1;
16+
const FIXED_OFFSET_232: usize = FIXED_OFFSET_168 + 1;
17+
const FIXED_OFFSET_2_POW_32: usize = FIXED_OFFSET_232 + 1;
1818
const FIXED_OFFSET_256: usize = FIXED_OFFSET_2_POW_32 + 1;
1919
const FIXED_OFFSET_EMPTY_KECCAK: usize = FIXED_OFFSET_256 + POWS_OF_256;
2020

@@ -33,7 +33,7 @@ impl RlcConfig {
3333
/// | MAX_AGG_SNARKS | MAX_AGG_SNARKS |
3434
/// | MAX_AGG_SNARKS + 1 | 32 |
3535
/// | MAX_AGG_SNARKS + 2 | 168 |
36-
/// | MAX_AGG_SNARKS + 3 | 200 |
36+
/// | MAX_AGG_SNARKS + 3 | 232 |
3737
/// | MAX_AGG_SNARKS + 4 | 2 ^ 32 |
3838
/// | MAX_AGG_SNARKS + 5 | 256 |
3939
/// | MAX_AGG_SNARKS + 6 | 256 ^ 2 |
@@ -60,8 +60,8 @@ impl RlcConfig {
6060
}
6161
assert_eq!(offset, FIXED_OFFSET_32);
6262

63-
// [32, 168, 200, 1 << 32]
64-
for const_val in [32, 168, 200, 1 << 32] {
63+
// [32, 168, 232, 1 << 32]
64+
for const_val in [32, 168, 232, 1 << 32] {
6565
region.assign_fixed(
6666
|| format!("const at offset={offset}"),
6767
self.fixed,
@@ -189,10 +189,10 @@ impl RlcConfig {
189189
}
190190

191191
#[inline]
192-
pub(crate) fn two_hundred_cell(&self, region_index: RegionIndex) -> Cell {
192+
pub(crate) fn two_hundred_and_thirty_two_cell(&self, region_index: RegionIndex) -> Cell {
193193
Cell {
194194
region_index,
195-
row_offset: FIXED_OFFSET_200,
195+
row_offset: FIXED_OFFSET_232,
196196
column: self.fixed.into(),
197197
}
198198
}

aggregator/src/batch.rs

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
//! This module implements related functions that aggregates public inputs of many chunks into a
22
//! single one.
33
4-
use eth_types::{Field, ToBigEndian, H256, U256};
4+
use eth_types::{Field, ToBigEndian, H256};
55
use ethers_core::utils::keccak256;
66

77
use crate::{
@@ -34,7 +34,10 @@ pub struct BatchHash {
3434
pub(crate) public_input_hash: H256,
3535
/// The number of chunks that contain meaningful data, i.e. not padded chunks.
3636
pub(crate) number_of_valid_chunks: usize,
37+
/// 4844-Blob related fields.
3738
pub(crate) blob: BlobAssignments,
39+
/// The 4844 versioned hash for the blob.
40+
pub(crate) versioned_hash: H256,
3841
}
3942

4043
impl BatchHash {
@@ -117,6 +120,7 @@ impl BatchHash {
117120

118121
let blob_data = BlobData::new(number_of_valid_chunks, chunks_with_padding);
119122
let blob_assignments = BlobAssignments::from(&blob_data);
123+
let versioned_hash = blob_data.get_versioned_hash();
120124

121125
// public input hash is build as
122126
// keccak(
@@ -126,7 +130,8 @@ impl BatchHash {
126130
// chunk[k-1].withdraw_root ||
127131
// batch_data_hash ||
128132
// z ||
129-
// y
133+
// y ||
134+
// versioned_hash
130135
// )
131136
let preimage = [
132137
chunks_with_padding[0].chain_id.to_be_bytes().as_ref(),
@@ -140,6 +145,7 @@ impl BatchHash {
140145
batch_data_hash.as_slice(),
141146
blob_assignments.challenge.to_be_bytes().as_ref(),
142147
blob_assignments.evaluation.to_be_bytes().as_ref(),
148+
versioned_hash.as_bytes(),
143149
]
144150
.concat();
145151
let public_input_hash: H256 = keccak256(preimage).into();
@@ -155,9 +161,10 @@ impl BatchHash {
155161
chain_id: chunks_with_padding[0].chain_id,
156162
chunks_with_padding: chunks_with_padding.to_vec(),
157163
data_hash: batch_data_hash.into(),
158-
blob: blob_assignments,
159164
public_input_hash,
160165
number_of_valid_chunks,
166+
blob: blob_assignments,
167+
versioned_hash,
161168
}
162169
}
163170

@@ -187,15 +194,9 @@ impl BatchHash {
187194
// chunk[k-1].withdraw_root ||
188195
// batch_data_hash ||
189196
// z ||
190-
// y )
191-
// TODO: make BLS_MODULUS into a static variable using lazy_static!()
192-
let (_, z) = self.blob.challenge_digest.div_mod(
193-
U256::from_str_radix(
194-
"0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001",
195-
16,
196-
)
197-
.unwrap(),
198-
);
197+
// y ||
198+
// blob_versioned_hash
199+
// )
199200
let batch_public_input_hash_preimage = [
200201
self.chain_id.to_be_bytes().as_ref(),
201202
self.chunks_with_padding[0].prev_state_root.as_bytes(),
@@ -206,8 +207,9 @@ impl BatchHash {
206207
.withdraw_root
207208
.as_bytes(),
208209
self.data_hash.as_bytes(),
209-
&z.to_be_bytes(),
210+
&self.blob.challenge.to_be_bytes(),
210211
&self.blob.evaluation.to_be_bytes(),
212+
self.versioned_hash.as_bytes(),
211213
]
212214
.concat();
213215
res.push(batch_public_input_hash_preimage);

0 commit comments

Comments
 (0)