Skip to content
Open
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
7375575
first approach
tomip01 Oct 7, 2025
34713c8
Merge branch 'main' into fusaka_blobs
tomip01 Oct 7, 2025
5918d8f
compute cell proofs
tomip01 Oct 8, 2025
4a86ee2
use version 1
tomip01 Oct 8, 2025
934e407
use cell proofs
tomip01 Oct 8, 2025
9775650
Merge branch 'main' into fusaka_blobs
tomip01 Oct 8, 2025
44ceffa
make eth client expose eth_config
tomip01 Oct 9, 2025
c46220a
default to osaka if rpc responds correctly
tomip01 Oct 9, 2025
9c5d320
remove comment
tomip01 Oct 9, 2025
d4b2b11
add issue link and rever genesis changes
tomip01 Oct 9, 2025
c82d7c8
revert makefile changes
tomip01 Oct 9, 2025
2d05121
Merge branch 'main' into fusaka_blobs
tomip01 Oct 9, 2025
a82fe68
first approach to maintain compatibility
tomip01 Oct 13, 2025
184aab6
handle osaka proof
tomip01 Oct 13, 2025
57a6150
Merge branch 'main' into fusaka_blobs
tomip01 Oct 13, 2025
1fc0c28
fix compile c_kzg
tomip01 Oct 13, 2025
5145ba5
use flag
tomip01 Oct 13, 2025
41ed5e7
use another crate for verification
tomip01 Oct 14, 2025
b2599af
Merge branch 'main' into fusaka_blobs
tomip01 Oct 14, 2025
1a78153
Revert "use another crate for verification"
tomip01 Oct 14, 2025
984d1fd
Merge branch 'main' into fusaka_blobs
tomip01 Oct 15, 2025
e17483e
Change precompute to 0
gianbelinche Oct 13, 2025
ca2c4d7
Add risc0 feature to kzg
gianbelinche Oct 14, 2025
2c5b849
include c-kzg risc0
tomip01 Oct 15, 2025
36be0a2
fix lint
tomip01 Oct 15, 2025
977ab1f
Merge branch 'fix_risc0' into fusaka_blobs
tomip01 Oct 16, 2025
963fcf4
Merge branch 'main' into fusaka_blobs
tomip01 Oct 16, 2025
7270ab6
use correct method?
tomip01 Oct 16, 2025
2883247
Merge branch 'main' into fusaka_blobs
tomip01 Oct 17, 2025
230e0e6
Merge branch 'main' into fusaka_blobs
tomip01 Oct 21, 2025
4b1a4e3
remove comment
tomip01 Oct 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 22 additions & 7 deletions crates/common/types/blobs_bundle.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use std::ops::AddAssign;

use crate::serde_utils;
#[cfg(feature = "c-kzg")]
use crate::types::Fork;
use crate::types::constants::VERSIONED_HASH_VERSION_KZG;
use crate::{Bytes, H256};

Expand Down Expand Up @@ -80,23 +82,34 @@ impl BlobsBundle {

// In the future we might want to provide a new method that calculates the commitments and proofs using the following.
#[cfg(feature = "c-kzg")]
pub fn create_from_blobs(blobs: &Vec<Blob>) -> Result<Self, BlobsBundleError> {
pub fn create_from_blobs(blobs: &Vec<Blob>, fork: Fork) -> Result<Self, BlobsBundleError> {
use ethrex_crypto::kzg::blob_to_kzg_commitment_and_proof;
let mut commitments = Vec::new();
let mut proofs = Vec::new();
let c_kzg_settings = c_kzg::ethereum_kzg_settings(8);

// Populate the commitments and proofs
for blob in blobs {
let (commitment, proof) = blob_to_kzg_commitment_and_proof(blob)?;
commitments.push(commitment);
proofs.push(proof);

if fork <= Fork::Prague {
proofs.push(proof);
} else {
let blob: c_kzg::Blob = (*blob).into();
let (_cells, cell_proofs) = c_kzg_settings
.compute_cells_and_kzg_proofs(&blob)
.map_err(|e| BlobsBundleError::Kzg(ethrex_crypto::kzg::KzgError::CKzg(e)))?;
let cell_proofs = cell_proofs.map(|p| p.to_bytes().into_inner());
proofs.extend(cell_proofs);
Copy link

Copilot AI Oct 9, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The variable '_cells' is ignored but the function computes both cells and proofs. If cells are not needed, consider using a more specific function that only computes proofs to avoid unnecessary computation. If cells might be needed in the future, add a comment explaining why they're currently ignored.

Copilot uses AI. Check for mistakes.

}
}

Ok(Self {
blobs: blobs.clone(),
commitments,
proofs,
version: 0,
version: if fork <= Fork::Prague { 0 } else { 1 },
})
}

Expand Down Expand Up @@ -266,8 +279,9 @@ mod tests {
})
.collect();

let blobs_bundle = crate::types::BlobsBundle::create_from_blobs(&blobs)
.expect("Failed to create blobs bundle");
let blobs_bundle =
crate::types::BlobsBundle::create_from_blobs(&blobs, crate::types::Fork::Prague)
.expect("Failed to create blobs bundle");

let blob_versioned_hashes = blobs_bundle.generate_versioned_hashes();

Expand Down Expand Up @@ -403,8 +417,9 @@ mod tests {
let blobs =
std::iter::repeat_n(blob, super::MAX_BLOB_COUNT_ELECTRA + 1).collect::<Vec<_>>();

let blobs_bundle = crate::types::BlobsBundle::create_from_blobs(&blobs)
.expect("Failed to create blobs bundle");
let blobs_bundle =
crate::types::BlobsBundle::create_from_blobs(&blobs, crate::types::Fork::Prague)
.expect("Failed to create blobs bundle");

let blob_versioned_hashes = blobs_bundle.generate_versioned_hashes();

Expand Down
17 changes: 9 additions & 8 deletions crates/common/types/transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1498,9 +1498,9 @@ mod serde_impl {
use serde_json::Value;
use std::{collections::HashMap, str::FromStr};

#[cfg(feature = "c-kzg")]
use crate::types::BYTES_PER_BLOB;
use crate::types::{AccessListItem, AuthorizationTuple, BlobsBundleError};
#[cfg(feature = "c-kzg")]
use crate::types::{BYTES_PER_BLOB, Fork};

use super::*;

Expand Down Expand Up @@ -2302,10 +2302,11 @@ mod serde_impl {
}

#[cfg(feature = "c-kzg")]
impl TryFrom<GenericTransaction> for WrappedEIP4844Transaction {
type Error = GenericTransactionError;

fn try_from(value: GenericTransaction) -> Result<Self, Self::Error> {
impl WrappedEIP4844Transaction {
pub fn from_generic_tx(
value: GenericTransaction,
fork: Fork,
) -> Result<Self, GenericTransactionError> {
let blobs = value
.blobs
.iter()
Expand All @@ -2319,8 +2320,8 @@ mod serde_impl {

Ok(Self {
tx: value.try_into()?,
wrapper_version: None,
blobs_bundle: BlobsBundle::create_from_blobs(&blobs)?,
wrapper_version: (fork > Fork::Prague).then_some(1),
blobs_bundle: BlobsBundle::create_from_blobs(&blobs, fork)?,
})
}
}
Expand Down
7 changes: 4 additions & 3 deletions crates/l2/based/block_fetcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use ethrex_l2_common::{
privileged_transactions::compute_privileged_transactions_hash,
state_diff::prepare_state_diff,
};
use ethrex_l2_sdk::{get_last_committed_batch, get_last_fetched_l1_block};
use ethrex_l2_sdk::{get_l1_fork, get_last_committed_batch, get_last_fetched_l1_block};
use ethrex_rlp::decode::RLPDecode;
use ethrex_rpc::{EthClient, types::receipt::RpcLog};
use ethrex_storage::Store;
Expand Down Expand Up @@ -388,8 +388,9 @@ impl BlockFetcher {
)
.map_err(|_| BlockFetcherError::BlobBundleError)?;

let (blobs_bundle, _) =
generate_blobs_bundle(&state_diff).map_err(|_| BlockFetcherError::BlobBundleError)?;
let fork = get_l1_fork(&self.eth_client).await;
let (blobs_bundle, _) = generate_blobs_bundle(&state_diff, fork)
.map_err(|_| BlockFetcherError::BlobBundleError)?;

Ok(Batch {
number: batch_number.as_u64(),
Expand Down
32 changes: 24 additions & 8 deletions crates/l2/monitor/widget/batches.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
use ethrex_common::{Address, H256, types::batch::Batch};
use ethrex_l2_sdk::get_last_committed_batch;
use ethrex_common::{
Address, H256,
types::{Fork, batch::Batch},
};
use ethrex_l2_sdk::{get_l1_fork, get_last_committed_batch};
use ethrex_rpc::EthClient;
use ethrex_storage_rollup::StoreRollup;
use ratatui::{
Expand Down Expand Up @@ -60,17 +63,23 @@ impl BatchesTable {
.await?;
new_latest_batches.truncate(BATCH_WINDOW_SIZE);

let fork = get_l1_fork(eth_client).await;

let n_new_latest_batches = new_latest_batches.len();
self.items
.truncate(BATCH_WINDOW_SIZE - n_new_latest_batches);
self.refresh_items(rollup_store).await?;
self.refresh_items(rollup_store, fork).await?;
self.items.extend_from_slice(&new_latest_batches);
self.items.rotate_right(n_new_latest_batches);

Ok(())
}

async fn refresh_items(&mut self, rollup_store: &StoreRollup) -> Result<(), MonitorError> {
async fn refresh_items(
&mut self,
rollup_store: &StoreRollup,
fork: Fork,
) -> Result<(), MonitorError> {
if self.items.is_empty() {
return Ok(());
}
Expand All @@ -83,7 +92,7 @@ impl BatchesTable {
} else {
let batch_number = batch.number;
let new_batch = rollup_store
.get_batch(batch_number)
.get_batch(batch_number, fork)
.await
.map_err(|e| MonitorError::GetBatchByNumber(batch_number, e))?
.ok_or(MonitorError::BatchNotFound(batch_number))?;
Expand Down Expand Up @@ -116,9 +125,15 @@ impl BatchesTable {
.map_err(|_| MonitorError::BatchWindow)?,
),
);
let fork = get_l1_fork(eth_client).await;

let new_batches =
Self::get_batches(last_l2_batch_fetched, last_l2_batch_number, rollup_store).await?;
let new_batches = Self::get_batches(
last_l2_batch_fetched,
last_l2_batch_number,
rollup_store,
fork,
)
.await?;

Ok(Self::process_batches(new_batches))
}
Expand All @@ -127,12 +142,13 @@ impl BatchesTable {
from: &mut u64,
to: u64,
rollup_store: &StoreRollup,
fork: Fork,
) -> Result<Vec<Batch>, MonitorError> {
let mut new_batches = Vec::new();

for batch_number in *from + 1..=to {
let batch = rollup_store
.get_batch(batch_number)
.get_batch(batch_number, fork)
.await
.map_err(|e| MonitorError::GetBatchByNumber(batch_number, e))?
.ok_or(MonitorError::BatchNotFound(batch_number))?;
Expand Down
8 changes: 7 additions & 1 deletion crates/l2/networking/rpc/l2/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,13 @@ impl RpcHandler for GetBatchByBatchNumberRequest {

async fn handle(&self, context: RpcApiContext) -> Result<Value, RpcErr> {
debug!("Requested batch with number: {}", self.batch_number);
let Some(batch) = context.rollup_store.get_batch(self.batch_number).await? else {
// TODO: review this choice of fork
let fork = context.l1_ctx.blockchain.current_fork().await?;
let Some(batch) = context
.rollup_store
.get_batch(self.batch_number, fork)
.await?
else {
return Ok(Value::Null);
};
let rpc_batch = RpcBatch::build(batch, self.block_hashes, &context.l1_ctx.storage).await?;
Expand Down
12 changes: 11 additions & 1 deletion crates/l2/sdk/src/sdk.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use bytes::Bytes;
use calldata::encode_calldata;
use ethereum_types::{H160, H256, U256};
use ethrex_common::types::Fork;
use ethrex_common::utils::keccak;
use ethrex_common::{
Address,
Expand Down Expand Up @@ -669,7 +670,8 @@ pub async fn send_generic_transaction(
signed_tx.encode(&mut encoded_tx);
}
TxType::EIP4844 => {
let mut tx: WrappedEIP4844Transaction = generic_tx.try_into()?;
let fork = get_l1_fork(client).await;
let mut tx = WrappedEIP4844Transaction::from_generic_tx(generic_tx, fork)?;
tx.tx
.sign_inplace(signer)
.await
Expand Down Expand Up @@ -980,6 +982,14 @@ pub async fn get_pending_privileged_transactions(
from_hex_string_to_h256_array(&response)
}

// TODO: This is a work around for now, issue: https://github.com/lambdaclass/ethrex/issues/4828
pub async fn get_l1_fork(client: &EthClient) -> Fork {
match client.get_eth_config().await {
Ok(_) => Fork::Osaka, // This endpoint only supports Osaka and later
Err(_) => Fork::Prague,
}
}

async fn _generic_call(
client: &EthClient,
selector: &[u8],
Expand Down
13 changes: 8 additions & 5 deletions crates/l2/sequencer/l1_committer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use ethrex_blockchain::{Blockchain, vm::StoreVmDatabase};
use ethrex_common::{
Address, H256, U256,
types::{
AccountUpdate, BLOB_BASE_FEE_UPDATE_FRACTION, BlobsBundle, Block, BlockNumber,
AccountUpdate, BLOB_BASE_FEE_UPDATE_FRACTION, BlobsBundle, Block, BlockNumber, Fork,
MIN_BASE_FEE_PER_BLOB_GAS, TxType, batch::Batch, blobs_bundle, fake_exponential_checked,
},
};
Expand All @@ -28,7 +28,7 @@ use ethrex_l2_common::{
};
use ethrex_l2_rpc::signer::{Signer, SignerHealth};
use ethrex_l2_sdk::{
build_generic_tx, calldata::encode_calldata, get_last_committed_batch,
build_generic_tx, calldata::encode_calldata, get_l1_fork, get_last_committed_batch,
send_tx_bump_gas_exponential_backoff,
};
#[cfg(feature = "metrics")]
Expand Down Expand Up @@ -206,7 +206,8 @@ impl L1Committer {
get_last_committed_batch(&self.eth_client, self.on_chain_proposer_address).await?;
let batch_to_commit = last_committed_batch_number + 1;

let batch = match self.rollup_store.get_batch(batch_to_commit).await? {
let fork = get_l1_fork(&self.eth_client).await;
let batch = match self.rollup_store.get_batch(batch_to_commit, fork).await? {
Some(batch) => batch,
None => {
let last_committed_blocks = self
Expand Down Expand Up @@ -448,7 +449,8 @@ impl L1Committer {
&acc_privileged_txs,
acc_account_updates.clone().into_values().collect(),
)?;
generate_blobs_bundle(&state_diff)
let fork = get_l1_fork(&self.eth_client).await;
generate_blobs_bundle(&state_diff, fork)
} else {
Ok((BlobsBundle::default(), 0_usize))
};
Expand Down Expand Up @@ -778,6 +780,7 @@ impl GenServer for L1Committer {
/// Generate the blob bundle necessary for the EIP-4844 transaction.
pub fn generate_blobs_bundle(
state_diff: &StateDiff,
fork: Fork,
) -> Result<(BlobsBundle, usize), CommitterError> {
let blob_data = state_diff.encode().map_err(CommitterError::from)?;

Expand All @@ -786,7 +789,7 @@ pub fn generate_blobs_bundle(
let blob = blobs_bundle::blob_from_bytes(blob_data).map_err(CommitterError::from)?;

Ok((
BlobsBundle::create_from_blobs(&vec![blob]).map_err(CommitterError::from)?,
BlobsBundle::create_from_blobs(&vec![blob], fork).map_err(CommitterError::from)?,
blob_size,
))
}
Expand Down
4 changes: 3 additions & 1 deletion crates/l2/sequencer/proof_coordinator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use ethrex_common::{
types::{Block, blobs_bundle},
};
use ethrex_l2_common::prover::{BatchProof, ProverType};
use ethrex_l2_sdk::get_l1_fork;
use ethrex_metrics::metrics;
use ethrex_rpc::clients::eth::EthClient;
use ethrex_storage::Store;
Expand Down Expand Up @@ -488,11 +489,12 @@ impl ProofCoordinator {
.get_blobs_by_batch(batch_number)
.await?
.ok_or(ProofCoordinatorError::MissingBlob(batch_number))?;
let fork = get_l1_fork(&self.eth_client).await;
let BlobsBundle {
mut commitments,
mut proofs,
..
} = BlobsBundle::create_from_blobs(&blob)?;
} = BlobsBundle::create_from_blobs(&blob, fork)?;
match (commitments.pop(), proofs.pop()) {
(Some(commitment), Some(proof)) => (commitment, proof),
_ => return Err(ProofCoordinatorError::MissingBlob(batch_number)),
Expand Down
11 changes: 8 additions & 3 deletions crates/l2/storage/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::store_db::in_memory::Store as InMemoryStore;
use crate::store_db::sql::SQLStore;
use ethrex_common::{
H256,
types::{AccountUpdate, Blob, BlobsBundle, BlockNumber, batch::Batch},
types::{AccountUpdate, Blob, BlobsBundle, BlockNumber, Fork, batch::Batch},
};
use ethrex_l2_common::prover::{BatchProof, ProverType};
use tracing::info;
Expand Down Expand Up @@ -153,7 +153,11 @@ impl Store {
self.engine.get_last_batch_number().await
}

pub async fn get_batch(&self, batch_number: u64) -> Result<Option<Batch>, RollupStoreError> {
pub async fn get_batch(
&self,
batch_number: u64,
fork: Fork,
) -> Result<Option<Batch>, RollupStoreError> {
let Some(blocks) = self.get_block_numbers_by_batch(batch_number).await? else {
return Ok(None);
};
Expand Down Expand Up @@ -181,7 +185,8 @@ impl Store {
&self
.get_blobs_by_batch(batch_number)
.await?
.unwrap_or_default()
.unwrap_or_default(),
fork,
).map_err(|e| {
RollupStoreError::Custom(format!("Failed to create blobs bundle from blob while getting batch from database: {e}. This is a bug"))
})?;
Expand Down
7 changes: 6 additions & 1 deletion crates/networking/p2p/rlpx/l2/l2_connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,12 @@ pub(crate) async fn send_sealed_batch(established: &mut Established) -> Result<(
{
return Ok(());
}
let Some(batch) = l2_state.store_rollup.get_batch(next_batch_to_send).await? else {
let fork = established.blockchain.current_fork().await?;
let Some(batch) = l2_state
.store_rollup
.get_batch(next_batch_to_send, fork)
.await?
else {
return Ok(());
};
match l2_state
Expand Down
Loading
Loading