Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Rename Validium abstraction and implementors #92

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions core/bin/external_node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ use zksync_dal::{healthcheck::ConnectionPoolHealthCheck, ConnectionPool};
use zksync_health_check::CheckHealth;
use zksync_state::PostgresStorageCaches;
use zksync_storage::RocksDB;
use zksync_types::l1_batch_committer::RollupModeL1BatchCommitter;
use zksync_types::l1_batch_commit_data_generator::RollupModeL1BatchCommitDataGenerator;
use zksync_utils::wait_for_tasks::wait_for_tasks;

mod config;
Expand Down Expand Up @@ -229,10 +229,11 @@ async fn init_tasks(
.context("failed to build a tree_pool")?;
let tree_handle = task::spawn(metadata_calculator.run(tree_pool, tree_stop_receiver));

let l1_batch_committer = Arc::new(RollupModeL1BatchCommitter {});
let l1_batch_commit_data_generator = Arc::new(RollupModeL1BatchCommitDataGenerator {});

let consistency_checker_handle =
tokio::spawn(consistency_checker.run(stop_receiver.clone(), l1_batch_committer));
let consistency_checker_handle = tokio::spawn(
consistency_checker.run(stop_receiver.clone(), l1_batch_commit_data_generator),
);

let updater_handle = task::spawn(batch_status_updater.run(stop_receiver.clone()));
let sk_handle = task::spawn(state_keeper.run());
Expand Down
15 changes: 15 additions & 0 deletions core/lib/config/src/configs/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,18 @@ impl Default for FeeModelVersion {
}
}

#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq)]
pub enum L1BatchCommitDataGeneratorMode {
Rollup,
Validium,
}

impl Default for L1BatchCommitDataGeneratorMode {
fn default() -> Self {
Self::Rollup
}
}

#[derive(Debug, Deserialize, Clone, PartialEq, Default)]
pub struct StateKeeperConfig {
/// The max number of slots for txs in a block before it should be sealed by the slots sealer.
Expand Down Expand Up @@ -116,6 +128,8 @@ pub struct StateKeeperConfig {

/// Number of keys that is processed by enum_index migration in State Keeper each L1 batch.
pub enum_index_migration_chunk_size: Option<usize>,

pub l1_batch_commit_data_generator_mode: L1BatchCommitDataGeneratorMode,
}

impl StateKeeperConfig {
Expand Down Expand Up @@ -150,6 +164,7 @@ impl StateKeeperConfig {
virtual_blocks_per_miniblock: 1,
upload_witness_inputs_to_gcs: false,
enum_index_migration_chunk_size: None,
l1_batch_commit_data_generator_mode: L1BatchCommitDataGeneratorMode::Rollup,
}
}

Expand Down
3 changes: 2 additions & 1 deletion core/lib/env_config/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ impl FromEnv for MempoolConfig {
#[cfg(test)]
mod tests {
use zksync_basic_types::L2ChainId;
use zksync_config::configs::chain::FeeModelVersion;
use zksync_config::configs::chain::{FeeModelVersion, L1BatchCommitDataGeneratorMode};

use super::*;
use crate::test_utils::{addr, EnvMutex};
Expand Down Expand Up @@ -94,6 +94,7 @@ mod tests {
virtual_blocks_per_miniblock: 1,
upload_witness_inputs_to_gcs: false,
enum_index_migration_chunk_size: Some(2_000),
l1_batch_commit_data_generator_mode: L1BatchCommitDataGeneratorMode::Rollup,
}
}

Expand Down
7 changes: 4 additions & 3 deletions core/lib/types/src/aggregated_operations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ use zkevm_test_harness::{
use zksync_basic_types::{ethabi::Token, L1BatchNumber};

use crate::{
commitment::L1BatchWithMetadata, l1_batch_committer::L1BatchCommitter, ProtocolVersionId, U256,
commitment::L1BatchWithMetadata, l1_batch_commit_data_generator::L1BatchCommitDataGenerator,
ProtocolVersionId, U256,
};

fn l1_batch_range_from_batches(
Expand All @@ -31,7 +32,7 @@ fn l1_batch_range_from_batches(
pub struct L1BatchCommitOperation {
pub last_committed_l1_batch: L1BatchWithMetadata,
pub l1_batches: Vec<L1BatchWithMetadata>,
pub l1_batch_committer: Arc<dyn L1BatchCommitter>,
pub l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
}

impl L1BatchCommitOperation {
Expand All @@ -41,7 +42,7 @@ impl L1BatchCommitOperation {
.l1_batches
.iter()
.map(|l1_batch_with_metadata| {
self.l1_batch_committer
self.l1_batch_commit_data_generator
.l1_commit_data(l1_batch_with_metadata)
})
.collect();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use zksync_basic_types::{ethabi::Token, U256};

use crate::{commitment::L1BatchWithMetadata, utils};

pub trait L1BatchCommitter
pub trait L1BatchCommitDataGenerator
where
Self: std::fmt::Debug + Send + Sync,
{
Expand All @@ -16,12 +16,12 @@ where
}

#[derive(Debug, Clone)]
pub struct RollupModeL1BatchCommitter {}
pub struct RollupModeL1BatchCommitDataGenerator {}

#[derive(Debug, Clone)]
pub struct ValidiumModeL1BatchCommitter {}
pub struct ValidiumModeL1BatchCommitDataGenerator {}

impl L1BatchCommitter for RollupModeL1BatchCommitter {
impl L1BatchCommitDataGenerator for RollupModeL1BatchCommitDataGenerator {
fn l1_commit_data(&self, l1_batch_with_metadata: &L1BatchWithMetadata) -> Token {
println!("RollupModeL1BatchCommitter");
let commit_data = if l1_batch_with_metadata
Expand All @@ -38,7 +38,7 @@ impl L1BatchCommitter for RollupModeL1BatchCommitter {
}
}

impl L1BatchCommitter for ValidiumModeL1BatchCommitter {
impl L1BatchCommitDataGenerator for ValidiumModeL1BatchCommitDataGenerator {
fn l1_commit_data(&self, l1_batch_with_metadata: &L1BatchWithMetadata) -> Token {
println!("ValidiumModeL1BatchCommitter");
let commit_data = if l1_batch_with_metadata
Expand Down
2 changes: 1 addition & 1 deletion core/lib/types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ pub mod utils;
pub mod vk_transform;
pub mod vm_version;

pub mod l1_batch_committer;
pub mod l1_batch_commit_data_generator;

/// Denotes the first byte of the special zkSync's EIP-712-signed transaction.
pub const EIP_712_TX_TYPE: u8 = 0x71;
Expand Down
19 changes: 12 additions & 7 deletions core/lib/zksync_core/src/consistency_checker/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ use tokio::sync::watch;
use zksync_contracts::PRE_BOOJUM_COMMIT_FUNCTION;
use zksync_dal::{ConnectionPool, StorageProcessor};
use zksync_eth_client::{clients::QueryClient, Error as L1ClientError, EthInterface};
use zksync_types::{l1_batch_committer::L1BatchCommitter, web3::ethabi, L1BatchNumber, H256};
use zksync_types::{
l1_batch_commit_data_generator::L1BatchCommitDataGenerator, web3::ethabi, L1BatchNumber, H256,
};

use crate::{
metrics::{CheckerComponent, EN_METRICS},
Expand Down Expand Up @@ -66,7 +68,7 @@ impl LocalL1BatchCommitData {
async fn new(
storage: &mut StorageProcessor<'_>,
batch_number: L1BatchNumber,
l1_batch_committer: Arc<dyn L1BatchCommitter>,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> anyhow::Result<Option<Self>> {
let Some(storage_l1_batch) = storage
.blocks_dal()
Expand Down Expand Up @@ -114,7 +116,7 @@ impl LocalL1BatchCommitData {

Ok(Some(Self {
is_pre_boojum,
l1_commit_data: l1_batch_committer.l1_commit_data(&l1_batch),
l1_commit_data: l1_batch_commit_data_generator.l1_commit_data(&l1_batch),
commit_tx_hash,
}))
}
Expand Down Expand Up @@ -251,7 +253,7 @@ impl ConsistencyChecker {
pub async fn run(
mut self,
mut stop_receiver: watch::Receiver<bool>,
l1_batch_committer: Arc<dyn L1BatchCommitter>,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> anyhow::Result<()> {
// It doesn't make sense to start the checker until we have at least one L1 batch with metadata.
let earliest_l1_batch_number =
Expand Down Expand Up @@ -290,9 +292,12 @@ impl ConsistencyChecker {
// The batch might be already committed but not yet processed by the external node's tree
// OR the batch might be processed by the external node's tree but not yet committed.
// We need both.
let Some(local) =
LocalL1BatchCommitData::new(&mut storage, batch_number, l1_batch_committer.clone())
.await?
let Some(local) = LocalL1BatchCommitData::new(
&mut storage,
batch_number,
l1_batch_commit_data_generator.clone(),
)
.await?
else {
tokio::time::sleep(self.sleep_interval).await;
continue;
Expand Down
Loading
Loading