Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: adhere new naming conventions #111

Merged
merged 3 commits into from
Aug 1, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 4 additions & 8 deletions src/processor/snapshot/exporter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,20 +36,16 @@ impl SnapshotExporter {
}

pub fn export_snapshot(&self, num_chunks: usize) -> Result<()> {
let l2_batch_number = self
let l1_batch_number = self
.database
.get_latest_l2_batch_number()?
.ok_or_eyre("no latest l2 batch number in snapshot db")?;
.get_latest_l1_batch_number()?
.ok_or_eyre("no latest l1 batch number in snapshot db")?;
let l2_block_number = self.database.get_latest_l2_block_number()?.unwrap_or({
tracing::warn!("WARNING: the database contains no l2 block number entry and will not be compatible with the ZKSync External Node! To export a compatible snapshot, please let the prepare-snapshot command run until an l2 block number can be found.");
U64::from(0)
});
let mut header = SnapshotHeader {
// NOTE: `l1_batch_number` in the snapshot header actually refers
// to the ZKsync batch number and not the Ethereum batch height we
// store in the snapshot database. In the snapshot database this
// field is referred to as `l2_batch_number`.
l1_batch_number: l2_batch_number.as_u64(),
l1_batch_number: l1_batch_number.as_u64(),
miniblock_number: l2_block_number.as_u64(),
..Default::default()
};
Expand Down
8 changes: 4 additions & 4 deletions src/processor/snapshot/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ impl SnapshotBuilder {
// Gets the next L1 batch number to be processed for ues in state recovery.
pub fn get_latest_l1_batch_number(&self) -> Result<U64> {
self.database
.get_latest_l1_batch_number()
.get_latest_l1_block_number()
.map(|o| o.unwrap_or(U64::from(0)))
}
}
Expand Down Expand Up @@ -133,10 +133,10 @@ impl Processor for SnapshotBuilder {

let _ = self
.database
.set_latest_l2_block_number(block.l2_block_number);
.set_latest_l2_block_number(block.l1_batch_number);
zeapoz marked this conversation as resolved.
Show resolved Hide resolved

if let Some(number) = block.l1_block_number {
let _ = self.database.set_latest_l1_batch_number(number);
let _ = self.database.set_latest_l1_block_number(number);
};
}
}
Expand Down Expand Up @@ -273,7 +273,7 @@ mod tests {
let repeated_storage_changes = IndexMap::new();
let cb = CommitBlock {
l1_block_number: Some(1),
l2_block_number: 2,
l1_batch_number: 2,
index_repeated_storage_changes: 0,
new_state_root: Vec::new(),
initial_storage_changes,
Expand Down
12 changes: 6 additions & 6 deletions src/processor/tree/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,16 @@ impl Processor for TreeProcessor {
let mut snapshot_metric = PerfMetric::new("snapshot");
while let Some(block) = rx.recv().await {
// Check if we've already processed this block.
let latest_l2 = self
let latest_l1_batch = self
.inner_db
.lock()
.await
.get_latest_l2_batch_number()
.get_latest_l1_batch_number()
.expect("value should default to 0");
if latest_l2 >= block.l2_block_number {
if latest_l1_batch >= block.l1_batch_number {
tracing::debug!(
"Block {} has already been processed, skipping.",
block.l2_block_number
"Batch {} has already been processed, skipping.",
block.l1_batch_number
);
continue;
}
Expand All @@ -92,7 +92,7 @@ impl Processor for TreeProcessor {
self.inner_db
.lock()
.await
.set_latest_l2_batch_number(block.l2_block_number)
.set_latest_l1_batch_number(block.l1_batch_number)
.expect("db failed");

if snapshot_metric.add(before.elapsed()) > 10 {
Expand Down
6 changes: 3 additions & 3 deletions src/processor/tree/tree_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,14 +107,14 @@ impl TreeWrapper {
let root_hash = output.root_hash;

tracing::debug!(
"Root hash of block {} = {}",
block.l2_block_number,
"Root hash of batch {} = {}",
block.l1_batch_number,
hex::encode(root_hash)
);

let root_hash_bytes = root_hash.as_bytes();
if root_hash_bytes == block.new_state_root {
tracing::debug!("Successfully processed block {}", block.l2_block_number);
tracing::debug!("Successfully processed batch {}", block.l1_batch_number);

Ok(root_hash)
} else {
Expand Down
22 changes: 11 additions & 11 deletions state-reconstruct-fetcher/src/l1_fetcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ impl L1Fetcher {
if current_l1_block_number == GENESIS_BLOCK.into() {
if let Some(snapshot) = &self.inner_db {
let snapshot_latest_l1_block_number =
snapshot.lock().await.get_latest_l1_batch_number()?;
snapshot.lock().await.get_latest_l1_block_number()?;
if snapshot_latest_l1_block_number > current_l1_block_number {
current_l1_block_number = snapshot_latest_l1_block_number;
tracing::info!(
Expand All @@ -155,8 +155,8 @@ impl L1Fetcher {
metrics.first_l1_block_num = current_l1_block_number.as_u64();
metrics.latest_l1_block_num = current_l1_block_number.as_u64();
if let Some(snapshot) = &self.inner_db {
metrics.latest_l2_block_num = snapshot.lock().await.get_latest_l2_batch_number()?;
metrics.first_l2_block_num = metrics.latest_l2_block_num;
metrics.latest_l1_batch_num = snapshot.lock().await.get_latest_l1_batch_number()?;
metrics.first_l1_batch_num = metrics.latest_l1_batch_num;
}
}

Expand Down Expand Up @@ -225,7 +225,7 @@ impl L1Fetcher {
snapshot
.lock()
.await
.set_latest_l1_batch_number(block_num)?;
.set_latest_l1_block_number(block_num)?;
}

// Fetching is naturally ahead of parsing, but the data
Expand Down Expand Up @@ -268,7 +268,7 @@ impl L1Fetcher {

Ok(tokio::spawn({
async move {
let mut latest_l2_block_number = U256::zero();
let mut latest_zksync_batch_number = U256::zero();
let mut previous_hash = None;
let mut end_block = None;
loop {
Expand Down Expand Up @@ -350,9 +350,9 @@ impl L1Fetcher {
// topics[2]: L2 block hash.
// topics[3]: L2 commitment.

let new_l2_block_number =
let new_l1_batch_number =
U256::from_big_endian(log.topics[1].as_fixed_bytes());
if new_l2_block_number <= latest_l2_block_number {
if new_l1_batch_number <= latest_zksync_batch_number {
continue;
}

Expand Down Expand Up @@ -381,7 +381,7 @@ impl L1Fetcher {
previous_hash = Some(tx_hash);
}

latest_l2_block_number = new_l2_block_number;
latest_zksync_batch_number = new_l1_batch_number;
}
} else {
cancellation_token.cancelled_else_long_timeout().await;
Expand Down Expand Up @@ -558,7 +558,7 @@ impl L1Fetcher {

let mut metrics = metrics.lock().await;
for blk in blocks {
metrics.latest_l2_block_num = blk.l2_block_number;
metrics.latest_l1_batch_num = blk.l1_batch_number;
if let Err(e) = sink.send(blk).await {
if cancellation_token.is_cancelled() {
tracing::debug!("Shutting down parsing task...");
Expand Down Expand Up @@ -638,9 +638,9 @@ pub async fn parse_calldata(
));
};

let abi::Token::Uint(_previous_l2_block_number) = stored_block_info[0].clone() else {
let abi::Token::Uint(_previous_l1_batch_number) = stored_block_info[0].clone() else {
return Err(ParseError::InvalidStoredBlockInfo(
"cannot parse previous L2 block number".to_string(),
"cannot parse previous L1 block number".to_string(),
zeapoz marked this conversation as resolved.
Show resolved Hide resolved
));
};

Expand Down
18 changes: 9 additions & 9 deletions state-reconstruct-fetcher/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@ pub const METRICS_TRACING_TARGET: &str = "metrics";
pub struct L1Metrics {
/// The first L1 block fetched.
pub first_l1_block_num: u64,
/// The first L2 block fetched.
pub first_l2_block_num: u64,
/// The first L2 batch fetched.
zeapoz marked this conversation as resolved.
Show resolved Hide resolved
pub first_l1_batch_num: u64,

/// The latest L1 block fetched.
pub latest_l1_block_num: u64,
/// The latest L2 block fetched.
zeapoz marked this conversation as resolved.
Show resolved Hide resolved
pub latest_l2_block_num: u64,
pub latest_l1_batch_num: u64,

/// The first L1 block to compare against when measuring progress.
pub initial_l1_block: u64,
Expand All @@ -32,9 +32,9 @@ impl L1Metrics {
pub fn new(initial_l1_block: u64) -> Self {
L1Metrics {
first_l1_block_num: 0,
first_l2_block_num: 0,
first_l1_batch_num: 0,
latest_l1_block_num: 0,
latest_l2_block_num: 0,
latest_l1_batch_num: 0,
initial_l1_block,
last_l1_block: 0,
log_acquisition: PerfMetric::new("log_acquisition"),
Expand Down Expand Up @@ -62,13 +62,13 @@ impl L1Metrics {
};

tracing::info!(
"PROGRESS: [{}] CUR BLOCK L1: {} L2: {} TOTAL BLOCKS PROCESSED L1: {} L2: {}",
"PROGRESS: [{}] CUR L1 BLOCK: {} L2 BATCH: {} TOTAL PROCESSED L1 BLOCKS: {} L2 BATCHES: {}",
progress,
self.latest_l1_block_num,
self.latest_l2_block_num,
self.latest_l1_batch_num,
self.latest_l1_block_num - self.first_l1_block_num,
self.latest_l2_block_num
.saturating_sub(self.first_l2_block_num)
self.latest_l1_batch_num
.saturating_sub(self.first_l1_batch_num)
);

let log_acquisition = self.log_acquisition.reset();
Expand Down
6 changes: 3 additions & 3 deletions state-reconstruct-fetcher/src/types/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::constants::zksync::{
};

pub struct ExtractedToken {
pub new_l2_block_number: U256,
pub l1_batch_number: U256,
pub timestamp: U256,
pub new_enumeration_index: U256,
pub state_root: Vec<u8>,
Expand All @@ -30,7 +30,7 @@ impl TryFrom<&abi::Token> for ExtractedToken {
));
};

let abi::Token::Uint(new_l2_block_number) = block_elems[0].clone() else {
let abi::Token::Uint(l1_batch_number) = block_elems[0].clone() else {
return Err(ParseError::InvalidCommitBlockInfo(
"blockNumber".to_string(),
));
Expand Down Expand Up @@ -75,7 +75,7 @@ impl TryFrom<&abi::Token> for ExtractedToken {
};

Ok(Self {
new_l2_block_number,
l1_batch_number,
timestamp,
new_enumeration_index,
state_root,
Expand Down
12 changes: 6 additions & 6 deletions state-reconstruct-fetcher/src/types/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,11 @@ pub enum CommitBlockInfo {
/// Block with all required fields extracted from a [`CommitBlockInfo`].
#[derive(Debug, Serialize, Deserialize)]
pub struct CommitBlock {
/// L1 block number.
/// Ethereum block number.
#[serde(skip)]
pub l1_block_number: Option<u64>,
/// L2 block number.
pub l2_block_number: u64,
/// ZKSync batch number.
pub l1_batch_number: u64,
/// Next unused key serial number.
pub index_repeated_storage_changes: u64,
/// The state root of the full state tree.
Expand Down Expand Up @@ -108,7 +108,7 @@ impl CommitBlock {
match block_type {
CommitBlockInfo::V1(block) => CommitBlock {
l1_block_number: None,
l2_block_number: block.block_number,
l1_batch_number: block.l1_batch_number,
index_repeated_storage_changes: block.index_repeated_storage_changes,
new_state_root: block.new_state_root,
initial_storage_changes: block
Expand Down Expand Up @@ -147,7 +147,7 @@ impl CommitBlock {

CommitBlock {
l1_block_number: None,
l2_block_number: block.block_number,
l1_batch_number: block.l1_batch_number,
index_repeated_storage_changes: block.index_repeated_storage_changes,
new_state_root: block.new_state_root,
initial_storage_changes,
Expand Down Expand Up @@ -186,7 +186,7 @@ impl CommitBlock {

Ok(CommitBlock {
l1_block_number: None,
l2_block_number: block.block_number,
l1_batch_number: block.l1_batch_number,
index_repeated_storage_changes: block.index_repeated_storage_changes,
new_state_root: block.new_state_root,
initial_storage_changes,
Expand Down
14 changes: 7 additions & 7 deletions state-reconstruct-fetcher/src/types/v1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ use super::{CommitBlockFormat, CommitBlockInfo, ParseError};
/// Data needed to commit new block
#[derive(Debug, Serialize, Deserialize)]
pub struct V1 {
/// L2 block number.
pub block_number: u64,
/// ZKSync batch number.
pub l1_batch_number: u64,
/// Unix timestamp denoting the start of the block execution.
pub timestamp: u64,
/// The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more.
Expand Down Expand Up @@ -46,7 +46,7 @@ impl TryFrom<&abi::Token> for V1 {
/// Try to parse Ethereum ABI token into [`V1`].
fn try_from(token: &abi::Token) -> Result<Self, Self::Error> {
let ExtractedToken {
new_l2_block_number,
l1_batch_number,
timestamp,
new_enumeration_index,
state_root,
Expand Down Expand Up @@ -83,7 +83,7 @@ impl TryFrom<&abi::Token> for V1 {
);

let mut blk = V1 {
block_number: new_l2_block_number.as_u64(),
l1_batch_number: l1_batch_number.as_u64(),
timestamp: timestamp.as_u64(),
index_repeated_storage_changes: new_enumeration_index,
new_state_root: state_root,
Expand Down Expand Up @@ -145,7 +145,7 @@ impl TryFrom<&abi::Token> for V1 {
}

struct ExtractedToken {
new_l2_block_number: U256,
l1_batch_number: U256,
timestamp: U256,
new_enumeration_index: U256,
state_root: Vec<u8>,
Expand All @@ -168,7 +168,7 @@ impl TryFrom<&abi::Token> for ExtractedToken {
));
};

let abi::Token::Uint(new_l2_block_number) = block_elems[0].clone() else {
let abi::Token::Uint(l1_batch_number) = block_elems[0].clone() else {
return Err(ParseError::InvalidCommitBlockInfo(
"blockNumber".to_string(),
));
Expand Down Expand Up @@ -247,7 +247,7 @@ impl TryFrom<&abi::Token> for ExtractedToken {
};

Ok(Self {
new_l2_block_number,
l1_batch_number,
timestamp,
new_enumeration_index,
state_root,
Expand Down
8 changes: 4 additions & 4 deletions state-reconstruct-fetcher/src/types/v2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ use super::{
/// Data needed to commit new block
#[derive(Debug, Serialize, Deserialize)]
pub struct V2 {
/// L2 block number.
pub block_number: u64,
/// ZKSync batch number.
pub l1_batch_number: u64,
/// Unix timestamp denoting the start of the block execution.
pub timestamp: u64,
/// The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more.
Expand Down Expand Up @@ -39,7 +39,7 @@ impl TryFrom<&abi::Token> for V2 {
/// Try to parse Ethereum ABI token into [`V2`].
fn try_from(token: &abi::Token) -> Result<Self, Self::Error> {
let ExtractedToken {
new_l2_block_number,
l1_batch_number,
timestamp,
new_enumeration_index,
state_root,
Expand All @@ -52,7 +52,7 @@ impl TryFrom<&abi::Token> for V2 {

let total_l2_to_l1_pubdata = parse_resolved_pubdata(&total_l2_to_l1_pubdata)?;
let blk = V2 {
block_number: new_l2_block_number.as_u64(),
l1_batch_number: l1_batch_number.as_u64(),
timestamp: timestamp.as_u64(),
index_repeated_storage_changes: new_enumeration_index,
new_state_root: state_root,
Expand Down
Loading