Skip to content

Commit

Permalink
remove tokenize
Browse files Browse the repository at this point in the history
  • Loading branch information
StanislavBreadless committed Jan 17, 2025
1 parent 03b05ec commit 8688c34
Showing 1 changed file with 2 additions and 41 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use zksync_types::{

use crate::{
i_executor::structures::{StoredBatchInfo, SUPPORTED_ENCODING_VERSION},
Tokenizable, Tokenize,
Tokenizable,
};

/// Input required to encode `executeBatches` call.
Expand All @@ -21,7 +21,7 @@ impl ExecuteBatches {
// of the underlying chain.
// However, we can send batches with older protocol versions just by changing the encoding.
// This makes the migration simpler.
pub fn encode_for_eth_tx(self, chain_protocol_version: ProtocolVersionId) -> Vec<Token> {
pub fn encode_for_eth_tx(&self, chain_protocol_version: ProtocolVersionId) -> Vec<Token> {
let internal_protocol_version = self.l1_batches[0].header.protocol_version.unwrap();

if internal_protocol_version.is_pre_gateway() && chain_protocol_version.is_pre_gateway() {
Expand Down Expand Up @@ -58,42 +58,3 @@ impl ExecuteBatches {
}
}
}

impl Tokenize for &ExecuteBatches {
fn into_tokens(self) -> Vec<Token> {
let protocol_version = self.l1_batches[0].header.protocol_version.unwrap();

if protocol_version.is_pre_gateway() {
vec![Token::Array(
self.l1_batches
.iter()
.map(|batch| StoredBatchInfo::from(batch).into_token())
.collect(),
)]
} else {
let encoded_data = encode(&[
Token::Array(
self.l1_batches
.iter()
.map(|batch| StoredBatchInfo::from(batch).into_token())
.collect(),
),
Token::Array(
self.priority_ops_proofs
.iter()
.map(|proof| proof.into_token())
.collect(),
),
]);
let execute_data = [[SUPPORTED_ENCODING_VERSION].to_vec(), encoded_data]
.concat()
.to_vec();

vec![
Token::Uint(self.l1_batches[0].header.number.0.into()),
Token::Uint(self.l1_batches.last().unwrap().header.number.0.into()),
Token::Bytes(execute_data),
]
}
}
}

0 comments on commit 8688c34

Please sign in to comment.