From ce1a8d1c30297474b2e2248eaf079d41a9231bd4 Mon Sep 17 00:00:00 2001 From: 0xaatif <169152398+0xaatif@users.noreply.github.com> Date: Tue, 13 Aug 2024 15:38:17 +0100 Subject: [PATCH] refactor: trace_decoder::decoding (#469) * mark: 0xaatif/refactor-trace-decoder-decoding * refactor: remove inappropriate static methods * refactor: remove inappropriate method * refactor: remove TraceParsingErrorReason * refactor: remove LocatedError * refactor: remove EMPTY_ACCOUNT_BYTES_RLPED * refactor: remove update_val_if_some * refactor: impl Display for TrieType * wibble * refactor: inline TraceParsingResult * wibble * wibble: order * refactor: remove unused variable * wibble: inline TrieRoots etc * wibble * review: WithHash -> CustomFmt --- Cargo.lock | 1 + trace_decoder/Cargo.toml | 1 + trace_decoder/src/decoding.rs | 1257 +++++++++++++-------------------- trace_decoder/src/lib.rs | 14 +- 4 files changed, 520 insertions(+), 753 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f4321e206..78efddfd1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5176,6 +5176,7 @@ dependencies = [ "serde_path_to_error", "smt_trie", "stackstack", + "strum", "thiserror", "u4", "winnow 0.6.13", diff --git a/trace_decoder/Cargo.toml b/trace_decoder/Cargo.toml index ed086283c..bc1f58625 100644 --- a/trace_decoder/Cargo.toml +++ b/trace_decoder/Cargo.toml @@ -33,6 +33,7 @@ rlp = { workspace = true } serde = { workspace = true } smt_trie = { workspace = true } stackstack = "0.3.0" +strum = { version = "0.26.3", features = ["derive"] } thiserror = { workspace = true } u4 = { workspace = true } winnow = { workspace = true } diff --git a/trace_decoder/src/decoding.rs b/trace_decoder/src/decoding.rs index 80ac4e385..0ba75c0f2 100644 --- a/trace_decoder/src/decoding.rs +++ b/trace_decoder/src/decoding.rs @@ -1,9 +1,6 @@ -use std::{ - collections::HashMap, - fmt::{self, Display, Formatter}, - iter::once, -}; +use std::{collections::HashMap, fmt}; +use anyhow::{anyhow, Context as _}; use ethereum_types::{Address, BigEndianHash, H256, U256, U512}; use evm_arithmetization::{ generation::{ @@ -13,200 +10,21 @@ use evm_arithmetization::{ proof::{BlockMetadata, ExtraBlockData, TrieRoots}, testing_utils::{BEACON_ROOTS_CONTRACT_ADDRESS_HASHED, HISTORY_BUFFER_LENGTH}, }; -use log::trace; use mpt_trie::{ nibbles::Nibbles, partial_trie::{HashedPartialTrie, Node, PartialTrie}, special_query::path_for_query, - trie_ops::{TrieOpError, TrieOpResult}, - trie_subsets::{create_trie_subset, SubsetTrieError}, - utils::{IntoTrieKey, TriePath}, + utils::{IntoTrieKey as _, TriePath}, }; -use thiserror::Error; use crate::{ hash, processed_block_trace::{ NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, StateTrieWrites, TxnMetaState, }, - OtherBlockData, + OtherBlockData, PartialTriePreImages, }; -/// Stores the result of parsing tries. Returns a [TraceParsingError] upon -/// failure. -pub type TraceParsingResult = Result>; - -const EMPTY_ACCOUNT_BYTES_RLPED: [u8; 70] = [ - 248, 68, 128, 128, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, - 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, - 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, - 123, 250, 216, 4, 93, 133, 164, 112, -]; - -// This is just `rlp(0)`. -const ZERO_STORAGE_SLOT_VAL_RLPED: [u8; 1] = [128]; - -/// Represents errors that can occur during the processing of a block trace. -/// -/// This struct is intended to encapsulate various kinds of errors that might -/// arise when parsing, validating, or otherwise processing the trace data of -/// blockchain blocks. It could include issues like malformed trace data, -/// inconsistencies found during processing, or any other condition that -/// prevents successful completion of the trace processing task. -#[derive(Debug)] -pub struct TraceParsingError { - block_num: Option, - block_chain_id: Option, - txn_idx: Option, - addr: Option
, - h_addr: Option, - slot: Option, - slot_value: Option, - reason: TraceParsingErrorReason, // The original error type -} - -impl std::fmt::Display for TraceParsingError { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - let h_slot = self.slot.map(|slot| { - let mut buf = [0u8; 64]; - slot.to_big_endian(&mut buf); - hash(buf) - }); - write!( - f, - "Error processing trace: {}\n{}{}{}{}{}{}{}{}", - self.reason, - optional_field("Block num", self.block_num), - optional_field("Block chain id", self.block_chain_id), - optional_field("Txn idx", self.txn_idx), - optional_field("Address", self.addr.as_ref()), - optional_field("Hashed address", self.h_addr.as_ref()), - optional_field_hex("Slot", self.slot), - optional_field("Hashed Slot", h_slot), - optional_field_hex("Slot value", self.slot_value), - ) - } -} - -impl std::error::Error for TraceParsingError {} - -impl TraceParsingError { - /// Function to create a new TraceParsingError with mandatory fields - const fn new(reason: TraceParsingErrorReason) -> Self { - Self { - block_num: None, - block_chain_id: None, - txn_idx: None, - addr: None, - h_addr: None, - slot: None, - slot_value: None, - reason, - } - } - - /// Builder method to set block_num - fn block_num(&mut self, block_num: U256) -> &mut Self { - self.block_num = Some(block_num); - self - } - - /// Builder method to set block_chain_id - fn block_chain_id(&mut self, block_chain_id: U256) -> &mut Self { - self.block_chain_id = Some(block_chain_id); - self - } - - /// Builder method to set txn_idx - pub fn txn_idx(&mut self, txn_idx: usize) -> &mut Self { - self.txn_idx = Some(txn_idx); - self - } - - /// Builder method to set addr - pub fn addr(&mut self, addr: Address) -> &mut Self { - self.addr = Some(addr); - self - } - - /// Builder method to set h_addr - pub fn h_addr(&mut self, h_addr: H256) -> &mut Self { - self.h_addr = Some(h_addr); - self - } - - /// Builder method to set slot - pub fn slot(&mut self, slot: U512) -> &mut Self { - self.slot = Some(slot); - self - } - - /// Builder method to set slot_value - pub fn slot_value(&mut self, slot_value: U512) -> &mut Self { - self.slot_value = Some(slot_value); - self - } -} - -/// An error reason for trie parsing. -#[derive(Debug, Error)] -pub enum TraceParsingErrorReason { - /// Failure to decode an Ethereum Account. - #[error("Failed to decode RLP bytes ({0}) as an Ethereum account due to the error: {1}")] - AccountDecode(String, String), - - /// Failure to decode a transaction receipt. - #[error("Failed to decode RLP bytes ({0}) as a transaction receipt due to the error: {1}")] - ReceiptDecode(String, String), - - /// Failure due to trying to access or delete a storage trie missing - /// from the base trie. - #[error("Missing account storage trie in base trie when constructing subset partial trie for txn (account: {0:x})")] - MissingAccountStorageTrie(H256), - - /// Failure due to missing keys when creating a sub-partial trie. - #[error("Missing key {0:x} when creating sub-partial tries (Trie type: {1})")] - MissingKeysCreatingSubPartialTrie(Nibbles, TrieType), - - /// Failure due to trying to withdraw from a missing account - #[error("No account present at {0:x} (hashed: {1:x}) to withdraw {2} Gwei from!")] - MissingWithdrawalAccount(Address, H256, U256), - - /// Failure due to a trie operation error. - #[error("Trie operation error: {0}")] - TrieOpError(TrieOpError), -} - -impl From for TraceParsingError { - fn from(err: TrieOpError) -> Self { - // Convert TrieOpError into TraceParsingError - TraceParsingError::new(TraceParsingErrorReason::TrieOpError(err)) - } -} -/// An enum to cover all Ethereum trie types (see for details). -#[derive(Debug)] -pub enum TrieType { - /// State trie. - State, - /// Storage trie. - Storage, - /// Receipt trie. - Receipt, - /// Transaction trie. - Txn, -} - -impl Display for TrieType { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - TrieType::State => write!(f, "state"), - TrieType::Storage => write!(f, "storage"), - TrieType::Receipt => write!(f, "receipt"), - TrieType::Txn => write!(f, "transaction"), - } - } -} - /// The current state of all tries as we process txn deltas. These are mutated /// after every txn we process in the trace. #[derive(Clone, Debug, Default)] @@ -226,573 +44,531 @@ struct TrieDeltaApplicationOutput { additional_storage_trie_paths_to_not_hash: HashMap>, } -impl ProcessedBlockTrace { - pub fn into_txn_proof_gen_ir( - self, - other_data: OtherBlockData, - ) -> TraceParsingResult> { - let mut curr_block_tries = PartialTrieState { - state: self.tries.state.as_hashed_partial_trie().clone(), - storage: self - .tries - .storage - .iter() - .map(|(k, v)| (*k, v.as_hashed_partial_trie().clone())) - .collect(), - ..Default::default() - }; - - let mut extra_data = ExtraBlockData { - checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, - txn_number_before: U256::zero(), - txn_number_after: U256::zero(), - gas_used_before: U256::zero(), - gas_used_after: U256::zero(), - }; +pub fn into_txn_proof_gen_ir( + ProcessedBlockTrace { + tries: PartialTriePreImages { state, storage }, + txn_info, + withdrawals, + }: ProcessedBlockTrace, + other_data: OtherBlockData, +) -> anyhow::Result> { + let mut curr_block_tries = PartialTrieState { + state: state.as_hashed_partial_trie().clone(), + storage: storage + .iter() + .map(|(k, v)| (*k, v.as_hashed_partial_trie().clone())) + .collect(), + ..Default::default() + }; + + let mut extra_data = ExtraBlockData { + checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, + txn_number_before: U256::zero(), + txn_number_after: U256::zero(), + gas_used_before: U256::zero(), + gas_used_after: U256::zero(), + }; + + // Dummy payloads do not increment this accumulator. + // For actual transactions, it will match their position in the block. + let mut txn_idx = 0; + + let mut txn_gen_inputs = txn_info + .into_iter() + .map(|txn_info| { + let is_initial_payload = txn_idx == 0; + + let current_idx = txn_idx; + if !txn_info.meta.is_dummy() { + txn_idx += 1; + } - // Dummy payloads do not increment this accumulator. - // For actual transactions, it will match their position in the block. - let mut txn_idx = 0; + process_txn_info( + current_idx, + is_initial_payload, + txn_info, + &mut curr_block_tries, + &mut extra_data, + &other_data, + ) + .context(format!("at transaction index {}", txn_idx)) + }) + .collect::>>() + .context(format!( + "at block num {} with chain id {}", + other_data.b_data.b_meta.block_number, other_data.b_data.b_meta.block_chain_id + ))?; + + if !withdrawals.is_empty() { + add_withdrawals_to_txns(&mut txn_gen_inputs, &mut curr_block_tries, withdrawals)?; + } - let mut txn_gen_inputs = self - .txn_info - .into_iter() - .map(|txn_info| { - let is_initial_payload = txn_idx == 0; + Ok(txn_gen_inputs) +} - let current_idx = txn_idx; - if !txn_info.meta.is_dummy() { - txn_idx += 1; +/// Cancun HF specific: At the start of a block, prior txn execution, we +/// need to update the storage of the beacon block root contract. +// See . +fn update_beacon_block_root_contract_storage( + trie_state: &mut PartialTrieState, + delta_out: &mut TrieDeltaApplicationOutput, + nodes_used: &mut NodesUsedByTxn, + block_data: &BlockMetadata, +) -> anyhow::Result<()> { + const HISTORY_BUFFER_LENGTH_MOD: U256 = U256([HISTORY_BUFFER_LENGTH.1, 0, 0, 0]); + const ADDRESS: H256 = H256(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED); + + let timestamp_idx = block_data.block_timestamp % HISTORY_BUFFER_LENGTH_MOD; + let timestamp = rlp::encode(&block_data.block_timestamp).to_vec(); + + let root_idx = timestamp_idx + HISTORY_BUFFER_LENGTH_MOD; + let calldata = rlp::encode(&U256::from_big_endian( + &block_data.parent_beacon_block_root.0, + )) + .to_vec(); + + let storage_trie = trie_state + .storage + .get_mut(&ADDRESS) + .context(format!("missing account storage trie {:x}", ADDRESS))?; + + let mut slots_nibbles = vec![]; + + for (slot, val) in [(timestamp_idx, timestamp), (root_idx, calldata)] + .iter() + .map(|(k, v)| { + ( + Nibbles::from_h256_be(hash(Nibbles::from_h256_be(H256::from_uint(k)).bytes_be())), + v, + ) + }) + { + slots_nibbles.push(slot); + + // If we are writing a zero, then we actually need to perform a delete. + match val == &ZERO_STORAGE_SLOT_VAL_RLPED { + false => { + storage_trie.insert(slot, val.clone()).context(format!( + "at slot {} with value {}", + CustomFmt(U512::from_big_endian(slot.bytes_be().as_slice())), + U512::from_big_endian(val.as_slice()) + ))?; + + delta_out + .additional_storage_trie_paths_to_not_hash + .entry(ADDRESS) + .or_default() + .push(slot); + } + true => { + if let Ok(Some(remaining_slot_key)) = + delete_node_and_report_remaining_key_if_branch_collapsed(storage_trie, &slot) + { + delta_out + .additional_storage_trie_paths_to_not_hash + .entry(ADDRESS) + .or_default() + .push(remaining_slot_key); } - - Self::process_txn_info( - current_idx, - is_initial_payload, - txn_info, - &mut curr_block_tries, - &mut extra_data, - &other_data, - ) - .map_err(|mut e| { - e.txn_idx(txn_idx); - e - }) - }) - .collect::>>() - .map_err(|mut e| { - e.block_num(other_data.b_data.b_meta.block_number); - e.block_chain_id(other_data.b_data.b_meta.block_chain_id); - e - })?; - - if !self.withdrawals.is_empty() { - Self::add_withdrawals_to_txns( - &mut txn_gen_inputs, - &mut curr_block_tries, - self.withdrawals, - )?; + } } + } + + nodes_used.storage_accesses.push((ADDRESS, slots_nibbles)); + + let addr_nibbles = Nibbles::from_h256_be(ADDRESS); + delta_out + .additional_state_trie_paths_to_not_hash + .push(addr_nibbles); + let addr_bytes = trie_state + .state + .get(addr_nibbles) + .context(format!("missing account storage trie {:x}", ADDRESS))?; + let mut account = account_from_rlped_bytes(addr_bytes)?; + + account.storage_root = storage_trie.hash(); + + let updated_account_bytes = rlp::encode(&account); + trie_state + .state + .insert(addr_nibbles, updated_account_bytes.to_vec()) + .context(format!( + "at slot {}", + CustomFmt(U512::from_big_endian(addr_nibbles.bytes_be().as_slice())) + ))?; + + Ok(()) +} - Ok(txn_gen_inputs) +fn update_txn_and_receipt_tries( + trie_state: &mut PartialTrieState, + meta: &TxnMetaState, + txn_idx: usize, +) -> anyhow::Result<()> { + if meta.is_dummy() { + // This is a dummy payload, that does not mutate these tries. + return Ok(()); } - /// Cancun HF specific: At the start of a block, prior txn execution, we - /// need to update the storage of the beacon block root contract. - // See . - fn update_beacon_block_root_contract_storage( - trie_state: &mut PartialTrieState, - delta_out: &mut TrieDeltaApplicationOutput, - nodes_used: &mut NodesUsedByTxn, - block_data: &BlockMetadata, - ) -> TraceParsingResult<()> { - const HISTORY_BUFFER_LENGTH_MOD: U256 = U256([HISTORY_BUFFER_LENGTH.1, 0, 0, 0]); - const ADDRESS: H256 = H256(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED); - - let timestamp_idx = block_data.block_timestamp % HISTORY_BUFFER_LENGTH_MOD; - let timestamp = rlp::encode(&block_data.block_timestamp).to_vec(); - - let root_idx = timestamp_idx + HISTORY_BUFFER_LENGTH_MOD; - let calldata = rlp::encode(&U256::from_big_endian( - &block_data.parent_beacon_block_root.0, - )) - .to_vec(); + let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); + trie_state.txn.insert(txn_k, meta.txn_bytes())?; + + Ok(trie_state + .receipt + .insert(txn_k, meta.receipt_node_bytes.as_ref())?) +} + +/// If the account does not have a storage trie or does but is not +/// accessed by any txns, then we still need to manually create an entry for +/// them. +fn init_any_needed_empty_storage_tries<'a>( + storage_tries: &mut HashMap, + accounts_with_storage: impl Iterator, + state_accounts_with_no_accesses_but_storage_tries: &'a HashMap, +) { + for h_addr in accounts_with_storage { + if !storage_tries.contains_key(h_addr) { + let trie = state_accounts_with_no_accesses_but_storage_tries + .get(h_addr) + .map(|s_root| HashedPartialTrie::new(Node::Hash(*s_root))) + .unwrap_or_default(); + + storage_tries.insert(*h_addr, trie); + }; + } +} +fn create_minimal_partial_tries_needed_by_txn( + curr_block_tries: &PartialTrieState, + nodes_used_by_txn: &NodesUsedByTxn, + txn_idx: usize, + delta_application_out: TrieDeltaApplicationOutput, +) -> anyhow::Result { + let state_trie = create_minimal_state_partial_trie( + &curr_block_tries.state, + nodes_used_by_txn.state_accesses.iter().cloned(), + delta_application_out + .additional_state_trie_paths_to_not_hash + .into_iter(), + )?; + + let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); + + let transactions_trie = + create_trie_subset_wrapped(&curr_block_tries.txn, [txn_k], TrieType::Txn)?; + + let receipts_trie = + create_trie_subset_wrapped(&curr_block_tries.receipt, [txn_k], TrieType::Receipt)?; + + let storage_tries = create_minimal_storage_partial_tries( + &curr_block_tries.storage, + nodes_used_by_txn.storage_accesses.iter(), + &delta_application_out.additional_storage_trie_paths_to_not_hash, + )?; + + Ok(TrieInputs { + state_trie, + transactions_trie, + receipts_trie, + storage_tries, + }) +} + +fn apply_deltas_to_trie_state( + trie_state: &mut PartialTrieState, + deltas: &NodesUsedByTxn, + meta: &TxnMetaState, +) -> anyhow::Result { + let mut out = TrieDeltaApplicationOutput::default(); + + for (hashed_acc_addr, storage_writes) in deltas.storage_writes.iter() { let storage_trie = trie_state .storage - .get_mut(&ADDRESS) - .ok_or(TraceParsingError::new( - TraceParsingErrorReason::MissingAccountStorageTrie(ADDRESS), + .get_mut(hashed_acc_addr) + .context(format!( + "missing account storage trie {:x}", + hashed_acc_addr ))?; - let mut slots_nibbles = vec![]; - - for (slot, val) in [(timestamp_idx, timestamp), (root_idx, calldata)] + for (slot, val) in storage_writes .iter() - .map(|(k, v)| { - ( - Nibbles::from_h256_be(hash( - Nibbles::from_h256_be(H256::from_uint(k)).bytes_be(), - )), - v, - ) - }) + .map(|(k, v)| (Nibbles::from_h256_be(hash(k.bytes_be())), v)) { - slots_nibbles.push(slot); - // If we are writing a zero, then we actually need to perform a delete. match val == &ZERO_STORAGE_SLOT_VAL_RLPED { - false => { - storage_trie.insert(slot, val.clone()).map_err(|err| { - let mut e = - TraceParsingError::new(TraceParsingErrorReason::TrieOpError(err)); - e.slot(U512::from_big_endian(slot.bytes_be().as_slice())); - e.slot_value(U512::from_big_endian(val.as_slice())); - e - })?; - - delta_out - .additional_storage_trie_paths_to_not_hash - .entry(ADDRESS) - .or_default() - .push(slot); - } + false => storage_trie.insert(slot, val.clone()).context(format!( + "at slot {} with value {}", + CustomFmt(U512::from_big_endian(slot.bytes_be().as_slice())), + U512::from_big_endian(val.as_slice()) + ))?, true => { - if let Ok(Some(remaining_slot_key)) = - Self::delete_node_and_report_remaining_key_if_branch_collapsed( + if let Some(remaining_slot_key) = + delete_node_and_report_remaining_key_if_branch_collapsed( storage_trie, &slot, - ) + )? { - delta_out - .additional_storage_trie_paths_to_not_hash - .entry(ADDRESS) + out.additional_storage_trie_paths_to_not_hash + .entry(*hashed_acc_addr) .or_default() .push(remaining_slot_key); } } }; } + } - nodes_used.storage_accesses.push((ADDRESS, slots_nibbles)); - - let addr_nibbles = Nibbles::from_h256_be(ADDRESS); - delta_out - .additional_state_trie_paths_to_not_hash - .push(addr_nibbles); - let addr_bytes = trie_state.state.get(addr_nibbles).ok_or_else(|| { - TraceParsingError::new(TraceParsingErrorReason::MissingAccountStorageTrie(ADDRESS)) - })?; - let mut account = account_from_rlped_bytes(addr_bytes)?; - - account.storage_root = storage_trie.hash(); + for (hashed_acc_addr, s_trie_writes) in deltas.state_writes.iter() { + let val_k = Nibbles::from_h256_be(*hashed_acc_addr); - let updated_account_bytes = rlp::encode(&account); - trie_state + // If the account was created, then it will not exist in the trie yet. + let is_created = !trie_state.state.contains(val_k); + let mut account = trie_state .state - .insert(addr_nibbles, updated_account_bytes.to_vec()) - .map_err(|err| { - let mut e = TraceParsingError::new(TraceParsingErrorReason::TrieOpError(err)); - e.slot(U512::from_big_endian(addr_nibbles.bytes_be().as_slice())); - e - })?; - - Ok(()) - } + .get(val_k) + .map(account_from_rlped_bytes) + .transpose()? + .unwrap_or_default(); + + s_trie_writes.apply_writes_to_state_node( + &mut account, + hashed_acc_addr, + &trie_state.storage, + )?; - fn update_txn_and_receipt_tries( - trie_state: &mut PartialTrieState, - meta: &TxnMetaState, - txn_idx: usize, - ) -> TrieOpResult<()> { - if meta.is_dummy() { - // This is a dummy payload, that does not mutate these tries. - return Ok(()); + let updated_account_bytes = rlp::encode(&account); + if is_created { + // If the account did not exist prior this transaction, we + // need to make sure the transaction didn't revert. + + // Check status in the receipt. + let (_, _, receipt) = decode_receipt(&meta.receipt_node_bytes) + .map_err(|_| anyhow!("couldn't RLP-decode receipt node bytes"))?; + + if !receipt.status { + // The transaction failed, hence any created account should be removed. + trie_state.state.delete(val_k)?; + trie_state.storage.remove(hashed_acc_addr); + continue; + } } - - let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); - trie_state.txn.insert(txn_k, meta.txn_bytes())?; - trie_state - .receipt - .insert(txn_k, meta.receipt_node_bytes.as_ref()) - } - - /// If the account does not have a storage trie or does but is not - /// accessed by any txns, then we still need to manually create an entry for - /// them. - fn init_any_needed_empty_storage_tries<'a>( - storage_tries: &mut HashMap, - accounts_with_storage: impl Iterator, - state_accounts_with_no_accesses_but_storage_tries: &'a HashMap, - ) { - for h_addr in accounts_with_storage { - if !storage_tries.contains_key(h_addr) { - let trie = state_accounts_with_no_accesses_but_storage_tries - .get(h_addr) - .map(|s_root| HashedPartialTrie::new(Node::Hash(*s_root))) - .unwrap_or_default(); - - storage_tries.insert(*h_addr, trie); - }; - } + .state + .insert(val_k, updated_account_bytes.to_vec())?; } - fn create_minimal_partial_tries_needed_by_txn( - curr_block_tries: &PartialTrieState, - nodes_used_by_txn: &NodesUsedByTxn, - txn_idx: usize, - delta_application_out: TrieDeltaApplicationOutput, - _coin_base_addr: &Address, - ) -> TraceParsingResult { - let state_trie = create_minimal_state_partial_trie( - &curr_block_tries.state, - nodes_used_by_txn.state_accesses.iter().cloned(), - delta_application_out - .additional_state_trie_paths_to_not_hash - .into_iter(), - )?; - - let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); - - let transactions_trie = - create_trie_subset_wrapped(&curr_block_tries.txn, once(txn_k), TrieType::Txn)?; + Ok(out) +} - let receipts_trie = - create_trie_subset_wrapped(&curr_block_tries.receipt, once(txn_k), TrieType::Receipt)?; +fn get_trie_trace(trie: &HashedPartialTrie, k: &Nibbles) -> TriePath { + path_for_query(trie, *k, true).collect() +} - let storage_tries = create_minimal_storage_partial_tries( - &curr_block_tries.storage, - nodes_used_by_txn.storage_accesses.iter(), - &delta_application_out.additional_storage_trie_paths_to_not_hash, - )?; +/// If a branch collapse occurred after a delete, then we must ensure that +/// the other single child that remains also is not hashed when passed into +/// plonky2. Returns the key to the remaining child if a collapse occurred. +fn delete_node_and_report_remaining_key_if_branch_collapsed( + trie: &mut HashedPartialTrie, + delete_k: &Nibbles, +) -> anyhow::Result> { + let old_trace = get_trie_trace(trie, delete_k); + trie.delete(*delete_k)?; + let new_trace = get_trie_trace(trie, delete_k); + + Ok(node_deletion_resulted_in_a_branch_collapse( + &old_trace, &new_trace, + )) +} - Ok(TrieInputs { - state_trie, - transactions_trie, - receipts_trie, - storage_tries, - }) +/// Comparing the path of the deleted key before and after the deletion, +/// determine if the deletion resulted in a branch collapsing into a leaf or +/// extension node, and return the path to the remaining child if this +/// occurred. +fn node_deletion_resulted_in_a_branch_collapse( + old_path: &TriePath, + new_path: &TriePath, +) -> Option { + // Collapse requires at least 2 nodes. + if old_path.0.len() < 2 { + return None; } - fn apply_deltas_to_trie_state( - trie_state: &mut PartialTrieState, - deltas: &NodesUsedByTxn, - meta: &TxnMetaState, - ) -> TraceParsingResult { - let mut out = TrieDeltaApplicationOutput::default(); - - for (hashed_acc_addr, storage_writes) in deltas.storage_writes.iter() { - let storage_trie = trie_state.storage.get_mut(hashed_acc_addr).ok_or_else(|| { - let hashed_acc_addr = *hashed_acc_addr; - let mut e = TraceParsingError::new( - TraceParsingErrorReason::MissingAccountStorageTrie(hashed_acc_addr), - ); - e.h_addr(hashed_acc_addr); - e - })?; - - for (slot, val) in storage_writes - .iter() - .map(|(k, v)| (Nibbles::from_h256_be(hash(k.bytes_be())), v)) - { - // If we are writing a zero, then we actually need to perform a delete. - match val == &ZERO_STORAGE_SLOT_VAL_RLPED { - false => storage_trie.insert(slot, val.clone()).map_err(|err| { - let mut e = - TraceParsingError::new(TraceParsingErrorReason::TrieOpError(err)); - e.slot(U512::from_big_endian(slot.bytes_be().as_slice())); - e.slot_value(U512::from_big_endian(val.as_slice())); - e - })?, - true => { - if let Some(remaining_slot_key) = - Self::delete_node_and_report_remaining_key_if_branch_collapsed( - storage_trie, - &slot, - ) - .map_err(TraceParsingError::from)? - { - out.additional_storage_trie_paths_to_not_hash - .entry(*hashed_acc_addr) - .or_default() - .push(remaining_slot_key); - } - } - }; - } - } - - for (hashed_acc_addr, s_trie_writes) in deltas.state_writes.iter() { - let val_k = Nibbles::from_h256_be(*hashed_acc_addr); - - // If the account was created, then it will not exist yet in the trie. - let (is_created, val_bytes) = match trie_state.state.get(val_k) { - Some(bytes) => (false, bytes), - None => (true, &EMPTY_ACCOUNT_BYTES_RLPED[..]), - }; - - let mut account = account_from_rlped_bytes(val_bytes)?; - - s_trie_writes.apply_writes_to_state_node( - &mut account, - hashed_acc_addr, - &trie_state.storage, - )?; - - let updated_account_bytes = rlp::encode(&account); - if is_created { - // If the account did not exist prior this transaction, we - // need to make sure the transaction didn't revert. - - // Check status in the receipt. - let (_, _, receipt) = decode_receipt(&meta.receipt_node_bytes).map_err(|err| { - Box::new(TraceParsingError::new( - TraceParsingErrorReason::ReceiptDecode( - hex::encode(&meta.receipt_node_bytes), - format!("{:?}", err), - ), - )) - })?; - - if !receipt.status { - // The transaction failed, hence any created account should be removed. - trie_state - .state - .delete(val_k) - .map_err(TraceParsingError::from)?; - - trie_state.storage.remove(hashed_acc_addr); - - continue; - } - } - - trie_state - .state - .insert(val_k, updated_account_bytes.to_vec()) - .map_err(TraceParsingError::from)?; - } - - Ok(out) - } + // If the node path length decreased after the delete, then a collapse occurred. + // As an aside, note that while it's true that the branch could have collapsed + // into an extension node with multiple nodes below it, the query logic will + // always stop at most one node after the keys diverge, which guarantees that + // the new trie path will always be shorter if a collapse occurred. + let branch_collapse_occurred = old_path.0.len() > new_path.0.len(); - fn get_trie_trace(trie: &HashedPartialTrie, k: &Nibbles) -> TriePath { - path_for_query(trie, *k, true).collect() - } + // Now we need to determine the key of the only remaining node after the + // collapse. + branch_collapse_occurred.then(|| new_path.iter().into_key()) +} - /// If a branch collapse occurred after a delete, then we must ensure that - /// the other single child that remains also is not hashed when passed into - /// plonky2. Returns the key to the remaining child if a collapse occurred. - fn delete_node_and_report_remaining_key_if_branch_collapsed( - trie: &mut HashedPartialTrie, - delete_k: &Nibbles, - ) -> TrieOpResult> { - let old_trace = Self::get_trie_trace(trie, delete_k); - trie.delete(*delete_k)?; - let new_trace = Self::get_trie_trace(trie, delete_k); - - Ok(Self::node_deletion_resulted_in_a_branch_collapse( - &old_trace, &new_trace, - )) +/// The withdrawals are always in the final ir payload. +fn add_withdrawals_to_txns( + txn_ir: &mut [GenerationInputs], + final_trie_state: &mut PartialTrieState, + mut withdrawals: Vec<(Address, U256)>, +) -> anyhow::Result<()> { + // Scale withdrawals amounts. + for (_addr, amt) in withdrawals.iter_mut() { + *amt = eth_to_gwei(*amt) } - /// Comparing the path of the deleted key before and after the deletion, - /// determine if the deletion resulted in a branch collapsing into a leaf or - /// extension node, and return the path to the remaining child if this - /// occurred. - fn node_deletion_resulted_in_a_branch_collapse( - old_path: &TriePath, - new_path: &TriePath, - ) -> Option { - // Collapse requires at least 2 nodes. - if old_path.0.len() < 2 { - return None; - } - - // If the node path length decreased after the delete, then a collapse occurred. - // As an aside, note that while it's true that the branch could have collapsed - // into an extension node with multiple nodes below it, the query logic will - // always stop at most one node after the keys diverge, which guarantees that - // the new trie path will always be shorter if a collapse occurred. - let branch_collapse_occurred = old_path.0.len() > new_path.0.len(); + let withdrawals_with_hashed_addrs_iter = || { + withdrawals + .iter() + .map(|(addr, v)| (*addr, hash(addr.as_bytes()), *v)) + }; + + let last_inputs = txn_ir + .last_mut() + .expect("We cannot have an empty list of payloads."); + + if last_inputs.signed_txn.is_none() { + // This is a dummy payload, hence it does not contain yet + // state accesses to the withdrawal addresses. + let withdrawal_addrs = withdrawals_with_hashed_addrs_iter().map(|(_, h_addr, _)| h_addr); + + let additional_paths = if last_inputs.txn_number_before == 0.into() { + // We need to include the beacon roots contract as this payload is at the + // start of the block execution. + vec![Nibbles::from_h256_be(H256( + BEACON_ROOTS_CONTRACT_ADDRESS_HASHED, + ))] + } else { + vec![] + }; - // Now we need to determine the key of the only remaining node after the - // collapse. - branch_collapse_occurred.then(|| new_path.iter().into_key()) + last_inputs.tries.state_trie = create_minimal_state_partial_trie( + &final_trie_state.state, + withdrawal_addrs, + additional_paths.into_iter(), + )?; } - /// The withdrawals are always in the final ir payload. - fn add_withdrawals_to_txns( - txn_ir: &mut [GenerationInputs], - final_trie_state: &mut PartialTrieState, - mut withdrawals: Vec<(Address, U256)>, - ) -> TraceParsingResult<()> { - // Scale withdrawals amounts. - for (_addr, amt) in withdrawals.iter_mut() { - *amt = eth_to_gwei(*amt) - } + update_trie_state_from_withdrawals( + withdrawals_with_hashed_addrs_iter(), + &mut final_trie_state.state, + )?; - let withdrawals_with_hashed_addrs_iter = || { - withdrawals - .iter() - .map(|(addr, v)| (*addr, hash(addr.as_bytes()), *v)) - }; + last_inputs.withdrawals = withdrawals; + last_inputs.trie_roots_after.state_root = final_trie_state.state.hash(); - let last_inputs = txn_ir - .last_mut() - .expect("We cannot have an empty list of payloads."); - - if last_inputs.signed_txn.is_none() { - // This is a dummy payload, hence it does not contain yet - // state accesses to the withdrawal addresses. - let withdrawal_addrs = - withdrawals_with_hashed_addrs_iter().map(|(_, h_addr, _)| h_addr); - - let additional_paths = if last_inputs.txn_number_before == 0.into() { - // We need to include the beacon roots contract as this payload is at the - // start of the block execution. - vec![Nibbles::from_h256_be(H256( - BEACON_ROOTS_CONTRACT_ADDRESS_HASHED, - ))] - } else { - vec![] - }; + Ok(()) +} - last_inputs.tries.state_trie = create_minimal_state_partial_trie( - &final_trie_state.state, - withdrawal_addrs, - additional_paths.into_iter(), - )?; - } +/// Withdrawals update balances in the account trie, so we need to update +/// our local trie state. +fn update_trie_state_from_withdrawals<'a>( + withdrawals: impl IntoIterator + 'a, + state: &mut HashedPartialTrie, +) -> anyhow::Result<()> { + for (addr, h_addr, amt) in withdrawals { + let h_addr_nibs = Nibbles::from_h256_be(h_addr); - Self::update_trie_state_from_withdrawals( - withdrawals_with_hashed_addrs_iter(), - &mut final_trie_state.state, - )?; + let acc_bytes = state.get(h_addr_nibs).context(format!( + "No account present at {addr:x} (hashed: {h_addr:x}) to withdraw {amt} Gwei from!" + ))?; + let mut acc_data = account_from_rlped_bytes(acc_bytes)?; - last_inputs.withdrawals = withdrawals; - last_inputs.trie_roots_after.state_root = final_trie_state.state.hash(); + acc_data.balance += amt; - Ok(()) + state.insert(h_addr_nibs, rlp::encode(&acc_data).to_vec())?; } - /// Withdrawals update balances in the account trie, so we need to update - /// our local trie state. - fn update_trie_state_from_withdrawals<'a>( - withdrawals: impl IntoIterator + 'a, - state: &mut HashedPartialTrie, - ) -> TraceParsingResult<()> { - for (addr, h_addr, amt) in withdrawals { - let h_addr_nibs = Nibbles::from_h256_be(h_addr); - - let acc_bytes = state.get(h_addr_nibs).ok_or_else(|| { - let mut e = TraceParsingError::new( - TraceParsingErrorReason::MissingWithdrawalAccount(addr, h_addr, amt), - ); - e.addr(addr); - e.h_addr(h_addr); - e - })?; - let mut acc_data = account_from_rlped_bytes(acc_bytes)?; - - acc_data.balance += amt; - - state - .insert(h_addr_nibs, rlp::encode(&acc_data).to_vec()) - .map_err(TraceParsingError::from)?; - } - - Ok(()) - } + Ok(()) +} - /// Processes a single transaction in the trace. - fn process_txn_info( - txn_idx: usize, - is_initial_payload: bool, - txn_info: ProcessedTxnInfo, - curr_block_tries: &mut PartialTrieState, - extra_data: &mut ExtraBlockData, - other_data: &OtherBlockData, - ) -> TraceParsingResult { - trace!("Generating proof IR for txn {}...", txn_idx); - - Self::init_any_needed_empty_storage_tries( - &mut curr_block_tries.storage, - txn_info - .nodes_used_by_txn - .storage_accesses - .iter() - .map(|(k, _)| k), - &txn_info - .nodes_used_by_txn - .state_accounts_with_no_accesses_but_storage_tries, - ); - // For each non-dummy txn, we increment `txn_number_after` by 1, and - // update `gas_used_after` accordingly. - extra_data.txn_number_after += U256::from(!txn_info.meta.is_dummy() as u8); - extra_data.gas_used_after += txn_info.meta.gas_used.into(); - - // Because we need to run delta application before creating the minimal - // sub-tries (we need to detect if deletes collapsed any branches), we need to - // do this clone every iteration. - let tries_at_start_of_txn = curr_block_tries.clone(); - - Self::update_txn_and_receipt_tries(curr_block_tries, &txn_info.meta, txn_idx) - .map_err(TraceParsingError::from)?; - - let mut delta_out = Self::apply_deltas_to_trie_state( +/// Processes a single transaction in the trace. +fn process_txn_info( + txn_idx: usize, + is_initial_payload: bool, + txn_info: ProcessedTxnInfo, + curr_block_tries: &mut PartialTrieState, + extra_data: &mut ExtraBlockData, + other_data: &OtherBlockData, +) -> anyhow::Result { + log::trace!("Generating proof IR for txn {}...", txn_idx); + + init_any_needed_empty_storage_tries( + &mut curr_block_tries.storage, + txn_info + .nodes_used_by_txn + .storage_accesses + .iter() + .map(|(k, _)| k), + &txn_info + .nodes_used_by_txn + .state_accounts_with_no_accesses_but_storage_tries, + ); + // For each non-dummy txn, we increment `txn_number_after` by 1, and + // update `gas_used_after` accordingly. + extra_data.txn_number_after += U256::from(!txn_info.meta.is_dummy() as u8); + extra_data.gas_used_after += txn_info.meta.gas_used.into(); + + // Because we need to run delta application before creating the minimal + // sub-tries (we need to detect if deletes collapsed any branches), we need to + // do this clone every iteration. + let tries_at_start_of_txn = curr_block_tries.clone(); + + update_txn_and_receipt_tries(curr_block_tries, &txn_info.meta, txn_idx)?; + + let mut delta_out = apply_deltas_to_trie_state( + curr_block_tries, + &txn_info.nodes_used_by_txn, + &txn_info.meta, + )?; + + let nodes_used_by_txn = if is_initial_payload { + let mut nodes_used = txn_info.nodes_used_by_txn; + update_beacon_block_root_contract_storage( curr_block_tries, - &txn_info.nodes_used_by_txn, - &txn_info.meta, + &mut delta_out, + &mut nodes_used, + &other_data.b_data.b_meta, )?; - let nodes_used_by_txn = if is_initial_payload { - let mut nodes_used = txn_info.nodes_used_by_txn; - Self::update_beacon_block_root_contract_storage( - curr_block_tries, - &mut delta_out, - &mut nodes_used, - &other_data.b_data.b_meta, - )?; - - nodes_used - } else { - txn_info.nodes_used_by_txn - }; - - let tries = Self::create_minimal_partial_tries_needed_by_txn( - &tries_at_start_of_txn, - &nodes_used_by_txn, - txn_idx, - delta_out, - &other_data.b_data.b_meta.block_beneficiary, - )?; - - let trie_roots_after = calculate_trie_input_hashes(curr_block_tries); - let gen_inputs = GenerationInputs { - txn_number_before: extra_data.txn_number_before, - gas_used_before: extra_data.gas_used_before, - gas_used_after: extra_data.gas_used_after, - signed_txn: txn_info.meta.txn_bytes, - withdrawals: Vec::default(), /* Only ever set in a dummy txn at the end of - * the block (see `[add_withdrawals_to_txns]` - * for more info). */ - tries, - trie_roots_after, - checkpoint_state_trie_root: extra_data.checkpoint_state_trie_root, - contract_code: txn_info.contract_code_accessed, - block_metadata: other_data.b_data.b_meta.clone(), - block_hashes: other_data.b_data.b_hashes.clone(), - global_exit_roots: vec![], - }; - - // After processing a transaction, we update the remaining accumulators - // for the next transaction. - extra_data.txn_number_before = extra_data.txn_number_after; - extra_data.gas_used_before = extra_data.gas_used_after; - - Ok(gen_inputs) - } + nodes_used + } else { + txn_info.nodes_used_by_txn + }; + + let tries = create_minimal_partial_tries_needed_by_txn( + &tries_at_start_of_txn, + &nodes_used_by_txn, + txn_idx, + delta_out, + )?; + + let gen_inputs = GenerationInputs { + txn_number_before: extra_data.txn_number_before, + gas_used_before: extra_data.gas_used_before, + gas_used_after: extra_data.gas_used_after, + signed_txn: txn_info.meta.txn_bytes, + withdrawals: Vec::default(), /* Only ever set in a dummy txn at the end of + * the block (see `[add_withdrawals_to_txns]` + * for more info). */ + tries, + trie_roots_after: TrieRoots { + state_root: curr_block_tries.state.hash(), + transactions_root: curr_block_tries.txn.hash(), + receipts_root: curr_block_tries.receipt.hash(), + }, + checkpoint_state_trie_root: extra_data.checkpoint_state_trie_root, + contract_code: txn_info.contract_code_accessed, + block_metadata: other_data.b_data.b_meta.clone(), + block_hashes: other_data.b_data.b_hashes.clone(), + global_exit_roots: vec![], + }; + + // After processing a transaction, we update the remaining accumulators + // for the next transaction. + extra_data.txn_number_before = extra_data.txn_number_after; + extra_data.gas_used_before = extra_data.gas_used_after; + + Ok(gen_inputs) } impl StateTrieWrites { @@ -801,45 +577,32 @@ impl StateTrieWrites { state_node: &mut AccountRlp, h_addr: &H256, acc_storage_tries: &HashMap, - ) -> TraceParsingResult<()> { + ) -> anyhow::Result<()> { let storage_root_hash_change = match self.storage_trie_change { false => None, true => { - let storage_trie = acc_storage_tries.get(h_addr).ok_or_else(|| { - let h_addr = *h_addr; - let mut e = TraceParsingError::new( - TraceParsingErrorReason::MissingAccountStorageTrie(h_addr), - ); - e.h_addr(h_addr); - e - })?; + let storage_trie = acc_storage_tries + .get(h_addr) + .context(format!("missing account storage trie {:x}", h_addr))?; Some(storage_trie.hash()) } }; - update_val_if_some(&mut state_node.balance, self.balance); - update_val_if_some(&mut state_node.nonce, self.nonce); - update_val_if_some(&mut state_node.storage_root, storage_root_hash_change); - update_val_if_some(&mut state_node.code_hash, self.code_hash); + state_node.balance = self.balance.unwrap_or(state_node.balance); + state_node.nonce = self.nonce.unwrap_or(state_node.nonce); + state_node.storage_root = storage_root_hash_change.unwrap_or(state_node.storage_root); + state_node.code_hash = self.code_hash.unwrap_or(state_node.code_hash); Ok(()) } } -fn calculate_trie_input_hashes(t_inputs: &PartialTrieState) -> TrieRoots { - TrieRoots { - state_root: t_inputs.state.hash(), - transactions_root: t_inputs.txn.hash(), - receipts_root: t_inputs.receipt.hash(), - } -} - fn create_minimal_state_partial_trie( state_trie: &HashedPartialTrie, state_accesses: impl Iterator, additional_state_trie_paths_to_not_hash: impl Iterator, -) -> TraceParsingResult { +) -> anyhow::Result { create_trie_subset_wrapped( state_trie, state_accesses @@ -856,7 +619,7 @@ fn create_minimal_storage_partial_tries<'a>( storage_tries: &HashMap, accesses_per_account: impl Iterator)>, additional_storage_trie_paths_to_not_hash: &HashMap>, -) -> TraceParsingResult> { +) -> anyhow::Result> { accesses_per_account .map(|(h_addr, mem_accesses)| { // Guaranteed to exist due to calling `init_any_needed_empty_storage_tries` @@ -878,31 +641,20 @@ fn create_minimal_storage_partial_tries<'a>( Ok((*h_addr, partial_storage_trie)) }) - .collect::>() + .collect() } fn create_trie_subset_wrapped( trie: &HashedPartialTrie, - accesses: impl Iterator, + accesses: impl IntoIterator, trie_type: TrieType, -) -> TraceParsingResult { - create_trie_subset(trie, accesses).map_err(|trie_err| { - let key = match trie_err { - SubsetTrieError::UnexpectedKey(key, _) => key, - }; - - Box::new(TraceParsingError::new( - TraceParsingErrorReason::MissingKeysCreatingSubPartialTrie(key, trie_type), - )) - }) +) -> anyhow::Result { + mpt_trie::trie_subsets::create_trie_subset(trie, accesses) + .context(format!("missing keys when creating {}", trie_type)) } -fn account_from_rlped_bytes(bytes: &[u8]) -> TraceParsingResult { - rlp::decode(bytes).map_err(|err| { - Box::new(TraceParsingError::new( - TraceParsingErrorReason::AccountDecode(hex::encode(bytes), err.to_string()), - )) - }) +fn account_from_rlped_bytes(bytes: &[u8]) -> anyhow::Result { + Ok(rlp::decode(bytes)?) } impl TxnMetaState { @@ -920,21 +672,32 @@ impl TxnMetaState { } } -fn update_val_if_some(target: &mut T, opt: Option) { - if let Some(new_val) = opt { - *target = new_val; - } +fn eth_to_gwei(eth: U256) -> U256 { + // 1 ether = 10^9 gwei. + eth * U256::from(10).pow(9.into()) } -fn optional_field(label: &str, value: Option) -> String { - value.map_or(String::new(), |v| format!("{}: {:?}\n", label, v)) -} +// This is just `rlp(0)`. +const ZERO_STORAGE_SLOT_VAL_RLPED: [u8; 1] = [128]; + +/// Formatting aid for error context. +struct CustomFmt(T); -fn optional_field_hex(label: &str, value: Option) -> String { - value.map_or(String::new(), |v| format!("{}: 0x{:064X}\n", label, v)) +impl fmt::Display for CustomFmt { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut buf = [0u8; 64]; + self.0.to_big_endian(&mut buf); + f.write_fmt(format_args!("{} (hashed: 0x{:064X})", self.0, hash(buf))) + } } -fn eth_to_gwei(eth: U256) -> U256 { - // 1 ether = 10^9 gwei. - eth * U256::from(10).pow(9.into()) +/// Aid for error context. +/// Covers all Ethereum trie types (see for details). +#[derive(Debug, strum::Display)] +#[allow(missing_docs)] +pub enum TrieType { + State, + Storage, + Receipt, + Txn, } diff --git a/trace_decoder/src/lib.rs b/trace_decoder/src/lib.rs index 42b83a69d..d540309d1 100644 --- a/trace_decoder/src/lib.rs +++ b/trace_decoder/src/lib.rs @@ -437,12 +437,14 @@ pub fn entrypoint( txn_info.insert(0, ProcessedTxnInfo::default()); } - Ok(ProcessedBlockTrace { - tries: pre_images.tries, - txn_info, - withdrawals: other.b_data.withdrawals.clone(), - } - .into_txn_proof_gen_ir(other)?) + decoding::into_txn_proof_gen_ir( + ProcessedBlockTrace { + tries: pre_images.tries, + txn_info, + withdrawals: other.b_data.withdrawals.clone(), + }, + other, + ) } #[derive(Debug, Default)]