diff --git a/evm_arithmetization/benches/fibonacci_25m_gas.rs b/evm_arithmetization/benches/fibonacci_25m_gas.rs index 9959acbc1..799a568b7 100644 --- a/evm_arithmetization/benches/fibonacci_25m_gas.rs +++ b/evm_arithmetization/benches/fibonacci_25m_gas.rs @@ -25,7 +25,6 @@ use evm_arithmetization::Node; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; type F = GoldilocksField; @@ -73,7 +72,7 @@ fn prepare_setup() -> anyhow::Result { ]; let code_hash = keccak(code); - let empty_trie_root = HashedPartialTrie::from(Node::Empty).hash(); + let empty_trie_root = Node::Empty.hash(); let sender_account_before = AccountRlp { nonce: 169.into(), @@ -93,14 +92,17 @@ fn prepare_setup() -> anyhow::Result { state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; - storage_tries.push((sender_state_key, Node::Empty.into())); - storage_tries.push((to_state_key, Node::Empty.into())); + storage_tries.push((sender_state_key, Node::Empty)); + storage_tries.push((to_state_key, Node::Empty)); let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; let gas_used = U256::from(0x17d7840_u32); @@ -133,7 +135,7 @@ fn prepare_setup() -> anyhow::Result { }; let to_account_after = to_account_before; - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); + let mut expected_state_trie_after = Node::Empty; expected_state_trie_after .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec())?; @@ -159,16 +161,15 @@ fn prepare_setup() -> anyhow::Result { bloom: vec![0; 256].into(), logs: vec![], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert( Nibbles::from_str("0x80").unwrap(), rlp::encode(&receipt_0).to_vec(), )?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), diff --git a/evm_arithmetization/src/cpu/kernel/constants/trie_type.rs b/evm_arithmetization/src/cpu/kernel/constants/trie_type.rs index 9821c06f7..99bad9908 100644 --- a/evm_arithmetization/src/cpu/kernel/constants/trie_type.rs +++ b/evm_arithmetization/src/cpu/kernel/constants/trie_type.rs @@ -1,7 +1,3 @@ -use core::ops::Deref; - -use mpt_trie::partial_trie::HashedPartialTrie; - use crate::Node; #[derive(Copy, Clone, Debug)] @@ -16,8 +12,8 @@ pub(crate) enum PartialTrieType { impl PartialTrieType { pub(crate) const COUNT: usize = 5; - pub(crate) fn of(trie: &HashedPartialTrie) -> Self { - match trie.deref() { + pub(crate) fn of(trie: &Node) -> Self { + match trie { Node::Empty => Self::Empty, Node::Hash(_) => Self::Hash, Node::Branch { .. } => Self::Branch, diff --git a/evm_arithmetization/src/cpu/kernel/interpreter.rs b/evm_arithmetization/src/cpu/kernel/interpreter.rs index 38fbe2320..544c2febb 100644 --- a/evm_arithmetization/src/cpu/kernel/interpreter.rs +++ b/evm_arithmetization/src/cpu/kernel/interpreter.rs @@ -10,7 +10,6 @@ use std::collections::{BTreeSet, HashMap}; use anyhow::anyhow; use ethereum_types::{BigEndianHash, U256}; use log::Level; -use mpt_trie::partial_trie::PartialTrie; use plonky2::field::types::Field; use crate::byte_packing::byte_packing_stark::BytePackingOp; diff --git a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs index 125760ee5..2ec43c379 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs @@ -5,7 +5,6 @@ use ethereum_types::{Address, BigEndianHash, H256, U256}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField as F; use plonky2::field::types::Field; use rand::{thread_rng, Rng}; @@ -62,7 +61,7 @@ fn test_account(code: &[u8]) -> AccountRlp { AccountRlp { nonce: U256::from(1111), balance: U256::from(2222), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak(code), } } @@ -82,7 +81,7 @@ fn prepare_interpreter( ) -> Result<()> { let mpt_insert_state_trie = KERNEL.global_labels["mpt_insert_state_trie"]; let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; - let mut state_trie: HashedPartialTrie = Default::default(); + let mut state_trie: Node = Default::default(); let trie_inputs = Default::default(); initialize_mpts(interpreter, &trie_inputs); @@ -321,15 +320,15 @@ fn sstore() -> Result<()> { ..AccountRlp::default() }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); + let mut state_trie_before = Node::Empty; state_trie_before.insert(addr_nibbles, rlp::encode(&account_before).to_vec())?; let trie_inputs = TrieInputs { - state_trie: state_trie_before.clone(), - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries: vec![(addr_hashed, Node::Empty.into())], + state_trie: state_trie_before.clone().freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: vec![(addr_hashed, Node::Empty.freeze())], }; let initial_stack = vec![]; @@ -358,10 +357,10 @@ fn sstore() -> Result<()> { let account_after = AccountRlp { balance: 0x0de0b6b3a7640000u64.into(), code_hash, - storage_root: HashedPartialTrie::from(Node::Leaf { + storage_root: Node::Leaf { nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), value: vec![2], - }) + } .hash(), ..AccountRlp::default() }; @@ -387,7 +386,7 @@ fn sstore() -> Result<()> { let hash = H256::from_uint(&interpreter.stack()[1]); - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); + let mut expected_state_trie_after = Node::Empty; expected_state_trie_after.insert(addr_nibbles, rlp::encode(&account_after).to_vec())?; let expected_state_trie_hash = expected_state_trie_after.hash(); @@ -419,15 +418,15 @@ fn sload() -> Result<()> { ..AccountRlp::default() }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); + let mut state_trie_before = Node::Empty; state_trie_before.insert(addr_nibbles, rlp::encode(&account_before).to_vec())?; let trie_inputs = TrieInputs { - state_trie: state_trie_before.clone(), - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries: vec![(addr_hashed, Node::Empty.into())], + state_trie: state_trie_before.clone().freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: vec![(addr_hashed, Node::Empty.freeze())], }; let initial_stack = vec![]; diff --git a/evm_arithmetization/src/cpu/kernel/tests/add11.rs b/evm_arithmetization/src/cpu/kernel/tests/add11.rs index ae5ac3871..303f016d1 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/add11.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/add11.rs @@ -5,7 +5,7 @@ use ethereum_types::{Address, BigEndianHash, H256}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, Node, PartialTrie}; +use mpt_trie::Node; use plonky2::field::goldilocks_field::GoldilocksField as F; use crate::cpu::kernel::aggregator::KERNEL; @@ -71,13 +71,16 @@ fn test_add11_yml() { .insert(to_nibbles, rlp::encode(&to_account_before).to_vec()) .unwrap(); - storage_tries.push((to_hashed, Node::Empty.into())); + storage_tries.push((to_hashed, Node::Empty)); let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; let txn = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16"); @@ -111,10 +114,10 @@ fn test_add11_yml() { balance: 0xde0b6b3a76586a0u64.into(), code_hash, // Storage map: { 0 => 2 } - storage_root: HashedPartialTrie::from(Node::Leaf { + storage_root: Node::Leaf { nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), value: vec![2], - }) + } .hash(), ..AccountRlp::default() }; @@ -127,7 +130,7 @@ fn test_add11_yml() { let beacon_roots_account = beacon_roots_contract_from_storage(&beacon_roots_account_storage); - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); + let mut expected_state_trie_after = Node::Empty; expected_state_trie_after .insert( beneficiary_nibbles, @@ -160,18 +163,17 @@ fn test_add11_yml() { bloom: vec![0; 256].into(), logs: vec![], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie .insert( Nibbles::from_str("0x80").unwrap(), rlp::encode(&receipt_0).to_vec(), ) .unwrap(); - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -187,7 +189,7 @@ fn test_add11_yml() { trie_roots_after, contract_code: contract_code.clone(), block_metadata, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), txn_number_before: 0.into(), gas_used_before: 0.into(), gas_used_after: gas_used, @@ -259,13 +261,16 @@ fn test_add11_yml_with_exception() { .insert(to_nibbles, rlp::encode(&to_account_before).to_vec()) .unwrap(); - storage_tries.push((to_hashed, Node::Empty.into())); + storage_tries.push((to_hashed, Node::Empty)); let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; let txn = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16"); @@ -307,7 +312,7 @@ fn test_add11_yml_with_exception() { let beacon_roots_account = beacon_roots_contract_from_storage(&beacon_roots_account_storage); - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); + let mut expected_state_trie_after = Node::Empty; expected_state_trie_after .insert( beneficiary_nibbles, @@ -341,18 +346,17 @@ fn test_add11_yml_with_exception() { bloom: vec![0; 256].into(), logs: vec![], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie .insert( Nibbles::from_str("0x80").unwrap(), rlp::encode(&receipt_0).to_vec(), ) .unwrap(); - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -368,7 +372,7 @@ fn test_add11_yml_with_exception() { trie_roots_after, contract_code: contract_code.clone(), block_metadata, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), txn_number_before: 0.into(), gas_used_before: 0.into(), gas_used_after: txn_gas_limit.into(), diff --git a/evm_arithmetization/src/cpu/kernel/tests/balance.rs b/evm_arithmetization/src/cpu/kernel/tests/balance.rs index 2b8f8c241..40ba570b4 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/balance.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/balance.rs @@ -1,7 +1,6 @@ use anyhow::Result; use ethereum_types::{Address, BigEndianHash, H256, U256}; use keccak_hash::keccak; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField as F; use plonky2::field::types::Field; use rand::{thread_rng, Rng}; @@ -19,7 +18,7 @@ fn test_account(balance: U256) -> AccountRlp { AccountRlp { nonce: U256::from(1111), balance, - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: H256::from_uint(&U256::from(8888)), } } @@ -33,7 +32,7 @@ fn prepare_interpreter( ) -> Result<()> { let mpt_insert_state_trie = KERNEL.global_labels["mpt_insert_state_trie"]; let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; - let mut state_trie: HashedPartialTrie = Default::default(); + let mut state_trie: Node = Default::default(); let trie_inputs = Default::default(); initialize_mpts(interpreter, &trie_inputs); diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs index 15a3a36cd..8376e4306 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs @@ -1,7 +1,6 @@ use anyhow::Result; use ethereum_types::{BigEndianHash, H256}; use mpt_trie::nibbles::{Nibbles, NibblesIntern}; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField as F; use rand::random; @@ -24,8 +23,7 @@ fn mpt_delete_leaf_nonoverlapping_keys() -> Result<()> { let state_trie = Node::Leaf { nibbles: nibbles_64(0xABC), value: test_account_1_rlp(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0x123), test_account_2()) } @@ -34,8 +32,7 @@ fn mpt_delete_leaf_overlapping_keys() -> Result<()> { let state_trie = Node::Leaf { nibbles: nibbles_64(0xABC), value: test_account_1_rlp(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xADE), test_account_2()) } @@ -45,8 +42,7 @@ fn mpt_delete_branch_into_hash() -> Result<()> { let state_trie = Node::Extension { nibbles: nibbles_64(0xADF), child: hash.into(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xADE), test_account_2()) } @@ -67,8 +63,7 @@ fn test_after_mpt_delete_extension_branch() -> Result<()> { let state_trie = Node::Extension { nibbles, child: branch.into(), - } - .into(); + }; let key = nibbles.merge_nibbles(&Nibbles { packed: NibblesIntern::zero(), count: 64 - nibbles.count, @@ -79,18 +74,14 @@ fn test_after_mpt_delete_extension_branch() -> Result<()> { /// Note: The account's storage_root is ignored, as we can't insert a new /// storage_root without the accompanying trie data. An empty trie's /// storage_root is used instead. -fn test_state_trie( - state_trie: HashedPartialTrie, - k: Nibbles, - mut account: AccountRlp, -) -> Result<()> { +fn test_state_trie(state_trie: Node, k: Nibbles, mut account: AccountRlp) -> Result<()> { assert_eq!(k.count, 64); // Ignore any storage_root; see documentation note. - account.storage_root = HashedPartialTrie::from(Node::Empty).hash(); + account.storage_root = Node::Empty.hash(); let trie_inputs = TrieInputs { - state_trie: state_trie.clone(), + state_trie: state_trie.clone().freeze(), transactions_trie: Default::default(), receipts_trie: Default::default(), storage_tries: vec![], diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs index 18e3ae1fe..65e525382 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs @@ -1,6 +1,5 @@ use anyhow::Result; use ethereum_types::{BigEndianHash, H256}; -use mpt_trie::partial_trie::PartialTrie; use plonky2::field::goldilocks_field::GoldilocksField as F; use crate::cpu::kernel::aggregator::KERNEL; @@ -31,7 +30,7 @@ fn mpt_hash_empty_branch() -> Result<()> { children, value: vec![], } - .into(); + .freeze(); let trie_inputs = TrieInputs { state_trie, transactions_trie: Default::default(), @@ -45,7 +44,7 @@ fn mpt_hash_empty_branch() -> Result<()> { fn mpt_hash_hash() -> Result<()> { let hash = H256::random(); let trie_inputs = TrieInputs { - state_trie: Node::Hash(hash).into(), + state_trie: Node::Hash(hash).freeze(), transactions_trie: Default::default(), receipts_trie: Default::default(), storage_tries: vec![], @@ -60,7 +59,7 @@ fn mpt_hash_leaf() -> Result<()> { nibbles: 0xABC_u64.into(), value: test_account_1_rlp(), } - .into(); + .freeze(); let trie_inputs = TrieInputs { state_trie, transactions_trie: Default::default(), @@ -72,7 +71,7 @@ fn mpt_hash_leaf() -> Result<()> { #[test] fn mpt_hash_extension_to_leaf() -> Result<()> { - let state_trie = extension_to_leaf(test_account_1_rlp()); + let state_trie = extension_to_leaf(test_account_1_rlp()).freeze(); let trie_inputs = TrieInputs { state_trie, transactions_trie: Default::default(), @@ -96,7 +95,7 @@ fn mpt_hash_branch_to_leaf() -> Result<()> { children, value: vec![], } - .into(); + .freeze(); let trie_inputs = TrieInputs { state_trie, diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs index d25138631..13ea86e6d 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs @@ -1,7 +1,6 @@ use anyhow::Result; use ethereum_types::{BigEndianHash, H256}; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField as F; use crate::cpu::kernel::aggregator::KERNEL; @@ -26,8 +25,7 @@ fn mpt_insert_leaf_identical_keys() -> Result<()> { let state_trie = Node::Leaf { nibbles: key, value: test_account_1_rlp(), - } - .into(); + }; test_state_trie(state_trie, key, test_account_2()) } @@ -36,8 +34,7 @@ fn mpt_insert_leaf_nonoverlapping_keys() -> Result<()> { let state_trie = Node::Leaf { nibbles: nibbles_64(0xABC), value: test_account_1_rlp(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0x123), test_account_2()) } @@ -46,8 +43,7 @@ fn mpt_insert_leaf_overlapping_keys() -> Result<()> { let state_trie = Node::Leaf { nibbles: nibbles_64(0xABC), value: test_account_1_rlp(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xADE), test_account_2()) } @@ -57,8 +53,7 @@ fn mpt_insert_leaf_insert_key_extends_leaf_key() -> Result<()> { let state_trie = Node::Leaf { nibbles: 0xABC_u64.into(), value: test_account_1_rlp(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xABCDE), test_account_2()) } @@ -68,8 +63,7 @@ fn mpt_insert_leaf_leaf_key_extends_insert_key() -> Result<()> { let state_trie = Node::Leaf { nibbles: 0xABCDE_u64.into(), value: test_account_1_rlp(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xABC), test_account_2()) } @@ -79,8 +73,7 @@ fn mpt_insert_branch_replacing_empty_child() -> Result<()> { let state_trie = Node::Branch { children, value: vec![], - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xABC), test_account_2()) } @@ -104,8 +97,7 @@ fn mpt_insert_extension_nonoverlapping_keys() -> Result<()> { value: test_account_1_rlp(), } .into(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0x12345), test_account_2()) } @@ -128,8 +120,7 @@ fn mpt_insert_extension_insert_key_extends_node_key() -> Result<()> { value: test_account_1_rlp(), } .into(), - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xABCDEF), test_account_2()) } @@ -146,8 +137,7 @@ fn mpt_insert_branch_to_leaf_same_key() -> Result<()> { let state_trie = Node::Branch { children, value: vec![], - } - .into(); + }; test_state_trie(state_trie, nibbles_64(0xABCD), test_account_2()) } @@ -155,18 +145,14 @@ fn mpt_insert_branch_to_leaf_same_key() -> Result<()> { /// Note: The account's storage_root is ignored, as we can't insert a new /// storage_root without the accompanying trie data. An empty trie's /// storage_root is used instead. -fn test_state_trie( - mut state_trie: HashedPartialTrie, - k: Nibbles, - mut account: AccountRlp, -) -> Result<()> { +fn test_state_trie(mut state_trie: Node, k: Nibbles, mut account: AccountRlp) -> Result<()> { assert_eq!(k.count, 64); // Ignore any storage_root; see documentation note. - account.storage_root = HashedPartialTrie::from(Node::Empty).hash(); + account.storage_root = Node::Empty.hash(); let trie_inputs = TrieInputs { - state_trie: state_trie.clone(), + state_trie: state_trie.clone().freeze(), transactions_trie: Default::default(), receipts_trie: Default::default(), storage_tries: vec![], diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs index 9aa8a1f0b..22fe76d40 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs @@ -4,7 +4,6 @@ use anyhow::Result; use ethereum_types::{BigEndianHash, H256, U256}; use hex_literal::hex; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::HashedPartialTrie; use plonky2::field::goldilocks_field::GoldilocksField as F; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; @@ -55,7 +54,7 @@ fn load_all_mpts_leaf() -> Result<()> { nibbles: 0xABC_u64.into(), value: test_account_1_rlp(), } - .into(), + .freeze(), transactions_trie: Default::default(), receipts_trie: Default::default(), storage_tries: vec![], @@ -101,7 +100,7 @@ fn load_all_mpts_leaf() -> Result<()> { fn load_all_mpts_hash() -> Result<()> { let hash = H256::random(); let trie_inputs = TrieInputs { - state_trie: Node::Hash(hash).into(), + state_trie: Node::Hash(hash).freeze(), transactions_trie: Default::default(), receipts_trie: Default::default(), storage_tries: vec![], @@ -137,7 +136,7 @@ fn load_all_mpts_empty_branch() -> Result<()> { children, value: vec![], } - .into(); + .freeze(); let trie_inputs = TrieInputs { state_trie, transactions_trie: Default::default(), @@ -191,7 +190,7 @@ fn load_all_mpts_empty_branch() -> Result<()> { #[test] fn load_all_mpts_ext_to_leaf() -> Result<()> { let trie_inputs = TrieInputs { - state_trie: extension_to_leaf(test_account_1_rlp()), + state_trie: extension_to_leaf(test_account_1_rlp()).freeze(), transactions_trie: Default::default(), receipts_trie: Default::default(), storage_tries: vec![], @@ -235,10 +234,11 @@ fn load_mpt_txn_trie() -> Result<()> { let trie_inputs = TrieInputs { state_trie: Default::default(), - transactions_trie: HashedPartialTrie::from(Node::Leaf { + transactions_trie: Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.clone(), - }), + } + .freeze(), receipts_trie: Default::default(), storage_tries: vec![], }; diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs index 84f64bb7b..31aa0d0a8 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs @@ -1,6 +1,5 @@ use ethereum_types::{BigEndianHash, H256, U256}; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::HashedPartialTrie; use crate::generation::mpt::AccountRlp; use crate::Node; @@ -56,7 +55,7 @@ pub(crate) fn test_account_2_rlp() -> Vec { /// A `PartialTrie` where an extension node leads to a leaf node containing an /// account. -pub(crate) fn extension_to_leaf(value: Vec) -> HashedPartialTrie { +pub(crate) fn extension_to_leaf(value: Vec) -> Node { Node::Extension { nibbles: 0xABC_u64.into(), child: Node::Leaf { @@ -68,5 +67,4 @@ pub(crate) fn extension_to_leaf(value: Vec) -> HashedPartialTrie { } .into(), } - .into() } diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs index 9b669a21c..728f30773 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs @@ -12,7 +12,7 @@ use crate::generation::TrieInputs; #[test] fn mpt_read() -> Result<()> { let trie_inputs = TrieInputs { - state_trie: extension_to_leaf(test_account_1_rlp()), + state_trie: extension_to_leaf(test_account_1_rlp()).freeze(), transactions_trie: Default::default(), receipts_trie: Default::default(), storage_tries: vec![], diff --git a/evm_arithmetization/src/fixed_recursive_verifier.rs b/evm_arithmetization/src/fixed_recursive_verifier.rs index 3fa6e208f..43070467c 100644 --- a/evm_arithmetization/src/fixed_recursive_verifier.rs +++ b/evm_arithmetization/src/fixed_recursive_verifier.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use anyhow::anyhow; use hashbrown::HashMap; use itertools::{zip_eq, Itertools}; -use mpt_trie::partial_trie::{HashedPartialTrie, Node, PartialTrie}; +use mpt_trie::Node; use plonky2::field::extension::Extendable; use plonky2::fri::FriParams; use plonky2::gates::constant::ConstantGate; @@ -1150,7 +1150,7 @@ where builder.assert_zero(x.extra_block_data.gas_used_before); // The transactions and receipts tries are empty at the beginning of the block. - let initial_trie = HashedPartialTrie::from(Node::Empty).hash(); + let initial_trie = Node::Empty.hash(); for (i, limb) in h256_limbs::(initial_trie).into_iter().enumerate() { let limb_target = builder.constant(limb); diff --git a/evm_arithmetization/src/generation/mod.rs b/evm_arithmetization/src/generation/mod.rs index 5940e8be3..55f1865d5 100644 --- a/evm_arithmetization/src/generation/mod.rs +++ b/evm_arithmetization/src/generation/mod.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use anyhow::anyhow; use ethereum_types::{Address, BigEndianHash, H256, U256}; use log::log_enabled; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; +use mpt_trie::FrozenNode; use plonky2::field::extension::Extendable; use plonky2::field::polynomial::PolynomialValues; use plonky2::field::types::Field; @@ -85,22 +85,22 @@ pub struct TrieInputs { /// A partial version of the state trie prior to these transactions. It /// should include all nodes that will be accessed by these /// transactions. - pub state_trie: HashedPartialTrie, + pub state_trie: FrozenNode, /// A partial version of the transaction trie prior to these transactions. /// It should include all nodes that will be accessed by these /// transactions. - pub transactions_trie: HashedPartialTrie, + pub transactions_trie: FrozenNode, /// A partial version of the receipt trie prior to these transactions. It /// should include all nodes that will be accessed by these /// transactions. - pub receipts_trie: HashedPartialTrie, + pub receipts_trie: FrozenNode, /// A partial version of each storage trie prior to these transactions. It /// should include all storage tries, and nodes therein, that will be /// accessed by these transactions. - pub storage_tries: Vec<(H256, HashedPartialTrie)>, + pub storage_tries: Vec<(H256, FrozenNode)>, } fn apply_metadata_and_tries_memops, const D: usize>( @@ -348,7 +348,7 @@ pub(crate) fn output_debug_tries(state: &GenerationState) -> an .map_err(|_| anyhow!("State trie pointer is too large to fit in a usize."))?; log::debug!( "Computed state trie: {:?}", - get_state_trie::(&state.memory, state_trie_ptr) + get_state_trie(&state.memory, state_trie_ptr) ); let txn_trie_ptr = u256_to_usize( @@ -359,7 +359,7 @@ pub(crate) fn output_debug_tries(state: &GenerationState) -> an .map_err(|_| anyhow!("Transactions trie pointer is too large to fit in a usize."))?; log::debug!( "Computed transactions trie: {:?}", - get_txn_trie::(&state.memory, txn_trie_ptr) + get_txn_trie(&state.memory, txn_trie_ptr) ); let receipt_trie_ptr = u256_to_usize( @@ -370,7 +370,7 @@ pub(crate) fn output_debug_tries(state: &GenerationState) -> an .map_err(|_| anyhow!("Receipts trie pointer is too large to fit in a usize."))?; log::debug!( "Computed receipts trie: {:?}", - get_receipt_trie::(&state.memory, receipt_trie_ptr) + get_receipt_trie(&state.memory, receipt_trie_ptr) ); } diff --git a/evm_arithmetization/src/generation/mpt.rs b/evm_arithmetization/src/generation/mpt.rs index 5c824ad9d..a2c1d1a01 100644 --- a/evm_arithmetization/src/generation/mpt.rs +++ b/evm_arithmetization/src/generation/mpt.rs @@ -1,11 +1,10 @@ -use core::ops::Deref; use std::collections::HashMap; use bytes::Bytes; use ethereum_types::{Address, BigEndianHash, H256, U256}; use keccak_hash::keccak; use mpt_trie::nibbles::{Nibbles, NibblesIntern}; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; +use mpt_trie::FrozenNode; use rlp::{Decodable, DecoderError, Encodable, PayloadInfo, Rlp, RlpStream}; use rlp_derive::{RlpDecodable, RlpEncodable}; @@ -35,7 +34,7 @@ impl Default for AccountRlp { Self { nonce: U256::zero(), balance: U256::zero(), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak([]), } } @@ -126,7 +125,7 @@ const fn empty_nibbles() -> Nibbles { } fn load_mpt( - trie: &HashedPartialTrie, + trie: &Node, trie_data: &mut Vec, parse_value: &F, ) -> Result @@ -139,7 +138,7 @@ where trie_data.push(type_of_trie.into()); } - match trie.deref() { + match trie { Node::Empty => Ok(0), Node::Hash(h) => { trie_data.push(h2u(*h)); @@ -204,17 +203,17 @@ where } fn load_state_trie( - trie: &HashedPartialTrie, + trie: &Node, key: Nibbles, trie_data: &mut Vec, - storage_tries_by_state_key: &HashMap, + storage_tries_by_state_key: &HashMap, ) -> Result { let node_ptr = trie_data.len(); let type_of_trie = PartialTrieType::of(trie) as u32; if type_of_trie > 0 { trie_data.push(type_of_trie.into()); } - match trie.deref() { + match trie { Node::Empty => Ok(0), Node::Hash(h) => { trie_data.push(h2u(*h)); @@ -274,9 +273,9 @@ fn load_state_trie( code_hash, } = account; - let storage_hash_only = HashedPartialTrie::new(Node::Hash(storage_root)); + let storage_hash_only = Node::Hash(storage_root).freeze(); let merged_key = key.merge_nibbles(nibbles); - let storage_trie: &HashedPartialTrie = storage_tries_by_state_key + let storage_trie: &Node = storage_tries_by_state_key .get(&merged_key) .copied() .unwrap_or(&storage_hash_only); diff --git a/evm_arithmetization/src/generation/trie_extractor.rs b/evm_arithmetization/src/generation/trie_extractor.rs index 48fc28f53..0fa919c10 100644 --- a/evm_arithmetization/src/generation/trie_extractor.rs +++ b/evm_arithmetization/src/generation/trie_extractor.rs @@ -1,9 +1,11 @@ //! Code for extracting trie data after witness generation. This is intended //! only for debugging. +use std::sync::Arc; + use ethereum_types::{BigEndianHash, H256, U256}; use mpt_trie::nibbles::{Nibbles, NibblesIntern}; -use mpt_trie::partial_trie::{HashedPartialTrie, Node, PartialTrie, WrappedNode}; +use mpt_trie::Node; use super::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp}; use crate::cpu::kernel::constants::trie_type::PartialTrieType; @@ -93,10 +95,9 @@ pub(crate) fn read_state_rlp_value( memory: &MemoryState, slice: &MemoryValues, ) -> Result, ProgramError> { - let storage_trie: HashedPartialTrie = - get_trie(memory, slice[2].unwrap_or_default().as_usize(), |_, x| { - Ok(rlp::encode(&read_storage_trie_value(x)).to_vec()) - })?; + let storage_trie: Node = get_trie(memory, slice[2].unwrap_or_default().as_usize(), |_, x| { + Ok(rlp::encode(&read_storage_trie_value(x)).to_vec()) + })?; let account = AccountRlp { nonce: slice[0].unwrap_or_default(), balance: slice[1].unwrap_or_default(), @@ -130,51 +131,37 @@ pub(crate) fn read_receipt_rlp_value( Ok(bytes) } -pub(crate) fn get_state_trie( - memory: &MemoryState, - ptr: usize, -) -> Result { +pub(crate) fn get_state_trie(memory: &MemoryState, ptr: usize) -> Result { get_trie(memory, ptr, read_state_rlp_value) } -pub(crate) fn get_txn_trie( - memory: &MemoryState, - ptr: usize, -) -> Result { +pub(crate) fn get_txn_trie(memory: &MemoryState, ptr: usize) -> Result { get_trie(memory, ptr, read_txn_rlp_value) } -pub(crate) fn get_receipt_trie( - memory: &MemoryState, - ptr: usize, -) -> Result { +pub(crate) fn get_receipt_trie(memory: &MemoryState, ptr: usize) -> Result { get_trie(memory, ptr, read_receipt_rlp_value) } type MemoryValues = Vec>; -pub(crate) fn get_trie( +pub(crate) fn get_trie( memory: &MemoryState, ptr: usize, read_rlp_value: fn(&MemoryState, &MemoryValues) -> Result, ProgramError>, -) -> Result { +) -> Result { let empty_nibbles = Nibbles { count: 0, packed: NibblesIntern::zero(), }; - Ok(N::new(get_trie_helper( - memory, - ptr, - read_rlp_value, - empty_nibbles, - )?)) + get_trie_helper(memory, ptr, read_rlp_value, empty_nibbles) } -pub(crate) fn get_trie_helper( +pub(crate) fn get_trie_helper( memory: &MemoryState, ptr: usize, read_value: fn(&MemoryState, &MemoryValues) -> Result, ProgramError>, prefix: Nibbles, -) -> Result, ProgramError> { +) -> Result { let load = |offset| { memory.get(MemoryAddress { context: 0, @@ -203,7 +190,7 @@ pub(crate) fn get_trie_helper( get_trie_helper(memory, child_ptr, read_value, prefix.merge_nibble(i as u8)) }) .collect::, _>>()?; - let children = core::array::from_fn(|i| WrappedNode::from(children[i].clone())); + let children = core::array::from_fn(|i| Arc::new(children[i].clone())); let value_ptr = u256_to_usize(load(ptr_payload + 16).unwrap_or_default())?; let mut value: Vec = vec![]; if value_ptr != 0 { @@ -219,7 +206,7 @@ pub(crate) fn get_trie_helper( packed: packed.into(), }; let child_ptr = u256_to_usize(load(ptr + 3).unwrap_or_default())?; - let child = WrappedNode::from(get_trie_helper( + let child = Arc::new(get_trie_helper( memory, child_ptr, read_value, diff --git a/evm_arithmetization/src/lib.rs b/evm_arithmetization/src/lib.rs index 0ee30fe08..cbb4c846c 100644 --- a/evm_arithmetization/src/lib.rs +++ b/evm_arithmetization/src/lib.rs @@ -211,11 +211,9 @@ pub mod extension_tower; pub mod testing_utils; pub mod util; -use mpt_trie::partial_trie::HashedPartialTrie; - // Public definitions and re-exports -pub type Node = mpt_trie::partial_trie::Node; +pub type Node = mpt_trie::Node; /// A type alias for `u64` of a block height. pub type BlockHeight = u64; diff --git a/evm_arithmetization/src/testing_utils.rs b/evm_arithmetization/src/testing_utils.rs index bb1f07b64..e159801ba 100644 --- a/evm_arithmetization/src/testing_utils.rs +++ b/evm_arithmetization/src/testing_utils.rs @@ -5,10 +5,7 @@ use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; use ethereum_types::{BigEndianHash, H256, U256}; use hex_literal::hex; use keccak_hash::keccak; -use mpt_trie::{ - nibbles::Nibbles, - partial_trie::{HashedPartialTrie, Node, PartialTrie}, -}; +use mpt_trie::{nibbles::Nibbles, Node}; pub use crate::cpu::kernel::cancun_constants::*; pub use crate::cpu::kernel::constants::global_exit_root::{ @@ -35,7 +32,7 @@ pub fn sh2u(s: &str) -> U256 { } /// Inserts a new pair `(slot, value)` into the provided storage trie. -fn insert_storage(trie: &mut HashedPartialTrie, slot: U256, value: U256) -> anyhow::Result<()> { +fn insert_storage(trie: &mut Node, slot: U256, value: U256) -> anyhow::Result<()> { let mut bytes = [0; 32]; slot.to_big_endian(&mut bytes); let key = keccak(bytes); @@ -51,8 +48,8 @@ fn insert_storage(trie: &mut HashedPartialTrie, slot: U256, value: U256) -> anyh /// Creates a storage trie for an account, given a list of `(slot, value)` /// pairs. -pub fn create_account_storage(storage_pairs: &[(U256, U256)]) -> anyhow::Result { - let mut trie = HashedPartialTrie::from(Node::Empty); +pub fn create_account_storage(storage_pairs: &[(U256, U256)]) -> anyhow::Result { + let mut trie = Node::Empty; for (slot, value) in storage_pairs { insert_storage(&mut trie, *slot, *value)?; } @@ -62,7 +59,7 @@ pub fn create_account_storage(storage_pairs: &[(U256, U256)]) -> anyhow::Result< /// Updates the beacon roots account storage with the provided timestamp and /// block parent root. pub fn update_beacon_roots_account_storage( - storage_trie: &mut HashedPartialTrie, + storage_trie: &mut Node, timestamp: U256, parent_root: H256, ) -> anyhow::Result<()> { @@ -74,7 +71,7 @@ pub fn update_beacon_roots_account_storage( } /// Returns the beacon roots contract account from its provided storage trie. -pub fn beacon_roots_contract_from_storage(storage_trie: &HashedPartialTrie) -> AccountRlp { +pub fn beacon_roots_contract_from_storage(storage_trie: &Node) -> AccountRlp { AccountRlp { storage_root: storage_trie.hash(), ..BEACON_ROOTS_ACCOUNT @@ -83,9 +80,8 @@ pub fn beacon_roots_contract_from_storage(storage_trie: &HashedPartialTrie) -> A /// Returns an initial state trie containing the beacon roots and global exit /// roots contracts, along with their storage tries. -pub fn preinitialized_state_and_storage_tries( -) -> anyhow::Result<(HashedPartialTrie, Vec<(H256, HashedPartialTrie)>)> { - let mut state_trie = HashedPartialTrie::from(Node::Empty); +pub fn preinitialized_state_and_storage_tries() -> anyhow::Result<(Node, Vec<(H256, Node)>)> { + let mut state_trie = Node::Empty; state_trie.insert( beacon_roots_account_nibbles(), rlp::encode(&BEACON_ROOTS_ACCOUNT).to_vec(), @@ -96,11 +92,8 @@ pub fn preinitialized_state_and_storage_tries( )?; let storage_tries = vec![ - ( - H256(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED), - Node::Empty.into(), - ), - (H256(GLOBAL_EXIT_ROOT_ADDRESS_HASHED), Node::Empty.into()), + (H256(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED), Node::Empty), + (H256(GLOBAL_EXIT_ROOT_ADDRESS_HASHED), Node::Empty), ]; Ok((state_trie, storage_tries)) @@ -117,7 +110,7 @@ pub fn ger_account_nibbles() -> Nibbles { } pub fn update_ger_account_storage( - storage_trie: &mut HashedPartialTrie, + storage_trie: &mut Node, root: H256, timestamp: U256, ) -> anyhow::Result<()> { @@ -128,7 +121,7 @@ pub fn update_ger_account_storage( insert_storage(storage_trie, slot.into_uint(), timestamp) } -pub fn ger_contract_from_storage(storage_trie: &HashedPartialTrie) -> AccountRlp { +pub fn ger_contract_from_storage(storage_trie: &Node) -> AccountRlp { AccountRlp { storage_root: storage_trie.hash(), ..GLOBAL_EXIT_ROOT_ACCOUNT diff --git a/evm_arithmetization/tests/add11_yml.rs b/evm_arithmetization/tests/add11_yml.rs index dca625d36..e577a8862 100644 --- a/evm_arithmetization/tests/add11_yml.rs +++ b/evm_arithmetization/tests/add11_yml.rs @@ -17,7 +17,6 @@ use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -72,13 +71,16 @@ fn add11_yml() -> anyhow::Result<()> { state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; - storage_tries.push((to_hashed, Node::Empty.into())); + storage_tries.push((to_hashed, Node::Empty)); let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; let txn = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16"); @@ -122,15 +124,15 @@ fn add11_yml() -> anyhow::Result<()> { balance: 0xde0b6b3a76586a0u64.into(), code_hash, // Storage map: { 0 => 2 } - storage_root: HashedPartialTrie::from(Node::Leaf { + storage_root: Node::Leaf { nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), value: vec![2], - }) + } .hash(), ..AccountRlp::default() }; - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); + let mut expected_state_trie_after = Node::Empty; expected_state_trie_after.insert( beneficiary_nibbles, rlp::encode(&beneficiary_account_after).to_vec(), @@ -156,16 +158,15 @@ fn add11_yml() -> anyhow::Result<()> { bloom: vec![0; 256].into(), logs: vec![], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert( Nibbles::from_str("0x80").unwrap(), rlp::encode(&receipt_0).to_vec(), )?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -180,7 +181,7 @@ fn add11_yml() -> anyhow::Result<()> { trie_roots_after, contract_code, block_metadata, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), txn_number_before: 0.into(), gas_used_before: 0.into(), gas_used_after: 0xa868u64.into(), diff --git a/evm_arithmetization/tests/erc20.rs b/evm_arithmetization/tests/erc20.rs index 1c829efc1..6ff7cfd63 100644 --- a/evm_arithmetization/tests/erc20.rs +++ b/evm_arithmetization/tests/erc20.rs @@ -16,7 +16,6 @@ use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -77,10 +76,13 @@ fn test_erc20() -> anyhow::Result<()> { ]); let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: HashedPartialTrie::from(Node::Empty), - receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; let txn = signed_tx(); @@ -105,7 +107,7 @@ fn test_erc20() -> anyhow::Result<()> { .map(|v| (keccak(v.clone()), v)) .into(); - let expected_state_trie_after: HashedPartialTrie = { + let expected_state_trie_after: Node = { update_beacon_roots_account_storage( &mut beacon_roots_account_storage, block_metadata.block_timestamp, @@ -114,7 +116,7 @@ fn test_erc20() -> anyhow::Result<()> { let beacon_roots_account = beacon_roots_contract_from_storage(&beacon_roots_account_storage); - let mut state_trie_after = HashedPartialTrie::from(Node::Empty); + let mut state_trie_after = Node::Empty; let sender_account = sender_account(); let sender_account_after = AccountRlp { nonce: sender_account.nonce + 1, @@ -165,13 +167,12 @@ fn test_erc20() -> anyhow::Result<()> { .into(), }], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert(Nibbles::from_str("0x80").unwrap(), receipt_0.encode(2))?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -185,7 +186,7 @@ fn test_erc20() -> anyhow::Result<()> { tries: tries_before, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), @@ -211,21 +212,21 @@ fn token_bytecode() -> Vec { hex!("608060405234801561001057600080fd5b50600436106100935760003560e01c8063313ce56711610066578063313ce567146100fe57806370a082311461010d57806395d89b4114610136578063a9059cbb1461013e578063dd62ed3e1461015157600080fd5b806306fdde0314610098578063095ea7b3146100b657806318160ddd146100d957806323b872dd146100eb575b600080fd5b6100a061018a565b6040516100ad919061056a565b60405180910390f35b6100c96100c43660046105d4565b61021c565b60405190151581526020016100ad565b6002545b6040519081526020016100ad565b6100c96100f93660046105fe565b610236565b604051601281526020016100ad565b6100dd61011b36600461063a565b6001600160a01b031660009081526020819052604090205490565b6100a061025a565b6100c961014c3660046105d4565b610269565b6100dd61015f36600461065c565b6001600160a01b03918216600090815260016020908152604080832093909416825291909152205490565b6060600380546101999061068f565b80601f01602080910402602001604051908101604052809291908181526020018280546101c59061068f565b80156102125780601f106101e757610100808354040283529160200191610212565b820191906000526020600020905b8154815290600101906020018083116101f557829003601f168201915b5050505050905090565b60003361022a818585610277565b60019150505b92915050565b600033610244858285610289565b61024f85858561030c565b506001949350505050565b6060600480546101999061068f565b60003361022a81858561030c565b610284838383600161036b565b505050565b6001600160a01b03838116600090815260016020908152604080832093861683529290522054600019811461030657818110156102f757604051637dc7a0d960e11b81526001600160a01b038416600482015260248101829052604481018390526064015b60405180910390fd5b6103068484848403600061036b565b50505050565b6001600160a01b03831661033657604051634b637e8f60e11b8152600060048201526024016102ee565b6001600160a01b0382166103605760405163ec442f0560e01b8152600060048201526024016102ee565b610284838383610440565b6001600160a01b0384166103955760405163e602df0560e01b8152600060048201526024016102ee565b6001600160a01b0383166103bf57604051634a1406b160e11b8152600060048201526024016102ee565b6001600160a01b038085166000908152600160209081526040808320938716835292905220829055801561030657826001600160a01b0316846001600160a01b03167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9258460405161043291815260200190565b60405180910390a350505050565b6001600160a01b03831661046b57806002600082825461046091906106c9565b909155506104dd9050565b6001600160a01b038316600090815260208190526040902054818110156104be5760405163391434e360e21b81526001600160a01b038516600482015260248101829052604481018390526064016102ee565b6001600160a01b03841660009081526020819052604090209082900390555b6001600160a01b0382166104f957600280548290039055610518565b6001600160a01b03821660009081526020819052604090208054820190555b816001600160a01b0316836001600160a01b03167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef8360405161055d91815260200190565b60405180910390a3505050565b600060208083528351808285015260005b818110156105975785810183015185820160400152820161057b565b506000604082860101526040601f19601f8301168501019250505092915050565b80356001600160a01b03811681146105cf57600080fd5b919050565b600080604083850312156105e757600080fd5b6105f0836105b8565b946020939093013593505050565b60008060006060848603121561061357600080fd5b61061c846105b8565b925061062a602085016105b8565b9150604084013590509250925092565b60006020828403121561064c57600080fd5b610655826105b8565b9392505050565b6000806040838503121561066f57600080fd5b610678836105b8565b9150610686602084016105b8565b90509250929050565b600181811c908216806106a357607f821691505b6020821081036106c357634e487b7160e01b600052602260045260246000fd5b50919050565b8082018082111561023057634e487b7160e01b600052601160045260246000fdfea2646970667358221220266a323ae4a816f6c6342a5be431fedcc0d45c44b02ea75f5474eb450b5d45b364736f6c63430008140033").into() } -fn giver_storage() -> anyhow::Result { +fn giver_storage() -> anyhow::Result { create_account_storage(&[( U256::zero(), sd2u("546584486846459126461364135121053344201067465379"), )]) } -fn token_storage() -> anyhow::Result { +fn token_storage() -> anyhow::Result { create_account_storage(&[( sd2u("82183438603287090451672504949863617512989139203883434767553028632841710582583"), sd2u("1000000000000000000000"), )]) } -fn token_storage_after() -> anyhow::Result { +fn token_storage_after() -> anyhow::Result { create_account_storage(&[ ( sd2u("82183438603287090451672504949863617512989139203883434767553028632841710582583"), diff --git a/evm_arithmetization/tests/erc721.rs b/evm_arithmetization/tests/erc721.rs index 3a02d8968..c509b41ca 100644 --- a/evm_arithmetization/tests/erc721.rs +++ b/evm_arithmetization/tests/erc721.rs @@ -16,7 +16,6 @@ use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -73,10 +72,13 @@ fn test_erc721() -> anyhow::Result<()> { storage_tries.push((contract_state_key, contract_storage()?)); let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: HashedPartialTrie::from(Node::Empty), - receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; let txn = signed_tx(); @@ -124,8 +126,8 @@ fn test_erc721() -> anyhow::Result<()> { ..Default::default() }; - let expected_state_trie_after: HashedPartialTrie = { - let mut state_trie_after = HashedPartialTrie::from(Node::Empty); + let expected_state_trie_after: Node = { + let mut state_trie_after = Node::Empty; update_beacon_roots_account_storage( &mut beacon_roots_account_storage, @@ -168,13 +170,12 @@ fn test_erc721() -> anyhow::Result<()> { bloom: bloom_bytes.to_vec().into(), logs, }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert(Nibbles::from_str("0x80").unwrap(), receipt_0.encode(0))?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -189,7 +190,7 @@ fn test_erc721() -> anyhow::Result<()> { tries: tries_before, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), @@ -211,7 +212,7 @@ fn contract_bytecode() -> Vec { hex!("608060405234801561000f575f80fd5b5060043610610109575f3560e01c8063715018a6116100a0578063a22cb4651161006f578063a22cb465146102a1578063b88d4fde146102bd578063c87b56dd146102d9578063e985e9c514610309578063f2fde38b1461033957610109565b8063715018a61461023f5780638da5cb5b1461024957806395d89b4114610267578063a14481941461028557610109565b806323b872dd116100dc57806323b872dd146101a757806342842e0e146101c35780636352211e146101df57806370a082311461020f57610109565b806301ffc9a71461010d57806306fdde031461013d578063081812fc1461015b578063095ea7b31461018b575b5f80fd5b61012760048036038101906101229190611855565b610355565b604051610134919061189a565b60405180910390f35b610145610436565b604051610152919061193d565b60405180910390f35b61017560048036038101906101709190611990565b6104c5565b60405161018291906119fa565b60405180910390f35b6101a560048036038101906101a09190611a3d565b6104e0565b005b6101c160048036038101906101bc9190611a7b565b6104f6565b005b6101dd60048036038101906101d89190611a7b565b6105f5565b005b6101f960048036038101906101f49190611990565b610614565b60405161020691906119fa565b60405180910390f35b61022960048036038101906102249190611acb565b610625565b6040516102369190611b05565b60405180910390f35b6102476106db565b005b6102516106ee565b60405161025e91906119fa565b60405180910390f35b61026f610716565b60405161027c919061193d565b60405180910390f35b61029f600480360381019061029a9190611a3d565b6107a6565b005b6102bb60048036038101906102b69190611b48565b6107bc565b005b6102d760048036038101906102d29190611cb2565b6107d2565b005b6102f360048036038101906102ee9190611990565b6107ef565b604051610300919061193d565b60405180910390f35b610323600480360381019061031e9190611d32565b610855565b604051610330919061189a565b60405180910390f35b610353600480360381019061034e9190611acb565b6108e3565b005b5f7f80ac58cd000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916827bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916148061041f57507f5b5e139f000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916827bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916145b8061042f575061042e82610967565b5b9050919050565b60605f805461044490611d9d565b80601f016020809104026020016040519081016040528092919081815260200182805461047090611d9d565b80156104bb5780601f10610492576101008083540402835291602001916104bb565b820191905f5260205f20905b81548152906001019060200180831161049e57829003601f168201915b5050505050905090565b5f6104cf826109d0565b506104d982610a56565b9050919050565b6104f282826104ed610a8f565b610a96565b5050565b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610566575f6040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161055d91906119fa565b60405180910390fd5b5f6105798383610574610a8f565b610aa8565b90508373ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16146105ef578382826040517f64283d7b0000000000000000000000000000000000000000000000000000000081526004016105e693929190611dcd565b60405180910390fd5b50505050565b61060f83838360405180602001604052805f8152506107d2565b505050565b5f61061e826109d0565b9050919050565b5f8073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610696575f6040517f89c62b6400000000000000000000000000000000000000000000000000000000815260040161068d91906119fa565b60405180910390fd5b60035f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f20549050919050565b6106e3610cb3565b6106ec5f610d3a565b565b5f60065f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff16905090565b60606001805461072590611d9d565b80601f016020809104026020016040519081016040528092919081815260200182805461075190611d9d565b801561079c5780601f106107735761010080835404028352916020019161079c565b820191905f5260205f20905b81548152906001019060200180831161077f57829003601f168201915b5050505050905090565b6107ae610cb3565b6107b88282610dfd565b5050565b6107ce6107c7610a8f565b8383610e1a565b5050565b6107dd8484846104f6565b6107e984848484610f83565b50505050565b60606107fa826109d0565b505f610804611135565b90505f8151116108225760405180602001604052805f81525061084d565b8061082c8461114b565b60405160200161083d929190611e3c565b6040516020818303038152906040525b915050919050565b5f60055f8473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f9054906101000a900460ff16905092915050565b6108eb610cb3565b5f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff160361095b575f6040517f1e4fbdf700000000000000000000000000000000000000000000000000000000815260040161095291906119fa565b60405180910390fd5b61096481610d3a565b50565b5f7f01ffc9a7000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916827bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916149050919050565b5f806109db83611215565b90505f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1603610a4d57826040517f7e273289000000000000000000000000000000000000000000000000000000008152600401610a449190611b05565b60405180910390fd5b80915050919050565b5f60045f8381526020019081526020015f205f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050919050565b5f33905090565b610aa3838383600161124e565b505050565b5f80610ab384611215565b90505f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1614610af457610af381848661140d565b5b5f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614610b7f57610b335f855f8061124e565b600160035f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f82825403925050819055505b5f73ffffffffffffffffffffffffffffffffffffffff168573ffffffffffffffffffffffffffffffffffffffff1614610bfe57600160035f8773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f82825401925050819055505b8460025f8681526020019081526020015f205f6101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550838573ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef60405160405180910390a4809150509392505050565b610cbb610a8f565b73ffffffffffffffffffffffffffffffffffffffff16610cd96106ee565b73ffffffffffffffffffffffffffffffffffffffff1614610d3857610cfc610a8f565b6040517f118cdaa7000000000000000000000000000000000000000000000000000000008152600401610d2f91906119fa565b60405180910390fd5b565b5f60065f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff1690508160065f6101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff167f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e060405160405180910390a35050565b610e16828260405180602001604052805f8152506114d0565b5050565b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610e8a57816040517f5b08ba18000000000000000000000000000000000000000000000000000000008152600401610e8191906119fa565b60405180910390fd5b8060055f8573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f8473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f6101000a81548160ff0219169083151502179055508173ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff167f17307eab39ab6107e8899845ad3d59bd9653f200f220920489ca2b5937696c3183604051610f76919061189a565b60405180910390a3505050565b5f8373ffffffffffffffffffffffffffffffffffffffff163b111561112f578273ffffffffffffffffffffffffffffffffffffffff1663150b7a02610fc6610a8f565b8685856040518563ffffffff1660e01b8152600401610fe89493929190611eb1565b6020604051808303815f875af192505050801561102357506040513d601f19601f820116820180604052508101906110209190611f0f565b60015b6110a4573d805f8114611051576040519150601f19603f3d011682016040523d82523d5f602084013e611056565b606091505b505f81510361109c57836040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161109391906119fa565b60405180910390fd5b805181602001fd5b63150b7a0260e01b7bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916817bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19161461112d57836040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161112491906119fa565b60405180910390fd5b505b50505050565b606060405180602001604052805f815250905090565b60605f6001611159846114eb565b0190505f8167ffffffffffffffff81111561117757611176611b8e565b5b6040519080825280601f01601f1916602001820160405280156111a95781602001600182028036833780820191505090505b5090505f82602001820190505b60011561120a578080600190039150507f3031323334353637383961626364656600000000000000000000000000000000600a86061a8153600a85816111ff576111fe611f3a565b5b0494505f85036111b6575b819350505050919050565b5f60025f8381526020019081526020015f205f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050919050565b808061128657505f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b156113b8575f611295846109d0565b90505f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff16141580156112ff57508273ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561131257506113108184610855565b155b1561135457826040517fa9fbf51f00000000000000000000000000000000000000000000000000000000815260040161134b91906119fa565b60405180910390fd5b81156113b657838573ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92560405160405180910390a45b505b8360045f8581526020019081526020015f205f6101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555050505050565b61141883838361163c565b6114cb575f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff160361148c57806040517f7e2732890000000000000000000000000000000000000000000000000000000081526004016114839190611b05565b60405180910390fd5b81816040517f177e802f0000000000000000000000000000000000000000000000000000000081526004016114c2929190611f67565b60405180910390fd5b505050565b6114da83836116fc565b6114e65f848484610f83565b505050565b5f805f90507a184f03e93ff9f4daa797ed6e38ed64bf6a1f0100000000000000008310611547577a184f03e93ff9f4daa797ed6e38ed64bf6a1f010000000000000000838161153d5761153c611f3a565b5b0492506040810190505b6d04ee2d6d415b85acef81000000008310611584576d04ee2d6d415b85acef8100000000838161157a57611579611f3a565b5b0492506020810190505b662386f26fc1000083106115b357662386f26fc1000083816115a9576115a8611f3a565b5b0492506010810190505b6305f5e10083106115dc576305f5e10083816115d2576115d1611f3a565b5b0492506008810190505b61271083106116015761271083816115f7576115f6611f3a565b5b0492506004810190505b60648310611624576064838161161a57611619611f3a565b5b0492506002810190505b600a8310611633576001810190505b80915050919050565b5f8073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff16141580156116f357508273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff1614806116b457506116b38484610855565b5b806116f257508273ffffffffffffffffffffffffffffffffffffffff166116da83610a56565b73ffffffffffffffffffffffffffffffffffffffff16145b5b90509392505050565b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff160361176c575f6040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161176391906119fa565b60405180910390fd5b5f61177883835f610aa8565b90505f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16146117ea575f6040517f73c6ac6e0000000000000000000000000000000000000000000000000000000081526004016117e191906119fa565b60405180910390fd5b505050565b5f604051905090565b5f80fd5b5f80fd5b5f7fffffffff0000000000000000000000000000000000000000000000000000000082169050919050565b61183481611800565b811461183e575f80fd5b50565b5f8135905061184f8161182b565b92915050565b5f6020828403121561186a576118696117f8565b5b5f61187784828501611841565b91505092915050565b5f8115159050919050565b61189481611880565b82525050565b5f6020820190506118ad5f83018461188b565b92915050565b5f81519050919050565b5f82825260208201905092915050565b5f5b838110156118ea5780820151818401526020810190506118cf565b5f8484015250505050565b5f601f19601f8301169050919050565b5f61190f826118b3565b61191981856118bd565b93506119298185602086016118cd565b611932816118f5565b840191505092915050565b5f6020820190508181035f8301526119558184611905565b905092915050565b5f819050919050565b61196f8161195d565b8114611979575f80fd5b50565b5f8135905061198a81611966565b92915050565b5f602082840312156119a5576119a46117f8565b5b5f6119b28482850161197c565b91505092915050565b5f73ffffffffffffffffffffffffffffffffffffffff82169050919050565b5f6119e4826119bb565b9050919050565b6119f4816119da565b82525050565b5f602082019050611a0d5f8301846119eb565b92915050565b611a1c816119da565b8114611a26575f80fd5b50565b5f81359050611a3781611a13565b92915050565b5f8060408385031215611a5357611a526117f8565b5b5f611a6085828601611a29565b9250506020611a718582860161197c565b9150509250929050565b5f805f60608486031215611a9257611a916117f8565b5b5f611a9f86828701611a29565b9350506020611ab086828701611a29565b9250506040611ac18682870161197c565b9150509250925092565b5f60208284031215611ae057611adf6117f8565b5b5f611aed84828501611a29565b91505092915050565b611aff8161195d565b82525050565b5f602082019050611b185f830184611af6565b92915050565b611b2781611880565b8114611b31575f80fd5b50565b5f81359050611b4281611b1e565b92915050565b5f8060408385031215611b5e57611b5d6117f8565b5b5f611b6b85828601611a29565b9250506020611b7c85828601611b34565b9150509250929050565b5f80fd5b5f80fd5b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b611bc4826118f5565b810181811067ffffffffffffffff82111715611be357611be2611b8e565b5b80604052505050565b5f611bf56117ef565b9050611c018282611bbb565b919050565b5f67ffffffffffffffff821115611c2057611c1f611b8e565b5b611c29826118f5565b9050602081019050919050565b828183375f83830152505050565b5f611c56611c5184611c06565b611bec565b905082815260208101848484011115611c7257611c71611b8a565b5b611c7d848285611c36565b509392505050565b5f82601f830112611c9957611c98611b86565b5b8135611ca9848260208601611c44565b91505092915050565b5f805f8060808587031215611cca57611cc96117f8565b5b5f611cd787828801611a29565b9450506020611ce887828801611a29565b9350506040611cf98782880161197c565b925050606085013567ffffffffffffffff811115611d1a57611d196117fc565b5b611d2687828801611c85565b91505092959194509250565b5f8060408385031215611d4857611d476117f8565b5b5f611d5585828601611a29565b9250506020611d6685828601611a29565b9150509250929050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52602260045260245ffd5b5f6002820490506001821680611db457607f821691505b602082108103611dc757611dc6611d70565b5b50919050565b5f606082019050611de05f8301866119eb565b611ded6020830185611af6565b611dfa60408301846119eb565b949350505050565b5f81905092915050565b5f611e16826118b3565b611e208185611e02565b9350611e308185602086016118cd565b80840191505092915050565b5f611e478285611e0c565b9150611e538284611e0c565b91508190509392505050565b5f81519050919050565b5f82825260208201905092915050565b5f611e8382611e5f565b611e8d8185611e69565b9350611e9d8185602086016118cd565b611ea6816118f5565b840191505092915050565b5f608082019050611ec45f8301876119eb565b611ed160208301866119eb565b611ede6040830185611af6565b8181036060830152611ef08184611e79565b905095945050505050565b5f81519050611f098161182b565b92915050565b5f60208284031215611f2457611f236117f8565b5b5f611f3184828501611efb565b91505092915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601260045260245ffd5b5f604082019050611f7a5f8301856119eb565b611f876020830184611af6565b939250505056fea2646970667358221220432b30673e00c0eb009e1718c271f4cfdfbeded17345829703b06d322360990164736f6c63430008160033").into() } -fn contract_storage() -> anyhow::Result { +fn contract_storage() -> anyhow::Result { create_account_storage(&[ ( U256::zero(), @@ -236,7 +237,7 @@ fn contract_storage() -> anyhow::Result { ]) } -fn contract_storage_after() -> anyhow::Result { +fn contract_storage_after() -> anyhow::Result { create_account_storage(&[ ( U256::zero(), @@ -265,7 +266,7 @@ fn owner_account() -> AccountRlp { AccountRlp { nonce: 2.into(), balance: 0x1000000.into(), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak([]), } } diff --git a/evm_arithmetization/tests/global_exit_root.rs b/evm_arithmetization/tests/global_exit_root.rs index 507ffe0f7..b87aa3ab0 100644 --- a/evm_arithmetization/tests/global_exit_root.rs +++ b/evm_arithmetization/tests/global_exit_root.rs @@ -13,7 +13,6 @@ use evm_arithmetization::testing_utils::{ use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use keccak_hash::keccak; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -39,8 +38,8 @@ fn test_global_exit_root() -> anyhow::Result<()> { let (state_trie_before, storage_tries) = preinitialized_state_and_storage_tries()?; let mut beacon_roots_account_storage = storage_tries[0].1.clone(); let mut ger_account_storage = storage_tries[1].1.clone(); - let transactions_trie = HashedPartialTrie::from(Node::Empty); - let receipts_trie = HashedPartialTrie::from(Node::Empty); + let transactions_trie = Node::Empty; + let receipts_trie = Node::Empty; let mut contract_code = HashMap::new(); contract_code.insert(keccak(vec![]), vec![]); @@ -48,7 +47,7 @@ fn test_global_exit_root() -> anyhow::Result<()> { let global_exit_roots = vec![(U256(random()), H256(random()))]; let state_trie_after = { - let mut trie = HashedPartialTrie::from(Node::Empty); + let mut trie = Node::Empty; update_beacon_roots_account_storage( &mut beacon_roots_account_storage, block_metadata.block_timestamp, @@ -81,14 +80,17 @@ fn test_global_exit_root() -> anyhow::Result<()> { withdrawals: vec![], global_exit_roots, tries: TrieInputs { - state_trie: state_trie_before, - transactions_trie, - receipts_trie, - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: transactions_trie.freeze(), + receipts_trie: receipts_trie.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), diff --git a/evm_arithmetization/tests/log_opcode.rs b/evm_arithmetization/tests/log_opcode.rs index 8cd5c57c0..6bf236cb6 100644 --- a/evm_arithmetization/tests/log_opcode.rs +++ b/evm_arithmetization/tests/log_opcode.rs @@ -21,7 +21,6 @@ use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -96,7 +95,7 @@ fn test_log_opcodes() -> anyhow::Result<()> { state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; - storage_tries.push((to_hashed, Node::Empty.into())); + storage_tries.push((to_hashed, Node::Empty)); // We now add two receipts with logs and data. This updates the receipt trie as // well. @@ -121,17 +120,20 @@ fn test_log_opcodes() -> anyhow::Result<()> { // Insert the first receipt into the initial receipt trie. The initial receipts // trie has an initial node with a random nibble. - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert( Nibbles::from_str("0x1337").unwrap(), rlp::encode(&receipt_0).to_vec(), )?; let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: receipts_trie.clone(), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: receipts_trie.clone().freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; // Prove a transaction which carries out two LOG opcodes. @@ -208,7 +210,7 @@ fn test_log_opcodes() -> anyhow::Result<()> { receipts_trie.insert(receipt_nibbles, rlp::encode(&receipt).to_vec())?; // Update the state trie. - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); + let mut expected_state_trie_after = Node::Empty; expected_state_trie_after.insert( beneficiary_nibbles, rlp::encode(&beneficiary_account_after).to_vec(), @@ -225,11 +227,10 @@ fn test_log_opcodes() -> anyhow::Result<()> { rlp::encode(&GLOBAL_EXIT_ROOT_ACCOUNT).to_vec(), )?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -244,7 +245,7 @@ fn test_log_opcodes() -> anyhow::Result<()> { tries: tries_before, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), @@ -278,8 +279,8 @@ fn test_log_opcodes() -> anyhow::Result<()> { #[test] fn test_txn_and_receipt_trie_hash() -> anyhow::Result<()> { // This test checks that inserting into the transaction and receipt - // `HashedPartialTrie`s works as expected. - let mut example_txn_trie = HashedPartialTrie::from(Node::Empty); + // `Node`s works as expected. + let mut example_txn_trie = Node::Empty; // We consider two transactions, with one log each. let transaction_0 = LegacyTransactionRlp { @@ -323,7 +324,7 @@ fn test_txn_and_receipt_trie_hash() -> anyhow::Result<()> { )?; // Receipts: - let mut example_receipt_trie = HashedPartialTrie::from(Node::Empty); + let mut example_receipt_trie = Node::Empty; let log_0 = LogRlp { address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), diff --git a/evm_arithmetization/tests/selfdestruct.rs b/evm_arithmetization/tests/selfdestruct.rs index 708646e16..050267f70 100644 --- a/evm_arithmetization/tests/selfdestruct.rs +++ b/evm_arithmetization/tests/selfdestruct.rs @@ -16,7 +16,6 @@ use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -46,7 +45,7 @@ fn test_selfdestruct() -> anyhow::Result<()> { let sender_account_before = AccountRlp { nonce: 5.into(), balance: eth_to_wei(100_000.into()), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak([]), }; let code = vec![ @@ -56,7 +55,7 @@ fn test_selfdestruct() -> anyhow::Result<()> { let to_account_before = AccountRlp { nonce: 12.into(), balance: eth_to_wei(10_000.into()), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak(&code), }; @@ -66,10 +65,13 @@ fn test_selfdestruct() -> anyhow::Result<()> { state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: HashedPartialTrie::from(Node::Empty), - receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; // Generated using a little py-evm script. @@ -90,8 +92,8 @@ fn test_selfdestruct() -> anyhow::Result<()> { let contract_code = [(keccak(&code), code.clone()), (keccak([]), vec![])].into(); - let expected_state_trie_after: HashedPartialTrie = { - let mut state_trie_after = HashedPartialTrie::from(Node::Empty); + let expected_state_trie_after: Node = { + let mut state_trie_after = Node::Empty; update_beacon_roots_account_storage( &mut beacon_roots_account_storage, @@ -104,7 +106,7 @@ fn test_selfdestruct() -> anyhow::Result<()> { let sender_account_after = AccountRlp { nonce: 6.into(), balance: eth_to_wei(110_000.into()) - 26_002 * 0xa, - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak([]), }; state_trie_after.insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; @@ -114,7 +116,7 @@ fn test_selfdestruct() -> anyhow::Result<()> { let to_account_before = AccountRlp { nonce: 12.into(), balance: 0.into(), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak(&code), }; state_trie_after.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; @@ -136,16 +138,15 @@ fn test_selfdestruct() -> anyhow::Result<()> { bloom: vec![0; 256].into(), logs: vec![], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert( Nibbles::from_str("0x80").unwrap(), rlp::encode(&receipt_0).to_vec(), )?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -159,7 +160,7 @@ fn test_selfdestruct() -> anyhow::Result<()> { tries: tries_before, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), diff --git a/evm_arithmetization/tests/simple_transfer.rs b/evm_arithmetization/tests/simple_transfer.rs index 030b2c3e1..331a659b3 100644 --- a/evm_arithmetization/tests/simple_transfer.rs +++ b/evm_arithmetization/tests/simple_transfer.rs @@ -17,7 +17,6 @@ use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -47,7 +46,7 @@ fn test_simple_transfer() -> anyhow::Result<()> { let sender_account_before = AccountRlp { nonce: 5.into(), balance: eth_to_wei(100_000.into()), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak([]), }; let to_account_before = AccountRlp::default(); @@ -57,10 +56,13 @@ fn test_simple_transfer() -> anyhow::Result<()> { state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: HashedPartialTrie::from(Node::Empty), - receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries, + state_trie: state_trie_before.freeze(), + transactions_trie: Node::Empty.freeze(), + receipts_trie: Node::Empty.freeze(), + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }; // Generated using a little py-evm script. @@ -83,8 +85,8 @@ fn test_simple_transfer() -> anyhow::Result<()> { let mut contract_code = HashMap::new(); contract_code.insert(keccak(vec![]), vec![]); - let expected_state_trie_after: HashedPartialTrie = { - let mut state_trie_after = HashedPartialTrie::from(Node::Empty); + let expected_state_trie_after: Node = { + let mut state_trie_after = Node::Empty; let txdata_gas = 2 * 16; let gas_used = 21_000 + txdata_gas; @@ -128,16 +130,15 @@ fn test_simple_transfer() -> anyhow::Result<()> { bloom: vec![0; 256].into(), logs: vec![], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert( Nibbles::from_str("0x80").unwrap(), rlp::encode(&receipt_0).to_vec(), )?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -151,7 +152,7 @@ fn test_simple_transfer() -> anyhow::Result<()> { tries: tries_before, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), diff --git a/evm_arithmetization/tests/two_to_one_block.rs b/evm_arithmetization/tests/two_to_one_block.rs index fe479bf65..30b01db10 100644 --- a/evm_arithmetization/tests/two_to_one_block.rs +++ b/evm_arithmetization/tests/two_to_one_block.rs @@ -13,7 +13,6 @@ use evm_arithmetization::{AllRecursiveCircuits, AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::hash::poseidon::PoseidonHash; use plonky2::plonk::config::{Hasher, PoseidonGoldilocksConfig}; @@ -51,28 +50,22 @@ fn empty_transfer(timestamp: u64) -> anyhow::Result { let sender_account_before = AccountRlp { nonce: 5.into(), balance: eth_to_wei(100_000.into()), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), + storage_root: Node::Empty.hash(), code_hash: keccak([]), }; let to_account_before = AccountRlp::default(); - let state_trie_before: HashedPartialTrie = Node::Leaf { + let state_trie_before = Node::Leaf { nibbles: sender_nibbles, value: rlp::encode(&sender_account_before).to_vec(), - } - .into(); + }; let checkpoint_state_trie_root = state_trie_before.hash(); assert_eq!( checkpoint_state_trie_root, hex!("ef46022eafbc33d70e6ea9c6aef1074c1ff7ad36417ffbc64307ad3a8c274b75").into() ); - let tries_before = TrieInputs { - state_trie: HashedPartialTrie::from(Node::Empty), - transactions_trie: HashedPartialTrie::from(Node::Empty), - receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries: vec![], - }; + let tries_before = TrieInputs::default(); // Generated using a little py-evm script. let txn = hex!("f861050a8255f094a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0648242421ba02c89eb757d9deeb1f5b3859a9d4d679951ef610ac47ad4608dc142beb1b7e313a05af7e9fbab825455d36c36c7f4cfcafbeafa9a77bdff936b52afb36d4fe4bcdd"); @@ -92,7 +85,7 @@ fn empty_transfer(timestamp: u64) -> anyhow::Result { let contract_code = HashMap::new(); - let expected_state_trie_after: HashedPartialTrie = { + let expected_state_trie_after: Node = { let txdata_gas = 2 * 16; let gas_used = 21_000 + txdata_gas; @@ -121,7 +114,6 @@ fn empty_transfer(timestamp: u64) -> anyhow::Result { children, value: vec![], } - .into() }; let receipt_0 = LegacyReceiptRlp { @@ -130,16 +122,15 @@ fn empty_transfer(timestamp: u64) -> anyhow::Result { bloom: vec![0; 256].into(), logs: vec![], }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let mut receipts_trie = Node::Empty; receipts_trie.insert( Nibbles::from_str("0x80").unwrap(), rlp::encode(&receipt_0).to_vec(), )?; - let transactions_trie: HashedPartialTrie = Node::Leaf { + let transactions_trie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), value: txn.to_vec(), - } - .into(); + }; let _trie_roots_after = TrieRoots { state_root: expected_state_trie_after.hash(), @@ -180,13 +171,9 @@ fn get_test_block_proof( global_exit_roots: vec![], withdrawals: vec![], tries: TrieInputs { - state_trie: HashedPartialTrie::from(Node::Hash(inputs.trie_roots_after.state_root)), - transactions_trie: HashedPartialTrie::from(Node::Hash( - inputs.trie_roots_after.transactions_root, - )), - receipts_trie: HashedPartialTrie::from(Node::Hash( - inputs.trie_roots_after.receipts_root, - )), + state_trie: Node::Hash(inputs.trie_roots_after.state_root).freeze(), + transactions_trie: Node::Hash(inputs.trie_roots_after.transactions_root).freeze(), + receipts_trie: Node::Hash(inputs.trie_roots_after.receipts_root).freeze(), storage_tries: vec![], }, trie_roots_after: inputs.trie_roots_after, diff --git a/evm_arithmetization/tests/withdrawals.rs b/evm_arithmetization/tests/withdrawals.rs index e17b775b1..22658f82b 100644 --- a/evm_arithmetization/tests/withdrawals.rs +++ b/evm_arithmetization/tests/withdrawals.rs @@ -15,7 +15,6 @@ use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; @@ -40,8 +39,8 @@ fn test_withdrawals() -> anyhow::Result<()> { let (state_trie_before, storage_tries) = preinitialized_state_and_storage_tries()?; let mut beacon_roots_account_storage = storage_tries[0].1.clone(); - let transactions_trie = HashedPartialTrie::from(Node::Empty); - let receipts_trie = HashedPartialTrie::from(Node::Empty); + let transactions_trie = Node::Empty.freeze(); + let receipts_trie = Node::Empty.freeze(); let mut contract_code = HashMap::new(); contract_code.insert(keccak(vec![]), vec![]); @@ -50,7 +49,7 @@ fn test_withdrawals() -> anyhow::Result<()> { let withdrawals = vec![(H160(random()), U256(random()))]; let state_trie_after = { - let mut trie = HashedPartialTrie::from(Node::Empty); + let mut trie = Node::Empty; update_beacon_roots_account_storage( &mut beacon_roots_account_storage, block_metadata.block_timestamp, @@ -89,14 +88,17 @@ fn test_withdrawals() -> anyhow::Result<()> { withdrawals, global_exit_roots: vec![], tries: TrieInputs { - state_trie: state_trie_before, + state_trie: state_trie_before.freeze(), transactions_trie, receipts_trie, - storage_tries, + storage_tries: storage_tries + .into_iter() + .map(|(k, v)| (k, v.freeze())) + .collect(), }, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: Node::Empty.hash(), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), diff --git a/mpt_trie/Cargo.toml b/mpt_trie/Cargo.toml index 0d64fafa6..01784b33f 100644 --- a/mpt_trie/Cargo.toml +++ b/mpt_trie/Cargo.toml @@ -39,13 +39,3 @@ serde_json = { workspace = true } [features] default = ["trie_debug"] trie_debug = [] - -[lib] -doc-scrape-examples = true - -[[example]] -name = "simple" -doc-scrape-examples = true - -[package.metadata.docs.rs] -cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"] diff --git a/mpt_trie/examples/ethereum_trie.rs b/mpt_trie/examples/ethereum_trie.rs deleted file mode 100644 index d1aa1f799..000000000 --- a/mpt_trie/examples/ethereum_trie.rs +++ /dev/null @@ -1,101 +0,0 @@ -//! Examples of constructing [`PartialTrie`]s for actual Ethereum tries. -//! -//! It's a bit difficult to ensure that nodes in Ethereum tries are RLP encoded -//! correctly. This library encodes keys being passed into the trie, but does -//! not apply any encoding to the values themselves. -//! -//! Also note that due to RLP encoding, the underlying integer types (with the -//! exception of hash types like `H256`) don't affect the hash generated due to -//! the RLP encoding truncating any leading zeros. - -use std::ops::RangeInclusive; - -use ethereum_types::{H160, H256, U256}; -use keccak_hash::keccak; -use mpt_trie::partial_trie::PartialTrie; -use mpt_trie::trie_ops::TrieOpResult; -use mpt_trie::utils::TryFromIterator; -use mpt_trie::{ - nibbles::Nibbles, - partial_trie::{HashedPartialTrie, StandardTrie}, -}; -use rand::{rngs::StdRng, Rng, SeedableRng}; -use rlp::Encodable; -use rlp_derive::RlpEncodable; - -const RANGE_OF_STORAGE_ENTRIES_AN_ACCOUNT_CAN_HAVE: RangeInclusive = 0..=10; -const NUM_ACCOUNTS_TO_GEN: usize = 100; - -type HashedAccountAddr = H256; -type AccountAddr = H160; - -/// Eth test account entry. As a separate struct to allow easy RLP encoding. -#[derive(Debug, RlpEncodable)] -struct StateTrieEntry { - nonce: U256, - balance: U256, - storage_root: H256, - code_hash: H256, -} - -fn main() -> TrieOpResult<()> { - let mut rng = StdRng::seed_from_u64(0); - - let (account_entries, account_storage_tries): (Vec<_>, Vec<_>) = (0..NUM_ACCOUNTS_TO_GEN) - .map(|_| generate_fake_account_and_storage_trie(&mut rng)) - .collect::, _>>()? - .into_iter() - .unzip(); - - let _state_trie = StandardTrie::try_from_iter( - account_entries - .into_iter() - .map(|(k, acc)| (Nibbles::from_h256_be(k), acc.rlp_bytes().to_vec())), - )?; - - let _account_storage_tries: Vec<(AccountAddr, HashedPartialTrie)> = account_storage_tries; - - Ok(()) - - // TODO: Generate remaining tries... -} - -fn generate_fake_account_and_storage_trie( - rng: &mut StdRng, -) -> TrieOpResult<( - (HashedAccountAddr, StateTrieEntry), - (AccountAddr, HashedPartialTrie), -)> { - let account_addr: H160 = rng.gen(); - let hashed_account_addr = keccak(account_addr.as_bytes()); - - let account_storage_trie = generate_fake_account_storage_trie(rng)?; - - let acc_entry = StateTrieEntry { - nonce: gen_u256(rng), - balance: gen_u256(rng), - storage_root: account_storage_trie.hash(), - code_hash: rng.gen(), /* For the test, the contract code does not exist, so we can just - * "fake" it here. */ - }; - - Ok(( - (hashed_account_addr, acc_entry), - (account_addr, account_storage_trie), - )) -} - -fn generate_fake_account_storage_trie(rng: &mut StdRng) -> TrieOpResult { - let num_storage_entries = rng.gen_range(RANGE_OF_STORAGE_ENTRIES_AN_ACCOUNT_CAN_HAVE); - - HashedPartialTrie::try_from_iter((0..num_storage_entries).map(|_| { - let hashed_storage_addr = Nibbles::from_h256_be(rng.gen::()); - let storage_data = gen_u256(rng).rlp_bytes().to_vec(); - - (hashed_storage_addr, storage_data) - })) -} - -fn gen_u256(rng: &mut StdRng) -> U256 { - U256(rng.gen::<[u64; 4]>()) -} diff --git a/mpt_trie/examples/hash_nodes.rs b/mpt_trie/examples/hash_nodes.rs index 2b18d33fc..379347b4a 100644 --- a/mpt_trie/examples/hash_nodes.rs +++ b/mpt_trie/examples/hash_nodes.rs @@ -50,12 +50,7 @@ use std::{ str::FromStr, }; -use mpt_trie::partial_trie::PartialTrie; -use mpt_trie::{ - nibbles::Nibbles, - partial_trie::{HashedPartialTrie, Node}, - trie_ops::TrieOpResult, -}; +use mpt_trie::{nibbles::Nibbles, trie_ops::TrieOpResult, Node}; fn main() -> TrieOpResult<()> { pretty_env_logger::try_init().unwrap(); @@ -63,7 +58,7 @@ fn main() -> TrieOpResult<()> { // Lets build the (binary) tries in the module-level docs. Since the example // uses binary nodes while branch nodes are really `16-ary`, we'll only use // branch slots `0` and `1`. - let mut full_trie = HashedPartialTrie::default(); + let mut full_trie = Node::default(); // Note the nibbles read the most significant nibble first (eg. `0x12` reads `1` // first). @@ -77,13 +72,13 @@ fn main() -> TrieOpResult<()> { // Slight hack. Normally this has would come from your own logic that is making // calls into this crate to construct the `PartialTrie`. May add API to // do this in the future if needed. - let left_side_hash = match &*full_trie { + let left_side_hash = match full_trie { Node::Branch { children, .. } => children[0].hash(), _ => unreachable!(), }; // Hash version. `0` branch is replaced with a `Hash` node. - let mut hash_trie = HashedPartialTrie::default(); + let mut hash_trie = Node::default(); hash_trie.insert(Nibbles::from_str("0x0").unwrap(), left_side_hash)?; // Hash node hash_trie.insert(0x10_u64, large_val(3))?; // 3rd from left. hash_trie.insert(0x11_u64, large_val(4))?; // 4th from left. diff --git a/mpt_trie/examples/simple.rs b/mpt_trie/examples/simple.rs deleted file mode 100644 index eb70a52b1..000000000 --- a/mpt_trie/examples/simple.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! Simple example showing off the basics of the library. - -use std::iter::once; - -use mpt_trie::partial_trie::PartialTrie; -use mpt_trie::utils::TryFromIterator; -use mpt_trie::{ - nibbles::{Nibbles, ToNibbles}, - partial_trie::{HashedPartialTrie, StandardTrie}, - trie_ops::{TrieOpResult, ValOrHash}, -}; - -fn main() -> TrieOpResult<()> { - // Construct an empty trie: - let mut trie = StandardTrie::default(); - - // Elements can be inserted into the trie by calling insert directly: - trie.insert( - Nibbles::from_bytes_be(b"hello").unwrap(), - b"world!".to_vec(), - )?; - - // Or by initializing the trie with an iterator of key value pairs: - let mut trie = StandardTrie::try_from_iter(vec![ - (0x1234_u32, b"some data".to_vec()), - (9001_u32, vec![1, 2, 3]), - ])?; - - // Tries can be queried: - assert_eq!(trie.get(0x1234_u32), Some(b"some data".as_slice())); - assert_eq!(trie.get(0x5678_u32), None); - - // Trie hashes can be calculated: - let _hash = trie.hash(); - - // `PartialTrie` can produce iterators which iterate over the values it - // contains: - assert_eq!( - trie.items().collect::>(), - vec![ - (0x1234_u32.into(), ValOrHash::Val(b"some data".to_vec())), - (9001_u32.into(), ValOrHash::Val(vec![1, 2, 3])) - ] - ); - - // Values can be deleted: - let del_val = trie.delete(0x1234_u32)?; - assert_eq!(del_val.unwrap(), b"some data".to_vec()); - assert_eq!(trie.get(0x1234_u32), None); - - // It's important to note how types are converted to `Nibbles`. This is - // especially important if you are trying to get hashes that are in agreement - // with a trie in an Ethereum EVM. - // - // By default, when converting to `Nibbles`, types are not padded to the nearest - // byte. For example, `Nibbles::From(0x123)` does not becomes `0x0123` - // internally. Many Ethereum trie libraries/EVM impls do this silently. If you - // want to have identical hashes to an Ethereum trie, you will want to create - // `Nibbles` like this instead: - - // Note that `From` just calls `to_nibbles` by default instead of - // `to_nibbles_byte_padded`. - let hash_1 = HashedPartialTrie::try_from_iter(once(( - 0x19002_u32.to_nibbles_byte_padded(), - vec![4, 5, 6], - )))? - .hash(); - let hash_2 = - HashedPartialTrie::try_from_iter(once((0x19002_u32.to_nibbles(), vec![4, 5, 6])))?.hash(); - assert_ne!(hash_1, hash_2); - - // Finally note that `Nibbles` which are constructed from bytes are always - // padded to the nearest byte: - assert_eq!( - format!("{:x}", Nibbles::from_bytes_be(&[1, 35, 69]).unwrap()), - "0x012345" - ); - assert_eq!( - format!("{:x}", Nibbles::from_bytes_le(&[69, 35, 1]).unwrap()), - "0x012345" - ); - - Ok(()) -} diff --git a/mpt_trie/src/builder.rs b/mpt_trie/src/builder.rs index 48475f4d2..f88935d0e 100644 --- a/mpt_trie/src/builder.rs +++ b/mpt_trie/src/builder.rs @@ -1,16 +1,12 @@ //! A builder for constructing a partial trie from a collection of proofs. -use std::collections::HashMap; -use std::sync::Arc; +use std::{collections::HashMap, sync::Arc}; use ethereum_types::H256; use keccak_hash::keccak; use rlp::{Prototype, Rlp}; -use super::{ - nibbles::Nibbles, - partial_trie::{Node, PartialTrie, WrappedNode}, -}; +use super::{nibbles::Nibbles, partial_trie::Node}; /// The hash of an empty trie. const EMPTY_TRIE_HASH: H256 = H256([ @@ -20,20 +16,15 @@ const EMPTY_TRIE_HASH: H256 = H256([ #[derive(Debug)] /// A builder for constructing a partial trie from a collection of nodes. -pub struct PartialTrieBuilder { +pub struct PartialTrieBuilder { root: H256, nodes: HashMap>, - _marker: std::marker::PhantomData, } -impl PartialTrieBuilder { +impl PartialTrieBuilder { /// Creates a new `PartialTrieBuilder` with the given root and nodes. pub const fn new(root: H256, nodes: HashMap>) -> Self { - PartialTrieBuilder { - root, - nodes, - _marker: std::marker::PhantomData, - } + PartialTrieBuilder { root, nodes } } /// Inserts a proof into the builder. @@ -71,7 +62,7 @@ impl PartialTrieBuilder { } /// Builds the partial trie from the nodes and root. - pub fn build(self) -> T { + pub fn build(self) -> Node { construct_partial_trie(self.root, &self.nodes) } @@ -90,25 +81,23 @@ impl PartialTrieBuilder { } /// Constructs a partial trie from a root hash and a collection of nodes. -fn construct_partial_trie(hash: H256, nodes: &HashMap>) -> T { +fn construct_partial_trie(hash: H256, nodes: &HashMap>) -> Node { let bytes = match nodes.get(&hash) { Some(value) => rlp::decode_list::>(value), - None if [H256::zero(), EMPTY_TRIE_HASH].contains(&hash) => return T::default(), - None => return T::new(Node::Hash(hash)), + None if [H256::zero(), EMPTY_TRIE_HASH].contains(&hash) => return Node::Empty, + None => return Node::Hash(hash), }; decode_node(bytes, nodes) } -fn decode_node(bytes: Vec>, nodes: &HashMap>) -> T { - let node = match bytes.len() { +fn decode_node(bytes: Vec>, nodes: &HashMap>) -> Node { + match bytes.len() { 17 => parse_branch_node(bytes, nodes), 2 if is_extension_node(&bytes) => parse_extension_node(bytes, nodes), 2 if is_leaf_node(&bytes) => parse_leaf_node(bytes), _ => unreachable!(), - }; - - T::new(node) + } } /// Returns true if the node is an extension node. @@ -122,31 +111,24 @@ fn is_leaf_node(bytes: &[Vec]) -> bool { } /// Parses a branch node from the given bytes. -fn parse_branch_node( - bytes: Vec>, - nodes: &HashMap>, -) -> Node { +fn parse_branch_node(bytes: Vec>, nodes: &HashMap>) -> Node { let children = (0..16) .map(|i| { - let child = match bytes[i].is_empty() { - true => T::default(), + Arc::new(match bytes[i].is_empty() { + true => Node::Empty, false => parse_child_node(&bytes[i], nodes), - }; - Arc::new(Box::new(child)) + }) }) - .collect::>>(); + .collect::>(); - Node::::Branch { + Node::Branch { children: children.try_into().unwrap(), value: bytes[16].clone(), } } /// Parses an extension node from the given bytes. -fn parse_extension_node( - bytes: Vec>, - nodes: &HashMap>, -) -> Node { +fn parse_extension_node(bytes: Vec>, nodes: &HashMap>) -> Node { let mut encoded_path = Nibbles::from_bytes_be(&bytes[0][..]).unwrap(); if encoded_path.pop_next_nibble_front() == 0 { @@ -155,12 +137,12 @@ fn parse_extension_node( Node::Extension { nibbles: encoded_path, - child: Arc::new(Box::new(parse_child_node(&bytes[1], nodes))), + child: (Arc::new(parse_child_node(&bytes[1], nodes))), } } /// Parses a leaf node from the given bytes. -fn parse_leaf_node(bytes: Vec>) -> Node { +fn parse_leaf_node(bytes: Vec>) -> Node { let mut encoded_path = Nibbles::from_bytes_be(&bytes[0][..]).unwrap(); if encoded_path.pop_next_nibble_front() == 2 { @@ -174,7 +156,7 @@ fn parse_leaf_node(bytes: Vec>) -> Node { } /// Parses a child node from the given bytes. -fn parse_child_node(bytes: &[u8], nodes: &HashMap>) -> T { +fn parse_child_node(bytes: &[u8], nodes: &HashMap>) -> Node { match bytes.len() { x if x < 32 => decode_node(rlp::decode_list::>(bytes), nodes), _ => construct_partial_trie(H256::from_slice(bytes), nodes), diff --git a/mpt_trie/src/debug_tools/diff.rs b/mpt_trie/src/debug_tools/diff.rs index c6a8458db..154142ed5 100644 --- a/mpt_trie/src/debug_tools/diff.rs +++ b/mpt_trie/src/debug_tools/diff.rs @@ -25,21 +25,17 @@ //! If there are multiple differences, then this will likely be what you want //! to use. +use std::fmt::Display; use std::fmt::{self, Debug}; -use std::{fmt::Display, ops::Deref}; use ethereum_types::H256; use crate::utils::{get_segment_from_node_and_key_piece, TriePath}; -use crate::{ - nibbles::Nibbles, - partial_trie::{HashedPartialTrie, Node, PartialTrie}, - utils::TrieNodeType, -}; +use crate::{nibbles::Nibbles, partial_trie::Node, utils::NodeKind}; /// Get the key piece from the given node if applicable. Note that /// [branch][`Node::Branch`]s have no [`Nibble`] directly associated with them. -fn get_key_piece_from_node(n: &Node) -> Nibbles { +fn get_key_piece_from_node(n: &Node) -> Nibbles { match n { Node::Empty | Node::Hash(_) | Node::Branch { .. } => Nibbles::default(), Node::Extension { nibbles, child: _ } | Node::Leaf { nibbles, value: _ } => *nibbles, @@ -102,12 +98,7 @@ pub struct DiffPoint { } impl DiffPoint { - fn new( - child_a: &HashedPartialTrie, - child_b: &HashedPartialTrie, - parent_k: Nibbles, - path: TriePath, - ) -> Self { + fn new(child_a: &Node, child_b: &Node, parent_k: Nibbles, path: TriePath) -> Self { let a_key = parent_k.merge_nibbles(&get_key_piece_from_node(child_a)); let b_key = parent_k.merge_nibbles(&get_key_piece_from_node(child_b)); @@ -141,7 +132,7 @@ pub struct NodeInfo { /// The direct value associated with the node (only applicable to `Leaf` & /// `Branch` nodes). value: Option>, - node_type: TrieNodeType, + node_type: NodeKind, hash: H256, } @@ -163,11 +154,11 @@ impl Display for NodeInfo { } impl NodeInfo { - fn new(n: &HashedPartialTrie, key: Nibbles, value: Option>) -> Self { + fn new(n: &Node, key: Nibbles, value: Option>) -> Self { Self { key, value, - node_type: n.deref().into(), + node_type: NodeKind::of(n), hash: n.hash(), } } @@ -175,7 +166,7 @@ impl NodeInfo { /// Create a diff between two tries. Will perform both types of diff searches /// (top-down & bottom-up). -pub fn create_diff_between_tries(a: &HashedPartialTrie, b: &HashedPartialTrie) -> TrieDiff { +pub fn create_diff_between_tries(a: &Node, b: &Node) -> TrieDiff { TrieDiff { latest_diff_res: find_latest_diff_point_between_tries(a, b), } @@ -183,10 +174,7 @@ pub fn create_diff_between_tries(a: &HashedPartialTrie, b: &HashedPartialTrie) - // Only support `HashedPartialTrie` due to it being significantly faster to // detect differences because of caching hashes. -fn find_latest_diff_point_between_tries( - a: &HashedPartialTrie, - b: &HashedPartialTrie, -) -> Option { +fn find_latest_diff_point_between_tries(a: &Node, b: &Node) -> Option { let state = DepthDiffPerCallState::new(a, b, Nibbles::default(), 0); let mut longest_state = DepthNodeDiffState::default(); @@ -232,8 +220,8 @@ impl DepthNodeDiffState { fn replace_longest_field_if_our_key_is_larger( field: &mut Option, parent_k: &Nibbles, - child_a: &HashedPartialTrie, - child_b: &HashedPartialTrie, + child_a: &Node, + child_b: &Node, path: TriePath, ) { if field @@ -248,8 +236,8 @@ impl DepthNodeDiffState { /// State that is copied per recursive call. #[derive(Clone, Debug)] struct DepthDiffPerCallState<'a> { - a: &'a HashedPartialTrie, - b: &'a HashedPartialTrie, + a: &'a Node, + b: &'a Node, curr_key: Nibbles, curr_depth: usize, @@ -260,12 +248,7 @@ struct DepthDiffPerCallState<'a> { impl<'a> DepthDiffPerCallState<'a> { /// Exists solely to prevent construction of this type from going over /// multiple lines. - fn new( - a: &'a HashedPartialTrie, - b: &'a HashedPartialTrie, - curr_key: Nibbles, - curr_depth: usize, - ) -> Self { + fn new(a: &'a Node, b: &'a Node, curr_key: Nibbles, curr_depth: usize) -> Self { Self { a, b, @@ -277,12 +260,7 @@ impl<'a> DepthDiffPerCallState<'a> { /// Note: The assumption here is that `a` and `b` are of the same node type /// and have the key. - fn new_from_parent( - &self, - a: &'a HashedPartialTrie, - b: &'a HashedPartialTrie, - key_piece: &Nibbles, - ) -> Self { + fn new_from_parent(&self, a: &'a Node, b: &'a Node, key_piece: &Nibbles) -> Self { let new_segment = get_segment_from_node_and_key_piece(self.a, key_piece); let new_path = self.curr_path.dup_and_append(new_segment); @@ -309,8 +287,8 @@ fn find_latest_diff_point_between_tries_rec( return DiffDetectionState::NoDiffDetected; } - let a_type: TrieNodeType = state.a.deref().into(); - let b_type: TrieNodeType = state.b.deref().into(); + let a_type: NodeKind = NodeKind::of(state.a); + let b_type: NodeKind = NodeKind::of(state.b); let a_key_piece = get_key_piece_from_node(state.a); let b_key_piece = get_key_piece_from_node(state.b); @@ -321,7 +299,7 @@ fn find_latest_diff_point_between_tries_rec( depth_state.try_update_longest_divergence_key_node(state); DiffDetectionState::NodeTypesDiffer } else { - match (&state.a.node, &state.b.node) { + match (&state.a, &state.b) { (Node::Empty, Node::Empty) => DiffDetectionState::NoDiffDetected, (Node::Hash(a_hash), Node::Hash(b_hash)) => { create_diff_detection_state_based_from_hashes( @@ -414,7 +392,7 @@ fn create_diff_detection_state_based_from_hashes( /// If the node type contains a value (without looking at the children), then /// return it. -const fn get_value_from_node(n: &Node) -> Option<&Vec> { +const fn get_value_from_node(n: &Node) -> Option<&Vec> { match n { Node::Empty | Node::Hash(_) | Node::Extension { .. } => None, Node::Branch { value, .. } | Node::Leaf { nibbles: _, value } => Some(value), @@ -424,17 +402,12 @@ const fn get_value_from_node(n: &Node) -> Option<&Vec> { #[cfg(test)] mod tests { use super::{create_diff_between_tries, DiffPoint, NodeInfo, TriePath}; - use crate::{ - nibbles::Nibbles, - partial_trie::{HashedPartialTrie, PartialTrie}, - trie_ops::TrieOpResult, - utils::TrieNodeType, - }; + use crate::{nibbles::Nibbles, partial_trie::Node, trie_ops::TrieOpResult, utils::NodeKind}; #[test] fn depth_single_node_hash_diffs_work() -> TrieOpResult<()> { // TODO: Reduce duplication once we identify common structures across tests... - let mut a = HashedPartialTrie::default(); + let mut a = Node::default(); a.insert(0x1234, vec![0])?; let a_hash = a.hash(); @@ -447,14 +420,14 @@ mod tests { let expected_a = NodeInfo { key: 0x1234.into(), value: Some(vec![0]), - node_type: TrieNodeType::Leaf, + node_type: NodeKind::Leaf, hash: a_hash, }; let expected_b = NodeInfo { key: 0x1234.into(), value: Some(vec![1]), - node_type: TrieNodeType::Leaf, + node_type: NodeKind::Leaf, hash: b_hash, }; diff --git a/mpt_trie/src/debug_tools/query.rs b/mpt_trie/src/debug_tools/query.rs index 753900262..d5ea3ad8c 100644 --- a/mpt_trie/src/debug_tools/query.rs +++ b/mpt_trie/src/debug_tools/query.rs @@ -1,13 +1,16 @@ //! Query tooling to report info on the path taken when searching down a trie //! with a given key. -use std::fmt::{self, Display}; +use std::{ + fmt::{self, Display}, + sync::Arc, +}; use ethereum_types::H256; use crate::{ nibbles::Nibbles, - partial_trie::{Node, PartialTrie, WrappedNode}, + partial_trie::Node, utils::{get_segment_from_node_and_key_piece, TriePath, TrieSegment}, }; @@ -18,10 +21,7 @@ use crate::{ /// and [Leaf][`Node::Leaf`] nodes, and the only way to get the `Nibble` /// "associated" with a branch is to look at the next `Nibble` in the current /// key as we traverse down it. -fn get_key_piece_from_node_pulling_from_key_for_branches( - n: &Node, - curr_key: &Nibbles, -) -> Nibbles { +fn get_key_piece_from_node_pulling_from_key_for_branches(n: &Node, curr_key: &Nibbles) -> Nibbles { match n { Node::Empty | Node::Hash(_) => Nibbles::default(), Node::Branch { .. } => curr_key.get_next_nibbles(1), @@ -133,7 +133,7 @@ impl Display for ExtraNodeSegmentInfo { } impl ExtraNodeSegmentInfo { - pub(super) fn from_node(n: &Node) -> Option { + pub(super) fn from_node(n: &Node) -> Option { match n { Node::Empty | Node::Extension { .. } => None, Node::Hash(h) => Some(ExtraNodeSegmentInfo::Hash(*h)), @@ -147,7 +147,7 @@ impl ExtraNodeSegmentInfo { } } -fn create_child_mask_from_children(children: &[WrappedNode; 16]) -> u16 { +fn create_child_mask_from_children(children: &[Arc; 16]) -> u16 { let mut mask: u16 = 0; for (i, child) in children.iter().enumerate().take(16) { @@ -262,10 +262,7 @@ impl DebugQueryOutput { } /// Get debug information on the path taken when querying a key in a given trie. -pub fn get_path_from_query>( - trie: &Node, - q: Q, -) -> DebugQueryOutput { +pub fn get_path_from_query>(trie: &Node, q: Q) -> DebugQueryOutput { let q = q.into(); let mut out = DebugQueryOutput::new(q.k, q.params); @@ -274,11 +271,7 @@ pub fn get_path_from_query>( out } -fn get_path_from_query_rec( - node: &Node, - curr_key: &mut Nibbles, - query_out: &mut DebugQueryOutput, -) { +fn get_path_from_query_rec(node: &Node, curr_key: &mut Nibbles, query_out: &mut DebugQueryOutput) { let key_piece = get_key_piece_from_node_pulling_from_key_for_branches(node, curr_key); let seg = get_segment_from_node_and_key_piece(node, &key_piece); diff --git a/mpt_trie/src/debug_tools/stats.rs b/mpt_trie/src/debug_tools/stats.rs index 1f2762936..ad2d6dc9b 100644 --- a/mpt_trie/src/debug_tools/stats.rs +++ b/mpt_trie/src/debug_tools/stats.rs @@ -7,7 +7,7 @@ use std::fmt::{self, Display}; use num_traits::ToPrimitive; -use crate::partial_trie::{Node, PartialTrie}; +use crate::partial_trie::Node; #[derive(Debug, Default)] /// Statistics for a given trie, consisting of node count aggregated @@ -245,16 +245,16 @@ impl DepthStats { /// Returns trie statistics consisting of node type counts as well as depth /// statistics. -pub fn get_trie_stats(trie: &T) -> TrieStats { +pub fn get_trie_stats(trie: &Node) -> TrieStats { get_trie_stats_common(trie, None) } /// Returns trie statistics with a given name. -pub fn get_trie_stats_with_name(trie: &T, name: String) -> TrieStats { +pub fn get_trie_stats_with_name(trie: &Node, name: String) -> TrieStats { get_trie_stats_common(trie, Some(name)) } -fn get_trie_stats_common(trie: &T, name: Option) -> TrieStats { +fn get_trie_stats_common(trie: &Node, name: Option) -> TrieStats { let mut state = CurrTrackingState::default(); get_trie_stats_rec(trie, &mut state, 0); @@ -272,11 +272,7 @@ fn get_trie_stats_common(trie: &T, name: Option) -> Trie } } -fn get_trie_stats_rec( - node: &Node, - state: &mut CurrTrackingState, - curr_depth: usize, -) { +fn get_trie_stats_rec(node: &Node, state: &mut CurrTrackingState, curr_depth: usize) { match node { Node::Empty => { state.counts.empty += 1; @@ -312,7 +308,7 @@ fn get_trie_stats_rec( mod tests { use super::get_trie_stats; use crate::{ - partial_trie::{HashedPartialTrie, PartialTrie}, + partial_trie::Node, testing_utils::{ generate_n_random_fixed_trie_hash_entries, generate_n_random_fixed_trie_value_entries, handmade_trie_1, @@ -372,7 +368,7 @@ mod tests { let val_entries = generate_n_random_fixed_trie_value_entries(n_leaf_nodes, seed); let hash_entries = generate_n_random_fixed_trie_hash_entries(n_hash_nodes, seed + 1); - let mut trie = HashedPartialTrie::default(); + let mut trie = Node::default(); trie.extend(val_entries)?; trie.extend(hash_entries)?; diff --git a/mpt_trie/src/lib.rs b/mpt_trie/src/lib.rs index c4dfabfa5..913b11f10 100644 --- a/mpt_trie/src/lib.rs +++ b/mpt_trie/src/lib.rs @@ -5,7 +5,7 @@ //! - You only need a portion of an existing larger trie. //! - You need this partial trie to produce the same hash as the full trie. //! -//! The core of this library is the [`PartialTrie`][partial_trie::PartialTrie] +//! The core of this library is the [`Node`] //! type, which represents a trie that is a subset of an existing larger one. //! Nodes that are not to be included in the `PartialTrie` are replaced with //! [`Hash`][partial_trie::Node::Hash] nodes, which contains the merkle @@ -14,11 +14,10 @@ #![allow(incomplete_features)] #![deny(rustdoc::broken_intra_doc_links)] #![deny(missing_debug_implementations)] -#![deny(missing_docs)] pub mod builder; pub mod nibbles; -pub mod partial_trie; +mod partial_trie; pub mod special_query; mod trie_hashing; pub mod trie_ops; @@ -30,3 +29,5 @@ pub mod debug_tools; #[cfg(test)] pub(crate) mod testing_utils; + +pub use partial_trie::{FrozenNode, Node}; diff --git a/mpt_trie/src/nibbles.rs b/mpt_trie/src/nibbles.rs index 1079aa6d2..5d5f2044a 100644 --- a/mpt_trie/src/nibbles.rs +++ b/mpt_trie/src/nibbles.rs @@ -255,8 +255,7 @@ impl From for NibblesIntern { } #[derive(Copy, Clone, Deserialize, Default, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)] -/// A sequence of nibbles which is used as the key type into -/// [`PartialTrie`][`crate::partial_trie::PartialTrie`]. +/// A sequence of nibbles which is used as the key type into a [`crate::Node`]. /// /// Generally, if you're constructing keys from actual trie data, you probably /// will be working with `U256`s and `H256`s both of which `Nibbles` has a diff --git a/mpt_trie/src/partial_trie.rs b/mpt_trie/src/partial_trie.rs index 27027eeae..9d91bbf4e 100644 --- a/mpt_trie/src/partial_trie.rs +++ b/mpt_trie/src/partial_trie.rs @@ -1,126 +1,37 @@ //! Definitions for the core types [`PartialTrie`] and [`Nibbles`]. -use std::{ - fmt::Debug, - ops::{Deref, DerefMut}, - sync::Arc, -}; +use std::{fmt::Debug, ops::Deref, sync::Arc}; use ethereum_types::H256; -use parking_lot::RwLock; use serde::{Deserialize, Serialize}; use crate::{ nibbles::Nibbles, trie_hashing::{hash_trie, rlp_encode_and_hash_node, EncodedNode}, trie_ops::{TrieOpResult, ValOrHash}, - utils::{bytes_to_h256, TryFromIterator}, + utils::TryFromIterator, }; -macro_rules! impl_from_for_trie_type { - ($type:ty) => { - impl From> for $type { - fn from(v: Node<$type>) -> Self { - Self::new(v) - } - } - }; -} - -/// Alias for a node that is a child of an extension or branch node. -pub type WrappedNode = Arc>; - -impl AsRef> for WrappedNode { - fn as_ref(&self) -> &Node { - self - } -} - -impl From> for WrappedNode { - fn from(v: Node) -> Self { - Arc::new(Box::new(N::new(v))) - } -} - -/// A trait for any types that are Tries. -pub trait PartialTrie: - Clone + Debug + Default + DerefMut> + Eq + TrieNodeIntern -{ - /// Creates a new partial trie from a node. - fn new(n: Node) -> Self; - - /// Inserts a node into the trie. - fn insert(&mut self, k: K, v: V) -> TrieOpResult<()> - where - K: Into, - V: Into; - - /// Add more nodes to the trie through an iterator - fn extend(&mut self, nodes: I) -> TrieOpResult<()> - where - K: Into, - V: Into, - I: IntoIterator; - - /// Get a node if it exists in the trie. - fn get(&self, k: K) -> Option<&[u8]> - where - K: Into; - - /// Deletes a `Leaf` node or `Branch` value field if it exists. - /// - /// To agree with Ethereum specs, deleting nodes does not result in the trie - /// removing nodes that are redundant after deletion. For example, a - /// `Branch` node that is completely empty after all of its children are - /// deleted is not pruned. Also note: - /// - Deleted leaves are replaced with `Empty` nodes. - /// - Deleted branch values are replaced with empty `Vec`s. - /// - /// # Panics - /// If a `Hash` node is traversed, a panic will occur. Since `Hash` nodes - /// are meant for parts of the trie that are not relevant, traversing one - /// means that a `Hash` node was created that potentially should not have - /// been. - fn delete(&mut self, k: K) -> TrieOpResult>> - where - K: Into; - - /// Get the hash for the node. - fn hash(&self) -> H256; - - /// Returns an iterator over the trie that returns all key/value pairs for - /// every `Leaf` and `Hash` node. - fn items(&self) -> impl Iterator; - - /// Returns an iterator over the trie that returns all keys for every `Leaf` - /// and `Hash` node. - fn keys(&self) -> impl Iterator; - - /// Returns an iterator over the trie that returns all values for every - /// `Leaf` and `Hash` node. - fn values(&self) -> impl Iterator; - - /// Returns `true` if the trie contains an element with the given key. - fn contains(&self, k: K) -> bool - where - K: Into; -} - -/// Part of the trait that is not really part of the public interface but -/// implementer of other node types still need to implement. -pub trait TrieNodeIntern { - /// Returns the hash of the rlp encoding of self. - fn hash_intern(&self) -> EncodedNode; -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -/// A partial trie, or a sub-trie thereof. This mimics the structure of an -/// Ethereum trie, except with an additional `Hash` node type, representing a -/// node whose data is not needed to process our transaction. -pub enum Node -where - T: Clone + Debug, -{ +/// A node (and equivalently, a tree) in a [Merkle Patricia Trie](https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/). +/// +/// Nodes may be [hashed](Self::hash) recursively. +/// +/// Any node in the trie may be replaced by its [hash](Self::hash) in a +/// [Node::Hash], and the root hash of the trie will remain unchanged. +/// +/// ```text +/// R R' +/// / \ / \ +/// A B H B +/// / \ \ \ +/// C D E E +/// ``` +/// +/// That is, if `H` is `A`'s hash, then the roots of `R` and `R'` are the same. +/// +/// This is particularly useful for pruning unrequired data from tries. +#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)] +pub enum Node { /// An empty trie. #[default] Empty, @@ -134,7 +45,7 @@ where /// A branch node, which consists of 16 children and an optional value. Branch { /// A slice containing the 16 children of this branch node. - children: [WrappedNode; 16], + children: [Arc; 16], /// The payload of this node. value: Vec, }, @@ -144,7 +55,7 @@ where /// The path of this extension. nibbles: Nibbles, /// The child of this extension node. - child: WrappedNode, + child: Arc, }, /// A leaf node, which consists of a list of nibbles and a value. Leaf { @@ -155,301 +66,155 @@ where }, } -impl Eq for Node {} - -/// `PartialTrie` equality means all nodes through the trie are equivalent. -impl PartialEq for Node { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Node::Empty, Node::Empty) => true, - (Node::Hash(h1), Node::Hash(h2)) => h1 == h2, - ( - Node::Branch { - children: c1, - value: v1, - }, - Node::Branch { - children: c2, - value: v2, - }, - ) => v1 == v2 && (0..16).all(|i| c1[i] == c2[i]), - ( - Node::Extension { - nibbles: n1, - child: c1, - }, - Node::Extension { - nibbles: n2, - child: c2, - }, - ) => n1 == n2 && c1 == c2, - ( - Node::Leaf { - nibbles: n1, - value: v1, - }, - Node::Leaf { - nibbles: n2, - value: v2, - }, - ) => n1 == n2 && v1 == v2, - (_, _) => false, - } - } +/// An immutable [`Node`] which caches the [`Node::hash`]. +/// +/// Freely convertible with [`Node`]. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(from = "Node", into = "Node")] +pub struct FrozenNode { + node: Node, + hash: H256, } -/// A simple PartialTrie with no hash caching. -/// Note that while you can *still* calculate the hashes for any given node, the -/// hashes are not cached and are recalculated each time. -#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] -pub struct StandardTrie(pub Node); - -impl_from_for_trie_type!(StandardTrie); - -impl PartialTrie for StandardTrie { - fn new(n: Node) -> Self { - Self(n) - } - - fn insert(&mut self, k: K, v: V) -> TrieOpResult<()> - where - K: Into, - V: Into, - { - self.0.trie_insert(k, v)?; - Ok(()) - } - - fn extend(&mut self, nodes: I) -> TrieOpResult<()> - where - K: Into, - V: Into, - I: IntoIterator, - { - self.0.trie_extend(nodes) - } - - fn get(&self, k: K) -> Option<&[u8]> - where - K: Into, - { - self.0.trie_get(k) - } - - fn delete(&mut self, k: K) -> TrieOpResult>> - where - K: Into, - { - self.0.trie_delete(k) - } - - fn hash(&self) -> H256 { - hash_trie(self) - } - - fn items(&self) -> impl Iterator { - self.0.trie_items() - } - - fn keys(&self) -> impl Iterator { - self.0.trie_keys() - } - - fn values(&self) -> impl Iterator { - self.0.trie_values() - } - - fn contains(&self, k: K) -> bool - where - K: Into, - { - self.0.trie_has_item_by_key(k) +impl From for FrozenNode { + fn from(value: Node) -> Self { + value.freeze() } } -impl TrieNodeIntern for StandardTrie { - fn hash_intern(&self) -> EncodedNode { - rlp_encode_and_hash_node(self) +impl From for Node { + fn from(value: FrozenNode) -> Self { + value.thaw() } } -impl Deref for StandardTrie { - type Target = Node; - - fn deref(&self) -> &Self::Target { - &self.0 +impl Default for FrozenNode { + fn default() -> Self { + Node::default().freeze() } } -impl DerefMut for StandardTrie { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 +impl FrozenNode { + /// A cached version of [`Node::hash`]. + pub fn hash(&self) -> H256 { + self.hash } } -impl TryFromIterator<(K, V)> for StandardTrie -where - K: Into, - V: Into, -{ - fn try_from_iter>(nodes: T) -> TrieOpResult { - from_iter_common(nodes) +impl FrozenNode { + /// Unfreeze this object, making it mutable again. + pub fn thaw(self) -> Node { + self.node } } +impl Deref for FrozenNode { + type Target = Node; -/// A partial trie that lazily caches hashes for each node as needed. -/// If you are doing frequent hashing of node, you probably want to use this -/// `Trie` variant. -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct HashedPartialTrie { - pub(crate) node: Node, - pub(crate) hash: Arc>>, -} - -impl_from_for_trie_type!(HashedPartialTrie); - -impl HashedPartialTrie { - /// Lazily get calculates the hash for the node, - pub(crate) fn get_hash(&self) -> H256 { - let hash = *self.hash.read(); - - match hash { - Some(h) => h, - None => hash_trie(self), - } - } - - pub(crate) fn set_hash(&self, v: Option) { - *self.hash.write() = v; + fn deref(&self) -> &Self::Target { + &self.node } } -impl PartialTrie for HashedPartialTrie { - fn new(node: Node) -> Self { - Self { - node, - hash: Arc::new(RwLock::new(None)), +impl Node { + /// Make this node immutable, caching it's [`Node::hash`]. + pub fn freeze(self) -> FrozenNode { + FrozenNode { + hash: self.hash(), + node: self, } } - - fn insert(&mut self, k: K, v: V) -> TrieOpResult<()> + /// Inserts a node into the trie. + pub fn insert(&mut self, k: K, v: V) -> TrieOpResult<()> where K: Into, V: Into, { - self.node.trie_insert(k, v)?; - self.set_hash(None); + self.trie_insert(k, v)?; Ok(()) } - - fn extend(&mut self, nodes: I) -> TrieOpResult<()> + /// Add more nodes to the trie through an iterator + pub fn extend(&mut self, nodes: I) -> TrieOpResult<()> where K: Into, V: Into, I: IntoIterator, { - self.node.trie_extend(nodes)?; - self.set_hash(None); + self.trie_extend(nodes)?; Ok(()) } - - fn get(&self, k: K) -> Option<&[u8]> + /// Get a node if it exists in the trie. + pub fn get(&self, k: K) -> Option<&[u8]> where K: Into, { - self.node.trie_get(k) + self.trie_get(k) } - fn delete(&mut self, k: K) -> TrieOpResult>> + /// Deletes a `Leaf` node or `Branch` value field if it exists. + /// + /// To agree with Ethereum specs, deleting nodes does not result in the trie + /// removing nodes that are redundant after deletion. For example, a + /// `Branch` node that is completely empty after all of its children are + /// deleted is not pruned. Also note: + /// - Deleted leaves are replaced with `Empty` nodes. + /// - Deleted branch values are replaced with empty `Vec`s. + /// + /// # Panics + /// If a `Hash` node is traversed, a panic will occur. Since `Hash` nodes + /// are meant for parts of the trie that are not relevant, traversing one + /// means that a `Hash` node was created that potentially should not have + /// been. + pub fn delete(&mut self, k: K) -> TrieOpResult>> where K: Into, { - let res = self.node.trie_delete(k); - self.set_hash(None); - - res + self.trie_delete(k) } - - fn hash(&self) -> H256 { + /// Get the hash for the node. + pub fn hash(&self) -> H256 { self.get_hash() } - - fn items(&self) -> impl Iterator { - self.node.trie_items() + /// Returns an iterator over the trie that returns all key/value pairs for + /// every `Leaf` and `Hash` node. + pub fn items(&self) -> impl Iterator { + self.trie_items() } - - fn keys(&self) -> impl Iterator { - self.node.trie_keys() + /// Returns an iterator over the trie that returns all keys for every `Leaf` + /// and `Hash` node. + pub fn keys(&self) -> impl Iterator { + self.trie_keys() } - - fn values(&self) -> impl Iterator { - self.node.trie_values() + /// Returns an iterator over the trie that returns all values for every + /// `Leaf` and `Hash` node. + pub fn values(&self) -> impl Iterator { + self.trie_values() } - - fn contains(&self, k: K) -> bool + /// Returns `true` if the trie contains an element with the given key. + pub fn contains(&self, k: K) -> bool where K: Into, { - self.node.trie_has_item_by_key(k) - } -} - -impl TrieNodeIntern for HashedPartialTrie { - fn hash_intern(&self) -> EncodedNode { - if let Some(h) = *self.hash.read() { - return EncodedNode::Hashed(h.0); - } - - let res = rlp_encode_and_hash_node(&self.node); - // We can't hash anything smaller than 32 bytes (which is the case if it's a - // `Raw` variant), so only cache if this isn't the case. - if let EncodedNode::Hashed(h) = res { - self.set_hash(Some(bytes_to_h256(&h))); - } - - res - } -} - -impl Deref for HashedPartialTrie { - type Target = Node; - - fn deref(&self) -> &Self::Target { - &self.node + self.trie_has_item_by_key(k) } } -impl DerefMut for HashedPartialTrie { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.node +impl Node { + pub(crate) fn hash_intern(&self) -> EncodedNode { + rlp_encode_and_hash_node(self) } -} - -impl Eq for HashedPartialTrie {} -impl PartialEq for HashedPartialTrie { - fn eq(&self, other: &Self) -> bool { - self.node == other.node + pub(crate) fn get_hash(&self) -> H256 { + hash_trie(self) } } -impl TryFromIterator<(K, V)> for HashedPartialTrie +impl TryFromIterator<(K, V)> for Node where K: Into, V: Into, { fn try_from_iter>(nodes: T) -> TrieOpResult { - from_iter_common(nodes) + let mut root = Node::Empty; + root.extend(nodes)?; + Ok(root) } } - -fn from_iter_common, K, V>( - nodes: T, -) -> TrieOpResult -where - K: Into, - V: Into, -{ - let mut root = N::new(Node::Empty); - root.extend(nodes)?; - Ok(root) -} diff --git a/mpt_trie/src/special_query.rs b/mpt_trie/src/special_query.rs index c133a6091..a185654d1 100644 --- a/mpt_trie/src/special_query.rs +++ b/mpt_trie/src/special_query.rs @@ -1,17 +1,15 @@ //! Specialized queries that users of the library may need that require //! knowledge of the private internal trie state. -use crate::{ - nibbles::Nibbles, - partial_trie::{Node, PartialTrie, WrappedNode}, - utils::TrieSegment, -}; +use std::sync::Arc; + +use crate::{nibbles::Nibbles, partial_trie::Node, utils::TrieSegment}; /// An iterator for a trie query. Note that this iterator is lazy. #[derive(Debug)] -pub struct TriePathIter { +pub struct TriePathIter { /// The next node in the trie to query with the remaining key. - curr_node: WrappedNode, + curr_node: Arc, /// The remaining part of the key as we traverse down the trie. curr_key: Nibbles, @@ -25,7 +23,7 @@ pub struct TriePathIter { always_include_final_node_if_possible: bool, } -impl Iterator for TriePathIter { +impl Iterator for TriePathIter { type Item = TrieSegment; fn next(&mut self) -> Option { @@ -33,7 +31,7 @@ impl Iterator for TriePathIter { return None; } - match self.curr_node.as_ref() { + match &*self.curr_node { Node::Empty => { self.terminated = true; Some(TrieSegment::Empty) @@ -101,16 +99,16 @@ fn pop_nibbles_clamped(nibbles: &mut Nibbles, n: usize) -> Nibbles { /// Note that if the key does not match the entire key of a node (eg. the /// remaining key is `0x34` but the next key is a leaf with the key `0x3456`), /// then the leaf will not appear in the query output. -pub fn path_for_query( - trie: &Node, +pub fn path_for_query( + trie: &Node, k: K, always_include_final_node_if_possible: bool, -) -> TriePathIter +) -> TriePathIter where K: Into, { TriePathIter { - curr_node: trie.clone().into(), + curr_node: Arc::new(trie.clone()), curr_key: k.into(), terminated: false, always_include_final_node_if_possible, @@ -168,7 +166,7 @@ mod test { ]; for (q, expected) in ks.into_iter().zip(res.into_iter()) { - let res: Vec<_> = path_for_query(&trie.node, q, false).collect(); + let res: Vec<_> = path_for_query(&trie, q, false).collect(); assert_eq!(res, expected) } diff --git a/mpt_trie/src/testing_utils.rs b/mpt_trie/src/testing_utils.rs index cc8a0daef..05673623f 100644 --- a/mpt_trie/src/testing_utils.rs +++ b/mpt_trie/src/testing_utils.rs @@ -9,7 +9,7 @@ use rand::{rngs::StdRng, seq::IteratorRandom, Rng, RngCore, SeedableRng}; use crate::{ nibbles::{Nibbles, NibblesIntern}, - partial_trie::{HashedPartialTrie, Node, PartialTrie}, + partial_trie::Node, trie_ops::{TrieOpResult, ValOrHash}, utils::is_even, }; @@ -20,7 +20,7 @@ use crate::{ /// chances of these collisions occurring. const MIN_BYTES_FOR_VAR_KEY: usize = 5; -pub(crate) type TrieType = HashedPartialTrie; +pub(crate) type TrieType = Node; pub(crate) type TestInsertValEntry = (Nibbles, Vec); pub(crate) type TestInsertHashEntry = (Nibbles, H256); @@ -120,8 +120,8 @@ fn gen_n_random_trie_value_entries_common< (0..n).map(move |_| (key_gen_f(&mut rng), val_gen_f(&mut rng))) } -pub(crate) fn generate_n_hash_nodes_entries_for_empty_slots_in_trie( - trie: &Node, +pub(crate) fn generate_n_hash_nodes_entries_for_empty_slots_in_trie( + trie: &Node, n: usize, seed: u64, ) -> Vec { @@ -165,9 +165,7 @@ fn gen_variable_nibbles(rng: &mut StdRng) -> Nibbles { U256::from_little_endian(&bytes).into() } // TODO: Replace with `PartialTrie` `iter` methods once done... -pub(crate) fn get_non_hash_values_in_trie( - trie: &Node, -) -> HashSet { +pub(crate) fn get_non_hash_values_in_trie(trie: &Node) -> HashSet { info!("Collecting all entries inserted into trie..."); trie.trie_items() .map(|(k, v)| (k, v.expect_val())) diff --git a/mpt_trie/src/trie_hashing.rs b/mpt_trie/src/trie_hashing.rs index 60890aae2..65fe1903b 100644 --- a/mpt_trie/src/trie_hashing.rs +++ b/mpt_trie/src/trie_hashing.rs @@ -3,10 +3,7 @@ use ethereum_types::H256; use keccak_hash::keccak; use rlp::RlpStream; -use crate::{ - partial_trie::{Node, PartialTrie, TrieNodeIntern}, - utils::bytes_to_h256, -}; +use crate::{partial_trie::Node, utils::bytes_to_h256}; /// The node type used for calculating the hash of a trie. #[derive(Debug)] @@ -28,14 +25,12 @@ impl From<&EncodedNode> for H256 { /// Calculates the hash of a node. /// Assumes that all leaf values are already rlp encoded. -pub(crate) fn hash_trie(node: &Node) -> H256 { +pub(crate) fn hash_trie(node: &Node) -> H256 { let trie_hash_bytes = rlp_encode_and_hash_node(node); (&trie_hash_bytes).into() } -pub(crate) fn rlp_encode_and_hash_node( - node: &Node, -) -> EncodedNode { +pub(crate) fn rlp_encode_and_hash_node(node: &Node) -> EncodedNode { let res = match node { Node::Empty => EncodedNode::Raw(Bytes::from_static(&rlp::NULL_RLP)), Node::Hash(h) => EncodedNode::Hashed(h.0), @@ -106,7 +101,7 @@ mod tests { use crate::{ nibbles::{Nibble, Nibbles}, - partial_trie::{HashedPartialTrie, Node, PartialTrie, WrappedNode}, + partial_trie::Node, testing_utils::{ common_setup, entry, generate_n_random_fixed_even_nibble_padded_trie_value_entries, generate_n_random_fixed_trie_value_entries, @@ -232,7 +227,7 @@ mod tests { fn get_root_hashes_for_our_trie_after_each_insert( entries: impl Iterator, ) -> impl Iterator { - let mut trie = HashedPartialTrie::new(Node::Empty); + let mut trie = Node::Empty; entries.map(move |(k, v)| { trie.insert(k, v).unwrap(); @@ -255,7 +250,7 @@ mod tests { fn empty_hash_is_correct() { common_setup(); - let trie = HashedPartialTrie::new(Node::Empty); + let trie = Node::Empty; assert_eq!(keccak_hash::KECCAK_NULL_RLP, trie.get_hash()); } @@ -277,7 +272,7 @@ mod tests { get_lib_trie_root_hashes_after_each_insert(once(ins_entry.clone())) .next() .unwrap(); - let our_hash = HashedPartialTrie::try_from_iter(once(ins_entry))?.get_hash(); + let our_hash = Node::try_from_iter(once(ins_entry))?.get_hash(); assert_eq!(py_evm_truth_val, our_hash); assert_eq!(eth_trie_lib_truth_val, our_hash); @@ -371,7 +366,7 @@ mod tests { ) .collect(); - let mut our_trie = HashedPartialTrie::try_from_iter(entries.iter().cloned())?; + let mut our_trie = Node::try_from_iter(entries.iter().cloned())?; let mut truth_trie = create_truth_trie(); for (k, v) in entries.iter() { @@ -393,7 +388,7 @@ mod tests { #[test] fn replacing_branch_of_leaves_with_hash_nodes_produced_same_hash() -> TrieOpResult<()> { - let mut trie = HashedPartialTrie::try_from_iter([ + let mut trie = Node::try_from_iter([ large_entry(0x1), large_entry(0x2), large_entry(0x3), @@ -412,9 +407,7 @@ mod tests { Ok(()) } - fn get_branch_children_expected( - node: &mut Node, - ) -> &mut [WrappedNode; 16] { + fn get_branch_children_expected(node: &mut Node) -> &mut [Arc; 16] { match node { Node::Branch { children, .. } => children, _ => unreachable!(), @@ -437,10 +430,10 @@ mod tests { }) }); - let mut trie = HashedPartialTrie::try_from_iter(entries)?; + let mut trie = Node::try_from_iter(entries)?; let orig_hash = trie.get_hash(); - let root_branch_children = match &mut *trie { + let root_branch_children = match &mut trie { Node::Branch { children, .. } => children, _ => unreachable!(), }; diff --git a/mpt_trie/src/trie_ops.rs b/mpt_trie/src/trie_ops.rs index 5f94fc942..373882a3d 100644 --- a/mpt_trie/src/trie_ops.rs +++ b/mpt_trie/src/trie_ops.rs @@ -1,7 +1,6 @@ -//! Defines various operations for -//! [`PartialTrie`]. +//! Defines various operations for [`Node`]. -use std::{fmt::Display, mem::size_of}; +use std::{array, fmt::Display, mem::size_of, sync::Arc}; use enum_as_inner::EnumAsInner; use ethereum_types::{H256, U128, U256, U512}; @@ -10,8 +9,8 @@ use thiserror::Error; use crate::{ nibbles::{Nibble, Nibbles}, - partial_trie::{Node, PartialTrie, WrappedNode}, - utils::TrieNodeType, + partial_trie::Node, + utils::NodeKind, }; /// Stores the result of trie operations. Returns a [TrieOpError] upon @@ -34,7 +33,7 @@ pub enum TrieOpError { /// An error that occurs when encontered an unexisting type of node during /// an extension node collapse. #[error("Extension managed to get an unexisting child node type! (child: {0})")] - HashNodeExtError(TrieNodeType), + HashNodeExtError(NodeKind), /// Failed to insert a hash node into the trie. #[error("Attempted to place a hash node on an existing node! (hash: {0})")] @@ -66,7 +65,7 @@ impl InsertEntry { } } -/// An "entry" in a [`PartialTrie`]. +/// An "entry" in a [`Node`]. /// /// Entries in the trie may either be actual values or /// [`Hash`](crate::partial_trie::Node::Hash) nodes. @@ -160,21 +159,21 @@ struct ExistingAndNewNodePreAndPost { /// we may place one of the nodes right into the value node of the branch. This /// enum just indicates whether or not a value needs to go into the branch node. #[derive(Debug)] -enum ExistingOrNewBranchValuePlacement { - BranchValue(Vec, (Nibble, WrappedNode)), - BothBranchChildren((Nibble, WrappedNode), (Nibble, WrappedNode)), +enum ExistingOrNewBranchValuePlacement { + BranchValue(Vec, (Nibble, Arc)), + BothBranchChildren((Nibble, Arc), (Nibble, Arc)), } #[derive(Debug)] -enum IterStackEntry { - Root(WrappedNode), +enum IterStackEntry { + Root(Node), Extension(usize), - Branch(BranchStackEntry), + Branch(BranchStackEntry), } #[derive(Debug)] -struct BranchStackEntry { - children: [WrappedNode; 16], +struct BranchStackEntry { + children: [Arc; 16], value: Vec, curr_nib: Nibble, } @@ -182,18 +181,18 @@ struct BranchStackEntry { #[derive(Debug)] /// An iterator that ranges over all the leafs and hash nodes /// of the trie, in lexicographic order. -pub struct PartialTrieIter { +pub struct PartialTrieIter { curr_key_after_last_branch: Nibbles, - trie_stack: Vec>, + trie_stack: Vec, } -impl PartialTrieIter { +impl PartialTrieIter { fn advance_iter_to_next_empty_leaf_or_hash_node( &mut self, - node: &WrappedNode, + node: &Node, mut curr_key: Nibbles, ) -> Option<(Nibbles, ValOrHash)> { - match node.as_ref() { + match node { Node::Empty => None, Node::Hash(h) => Some((curr_key, ValOrHash::Hash(*h))), Node::Branch { children, value } => { @@ -209,7 +208,7 @@ impl PartialTrieIter { self.advance_iter_to_next_empty_leaf_or_hash_node(&children[0], curr_key) } Node::Extension { nibbles, child } => { - if TrieNodeType::from(child) != TrieNodeType::Hash { + if NodeKind::of(child) != NodeKind::Hash { self.trie_stack .push(IterStackEntry::Extension(nibbles.count)); } @@ -226,7 +225,7 @@ impl PartialTrieIter { } } -impl Iterator for PartialTrieIter { +impl Iterator for PartialTrieIter { type Item = (Nibbles, ValOrHash); fn next(&mut self) -> Option<(Nibbles, ValOrHash)> { @@ -290,7 +289,7 @@ impl Iterator for PartialTrieIter { } } -impl Node { +impl Node { pub(crate) fn trie_insert(&mut self, k: K, v: V) -> TrieOpResult<()> where K: Into, @@ -300,7 +299,7 @@ impl Node { trace!("Inserting new node {:?}...", ins_entry); // Inserts are guaranteed to update the root node. - let node_ref: &Node = &insert_into_trie_rec(self, ins_entry)?.unwrap(); + let node_ref: &Node = &insert_into_trie_rec(self, ins_entry)?.unwrap(); *self = node_ref.clone(); Ok(()) } @@ -370,7 +369,7 @@ impl Node { delete_intern(&self.clone(), k)?.map_or(Ok(None), |(updated_root, deleted_val)| { // Final check at the root if we have an extension node let wrapped_node = try_collapse_if_extension(updated_root)?; - let node_ref: &Node = &wrapped_node; + let node_ref: &Node = &wrapped_node; *self = node_ref.clone(); Ok(Some(deleted_val)) @@ -380,7 +379,7 @@ impl Node { pub(crate) fn trie_items(&self) -> impl Iterator { PartialTrieIter { curr_key_after_last_branch: Nibbles::default(), - trie_stack: vec![IterStackEntry::Root(self.clone().into())], + trie_stack: vec![IterStackEntry::Root(self.clone())], } } @@ -401,10 +400,7 @@ impl Node { } } -fn insert_into_trie_rec( - node: &Node, - mut new_node: InsertEntry, -) -> TrieOpResult>> { +fn insert_into_trie_rec(node: &Node, mut new_node: InsertEntry) -> TrieOpResult> { match node { Node::Empty => { trace!("Insert traversed Empty"); @@ -429,7 +425,7 @@ fn insert_into_trie_rec( Ok( insert_into_trie_rec(&children[nibble as usize], new_node)?.map(|updated_child| { let mut updated_children = children.clone(); - updated_children[nibble as usize] = updated_child; + updated_children[nibble as usize] = Arc::new(updated_child); branch(updated_children, value.clone()) }), ) @@ -445,7 +441,7 @@ fn insert_into_trie_rec( new_node.truncate_n_nibbles(nibbles.count); return insert_into_trie_rec(child, new_node)?.map_or(Ok(None), |updated_child| { - Ok(Some(extension(*nibbles, updated_child))) + Ok(Some(extension(*nibbles, Box::new(updated_child)))) }); } @@ -458,7 +454,10 @@ fn insert_into_trie_rec( // the branch. let updated_existing_node = match existing_postfix_adjusted_for_branch.count { 0 => child.clone(), - _ => extension(existing_postfix_adjusted_for_branch, child.clone()), + _ => Arc::new(extension( + existing_postfix_adjusted_for_branch, + child.clone(), + )), }; Ok(Some(place_branch_and_potentially_ext_prefix( @@ -486,17 +485,14 @@ fn insert_into_trie_rec( Ok(Some(place_branch_and_potentially_ext_prefix( &info, - existing_node_truncated, + Arc::new(existing_node_truncated), new_node, ))) } } } -fn delete_intern( - node: &Node, - mut curr_k: Nibbles, -) -> TrieOpResult, Vec)>> { +fn delete_intern(node: &Node, mut curr_k: Nibbles) -> TrieOpResult)>> { match node { Node::Empty => { trace!("Delete traversed Empty"); @@ -523,7 +519,7 @@ fn delete_intern( // Branch stays. let mut updated_children = children.clone(); updated_children[nibble as usize] = - try_collapse_if_extension(updated_child)?; + Arc::new(try_collapse_if_extension(updated_child)?); branch(updated_children, value.clone()) } true => { @@ -534,7 +530,7 @@ fn delete_intern( trace!("Branch {:x} became an extension when collapsing a branch (may be collapsed further still). Single remaining child in slot {:x} ({}) will be pointed at with an extension node.", - nibble, child_nibble, TrieNodeType::from(non_empty_node.deref())); + nibble, child_nibble, NodeKind::of(non_empty_node)); // Extension may be collapsed one level above. extension(Nibbles::from_nibble(child_nibble), non_empty_node.clone()) @@ -570,30 +566,27 @@ fn delete_intern( trace!("Delete traversed Leaf (nibbles: {:?})", nibbles); Ok((*nibbles == curr_k).then(|| { trace!("Deleting leaf ({:x})", nibbles); - (Node::Empty.into(), value.clone()) + (Node::Empty, value.clone()) })) } } } -fn try_collapse_if_extension(node: WrappedNode) -> TrieOpResult> { - match node.as_ref() { +fn try_collapse_if_extension(node: Node) -> TrieOpResult { + match &node { Node::Extension { nibbles, child } => collapse_ext_node_if_needed(nibbles, child), _ => Ok(node), } } -fn collapse_ext_node_if_needed( - ext_nibbles: &Nibbles, - child: &WrappedNode, -) -> TrieOpResult> { +fn collapse_ext_node_if_needed(ext_nibbles: &Nibbles, child: &Node) -> TrieOpResult { trace!( "Collapsing extension node ({:x}) with child {}...", ext_nibbles, - TrieNodeType::from(child.deref()) + NodeKind::of(child) ); - match child.as_ref() { + match child { Node::Branch { .. } => Ok(extension(*ext_nibbles, child.clone())), Node::Extension { nibbles: other_ext_nibbles, @@ -607,7 +600,7 @@ fn collapse_ext_node_if_needed( value, } => Ok(leaf(ext_nibbles.merge_nibbles(leaf_nibbles), value.clone())), Node::Hash(_) => Ok(extension(*ext_nibbles, child.clone())), - _ => Err(TrieOpError::HashNodeExtError(TrieNodeType::from(child))), + _ => Err(TrieOpError::HashNodeExtError(NodeKind::of(child))), } } @@ -632,11 +625,11 @@ fn get_pre_and_postfixes_for_existing_and_new_nodes( } } -fn place_branch_and_potentially_ext_prefix( +fn place_branch_and_potentially_ext_prefix( info: &ExistingAndNewNodePreAndPost, - existing_node: WrappedNode, + existing_node: Arc, new_node: InsertEntry, -) -> WrappedNode { +) -> Node { let mut children = new_branch_child_arr(); let mut value = vec![]; @@ -665,16 +658,16 @@ fn place_branch_and_potentially_ext_prefix( /// Check if the new leaf or existing node (either leaf/extension) should go /// into the value field of the new branch. -fn check_if_existing_or_new_node_should_go_in_branch_value_field( +fn check_if_existing_or_new_node_should_go_in_branch_value_field( info: &ExistingAndNewNodePreAndPost, - existing_node: WrappedNode, + existing_node: Arc, new_node_entry: InsertEntry, -) -> ExistingOrNewBranchValuePlacement { +) -> ExistingOrNewBranchValuePlacement { // Guaranteed that both postfixes are not equal at this point. match ( info.existing_postfix.count, info.new_postfix.count, - existing_node.as_ref(), + &*existing_node, ) { (0, _, Node::Leaf { value, .. }) => ExistingOrNewBranchValuePlacement::BranchValue( value.clone(), @@ -692,10 +685,10 @@ fn check_if_existing_or_new_node_should_go_in_branch_value_field } } -fn ins_entry_into_leaf_and_nibble( +fn ins_entry_into_leaf_and_nibble( info: &ExistingAndNewNodePreAndPost, entry: InsertEntry, -) -> (Nibble, WrappedNode) { +) -> (Nibble, Arc) { let new_first_nibble = info.new_postfix.get_nibble(0); let new_node = create_node_from_insert_val( entry @@ -704,85 +697,57 @@ fn ins_entry_into_leaf_and_nibble( entry.v, ); - (new_first_nibble, new_node) + (new_first_nibble, Arc::new(new_node)) } -fn new_branch_child_arr() -> [WrappedNode; 16] { - // Hahaha ok there actually is no better way to init this array unless I want to - // use iterators and take a runtime hit... - [ - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - Node::Empty.into(), - ] +fn new_branch_child_arr() -> [Arc; 16] { + array::from_fn(|_ix| Arc::new(Node::Empty)) } -fn get_num_non_empty_children(children: &[WrappedNode; 16]) -> usize { +fn get_num_non_empty_children(children: &[Arc; 16]) -> usize { children.iter().filter(|c| !node_is_empty(c)).count() } -fn get_other_non_empty_child_and_nibble_in_two_elem_branch( - children: &[WrappedNode; 16], +fn get_other_non_empty_child_and_nibble_in_two_elem_branch( + children: &[Arc; 16], our_nib: Nibble, -) -> (Nibble, &WrappedNode) { +) -> (Nibble, &Node) { children .iter() .enumerate() .find(|(i, c)| *i != our_nib as usize && !node_is_empty(c)) - .map(|(n, c)| (n as Nibble, c)) + .map(|(n, c)| (n as Nibble, &**c)) .expect("Expected to find a non-empty node in the branch's children") } -fn node_is_empty(node: &WrappedNode) -> bool { - matches!(node.as_ref(), Node::Empty) +fn node_is_empty(node: &Node) -> bool { + matches!(node, Node::Empty) } -pub(crate) fn branch( - children: [WrappedNode; 16], - value: Vec, -) -> WrappedNode { - Node::Branch { children, value }.into() +pub(crate) fn branch(children: [Arc; 16], value: Vec) -> Node { + Node::Branch { children, value } } -fn branch_from_insert_val( - children: [WrappedNode; 16], - value: ValOrHash, -) -> TrieOpResult> { - create_node_if_ins_val_not_hash(value, |value| Node::Branch { children, value }.into()) +fn branch_from_insert_val(children: [Arc; 16], value: ValOrHash) -> TrieOpResult { + create_node_if_ins_val_not_hash(value, |value| Node::Branch { children, value }) } -fn extension(nibbles: Nibbles, child: WrappedNode) -> WrappedNode { - Node::Extension { nibbles, child }.into() +fn extension(nibbles: Nibbles, child: impl Into>) -> Node { + Node::Extension { + nibbles, + child: child.into(), + } } -fn leaf(nibbles: Nibbles, value: Vec) -> WrappedNode { - Node::Leaf { nibbles, value }.into() +fn leaf(nibbles: Nibbles, value: Vec) -> Node { + Node::Leaf { nibbles, value } } -fn leaf_from_insert_val( - nibbles: Nibbles, - value: ValOrHash, -) -> TrieOpResult> { - create_node_if_ins_val_not_hash(value, |value| Node::Leaf { nibbles, value }.into()) +fn leaf_from_insert_val(nibbles: Nibbles, value: ValOrHash) -> TrieOpResult { + create_node_if_ins_val_not_hash(value, |value| Node::Leaf { nibbles, value }) } -fn create_node_from_insert_val( - nibbles: Nibbles, - value: ValOrHash, -) -> WrappedNode { +fn create_node_from_insert_val(nibbles: Nibbles, value: ValOrHash) -> Node { match value { ValOrHash::Val(value) => Node::Leaf { nibbles, value }, ValOrHash::Hash(h) => { @@ -799,13 +764,12 @@ fn create_node_from_insert_val( } } } - .into() } -fn create_node_if_ins_val_not_hash) -> WrappedNode>( +fn create_node_if_ins_val_not_hash) -> Node>( value: ValOrHash, create_node_f: F, -) -> TrieOpResult> { +) -> TrieOpResult { match value { ValOrHash::Val(leaf_v) => Ok(create_node_f(leaf_v)), ValOrHash::Hash(h) => Err(TrieOpError::ExistingHashNodeError(h)), @@ -820,8 +784,7 @@ mod tests { use super::ValOrHash; use crate::{ - nibbles::Nibbles, - partial_trie::{HashedPartialTrie, Node, PartialTrie, StandardTrie}, + partial_trie::Node, testing_utils::{ common_setup, entry, entry_with_value, generate_n_hash_nodes_entries_for_empty_slots_in_trie, @@ -839,13 +802,13 @@ mod tests { fn insert_entries_and_assert_all_exist_in_trie_with_no_extra( entries: &[TestInsertValEntry], ) -> TrieOpResult<()> { - let trie = StandardTrie::try_from_iter(entries.iter().cloned())?; + let trie = Node::try_from_iter(entries.iter().cloned())?; assert_all_entries_in_trie(entries, &trie); Ok(()) } - fn assert_all_entries_in_trie(entries: &[TestInsertValEntry], trie: &Node) { + fn assert_all_entries_in_trie(entries: &[TestInsertValEntry], trie: &Node) { let entries_in_trie = get_non_hash_values_in_trie(trie); let all_entries_retrieved: Vec<_> = entries @@ -923,7 +886,7 @@ mod tests { let mut entries = [entry(0x1234), entry(0x1234)]; entries[1].1 = vec![100]; - let trie = StandardTrie::try_from_iter(entries)?; + let trie = Node::try_from_iter(entries)?; assert_eq!(trie.get(0x1234), Some([100].as_slice())); Ok(()) @@ -933,17 +896,7 @@ mod tests { fn cloning_a_trie_creates_two_separate_tries() -> TrieOpResult<()> { common_setup(); - assert_cloning_works_for_tries::()?; - assert_cloning_works_for_tries::()?; - - Ok(()) - } - - fn assert_cloning_works_for_tries() -> TrieOpResult<()> - where - T: TryFromIterator<(Nibbles, Vec)> + PartialTrie, - { - let trie = T::try_from_iter(once(entry(0x1234)))?; + let trie = Node::try_from_iter(once(entry(0x1234)))?; let mut cloned_trie = trie.clone(); cloned_trie.extend(once(entry(0x5678)))?; @@ -978,7 +931,7 @@ mod tests { common_setup(); let non_hash_entries: Vec<_> = generate_n_random_variable_trie_value_entries(MASSIVE_TRIE_SIZE, 0).collect(); - let mut trie = StandardTrie::try_from_iter(non_hash_entries.iter().cloned())?; + let mut trie = Node::try_from_iter(non_hash_entries.iter().cloned())?; let extra_hash_entries = generate_n_hash_nodes_entries_for_empty_slots_in_trie( &trie, @@ -1005,17 +958,12 @@ mod tests { fn equivalency_check_works() -> TrieOpResult<()> { common_setup(); - assert_eq!( - StandardTrie::new(Node::Empty), - StandardTrie::new(Node::Empty) - ); - let entries = generate_n_random_fixed_trie_value_entries(MASSIVE_TRIE_SIZE, 0); - let big_trie_1 = StandardTrie::try_from_iter(entries)?; + let big_trie_1 = Node::try_from_iter(entries)?; assert_eq!(big_trie_1, big_trie_1); let entries = generate_n_random_fixed_trie_value_entries(MASSIVE_TRIE_SIZE, 1); - let big_trie_2 = StandardTrie::try_from_iter(entries)?; + let big_trie_2 = Node::try_from_iter(entries)?; assert_ne!(big_trie_1, big_trie_2); @@ -1027,7 +975,7 @@ mod tests { common_setup(); let entries = [entry_with_value(0x1234, 1), entry_with_value(0x12345678, 2)]; - let trie = StandardTrie::try_from_iter(entries.iter().cloned())?; + let trie = Node::try_from_iter(entries.iter().cloned())?; assert_eq!(trie.get(0x1234), Some([1].as_slice())); assert_eq!(trie.get(0x12345678), Some([2].as_slice())); @@ -1041,7 +989,7 @@ mod tests { let random_entries: Vec<_> = generate_n_random_fixed_trie_value_entries(MASSIVE_TRIE_SIZE, 9001).collect(); - let trie = StandardTrie::try_from_iter(random_entries.iter().cloned())?; + let trie = Node::try_from_iter(random_entries.iter().cloned())?; for (k, v) in random_entries.into_iter() { debug!("Attempting to retrieve {:?}...", (k, &v)); @@ -1062,7 +1010,7 @@ mod tests { let mut all_nodes_in_trie_after_each_insert = Vec::new(); let mut root_node_after_each_insert = Vec::new(); - let mut trie = StandardTrie::default(); + let mut trie = Node::default(); for (k, v) in entries { trie.insert(k, v)?; @@ -1087,7 +1035,7 @@ mod tests { let entries: HashSet<_> = generate_n_random_variable_trie_value_entries(MASSIVE_TRIE_SIZE, 9003).collect(); - let trie = StandardTrie::try_from_iter(entries.iter().cloned())?; + let trie = Node::try_from_iter(entries.iter().cloned())?; let trie_items: HashSet<_> = trie .items() @@ -1104,7 +1052,7 @@ mod tests { fn deleting_a_non_existent_node_returns_none() -> TrieOpResult<()> { common_setup(); - let mut trie = StandardTrie::default(); + let mut trie = Node::default(); trie.insert(0x1234, vec![91])?; let res = trie.delete(0x5678)?; @@ -1117,7 +1065,7 @@ mod tests { fn existent_node_key_contains_returns_true() -> TrieOpResult<()> { common_setup(); - let mut trie = StandardTrie::default(); + let mut trie = Node::default(); trie.insert(0x1234, vec![91])?; assert!(trie.contains(0x1234)); @@ -1128,7 +1076,7 @@ mod tests { fn non_existent_node_key_contains_returns_false() -> TrieOpResult<()> { common_setup(); - let mut trie = StandardTrie::default(); + let mut trie = Node::default(); trie.insert(0x1234, vec![91])?; assert!(!trie.contains(0x5678)); @@ -1139,7 +1087,7 @@ mod tests { fn deleting_from_an_empty_trie_returns_none() -> TrieOpResult<()> { common_setup(); - let mut trie = StandardTrie::default(); + let mut trie = Node::default(); let res = trie.delete(0x1234)?; assert!(res.is_none()); @@ -1152,7 +1100,7 @@ mod tests { let entries: Vec<_> = generate_n_random_variable_trie_value_entries(MASSIVE_TRIE_SIZE, 7).collect(); - let mut trie = StandardTrie::try_from_iter(entries.iter().cloned())?; + let mut trie = Node::try_from_iter(entries.iter().cloned())?; // Delete half of the elements let half_entries = entries.len() / 2; diff --git a/mpt_trie/src/trie_subsets.rs b/mpt_trie/src/trie_subsets.rs index 74f50d769..34d24f6bc 100644 --- a/mpt_trie/src/trie_subsets.rs +++ b/mpt_trie/src/trie_subsets.rs @@ -1,11 +1,11 @@ -//! Logic for calculating a subset of a [`PartialTrie`] from an existing -//! [`PartialTrie`]. +//! Logic for calculating a subset of a [`Node`] from an existing +//! [`Node`]. //! -//! Given a `PartialTrie`, you can pass in keys of leaf nodes that should be +//! Given an MPT, you can pass in keys of leaf nodes that should be //! included in the produced subset. Any nodes that are not needed in the subset //! are replaced with [`Hash`] nodes are far up the trie as possible. -use std::sync::Arc; +use std::{array, sync::Arc}; use ethereum_types::H256; use log::trace; @@ -13,15 +13,15 @@ use thiserror::Error; use crate::{ nibbles::Nibbles, - partial_trie::{Node, PartialTrie, WrappedNode}, + partial_trie::Node, trie_hashing::EncodedNode, - utils::{bytes_to_h256, TrieNodeType}, + utils::{bytes_to_h256, NodeKind}, }; /// The output type of trie_subset operations. pub type SubsetTrieResult = Result; -/// Errors that may occur when creating a subset [`PartialTrie`]. +/// Errors that may occur when creating a subset [`Node`]. #[derive(Debug, Error)] pub enum SubsetTrieError { #[error("Tried to mark nodes in a tracked trie for a key that does not exist! (Key: {0}, trie: {1})")] @@ -30,24 +30,24 @@ pub enum SubsetTrieError { } #[derive(Debug)] -enum TrackedNodeIntern { +enum TrackedNodeIntern { Empty, Hash, - Branch(Box<[TrackedNode; 16]>), - Extension(Box>), + Branch(Box<[TrackedNode; 16]>), + Extension(Box), Leaf, } #[derive(Debug)] -struct TrackedNode { - node: TrackedNodeIntern, - info: TrackedNodeInfo, +struct TrackedNode { + node: TrackedNodeIntern, + info: TrackedNodeInfo, } -impl TrackedNode { - fn new(underlying_node: &N) -> Self { +impl TrackedNode { + fn new(underlying_node: &Node) -> Self { Self { - node: match &**underlying_node { + node: match underlying_node { Node::Empty => TrackedNodeIntern::Empty, Node::Hash(_) => TrackedNodeIntern::Hash, Node::Branch { ref children, .. } => { @@ -63,107 +63,36 @@ impl TrackedNode { } } -fn tracked_branch( - underlying_children: &[WrappedNode; 16], -) -> [TrackedNode; 16] { - [ - TrackedNode::new(&underlying_children[0]), - TrackedNode::new(&underlying_children[1]), - TrackedNode::new(&underlying_children[2]), - TrackedNode::new(&underlying_children[3]), - TrackedNode::new(&underlying_children[4]), - TrackedNode::new(&underlying_children[5]), - TrackedNode::new(&underlying_children[6]), - TrackedNode::new(&underlying_children[7]), - TrackedNode::new(&underlying_children[8]), - TrackedNode::new(&underlying_children[9]), - TrackedNode::new(&underlying_children[10]), - TrackedNode::new(&underlying_children[11]), - TrackedNode::new(&underlying_children[12]), - TrackedNode::new(&underlying_children[13]), - TrackedNode::new(&underlying_children[14]), - TrackedNode::new(&underlying_children[15]), - ] +fn tracked_branch(underlying_children: &[Arc; 16]) -> [TrackedNode; 16] { + array::from_fn(|ix| TrackedNode::new(&underlying_children[ix])) } -fn partial_trie_extension(nibbles: Nibbles, child: &TrackedNode) -> N { - N::new(Node::Extension { +fn partial_trie_extension(nibbles: Nibbles, child: &TrackedNode) -> Node { + Node::Extension { nibbles, - child: Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - child, - ))), - }) + child: (Arc::new(create_partial_trie_subset_from_tracked_trie(child))), + } } -fn partial_trie_branch( - underlying_children: &[TrackedNode; 16], - value: &[u8], -) -> N { - let children = [ - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[0], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[1], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[2], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[3], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[4], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[5], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[6], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[7], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[8], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[9], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[10], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[11], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[12], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[13], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[14], - ))), - Arc::new(Box::new(create_partial_trie_subset_from_tracked_trie( - &underlying_children[15], - ))), - ]; - - N::new(Node::Branch { - children, +fn partial_trie_branch(underlying_children: &[TrackedNode; 16], value: &[u8]) -> Node { + Node::Branch { + children: array::from_fn(|ix| { + Arc::new(create_partial_trie_subset_from_tracked_trie( + &underlying_children[ix], + )) + }), value: value.to_owned(), - }) + } } #[derive(Debug)] -struct TrackedNodeInfo { - underlying_node: N, +struct TrackedNodeInfo { + underlying_node: Node, touched: bool, } -impl TrackedNodeInfo { - const fn new(underlying_node: N) -> Self { +impl TrackedNodeInfo { + const fn new(underlying_node: Node) -> Self { Self { underlying_node, touched: false, @@ -175,64 +104,62 @@ impl TrackedNodeInfo { } fn get_nibbles_expected(&self) -> &Nibbles { - match &*self.underlying_node { + match &self.underlying_node { Node::Extension { nibbles, .. } => nibbles, Node::Leaf { nibbles, .. } => nibbles, _ => unreachable!( "Tried getting the nibbles field from a {} node!", - TrieNodeType::from(&*self.underlying_node) + NodeKind::of(&self.underlying_node) ), } } fn get_hash_node_hash_expected(&self) -> H256 { - match *self.underlying_node { - Node::Hash(h) => h, + match &self.underlying_node { + Node::Hash(h) => *h, _ => unreachable!("Expected an underlying hash node!"), } } fn get_branch_value_expected(&self) -> &Vec { - match &*self.underlying_node { + match &self.underlying_node { Node::Branch { value, .. } => value, _ => unreachable!("Expected an underlying branch node!"), } } fn get_leaf_nibbles_and_value_expected(&self) -> (&Nibbles, &Vec) { - match &*self.underlying_node { + match &self.underlying_node { Node::Leaf { nibbles, value } => (nibbles, value), _ => unreachable!("Expected an underlying leaf node!"), } } } -/// Create a [`PartialTrie`] subset from a base trie given an iterator of keys +/// Create a [`Node`] subset from a base trie given an iterator of keys /// of nodes that may or may not exist in the trie. All nodes traversed by the /// keys will not be hashed out in the trie subset. If the key does not exist in /// the trie at all, this is not considered an error and will still record which /// nodes were visited. -pub fn create_trie_subset( - trie: &N, +pub fn create_trie_subset( + trie: &Node, keys_involved: impl IntoIterator, -) -> SubsetTrieResult +) -> SubsetTrieResult where - N: PartialTrie, K: Into, { let mut tracked_trie = TrackedNode::new(trie); create_trie_subset_intern(&mut tracked_trie, keys_involved.into_iter()) } -/// Create [`PartialTrie`] subsets from a given base `PartialTrie` given a +/// Create [`Node`] subsets from a given base Node given a /// iterator of keys per subset needed. See [`create_trie_subset`] for more /// info. -pub fn create_trie_subsets( - base_trie: &N, +pub fn create_trie_subsets( + base_trie: &Node, keys_involved: impl IntoIterator>, -) -> SubsetTrieResult> +) -> SubsetTrieResult> where - N: PartialTrie, K: Into, { let mut tracked_trie = TrackedNode::new(base_trie); @@ -248,12 +175,11 @@ where .collect::>() } -fn create_trie_subset_intern( - tracked_trie: &mut TrackedNode, +fn create_trie_subset_intern( + tracked_trie: &mut TrackedNode, keys_involved: impl Iterator, -) -> SubsetTrieResult +) -> SubsetTrieResult where - N: PartialTrie, K: Into, { for k in keys_involved { @@ -270,14 +196,14 @@ where /// - For the key `0x1`, the marked nodes would be [B(0x), B(0x1)]. /// - For the key `0x12`, the marked nodes still would be [B(0x), B(0x1)]. /// - For the key `0x123`, the marked nodes would be [B(0x), B(0x1), L(0x123)]. -fn mark_nodes_that_are_needed( - trie: &mut TrackedNode, +fn mark_nodes_that_are_needed( + trie: &mut TrackedNode, curr_nibbles: &mut Nibbles, ) -> SubsetTrieResult<()> { trace!( "Sub-trie marking at {:x}, (type: {})", curr_nibbles, - TrieNodeType::from(trie.info.underlying_node.deref()) + NodeKind::of(&trie.info.underlying_node) ); match &mut trie.node { @@ -330,24 +256,20 @@ fn mark_nodes_that_are_needed( Ok(()) } -fn create_partial_trie_subset_from_tracked_trie( - tracked_node: &TrackedNode, -) -> N { +fn create_partial_trie_subset_from_tracked_trie(tracked_node: &TrackedNode) -> Node { // If we don't use the node in the trace, then we can potentially hash it away. if !tracked_node.info.touched { let e_node = tracked_node.info.underlying_node.hash_intern(); // Don't hash if it's too small, even if we don't need it. if let EncodedNode::Hashed(h) = e_node { - return N::new(Node::Hash(bytes_to_h256(&h))); + return Node::Hash(bytes_to_h256(&h)); } } match &tracked_node.node { - TrackedNodeIntern::Empty => N::new(Node::Empty), - TrackedNodeIntern::Hash => { - N::new(Node::Hash(tracked_node.info.get_hash_node_hash_expected())) - } + TrackedNodeIntern::Empty => Node::Empty, + TrackedNodeIntern::Hash => Node::Hash(tracked_node.info.get_hash_node_hash_expected()), TrackedNodeIntern::Branch(children) => { partial_trie_branch(children, tracked_node.info.get_branch_value_expected()) } @@ -356,15 +278,15 @@ fn create_partial_trie_subset_from_tracked_trie( } TrackedNodeIntern::Leaf => { let (nibbles, value) = tracked_node.info.get_leaf_nibbles_and_value_expected(); - N::new(Node::Leaf { + Node::Leaf { nibbles: *nibbles, value: value.clone(), - }) + } } } } -fn reset_tracked_trie_state(tracked_node: &mut TrackedNode) { +fn reset_tracked_trie_state(tracked_node: &mut TrackedNode) { match tracked_node.node { TrackedNodeIntern::Branch(ref mut children) => children.iter_mut().for_each(|c| { c.info.reset(); @@ -392,13 +314,13 @@ mod tests { use super::{create_trie_subset, create_trie_subsets}; use crate::{ nibbles::Nibbles, - partial_trie::{Node, PartialTrie}, + partial_trie::Node, testing_utils::{ common_setup, create_trie_with_large_entry_nodes, generate_n_random_fixed_trie_value_entries, handmade_trie_1, TrieType, }, trie_ops::{TrieOpResult, ValOrHash}, - utils::{TrieNodeType, TryFromIterator}, + utils::{NodeKind, TryFromIterator}, }; const MASSIVE_TEST_NUM_SUB_TRIES: usize = 10; @@ -406,19 +328,19 @@ mod tests { #[derive(Debug, Eq, PartialEq)] struct NodeFullNibbles { - n_type: TrieNodeType, + n_type: NodeKind, nibbles: Nibbles, } impl NodeFullNibbles { - fn new_from_node(node: &Node, nibbles: Nibbles) -> Self { + fn new_from_node(node: &Node, nibbles: Nibbles) -> Self { Self { - n_type: node.into(), + n_type: NodeKind::of(node), nibbles, } } - fn new_from_node_type>(n_type: TrieNodeType, nibbles: K) -> Self { + fn new_from_node_type>(n_type: NodeKind, nibbles: K) -> Self { Self { n_type, nibbles: nibbles.into(), @@ -450,12 +372,12 @@ mod tests { } fn get_nodes_in_trie_intern_rec( - trie: &TrieType, + trie: &Node, mut curr_nibbles: Nibbles, nodes: &mut Vec, return_on_empty_or_hash: bool, ) { - match &trie.node { + match &trie { Node::Empty | Node::Hash(_) => match return_on_empty_or_hash { false => (), true => return, @@ -514,7 +436,7 @@ mod tests { fn encountering_a_hash_node_returns_err() { common_setup(); - let trie = TrieType::new(Node::Hash(H256::zero())); + let trie = Node::Hash(H256::zero()); let res = create_trie_subset(&trie, once(0x1234)); assert!(res.is_err()) @@ -565,24 +487,24 @@ mod tests { assert_node_exists( &all_non_empty_and_hash_nodes, - TrieNodeType::Branch, + NodeKind::Branch, Nibbles::default(), ); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Branch, 0x1); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Leaf, 0x1234); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Branch, 0x1); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Leaf, 0x1234); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Extension, 0x13); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Branch, 0x1324); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Extension, 0x13); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Branch, 0x1324); assert_node_exists( &all_non_empty_and_hash_nodes, - TrieNodeType::Leaf, + NodeKind::Leaf, 0x132400005_u64, ); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Extension, 0x2); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Branch, 0x200); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Leaf, 0x2001); - assert_node_exists(&all_non_empty_and_hash_nodes, TrieNodeType::Leaf, 0x2002); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Extension, 0x2); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Branch, 0x200); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Leaf, 0x2001); + assert_node_exists(&all_non_empty_and_hash_nodes, NodeKind::Leaf, 0x2002); assert_eq!(all_non_empty_and_hash_nodes.len(), 10); @@ -592,22 +514,22 @@ mod tests { ); assert_node_exists( &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Branch, + NodeKind::Branch, Nibbles::default(), ); assert_node_exists( &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Extension, + NodeKind::Extension, 0x2, ); assert_node_exists( &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Branch, + NodeKind::Branch, 0x200, ); assert_node_exists( &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Leaf, + NodeKind::Leaf, 0x2001, ); assert_eq!(all_non_empty_and_hash_nodes_partial.len(), 4); @@ -617,22 +539,18 @@ mod tests { ); assert_node_exists( &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Branch, + NodeKind::Branch, Nibbles::default(), ); + assert_node_exists(&all_non_empty_and_hash_nodes_partial, NodeKind::Branch, 0x1); assert_node_exists( &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Branch, - 0x1, - ); - assert_node_exists( - &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Extension, + NodeKind::Extension, 0x13, ); assert_node_exists( &all_non_empty_and_hash_nodes_partial, - TrieNodeType::Branch, + NodeKind::Branch, 0x1324, ); assert_eq!(all_non_empty_and_hash_nodes_partial.len(), 4); @@ -640,7 +558,7 @@ mod tests { fn assert_node_exists>( nodes: &[NodeFullNibbles], - n_type: TrieNodeType, + n_type: NodeKind, nibbles: K, ) { assert!(nodes.contains(&NodeFullNibbles::new_from_node_type( @@ -655,7 +573,7 @@ mod tests { ) { assert_keys_point_to_nodes_of_type( trie, - keys.into_iter().map(|k| (k.into(), TrieNodeType::Leaf)), + keys.into_iter().map(|k| (k.into(), NodeKind::Leaf)), ) } @@ -665,13 +583,13 @@ mod tests { ) { assert_keys_point_to_nodes_of_type( trie, - keys.into_iter().map(|k| (k.into(), TrieNodeType::Hash)), + keys.into_iter().map(|k| (k.into(), NodeKind::Hash)), ) } fn assert_keys_point_to_nodes_of_type( trie: &TrieType, - keys: impl Iterator, + keys: impl Iterator, ) { let nodes = get_all_nodes_in_trie(trie); let keys_to_node_types: HashMap<_, _> = diff --git a/mpt_trie/src/utils.rs b/mpt_trie/src/utils.rs index 5d530878e..9c07bf4cb 100644 --- a/mpt_trie/src/utils.rs +++ b/mpt_trie/src/utils.rs @@ -4,7 +4,6 @@ use std::{ borrow::Borrow, fmt::{self, Display}, ops::BitAnd, - sync::Arc, }; use ethereum_types::H256; @@ -12,37 +11,22 @@ use num_traits::PrimInt; use crate::{ nibbles::{Nibble, Nibbles, NibblesIntern}, - partial_trie::{Node, PartialTrie}, + partial_trie::Node, trie_ops::TrieOpResult, }; +/// Represents the different [`Node`] discriminants. #[derive(Clone, Debug, Eq, Hash, PartialEq)] -/// Simplified trie node type to make logging cleaner. -pub enum TrieNodeType { - /// Empty node. +pub enum NodeKind { Empty, - - /// Hash node. Hash, - - /// Branch node. Branch, - - /// Extension node. Extension, - - /// Leaf node. Leaf, } -impl From<&Arc>> for TrieNodeType { - fn from(value: &Arc>) -> Self { - (&****value).into() - } -} - -impl From<&Node> for TrieNodeType { - fn from(node: &Node) -> Self { +impl NodeKind { + pub fn of(node: &Node) -> Self { match node { Node::Empty => Self::Empty, Node::Hash(_) => Self::Hash, @@ -53,17 +37,9 @@ impl From<&Node> for TrieNodeType { } } -impl Display for TrieNodeType { +impl Display for NodeKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let s = match self { - TrieNodeType::Empty => "Empty", - TrieNodeType::Hash => "Hash", - TrieNodeType::Branch => "Branch", - TrieNodeType::Extension => "Extension", - TrieNodeType::Leaf => "Leaf", - }; - - write!(f, "{}", s) + write!(f, "{:?}", self) } } @@ -80,7 +56,7 @@ pub(crate) fn bytes_to_h256(b: &[u8; 32]) -> H256 { } /// Minimal key information of "segments" (nodes) used to construct trie -/// "traces" of a trie query. Unlike [`TrieNodeType`], this type also contains +/// "traces" of a trie query. Unlike [`NodeKind`], this type also contains /// the key piece of the node if applicable (eg. [`Node::Empty`] & /// [`Node::Hash`] do not have associated key pieces). #[derive(Clone, Debug, Eq, Hash, PartialEq)] @@ -206,13 +182,13 @@ impl Display for TrieSegment { impl TrieSegment { /// Get the node type of the [`TrieSegment`]. - pub const fn node_type(&self) -> TrieNodeType { + pub const fn node_type(&self) -> NodeKind { match self { - TrieSegment::Empty => TrieNodeType::Empty, - TrieSegment::Hash => TrieNodeType::Hash, - TrieSegment::Branch(_) => TrieNodeType::Branch, - TrieSegment::Extension(_) => TrieNodeType::Extension, - TrieSegment::Leaf(_) => TrieNodeType::Leaf, + TrieSegment::Empty => NodeKind::Empty, + TrieSegment::Hash => NodeKind::Hash, + TrieSegment::Branch(_) => NodeKind::Branch, + TrieSegment::Extension(_) => NodeKind::Extension, + TrieSegment::Leaf(_) => NodeKind::Leaf, } } @@ -231,16 +207,13 @@ impl TrieSegment { /// This function is intended to be used during a trie query as we are /// traversing down a trie. Depending on the current node, we pop off nibbles /// and use these to create `TrieSegment`s. -pub(crate) fn get_segment_from_node_and_key_piece( - n: &Node, - k_piece: &Nibbles, -) -> TrieSegment { - match TrieNodeType::from(n) { - TrieNodeType::Empty => TrieSegment::Empty, - TrieNodeType::Hash => TrieSegment::Hash, - TrieNodeType::Branch => TrieSegment::Branch(k_piece.get_nibble(0)), - TrieNodeType::Extension => TrieSegment::Extension(*k_piece), - TrieNodeType::Leaf => TrieSegment::Leaf(*k_piece), +pub(crate) fn get_segment_from_node_and_key_piece(n: &Node, k_piece: &Nibbles) -> TrieSegment { + match NodeKind::of(n) { + NodeKind::Empty => TrieSegment::Empty, + NodeKind::Hash => TrieSegment::Hash, + NodeKind::Branch => TrieSegment::Branch(k_piece.get_nibble(0)), + NodeKind::Extension => TrieSegment::Extension(*k_piece), + NodeKind::Leaf => TrieSegment::Leaf(*k_piece), } } diff --git a/trace_decoder/src/decoding.rs b/trace_decoder/src/decoding.rs index 98a0d8857..7430c5138 100644 --- a/trace_decoder/src/decoding.rs +++ b/trace_decoder/src/decoding.rs @@ -13,11 +13,11 @@ use evm_arithmetization::{ use log::trace; use mpt_trie::{ nibbles::Nibbles, - partial_trie::{HashedPartialTrie, Node, PartialTrie}, special_query::path_for_query, trie_ops::{TrieOpError, TrieOpResult}, trie_subsets::{create_trie_subset, SubsetTrieError}, utils::{IntoTrieKey, TriePath}, + FrozenNode, Node, }; use thiserror::Error; @@ -204,10 +204,10 @@ impl Display for TrieType { /// after every txn we process in the trace. #[derive(Clone, Debug, Default)] struct PartialTrieState { - state: HashedPartialTrie, - storage: HashMap, - txn: HashedPartialTrie, - receipt: HashedPartialTrie, + state: Node, + storage: HashMap, + txn: Node, + receipt: Node, } /// Additional information discovered during delta application. @@ -409,7 +409,7 @@ impl ProcessedBlockTrace { /// accessed by any txns, then we still need to manually create an entry for /// them. fn init_any_needed_empty_storage_tries<'a>( - storage_tries: &mut HashMap, + storage_tries: &mut HashMap, accounts_with_storage: impl Iterator, state_accounts_with_no_accesses_but_storage_tries: &'a HashMap, ) { @@ -417,7 +417,7 @@ impl ProcessedBlockTrace { if !storage_tries.contains_key(h_addr) { let trie = state_accounts_with_no_accesses_but_storage_tries .get(h_addr) - .map(|s_root| HashedPartialTrie::new(Node::Hash(*s_root))) + .map(|s_root| Node::Hash(*s_root)) .unwrap_or_default(); storage_tries.insert(*h_addr, trie); @@ -438,15 +438,17 @@ impl ProcessedBlockTrace { delta_application_out .additional_state_trie_paths_to_not_hash .into_iter(), - )?; + )? + .freeze(); let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); let transactions_trie = - create_trie_subset_wrapped(&curr_block_tries.txn, once(txn_k), TrieType::Txn)?; + create_trie_subset_wrapped(&curr_block_tries.txn, once(txn_k), TrieType::Txn)?.freeze(); let receipts_trie = - create_trie_subset_wrapped(&curr_block_tries.receipt, once(txn_k), TrieType::Receipt)?; + create_trie_subset_wrapped(&curr_block_tries.receipt, once(txn_k), TrieType::Receipt)? + .freeze(); let storage_tries = create_minimal_storage_partial_tries( &curr_block_tries.storage, @@ -564,7 +566,7 @@ impl ProcessedBlockTrace { Ok(out) } - fn get_trie_trace(trie: &HashedPartialTrie, k: &Nibbles) -> TriePath { + fn get_trie_trace(trie: &Node, k: &Nibbles) -> TriePath { path_for_query(trie, *k, true).collect() } @@ -572,7 +574,7 @@ impl ProcessedBlockTrace { /// the other single child that remains also is not hashed when passed into /// plonky2. Returns the key to the remaining child if a collapse occurred. fn delete_node_and_report_remaining_key_if_branch_collapsed( - trie: &mut HashedPartialTrie, + trie: &mut Node, delete_k: &Nibbles, ) -> TrieOpResult> { let old_trace = Self::get_trie_trace(trie, delete_k); @@ -650,7 +652,8 @@ impl ProcessedBlockTrace { &final_trie_state.state, withdrawal_addrs, additional_paths.into_iter(), - )?; + )? + .freeze(); } Self::update_trie_state_from_withdrawals( @@ -668,7 +671,7 @@ impl ProcessedBlockTrace { /// our local trie state. fn update_trie_state_from_withdrawals<'a>( withdrawals: impl IntoIterator + 'a, - state: &mut HashedPartialTrie, + state: &mut Node, ) -> TraceParsingResult<()> { for (addr, h_addr, amt) in withdrawals { let h_addr_nibs = Nibbles::from_h256_be(h_addr); @@ -785,7 +788,7 @@ impl StateTrieWrites { &self, state_node: &mut AccountRlp, h_addr: &H256, - acc_storage_tries: &HashMap, + acc_storage_tries: &HashMap, ) -> TraceParsingResult<()> { let storage_root_hash_change = match self.storage_trie_change { false => None, @@ -821,10 +824,10 @@ fn calculate_trie_input_hashes(t_inputs: &PartialTrieState) -> TrieRoots { } fn create_minimal_state_partial_trie( - state_trie: &HashedPartialTrie, + state_trie: &Node, state_accesses: impl Iterator, additional_state_trie_paths_to_not_hash: impl Iterator, -) -> TraceParsingResult { +) -> TraceParsingResult { create_trie_subset_wrapped( state_trie, state_accesses @@ -838,10 +841,10 @@ fn create_minimal_state_partial_trie( // TODO!!!: We really need to be appending the empty storage tries to the base // trie somewhere else! This is a big hack! fn create_minimal_storage_partial_tries<'a>( - storage_tries: &HashMap, + storage_tries: &HashMap, accesses_per_account: impl Iterator)>, additional_storage_trie_paths_to_not_hash: &HashMap>, -) -> TraceParsingResult> { +) -> TraceParsingResult> { accesses_per_account .map(|(h_addr, mem_accesses)| { // Guaranteed to exist due to calling `init_any_needed_empty_storage_tries` @@ -859,7 +862,8 @@ fn create_minimal_storage_partial_tries<'a>( base_storage_trie, storage_slots_to_not_hash, TrieType::Storage, - )?; + )? + .freeze(); Ok((*h_addr, partial_storage_trie)) }) @@ -867,10 +871,10 @@ fn create_minimal_storage_partial_tries<'a>( } fn create_trie_subset_wrapped( - trie: &HashedPartialTrie, + trie: &Node, accesses: impl Iterator, trie_type: TrieType, -) -> TraceParsingResult { +) -> TraceParsingResult { create_trie_subset(trie, accesses).map_err(|trie_err| { let key = match trie_err { SubsetTrieError::UnexpectedKey(key, _) => key, diff --git a/trace_decoder/src/lib.rs b/trace_decoder/src/lib.rs index 796fa1008..6c6faae3f 100644 --- a/trace_decoder/src/lib.rs +++ b/trace_decoder/src/lib.rs @@ -99,7 +99,7 @@ use ethereum_types::{Address, U256}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata}; use evm_arithmetization::GenerationInputs; use keccak_hash::H256; -use mpt_trie::partial_trie::HashedPartialTrie; +use mpt_trie::Node; use processed_block_trace::ProcessedTxnInfo; use serde::{Deserialize, Serialize}; @@ -150,7 +150,7 @@ pub struct SeparateTriePreImages { pub enum SeparateTriePreImage { /// Storage or state trie format that can be processed as is, as it /// corresponds to the internal format. - Direct(HashedPartialTrie), + Direct(Node), } /// A trie pre-image where both state & storage are combined into one payload. @@ -297,7 +297,6 @@ pub fn entrypoint( ) -> anyhow::Result> { use anyhow::Context as _; use evm_arithmetization::generation::mpt::AccountRlp; - use mpt_trie::partial_trie::PartialTrie as _; use crate::processed_block_trace::{ CodeHashResolving, ProcessedBlockTrace, ProcessedBlockTracePreImages, @@ -421,8 +420,8 @@ fn hash(bytes: &[u8]) -> ethereum_types::H256 { #[derive(Debug, Default)] struct PartialTriePreImages { - pub state: HashedPartialTrie, - pub storage: HashMap, + pub state: Node, + pub storage: HashMap, } /// Like `#[serde(with = "hex")`, but tolerates and emits leading `0x` prefixes diff --git a/trace_decoder/src/type1.rs b/trace_decoder/src/type1.rs index 5d93f6b4e..ca7f6edab 100644 --- a/trace_decoder/src/type1.rs +++ b/trace_decoder/src/type1.rs @@ -9,19 +9,16 @@ use std::{ use anyhow::{bail, ensure, Context as _}; use either::Either; -use mpt_trie::{ - partial_trie::{HashedPartialTrie, PartialTrie as _}, - trie_ops::ValOrHash, -}; +use mpt_trie::{trie_ops::ValOrHash, Node as TrieNode}; use nunny::NonEmpty; use u4::U4; use crate::wire::{Instruction, SmtLeaf}; pub struct Frontend { - pub state: HashedPartialTrie, + pub state: TrieNode, pub code: HashSet>>, - pub storage: HashMap, + pub storage: HashMap, } pub fn frontend(instructions: impl IntoIterator) -> anyhow::Result { @@ -35,7 +32,7 @@ pub fn frontend(instructions: impl IntoIterator) -> anyhow:: let mut visitor = Visitor { path: Vec::new(), frontend: Frontend { - state: HashedPartialTrie::default(), + state: Default::default(), code: HashSet::new(), storage: HashMap::new(), }, @@ -354,8 +351,8 @@ impl Visitor { /// # Panics /// - internally in [`mpt_trie`]. -fn node2trie(node: Node) -> anyhow::Result { - let mut trie = HashedPartialTrie::default(); +fn node2trie(node: Node) -> anyhow::Result { + let mut trie = TrieNode::default(); for (k, v) in iter_leaves(node) { trie.insert( nibbles2nibbles(k), diff --git a/zero_bin/rpc/src/native/state.rs b/zero_bin/rpc/src/native/state.rs index 5fd9b539c..3171512bf 100644 --- a/zero_bin/rpc/src/native/state.rs +++ b/zero_bin/rpc/src/native/state.rs @@ -9,7 +9,7 @@ use alloy::{ use anyhow::Context as _; use evm_arithmetization::testing_utils::{BEACON_ROOTS_CONTRACT_STATE_KEY, HISTORY_BUFFER_LENGTH}; use futures::future::{try_join, try_join_all}; -use mpt_trie::{builder::PartialTrieBuilder, partial_trie::HashedPartialTrie}; +use mpt_trie::builder::PartialTrieBuilder; use trace_decoder::{ BlockTraceTriePreImages, SeparateStorageTriesPreImage, SeparateTriePreImage, SeparateTriePreImages, TxnInfo, @@ -117,16 +117,13 @@ async fn generate_state_witness( accounts_state: HashMap>, cached_provider: &CachedProvider, block_number: u64, -) -> anyhow::Result<( - PartialTrieBuilder, - HashMap>, -)> +) -> anyhow::Result<(PartialTrieBuilder, HashMap)> where ProviderT: Provider, TransportT: Transport + Clone, { let mut state = PartialTrieBuilder::new(prev_state_root.compat(), Default::default()); - let mut storage_proofs = HashMap::>::new(); + let mut storage_proofs = HashMap::::new(); let (account_proofs, next_account_proofs) = fetch_proof_data(accounts_state, cached_provider, block_number).await?;