Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: compute Storage Tries and Contracts Trie together #330

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions crates/committer/src/patricia_merkle_tree/external_test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ pub fn get_random_u256<R: Rng>(rng: &mut R, low: U256, high: U256) -> U256 {
}

pub async fn tree_computation_flow(
leaf_modifications: Arc<LeafModifications<StarknetStorageValue>>,
leaf_modifications: LeafModifications<StarknetStorageValue>,
storage: &MapStorage,
root_hash: HashOutput,
) -> StorageTrie {
Expand Down Expand Up @@ -96,9 +96,12 @@ pub async fn tree_computation_flow(
)
.expect("Failed to create the updated skeleton tree");

StorageTrie::create::<TreeHashFunctionImpl>(updated_skeleton.into(), leaf_modifications)
.await
.expect("Failed to create the filled tree")
StorageTrie::create_no_additional_output::<TreeHashFunctionImpl>(
updated_skeleton.into(),
Arc::new(leaf_modifications),
)
.await
.expect("Failed to create the filled tree")
}

pub async fn single_tree_flow_test(
Expand All @@ -112,8 +115,7 @@ pub async fn single_tree_flow_test(
.map(|(k, v)| (NodeIndex::FIRST_LEAF + k, v))
.collect::<LeafModifications<StarknetStorageValue>>();

let filled_tree =
tree_computation_flow(Arc::new(leaf_modifications), &storage, root_hash).await;
let filled_tree = tree_computation_flow(leaf_modifications, &storage, root_hash).await;

let hash_result = filled_tree.get_root_hash();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,13 @@ pub enum FilledTreeError<L: Leaf> {
#[error("Deleted leaf at index {0:?} appears in the updated skeleton tree.")]
DeletedLeafInSkeleton(NodeIndex),
#[error("Double update at node {index:?}. Existing value: {existing_value:?}.")]
DoubleUpdate {
DoubleOutputUpdate {
index: NodeIndex,
existing_value: Box<FilledNode<L>>,
},
#[error("Double update at node {index:?}.")]
//TODO(Amos): Add the existing value to the error message.
DoubleAdditionalOutputUpdate { index: NodeIndex },
#[error(transparent)]
Leaf(#[from] LeafError),
#[error("Missing node at index {0:?}.")]
Expand Down
136 changes: 68 additions & 68 deletions crates/committer/src/patricia_merkle_tree/filled_tree/forest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@ use crate::hash::hash_trait::HashOutput;
use crate::patricia_merkle_tree::filled_tree::node::CompiledClassHash;
use crate::patricia_merkle_tree::filled_tree::node::{ClassHash, Nonce};
use crate::patricia_merkle_tree::filled_tree::tree::FilledTree;
use crate::patricia_merkle_tree::filled_tree::tree::{
ClassesTrie, ContractsTrie, StorageTrie, StorageTrieMap,
};
use crate::patricia_merkle_tree::node_data::leaf::{ContractState, LeafModifications};
use crate::patricia_merkle_tree::filled_tree::tree::{ClassesTrie, ContractsTrie, StorageTrieMap};
use crate::patricia_merkle_tree::node_data::leaf::{ContractState, Leaf, LeafModifications};
use crate::patricia_merkle_tree::types::NodeIndex;
use crate::patricia_merkle_tree::updated_skeleton_tree::hash_function::ForestHashFunction;
use crate::patricia_merkle_tree::updated_skeleton_tree::skeleton_forest::UpdatedSkeletonForest;
Expand All @@ -16,7 +14,6 @@ use crate::storage::storage_trait::Storage;

use std::collections::HashMap;
use std::sync::Arc;
use tokio::task::JoinSet;

pub struct FilledForest {
pub storage_tries: StorageTrieMap,
Expand Down Expand Up @@ -48,83 +45,86 @@ impl FilledForest {
}

pub(crate) async fn create<TH: ForestHashFunction + 'static>(
mut updated_forest: UpdatedSkeletonForest,
updated_forest: UpdatedSkeletonForest,
storage_updates: HashMap<ContractAddress, LeafModifications<StarknetStorageValue>>,
classes_updates: LeafModifications<CompiledClassHash>,
original_contracts_trie_leaves: &HashMap<NodeIndex, ContractState>,
address_to_class_hash: &HashMap<ContractAddress, ClassHash>,
address_to_nonce: &HashMap<ContractAddress, Nonce>,
) -> ForestResult<Self> {
let classes_trie_task = tokio::spawn(ClassesTrie::create::<TH>(
let classes_trie = ClassesTrie::create_no_additional_output::<TH>(
Arc::new(updated_forest.classes_trie),
Arc::new(classes_updates),
));
let mut contracts_trie_modifications = HashMap::new();
let mut filled_storage_tries = HashMap::new();
let mut contracts_state_tasks = JoinSet::new();
)
.await?;

for (address, inner_updates) in storage_updates {
let updated_storage_trie = updated_forest
.storage_tries
.remove(&address)
.ok_or(ForestError::MissingUpdatedSkeleton(address))?;

let original_contract_state = original_contracts_trie_leaves
.get(&NodeIndex::from_contract_address(&address))
.ok_or(ForestError::MissingContractCurrentState(address))?;
contracts_state_tasks.spawn(Self::new_contract_state::<TH>(
address,
*(address_to_nonce
.get(&address)
.unwrap_or(&original_contract_state.nonce)),
*(address_to_class_hash
.get(&address)
.unwrap_or(&original_contract_state.class_hash)),
updated_storage_trie,
inner_updates,
));
}

while let Some(result) = contracts_state_tasks.join_next().await {
let (address, new_contract_state, filled_storage_trie) = result??;
contracts_trie_modifications.insert(
NodeIndex::from_contract_address(&address),
new_contract_state,
);
filled_storage_tries.insert(address, filled_storage_trie);
}

let contracts_trie_task = tokio::spawn(ContractsTrie::create::<TH>(
let (contracts_trie, storage_tries) = ContractsTrie::create::<TH>(
Arc::new(updated_forest.contracts_trie),
Arc::new(contracts_trie_modifications),
));
Arc::new(FilledForest::get_contracts_trie_leaf_input(
original_contracts_trie_leaves,
storage_updates,
updated_forest.storage_tries,
address_to_class_hash,
address_to_nonce,
)?),
)
.await?;

Ok(Self {
storage_tries: filled_storage_tries,
contracts_trie: contracts_trie_task.await??,
classes_trie: classes_trie_task.await??,
storage_tries: storage_tries
.unwrap_or_else(|| panic!("Missing storage tries."))
.into_iter()
.map(|(node_index, storage_trie)| (node_index.to_contract_address(), storage_trie))
.collect(),
contracts_trie,
classes_trie,
})
}

async fn new_contract_state<TH: ForestHashFunction + 'static>(
contract_address: ContractAddress,
new_nonce: Nonce,
new_class_hash: ClassHash,
updated_storage_trie: UpdatedSkeletonTreeImpl,
inner_updates: LeafModifications<StarknetStorageValue>,
) -> ForestResult<(ContractAddress, ContractState, StorageTrie)> {
let filled_storage_trie =
StorageTrie::create::<TH>(Arc::new(updated_storage_trie), Arc::new(inner_updates))
.await?;
let new_root_hash = filled_storage_trie.get_root_hash();
Ok((
contract_address,
ContractState {
nonce: new_nonce,
storage_root_hash: new_root_hash,
class_hash: new_class_hash,
},
filled_storage_trie,
))
// TODO(Amos, 1/8/2024): Can this be done more efficiently?
// should error be returned if keys are missing?
fn get_contracts_trie_leaf_input(
original_contracts_trie_leaves: &HashMap<NodeIndex, ContractState>,
mut storage_updates: HashMap<ContractAddress, LeafModifications<StarknetStorageValue>>,
mut storage_tries: HashMap<ContractAddress, UpdatedSkeletonTreeImpl>,
address_to_class_hash: &HashMap<ContractAddress, ClassHash>,
address_to_nonce: &HashMap<ContractAddress, Nonce>,
) -> ForestResult<HashMap<NodeIndex, <ContractState as Leaf>::I>> {
let mut leaf_index_to_leaf_input = HashMap::new();
assert_eq!(storage_updates.len(), storage_tries.len());
// `storage_updates` includes all modified contracts, see
// StateDiff::actual_storage_updates().
for contract_address in storage_updates.keys().cloned().collect::<Vec<_>>() {
let original_contract_state = original_contracts_trie_leaves
.get(&NodeIndex::from_contract_address(&contract_address))
.ok_or(ForestError::MissingContractCurrentState(contract_address))?;
leaf_index_to_leaf_input.insert(
NodeIndex::from_contract_address(&contract_address),
(
NodeIndex::from_contract_address(&contract_address),
*(address_to_nonce
.get(&contract_address)
.unwrap_or(&original_contract_state.nonce)),
*(address_to_class_hash
.get(&contract_address)
.unwrap_or(&original_contract_state.class_hash)),
storage_tries.remove(&contract_address).unwrap_or_else(|| {
panic!(
"Missing update skeleton tree for contract {:?}",
contract_address
)
}),
storage_updates
.remove(&contract_address)
.unwrap_or_else(|| {
panic!(
"Missing storage updates for contract {:?}",
contract_address
)
}),
),
);
}
Ok(leaf_index_to_leaf_input)
}
}
Loading
Loading