Skip to content

Commit

Permalink
Merge branch 'Davidson-Souza:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
lla-dane authored Jun 19, 2024
2 parents 8f21d84 + de32ff6 commit dcc7f6f
Show file tree
Hide file tree
Showing 15 changed files with 239 additions and 89 deletions.
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion crates/floresta-chain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ categories = ["bitcoin", "blockchain", "node"]
crate-type = ["cdylib", "rlib"]

[dependencies]
rustreexo = "0.1.0"
rustreexo = "0.2.0"
sha2 = "^0.10.6"
log = "0.4"
kv = "0.24.0"
Expand Down
4 changes: 3 additions & 1 deletion crates/floresta-chain/src/pruned_utreexo/chain_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -969,7 +969,9 @@ impl<PersistedState: ChainStore> BlockchainInterface for ChainState<PersistedSta
if let DiskBlockHeader::FullyValid(_, height) = header {
Ok(height)
} else {
unreachable!("Validation index is in an invalid state, you should re-index your node")
unreachable!(
"Validation index is in an invalid state, you should re-index your node {header:?}"
)
}
}

Expand Down
119 changes: 77 additions & 42 deletions crates/floresta-chain/src/pruned_utreexo/chainstore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,12 @@ use bitcoin::consensus::serialize;
use bitcoin::consensus::Decodable;
use bitcoin::consensus::Encodable;
use bitcoin::BlockHash;
use kv::Batch;
use kv::Bucket;
use spin::RwLock;

use crate::prelude::*;
#[derive(Debug)]
#[derive(Debug, Clone, Copy)]
pub enum DiskBlockHeader {
FullyValid(BlockHeader, u32),
AssumedValid(BlockHeader, u32),
Expand All @@ -19,6 +22,7 @@ pub enum DiskBlockHeader {
InFork(BlockHeader, u32),
InvalidChain(BlockHeader),
}

impl DiskBlockHeader {
pub fn block_hash(&self) -> BlockHash {
self.deref().block_hash()
Expand Down Expand Up @@ -127,91 +131,122 @@ use kv::Store;

use super::chain_state::BestChain;
use super::ChainStore;
pub struct KvChainStore(Store);
impl KvChainStore {
pub fn new(datadir: String) -> Result<KvChainStore, kv::Error> {

pub struct KvChainStore<'a> {
_store: Store,
headers: Bucket<'a, Vec<u8>, Vec<u8>>,
index: Bucket<'a, Integer, Vec<u8>>,
meta: Bucket<'a, &'a str, Vec<u8>>,
headers_cache: RwLock<HashMap<BlockHash, DiskBlockHeader>>,
index_cache: RwLock<HashMap<u32, BlockHash>>,
}

impl<'a> KvChainStore<'a> {
pub fn new(datadir: String) -> Result<KvChainStore<'a>, kv::Error> {
// Configure the database
let cfg = Config::new(datadir + "/chain_data").cache_capacity(100_000_000);

// Open the key/value store
let store = Store::new(cfg)?;

Ok(KvChainStore(store))
Ok(KvChainStore {
headers: store.bucket(Some("headers"))?,
index: store.bucket(Some("index"))?,
meta: store.bucket(None)?,
_store: store,
headers_cache: RwLock::new(HashMap::new()),
index_cache: RwLock::new(HashMap::new()),
})
}
}
impl ChainStore for KvChainStore {

impl<'a> ChainStore for KvChainStore<'a> {
type Error = kv::Error;
fn load_roots(&self) -> Result<Option<Vec<u8>>, Self::Error> {
let bucket = self.0.bucket::<&str, Vec<u8>>(None)?;
bucket.get(&"roots")
self.meta.get(&"roots")
}
fn save_roots(&self, roots: Vec<u8>) -> Result<(), Self::Error> {
let bucket = self.0.bucket::<&str, Vec<u8>>(None)?;

bucket.set(&"roots", &roots)?;
fn save_roots(&self, roots: Vec<u8>) -> Result<(), Self::Error> {
self.meta.set(&"roots", &roots)?;
Ok(())
}

fn load_height(&self) -> Result<Option<BestChain>, Self::Error> {
let bucket = self.0.bucket::<&str, Vec<u8>>(None)?;
let height = bucket.get(&"height")?;

let height = self.meta.get(&"height")?;
if let Some(height) = height {
return Ok(Some(deserialize(&height).unwrap()));
}

Ok(None)
}

fn save_height(&self, height: &BestChain) -> Result<(), Self::Error> {
let bucket = self.0.bucket::<&str, Vec<u8>>(None)?;
let height = serialize(height);
bucket.set(&"height", &height)?;
self.meta.set(&"height", &height)?;
Ok(())
}
fn get_header(&self, block_hash: &BlockHash) -> Result<Option<DiskBlockHeader>, Self::Error> {
let bucket = self.0.bucket::<&[u8], Vec<u8>>(Some("header"))?;
let block_hash = serialize(&block_hash);

let header = bucket.get(&&*block_hash)?;
if let Some(header) = header {
return Ok(Some(deserialize(&header).unwrap()));
fn get_header(&self, block_hash: &BlockHash) -> Result<Option<DiskBlockHeader>, Self::Error> {
match self.headers_cache.read().get(block_hash) {
Some(header) => Ok(Some(*header)),
None => {
let block_hash = serialize(&block_hash);
Ok(self
.headers
.get(&block_hash)?
.and_then(|b| deserialize(&b).ok()))
}
}
Ok(None)
}

fn flush(&self) -> Result<(), Self::Error> {
// save all headers in batch
let mut batch = Batch::new();
for header in self.headers_cache.read().iter() {
let ser_header = serialize(header.1);
let block_hash = serialize(&header.1.block_hash());
batch.set(&block_hash, &ser_header)?;
}
self.headers.batch(batch)?;
self.headers_cache.write().clear();

// save all index in batch
let mut batch = Batch::new();
for (height, hash) in self.index_cache.read().iter() {
let ser_hash = serialize(hash);
batch.set(&Integer::from(*height), &ser_hash)?;
}
self.index.batch(batch)?;
self.index_cache.write().clear();

// Flush the header bucket
let bucket = self.0.bucket::<&[u8], Vec<u8>>(Some("header"))?;
bucket.flush()?;
self.headers.flush()?;
// Flush the block index
let bucket = self.0.bucket::<&[u8], Vec<u8>>(Some("index"))?;
bucket.flush()?;
self.index.flush()?;
// Flush the default bucket with meta-info
let bucket = self.0.bucket::<&[u8], Vec<u8>>(None)?;
bucket.flush()?;
self.meta.flush()?;
Ok(())
}

fn save_header(&self, header: &DiskBlockHeader) -> Result<(), Self::Error> {
let ser_header = serialize(header);
let block_hash = serialize(&header.block_hash());
let bucket = self.0.bucket::<&[u8], Vec<u8>>(Some("header"))?;
bucket.set(&&*block_hash, &ser_header)?;
self.headers_cache
.write()
.insert(header.block_hash(), *header);
Ok(())
}

fn get_block_hash(&self, height: u32) -> Result<Option<BlockHash>, Self::Error> {
let bucket = self.0.bucket::<Integer, Vec<u8>>(Some("index"))?;
let block = bucket.get(&Integer::from(height))?;
if let Some(block) = block {
return Ok(Some(deserialize(&block).unwrap()));
match self.index_cache.read().get(&height).cloned() {
Some(hash) => Ok(Some(hash)),
None => Ok(self
.index
.get(&Integer::from(height))?
.and_then(|b| deserialize(&b).ok())),
}
Ok(None)
}

fn update_block_index(&self, height: u32, hash: BlockHash) -> Result<(), Self::Error> {
let bucket = self.0.bucket::<Integer, Vec<u8>>(Some("index"))?;
let block_hash = serialize(&hash);

bucket.set(&Integer::from(height), &block_hash)?;
self.index_cache.write().insert(height, hash);
Ok(())
}
}
16 changes: 15 additions & 1 deletion crates/floresta-chain/src/pruned_utreexo/consensus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use bitcoin::pow::U256;
use bitcoin::Block;
use bitcoin::BlockHash;
use bitcoin::OutPoint;
use bitcoin::ScriptBuf;
use bitcoin::Target;
use bitcoin::Transaction;
use bitcoin::TxOut;
Expand Down Expand Up @@ -228,10 +229,11 @@ impl Consensus {
block_inputs.insert((input.previous_output.txid, input.previous_output.vout));
}
}

// Get all leaf hashes that will be added to the accumulator
for transaction in block.txdata.iter() {
for (i, output) in transaction.output.iter().enumerate() {
if !output.script_pubkey.is_provably_unspendable()
if !Self::is_unspendable(&output.script_pubkey)
&& !block_inputs.contains(&(transaction.txid(), i as u32))
{
leaf_hashes.push(Self::get_leaf_hashes(
Expand All @@ -252,4 +254,16 @@ impl Consensus {
let acc = acc.modify(&hashes, &del_hashes, &proof)?.0;
Ok(acc)
}

fn is_unspendable(script: &ScriptBuf) -> bool {
if script.len() > 10_000 {
return true;
}

if !script.is_empty() && script.as_bytes()[0] == 0x6a {
return true;
}

false
}
}
2 changes: 1 addition & 1 deletion crates/floresta-electrum/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ floresta-watch-only = { path = "../floresta-watch-only" }
floresta-compact-filters = { path = "../floresta-compact-filters" }
floresta-wire = { path = "../floresta-wire" }

rustreexo = "0.1.0"
rustreexo = "0.2.0"
sha2 = "^0.10.6"
async-std = { version = "1.12.0", features = ["attributes"] }
log = "0.4"
Expand Down
Loading

0 comments on commit dcc7f6f

Please sign in to comment.