Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: improve comments + clean codebase #291

Merged
merged 11 commits into from
Nov 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Scarb.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repository = "https://github.com/keep-starknet-strange/raito"
license-file = "LICENSE"

[workspace.dependencies]
cairo_test = "2.8.0"
cairo_test = "2.8.4"
shinigami_engine = { git = "https://github.com/keep-starknet-strange/shinigami.git", rev = "3415ed6" }

[profile.cairo1-run.cairo]
Expand Down
20 changes: 10 additions & 10 deletions packages/client/src/test.cairo
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
use core::serde::Serde;
use core::testing::get_available_gas;
use consensus::types::block::Block;
use consensus::types::chain_state::{ChainState, BlockValidatorImpl};
use consensus::types::utxo_set::{UtxoSet, UtxoSetTrait};
use utreexo::stump::accumulator::StumpUtreexoAccumulator;
use utreexo::stump::state::UtreexoStumpState;
use utreexo::stump::proof::UtreexoBatchProof;
use core::testing::get_available_gas;
use core::serde::Serde;

/// Integration testing program arguments.
#[derive(Drop)]
struct Args {
/// Current (initial) chain state
/// Current (initial) chain state.
chain_state: ChainState,
/// Batch of blocks that have to be applied to the current chain state
/// Batch of blocks that have to be applied to the current chain state.
blocks: Array<Block>,
/// Expected chain state (that we want to compare the result with)
/// Expected chain state (that we want to compare the result with).
expected_chain_state: ChainState,
/// Optional Utreexo arguments
/// Optional Utreexo arguments.
utreexo_args: Option<UtreexoArgs>,
/// If this flag is set, locking scripts will be executed
execute_script: bool,
}

/// Utreexo arguments necessary for constraining the UTXO set
/// Utreexo arguments necessary for constraining the UTXO set.
#[derive(Drop, Serde)]
struct UtreexoArgs {
/// Current (initial) accumulator state
/// Current (initial) accumulator state.
state: UtreexoStumpState,
/// Batch inclusion proof for TXOs spent during the current block.
/// Note that it doesn't support flow with multiple blocks applied
/// in a single program run.
proof: UtreexoBatchProof,
/// Expected accumulator state at the end of the execution
/// Expected accumulator state at the end of the execution.
expected_state: UtreexoStumpState,
}

Expand Down Expand Up @@ -105,7 +105,7 @@ fn main(arguments: Array<felt252>) -> Array<felt252> {
}

/// Workaround for handling missing `utreexo_args` field.
/// Rough analogue of `#[serde(default)]`
/// Rough analogue of `#[serde(default)]`.
impl ArgsSerde of Serde<Args> {
fn serialize(self: @Args, ref output: Array<felt252>) {
panic!("not implemented");
Expand Down
46 changes: 28 additions & 18 deletions packages/consensus/src/codec.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ use super::types::transaction::{Transaction, TxIn, TxOut, OutPoint};
use utils::hash::Digest;

pub trait Encode<T> {
/// Encode using Bitcoin codec and append to the buffer.
/// Encodes using Bitcoin codec and appends to the buffer.
fn encode_to(self: @T, ref dest: ByteArray);

/// Encode using Bitcoin codec and return byte array
/// Encodes using Bitcoin codec and returns a `ByteArray`.
fn encode(
self: @T
) -> ByteArray {
Expand All @@ -17,6 +17,7 @@ pub trait Encode<T> {
}
}

/// `Encode` trait implementation for `Span<T>`.
pub impl EncodeSpan<T, +Encode<T>> of Encode<Span<T>> {
fn encode_to(self: @Span<T>, ref dest: ByteArray) {
let items = *self;
Expand All @@ -27,31 +28,36 @@ pub impl EncodeSpan<T, +Encode<T>> of Encode<Span<T>> {
}
}

/// `Encode` trait implementation for `ByteArray`.
pub impl EncodeByteArray of Encode<ByteArray> {
fn encode_to(self: @ByteArray, ref dest: ByteArray) {
encode_compact_size(self.len(), ref dest);
dest.append(self);
}
}

/// `Encode` trait implementation for `u32`.
pub impl EncodeU32 of Encode<u32> {
fn encode_to(self: @u32, ref dest: ByteArray) {
dest.append_word_rev((*self).into(), 4);
}
}

/// `Encode` trait implementation for `u64`.
pub impl EncodeU64 of Encode<u64> {
fn encode_to(self: @u64, ref dest: ByteArray) {
dest.append_word_rev((*self).into(), 8);
}
}

/// `Encode` trait implementation for `Digest`.
pub impl EncodeHash of Encode<Digest> {
fn encode_to(self: @Digest, ref dest: ByteArray) {
dest.append(@(*self).into());
}
}

/// `Encode` trait implementation for `TxIn`.
pub impl EncodeTxIn of Encode<TxIn> {
fn encode_to(self: @TxIn, ref dest: ByteArray) {
self.previous_output.encode_to(ref dest);
Expand All @@ -60,20 +66,23 @@ pub impl EncodeTxIn of Encode<TxIn> {
}
}

/// `Encode` trait implementation for `TxOut`.
pub impl EncodeTxOut of Encode<TxOut> {
fn encode_to(self: @TxOut, ref dest: ByteArray) {
self.value.encode_to(ref dest);
(*self.pk_script).encode_to(ref dest);
}
}

/// `Encode` trait implementation for `OutPoint`.
pub impl EncodeOutpoint of Encode<OutPoint> {
fn encode_to(self: @OutPoint, ref dest: ByteArray) {
self.txid.encode_to(ref dest);
self.vout.encode_to(ref dest);
}
}

/// `Encode` trait implementation for `Transaction`.
pub impl EncodeTransaction of Encode<Transaction> {
fn encode_to(self: @Transaction, ref dest: ByteArray) {
self.version.encode_to(ref dest);
Expand All @@ -85,7 +94,7 @@ pub impl EncodeTransaction of Encode<Transaction> {

#[generate_trait]
pub impl TransactionCodecImpl of TransactionCodec {
/// Reencode transaction with witness fields (for computing wtxid) given the legacy encoded
/// Reencodes transaction with witness fields (for computing wtxid) given the legacy encoded
/// bytes.
/// We use this method to avoid double serialization.
fn encode_with_witness(self: @Transaction, legacy_bytes: @ByteArray) -> ByteArray {
Expand Down Expand Up @@ -117,9 +126,9 @@ pub impl TransactionCodecImpl of TransactionCodec {
///
/// https://learnmeabitcoin.com/technical/general/compact-size/
pub fn encode_compact_size(len: usize, ref dest: ByteArray) {
// first covert the len into the felt252
// First convert the len into a `felt252`
let val: felt252 = len.try_into().unwrap();
// then append as the reverse word is this correct i think

if (len < 253) {
dest.append_word_rev(val, 1);
} else if (len < 65536) {
Expand All @@ -131,11 +140,12 @@ pub fn encode_compact_size(len: usize, ref dest: ByteArray) {
}
// Note: `usize` is a `u32` alias, so lens >= 4,294,967,296 are not handled.
}

#[cfg(test)]
mod tests {
use utils::hex::{from_hex, hex_to_hash_rev};
use crate::types::transaction::{Transaction, TxIn, TxOut, OutPoint};
use super::{Encode, TransactionCodec, encode_compact_size};
use utils::hex::{from_hex, hex_to_hash_rev};

#[test]
fn test_encode_compact_size1() {
Expand Down Expand Up @@ -182,7 +192,7 @@ mod tests {

#[test]
fn test_encode_txout() {
// block 170 coinbase tx
// Block 170 coinbase tx
let txout = @TxOut {
value: 5000000000_u64,
pk_script: @from_hex(
Expand All @@ -201,7 +211,7 @@ mod tests {

#[test]
fn test_encode_outpoint() {
// block 170 coinbase tx b1fea52486ce0c62bb442b530a3f0132b826c74e473d1f2c220bfa78111c5082
// Block 170 coinbase tx b1fea52486ce0c62bb442b530a3f0132b826c74e473d1f2c220bfa78111c5082
let outpoint = OutPoint {
txid: hex_to_hash_rev(
"0000000000000000000000000000000000000000000000000000000000000000"
Expand All @@ -222,7 +232,7 @@ mod tests {

#[test]
fn test_encode_outpoint2() {
//block 170 tx f4184fc596403b9d638783cf57adfe4c75c605f6356fbc91338530e9831e9e16
//Block 170 tx f4184fc596403b9d638783cf57adfe4c75c605f6356fbc91338530e9831e9e16
let outpoint = OutPoint {
txid: hex_to_hash_rev(
"0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9"
Expand All @@ -243,7 +253,7 @@ mod tests {

#[test]
fn test_encode_txin1() {
// tx b1fea52486ce0c62bb442b530a3f0132b826c74e473d1f2c220bfa78111c5082
// Tx b1fea52486ce0c62bb442b530a3f0132b826c74e473d1f2c220bfa78111c5082
let txin = @TxIn {
script: @from_hex("04ffff001d0102"),
sequence: 0xffffffff,
Expand All @@ -269,7 +279,7 @@ mod tests {

#[test]
fn test_encode_txin2() {
// tx 4ff32a7e58200897220ce4615e30e3e414991222d7eda27e693116abea8b8f33,
// Tx 4ff32a7e58200897220ce4615e30e3e414991222d7eda27e693116abea8b8f33,
// input 2
let txin = @TxIn {
script: @from_hex(
Expand Down Expand Up @@ -298,7 +308,7 @@ mod tests {

#[test]
fn test_encode_tx1() {
// tx 4ff32a7e58200897220ce4615e30e3e414991222d7eda27e693116abea8b8f33
// Tx 4ff32a7e58200897220ce4615e30e3e414991222d7eda27e693116abea8b8f33
let tx = @Transaction {
version: 1_u32,
is_segwit: false,
Expand Down Expand Up @@ -383,7 +393,7 @@ mod tests {

#[test]
fn test_encode_tx_many_inputs() {
// tx 23d5c86600b72cd512aecebd68a7274f611cd96eb9106125f4ef2502f54effa5
// Tx 23d5c86600b72cd512aecebd68a7274f611cd96eb9106125f4ef2502f54effa5
let tx = @Transaction {
version: 1,
is_segwit: false,
Expand Down Expand Up @@ -603,7 +613,7 @@ mod tests {

#[test]
fn test_encode_tx_many_outputs() {
// tx 3e6cc776f588a464c98e8f701cdcde651c7b3620c44c65099fb3d2f4d8ea260e
// Tx 3e6cc776f588a464c98e8f701cdcde651c7b3620c44c65099fb3d2f4d8ea260e
let tx = @Transaction {
version: 1,
is_segwit: false,
Expand Down Expand Up @@ -711,7 +721,7 @@ mod tests {

#[test]
fn test_encode_tx_witness1() {
// tx 65d8bd45f01bd6209d8695d126ba6bb4f2936501c12b9a1ddc9e38600d35aaa2
// Tx 65d8bd45f01bd6209d8695d126ba6bb4f2936501c12b9a1ddc9e38600d35aaa2
let tx = @Transaction {
version: 2,
is_segwit: true,
Expand Down Expand Up @@ -775,7 +785,7 @@ mod tests {

#[test]
fn test_encode_tx_witness2() {
// tx 7ee8997b455d8231c162277943a9a2d2d98800faa51da79c17eeb5156739a628,
// Tx 7ee8997b455d8231c162277943a9a2d2d98800faa51da79c17eeb5156739a628,
let tx = @Transaction {
version: 2,
is_segwit: true,
Expand Down Expand Up @@ -861,8 +871,8 @@ mod tests {
}
#[test]
fn test_encode_tx_witness3() {
/// tx c06aaaa2753dc4e74dd4fe817522dc3c126fd71792dd9acfefdaff11f8ff954d
/// data from example https://learnmeabitcoin.com/technical/transaction/wtxid/
/// Tx c06aaaa2753dc4e74dd4fe817522dc3c126fd71792dd9acfefdaff11f8ff954d
/// Data from example https://learnmeabitcoin.com/technical/transaction/wtxid/
let tx = @Transaction {
version: 1,
is_segwit: true,
Expand Down
18 changes: 9 additions & 9 deletions packages/consensus/src/lib.cairo
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
pub mod codec;
pub mod types {
pub mod block;
pub mod chain_state;
pub mod transaction;
pub mod utxo_set;
}
pub mod validation {
pub mod difficulty;
pub mod block;
pub mod coinbase;
pub mod difficulty;
pub mod locktime;
pub mod script;
pub mod timestamp;
pub mod transaction;
pub mod work;
pub mod block;
}
pub mod codec;
pub mod types {
pub mod chain_state;
pub mod block;
pub mod transaction;
pub mod utxo_set;
}
17 changes: 10 additions & 7 deletions packages/consensus/src/types/block.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
//!
//! The data is expected to be prepared in advance and passed as program arguments.

use core::fmt::{Display, Formatter, Error};
use super::transaction::Transaction;
use utils::hash::Digest;
use utils::double_sha256::double_sha256_u32_array;
use utils::numeric::u32_byte_reverse;
use super::transaction::Transaction;
use core::fmt::{Display, Formatter, Error};

/// Represents a block in the blockchain.
#[derive(Drop, Copy, Debug, PartialEq, Default, Serde)]
Expand Down Expand Up @@ -54,7 +54,7 @@ pub struct Header {

#[generate_trait]
pub impl BlockHashImpl of BlockHash {
/// Compute hash of the block header given the missing fields.
/// Computes the hash of the block header given the missing fields.
fn hash(self: @Header, prev_block_hash: Digest, merkle_root: Digest) -> Digest {
let mut header_data_u32: Array<u32> = array![];

Expand All @@ -70,13 +70,14 @@ pub impl BlockHashImpl of BlockHash {
}
}

/// Empty transaction data
/// `Default` trait implementation of `TransactionData`, i.e., empty transaction data.
pub impl TransactionDataDefault of Default<TransactionData> {
fn default() -> TransactionData {
TransactionData::Transactions(array![].span())
}
}

/// `Display` trait implementation for `Block`.
impl BlockDisplay of Display<Block> {
fn fmt(self: @Block, ref f: Formatter) -> Result<(), Error> {
let data = match *self.data {
Expand All @@ -89,6 +90,7 @@ impl BlockDisplay of Display<Block> {
}
}

/// `Display` trait implementation for `Header`.
impl HeaderDisplay of Display<Header> {
fn fmt(self: @Header, ref f: Formatter) -> Result<(), Error> {
let str: ByteArray = format!(
Expand All @@ -103,6 +105,7 @@ impl HeaderDisplay of Display<Header> {
}
}

/// `Display` trait implementation for `TransactionData`.
impl TransactionDataDisplay of Display<TransactionData> {
fn fmt(self: @TransactionData, ref f: Formatter) -> Result<(), Error> {
match *self {
Expand All @@ -117,8 +120,8 @@ impl TransactionDataDisplay of Display<TransactionData> {

#[cfg(test)]
mod tests {
use super::{Header, BlockHash};
use crate::types::chain_state::ChainState;
use super::{Header, BlockHash};
use utils::hash::Digest;

#[test]
Expand All @@ -128,7 +131,7 @@ mod tests {
.best_block_hash =
0x000000002a22cfee1f2c846adbd12b3e183d4f97683f85dad08a79780a84bd55_u256
.into();
// block 170
// Block 170
let header = Header {
version: 1_u32, time: 1231731025_u32, bits: 0x1d00ffff_u32, nonce: 1889418792_u32
};
Expand All @@ -153,7 +156,7 @@ mod tests {
.best_block_hash =
0x000000002a22cfee1f2c846adbd12b3e183d4f97683f85dad08a79780a84bd55_u256
.into();
// block 170
// Block 170
let header = Header {
version: 1_u32, time: 1231731025_u32, bits: 0x1d00ffff_u32, nonce: 1889418792_u32
};
Expand Down
Loading
Loading