Skip to content

Commit

Permalink
chore(benches): this is a set of RAM intensive benchmarks to understa…
Browse files Browse the repository at this point in the history
…nd memory consumption patterns in AquaVM
  • Loading branch information
raftedproc authored Jan 15, 2024
1 parent d7d33dd commit 1888de1
Show file tree
Hide file tree
Showing 9 changed files with 1,578 additions and 733 deletions.
1,223 changes: 808 additions & 415 deletions benches/PERFORMANCE.json

Large diffs are not rendered by default.

769 changes: 451 additions & 318 deletions benches/PERFORMANCE.txt

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions junk/gen-bench-data/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,5 @@ itertools = "0.10.5"
maplit = "1.0.2"
serde_json = "1.0.97"
serde = "1.0.165"
hex = "0.4.3"
rand = "0.8.5"
5 changes: 5 additions & 0 deletions junk/gen-bench-data/gen_benchmark_data.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@ for bench in multiple-cids10 \
parser-10000-100 \
parser-calls-10000-100 \
null \
lense-100mb \
map-100mb \
canon-map-100mb \
call-result-100mb \
parser-air-100mb \
;
do
echo "Generating ${bench} ..." >&2
Expand Down
261 changes: 261 additions & 0 deletions junk/gen-bench-data/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ use std::path::PathBuf;

const PARTICLE_ID: &str = "0123456789ABCDEF";
const MAX_STREAM_SIZE: usize = 1023;
const MB: usize = 1024 * 1024;
const SEED: u64 = 123456789;

mod calls;
mod cid_benchmarking;
Expand Down Expand Up @@ -65,6 +67,16 @@ enum Bench {
#[command(name = "parser-calls-10000-100")]
ParserCalls10000_100,
Null,
#[command(name = "lense-100mb")]
Lense100MB,
#[command(name = "map-100mb")]
Map100MB,
#[command(name = "canon-map-100mb")]
CanonMap100MB,
#[command(name = "call-result-100mb")]
CallResult100MB,
#[command(name = "parser-air-100mb")]
ParserAir100MB,
}

fn main() {
Expand Down Expand Up @@ -96,6 +108,11 @@ fn main() {
Bench::Parser10000_100 => parser_10000_100(),
Bench::ParserCalls10000_100 => parser_calls(10000, 100),
Bench::Null => null(),
Bench::CallResult100MB => mem_consumption_with_size_in_mb(100),
Bench::Lense100MB => mem_consumption_w_lense_with_size_in_mb(100),
Bench::Map100MB => mem_consumption_w_map_2_scalar_with_size_in_mb(100),
Bench::CanonMap100MB => mem_consumption_w_canon_map_with_size_in_mb(100),
Bench::ParserAir100MB => mem_consumption_air_100mb(280000, 10),
};

save_data(&args.dest_dir, data).unwrap();
Expand Down Expand Up @@ -735,6 +752,250 @@ fn big_values_data() -> Data {
}
}

fn generate_random_data(random_data_size: usize) -> Vec<u8> {
use rand::rngs::StdRng;
use rand::Rng;
use rand::SeedableRng;

// hex::encode later prints out every byte as 2 bytes sequence.
let random_data_size = random_data_size / 2 * MB;

let mut rng = StdRng::seed_from_u64(SEED);
let mut random_data = vec![0u8; random_data_size];
rng.fill(random_data.as_mut_slice());

random_data
}

fn mem_consumption_with_size_in_mb(data_size: usize) -> Data {
let random_data = generate_random_data(data_size);

let air_script = format!(
include_str!("mem_consumption_generic.air.tmpl"),
data = format_args!("\"{}\"", hex::encode(random_data))
);

let exec = AirScriptExecutor::<NativeAirRunner>::new(
TestRunParameters::from_init_peer_id("init_peer_id").with_particle_id(PARTICLE_ID),
vec![],
vec![],
&air_script,
)
.unwrap();

let keypair = exec
.get_network()
.get_named_peer_env("other_peer_id")
.expect("main peer")
.borrow()
.get_peer()
.get_keypair()
.clone();

let prev_res = exec.execute_one("init_peer_id").unwrap();

let peer_id: String = exec.resolve_name("other_peer_id").to_string();
let init_peer_id: String = exec.resolve_name("init_peer_id").to_string();

Data {
air: exec.get_transformed_air_script().to_string(),
prev_data: vec![],
cur_data: prev_res.data,
params_json: hashmap! {
"comment".to_owned() => "benchmarking".to_owned(),
"particle-id".to_owned() => PARTICLE_ID.to_owned(),
"current-peer-id".to_owned() => peer_id.clone(),
"init-peer-id".to_owned() => init_peer_id,
},
call_results: None,
keypair: bs58::encode(keypair.to_vec()).into_string(),
}
}

fn mem_consumption_w_lense_with_size_in_mb(data_size: usize) -> Data {
let random_data = generate_random_data(data_size);

let air_script = format!(
include_str!("mem_consumption_lense.air.tmpl"),
data = format_args!("{{\"attrib\": \"{}\"}}", hex::encode(random_data))
);

let exec = AirScriptExecutor::<NativeAirRunner>::new(
TestRunParameters::from_init_peer_id("init_peer_id").with_particle_id(PARTICLE_ID),
vec![],
vec![],
&air_script,
)
.unwrap();

let keypair = exec
.get_network()
.get_named_peer_env("other_peer_id")
.expect("main peer")
.borrow()
.get_peer()
.get_keypair()
.clone();

let prev_res = exec.execute_one("init_peer_id").unwrap();

let peer_id: String = exec.resolve_name("other_peer_id").to_string();
let init_peer_id: String = exec.resolve_name("init_peer_id").to_string();

Data {
air: exec.get_transformed_air_script().to_string(),
prev_data: vec![],
cur_data: prev_res.data,
params_json: hashmap! {
"comment".to_owned() => "benchmarking".to_owned(),
"particle-id".to_owned() => PARTICLE_ID.to_owned(),
"current-peer-id".to_owned() => peer_id.clone(),
"init-peer-id".to_owned() => init_peer_id,
},
call_results: None,
keypair: bs58::encode(keypair.to_vec()).into_string(),
}
}

fn mem_consumption_w_map_2_scalar_with_size_in_mb(data_size: usize) -> Data {
let random_data = generate_random_data(data_size);

let air_script = format!(
include_str!("mem_consumption_canon_map.air.tmpl"),
data = format_args!("\"{}\"", hex::encode(random_data))
);

let exec = AirScriptExecutor::<NativeAirRunner>::new(
TestRunParameters::from_init_peer_id("init_peer_id").with_particle_id(PARTICLE_ID),
vec![],
vec![],
&air_script,
)
.unwrap();

let keypair = exec
.get_network()
.get_named_peer_env("other_peer_id")
.expect("main peer")
.borrow()
.get_peer()
.get_keypair()
.clone();

let prev_res = exec.execute_one("init_peer_id").unwrap();

let peer_id: String = exec.resolve_name("other_peer_id").to_string();
let init_peer_id: String = exec.resolve_name("init_peer_id").to_string();

Data {
air: exec.get_transformed_air_script().to_string(),
prev_data: vec![],
cur_data: prev_res.data,
params_json: hashmap! {
"comment".to_owned() => "benchmarking".to_owned(),
"particle-id".to_owned() => PARTICLE_ID.to_owned(),
"current-peer-id".to_owned() => peer_id.clone(),
"init-peer-id".to_owned() => init_peer_id,
},
call_results: None,
keypair: bs58::encode(keypair.to_vec()).into_string(),
}
}

fn mem_consumption_w_canon_map_with_size_in_mb(data_size: usize) -> Data {
let random_data = generate_random_data(data_size);

let air_script = format!(
include_str!("mem_consumption_canon_map_2.air.tmpl"),
data = format_args!("\"{}\"", hex::encode(random_data))
);

let exec = AirScriptExecutor::<NativeAirRunner>::new(
TestRunParameters::from_init_peer_id("init_peer_id").with_particle_id(PARTICLE_ID),
vec![],
vec![],
&air_script,
)
.unwrap();

let keypair = exec
.get_network()
.get_named_peer_env("other_peer_id")
.expect("main peer")
.borrow()
.get_peer()
.get_keypair()
.clone();

let prev_res = exec.execute_one("init_peer_id").unwrap();

let peer_id: String = exec.resolve_name("other_peer_id").to_string();
let init_peer_id: String = exec.resolve_name("init_peer_id").to_string();

Data {
air: exec.get_transformed_air_script().to_string(),
prev_data: vec![],
cur_data: prev_res.data,
params_json: hashmap! {
"comment".to_owned() => "benchmarking".to_owned(),
"particle-id".to_owned() => PARTICLE_ID.to_owned(),
"current-peer-id".to_owned() => peer_id.clone(),
"init-peer-id".to_owned() => init_peer_id,
},
call_results: None,
keypair: bs58::encode(keypair.to_vec()).into_string(),
}
}

fn mem_consumption_air_100mb(calls: usize, vars: usize) -> Data {
let (keypair, peer_id) = derive_dummy_keypair("init_peer_id");
let particle_id = "particle_id";

let vars = (0..vars).map(|n| format!("var{}", n)).collect_vec();
let init_var = vars[0].clone();
let statements = vars
.iter()
.cycle()
.take(calls)
.tuple_windows()
.map(|(a, b)| format!(r#"(call {a} ("serv" "func") [] {b})"#))
.collect_vec();

fn build_tree(statements: &[String]) -> String {
assert!(!statements.is_empty());
if statements.len() == 1 {
statements[0].clone()
} else {
let mid = statements.len() / 2;
format!(
"(seq {} {})",
build_tree(&statements[..mid]),
build_tree(&statements[mid..])
)
}
}

let tree = build_tree(&statements);
let air = format!(
r#"(seq (call "peer" ("serv" "func") [] {}) {})"#,
init_var, tree
);

Data {
air,
prev_data: vec![],
cur_data: vec![],
call_results: None,
keypair: bs58::encode(keypair.as_inner().to_vec()).into_string(),
params_json: hashmap! {
"comment".to_owned() => "multiple calls parser benchmark".to_owned(),
"particle-id".to_owned() => particle_id.to_owned(),
"current-peer-id".to_owned() => peer_id.clone(),
"init-peer-id".to_owned() => peer_id,
},
}
}

fn parser_10000_100() -> Data {
let air_script = include_str!("parser_10000_100.air");

Expand Down
15 changes: 15 additions & 0 deletions junk/gen-bench-data/src/mem_consumption_canon_map.air.tmpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
(seq
(call "init_peer_id" ("" "init") [] data) ; ok = [{data}]
(seq
(seq
(fold data unused
(seq
(ap ("key" unused) %map)
(next unused)
)
)
(canon "other_peer_id" %map new_data)
)
(canon "init_peer_id" $data #$data)
)
)
15 changes: 15 additions & 0 deletions junk/gen-bench-data/src/mem_consumption_canon_map_2.air.tmpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
(seq
(call "init_peer_id" ("" "init") [] data) ; ok = [{data}]
(seq
(seq
(fold data unused
(seq
(ap ("key" unused) %map)
(next unused)
)
)
(canon "other_peer_id" %map #%new_data)
)
(canon "init_peer_id" $data #$new_data)
)
)
9 changes: 9 additions & 0 deletions junk/gen-bench-data/src/mem_consumption_generic.air.tmpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
(seq
(call "init_peer_id" ("" "init") [] data) ; ok = [{data}]
(seq
(fold data unused
(next unused)
)
(canon "other_peer_id" %map #%canon)
)
)
12 changes: 12 additions & 0 deletions junk/gen-bench-data/src/mem_consumption_lense.air.tmpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
(seq
(call "init_peer_id" ("" "init") [] data) ; ok = [{data}]
(seq
(fold data unused
(seq
(ap unused.$.attrib $data)
(next unused)
)
)
(canon "other_peer_id" $data #$data)
)
)

0 comments on commit 1888de1

Please sign in to comment.