Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update hvm-core #216

Merged
merged 2 commits into from
Feb 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
"oprune",
"oref",
"postcondition",
"rdex",
"redex",
"redexes",
"readback",
Expand Down
8 changes: 4 additions & 4 deletions src/hvmc_net/pre_reduce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,24 +41,24 @@ pub fn pre_reduce_book(book: &mut Book, entrypoint: &str) -> Result<(), String>

for (nam, net) in book.iter_mut() {
// Skip unnecessary work
if net.rdex.is_empty() || *nam == entrypoint {
if net.redexes.is_empty() || *nam == entrypoint {
continue;
}

let area = hvmc::run::Net::<hvmc::run::Lazy>::init_heap(1 << 18);
let area = hvmc::run::Heap::new_words(1 << 18);
let mut rt = hvmc::run::DynNet::new(&area, false);
dispatch_dyn_net!(&mut rt => {
rt.boot(host.defs.get(nam).expect("No function."));
rt.expand();
rt.reduce(MAX_RWTS);
});

// Move interactions with inert defs back into the net rdex array
// Move interactions with inert defs back into the net redexes array
for def in host.defs.values() {
if let Some(def) = def.downcast_ref::<InertDef>() {
let mut stored_redexes = def.data.0.lock().unwrap();
dispatch_dyn_net!(&mut rt => {
rt.rdex.extend(core::mem::take(&mut *stored_redexes));
rt.redexes.extend(core::mem::take(&mut *stored_redexes));
})
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/hvmc_net/prune.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ pub fn prune_defs(book: &mut Book, entrypoint: String) {
while let Some(nam) = to_visit.pop() {
let def = &book[&nam];
used_defs_in_tree(&def.root, &mut used_defs, &mut to_visit);
for (a, b) in &def.rdex {
for (a, b) in &def.redexes {
used_defs_in_tree(a, &mut used_defs, &mut to_visit);
used_defs_in_tree(b, &mut used_defs, &mut to_visit);
}
Expand Down
14 changes: 7 additions & 7 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use hvmc::{
ast::{self, Net},
dispatch_dyn_net,
host::Host,
run::{DynNet, Net as RtNet, Rewrites},
run::{DynNet, Heap, Rewrites},
stdlib::LogDef,
};
use hvmc_net::{pre_reduce::pre_reduce_book, prune::prune_defs};
Expand Down Expand Up @@ -53,7 +53,7 @@ pub fn create_host(book: Arc<Book>, labels: Arc<Labels>, compile_opts: CompileOp
move |wire| {
let host = host.lock().unwrap();
let tree = host.readback_tree(&wire);
let net = hvmc::ast::Net { root: tree, rdex: vec![] };
let net = hvmc::ast::Net { root: tree, redexes: vec![] };
let net = hvmc_to_net(&net);
let (mut term, mut readback_errors) = net_to_term(&net, &book, &labels, false);
let resugar_errs = term.resugar_adts(&book, compile_opts.adt_encoding);
Expand All @@ -73,7 +73,7 @@ pub fn create_host(book: Arc<Book>, labels: Arc<Labels>, compile_opts: CompileOp
move |wire| {
let host = host.lock().unwrap();
let tree = host.readback_tree(&wire);
let net = hvmc::ast::Net { root: tree, rdex: vec![] };
let net = hvmc::ast::Net { root: tree, redexes: vec![] };
let net = hvmc_to_net(&net);
let (mut term, mut readback_errors) = net_to_term(&net, &book, &labels, false);
let resugar_errs = term.resugar_adts(&book, compile_opts.adt_encoding);
Expand Down Expand Up @@ -217,7 +217,7 @@ pub fn run_book(
pub fn count_nodes<'l>(net: &'l hvmc::ast::Net) -> usize {
let mut visit: Vec<&'l hvmc::ast::Tree> = vec![&net.root];
let mut count = 0usize;
for (l, r) in &net.rdex {
for (l, r) in &net.redexes {
visit.push(l);
visit.push(r);
}
Expand Down Expand Up @@ -253,7 +253,7 @@ pub fn run_compiled(
hook: Option<impl FnMut(&Net)>,
entrypoint: &str,
) -> (Net, RunStats) {
let heap = RtNet::<hvmc::run::Lazy>::init_heap(mem_size);
let heap = Heap::new_bytes(mem_size);
let mut root = DynNet::new(&heap, run_opts.lazy_mode);
let max_rwts = run_opts.max_rewrites.map(|x| x.clamp(usize::MIN as u64, usize::MAX as u64) as usize);
// Expect won't be reached because there's
Expand All @@ -265,7 +265,7 @@ pub fn run_compiled(

if let Some(mut hook) = hook {
root.expand();
while !root.rdex.is_empty() {
while !root.redexes.is_empty() {
hook(&host.lock().unwrap().readback(root));
root.reduce(1);
root.expand();
Expand All @@ -278,7 +278,7 @@ pub fn run_compiled(
panic!("Parallel mode does not yet support rewrite limit");
}
root.expand();
while !root.rdex.is_empty() {
while !root.redexes.is_empty() {
let old_rwts = root.rwts.total();
root.reduce(max_rwts);
let delta_rwts = root.rwts.total() - old_rwts;
Expand Down
3 changes: 1 addition & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -232,9 +232,8 @@ fn execute_cli_mode(mut cli: Cli) -> Result<(), Info> {

let book = load_book(&path)?;

let mem_size = max_mem / std::mem::size_of::<hvmc::run::Node>() as u64;
let run_opts =
RunOpts { single_core, debug, linear, lazy_mode, max_memory: mem_size, max_rewrites: max_rwts };
RunOpts { single_core, debug, linear, lazy_mode, max_memory: max_mem, max_rewrites: max_rwts };
let (res_term, RunInfo { stats, readback_errors, net, book: _, labels: _ }) =
run_book(book, max_mem as usize, run_opts, warning_opts, opts)?;

Expand Down
2 changes: 1 addition & 1 deletion src/net/hvmc_to_net.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ fn hvmc_to_inodes(net: &Net) -> INodes {
inodes.append(&mut root);
}
// Convert all the trees forming active pairs.
for (i, (tree1, tree2)) in net.rdex.iter().enumerate() {
for (i, (tree1, tree2)) in net.redexes.iter().enumerate() {
let tree_root = format!("a{i}");
let mut tree1 = tree_to_inodes(tree1, tree_root.clone(), net_root, &mut n_vars);
inodes.append(&mut tree1);
Expand Down
14 changes: 7 additions & 7 deletions src/net/net_to_hvmc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pub fn nets_to_hvmc(nets: HashMap<String, INet>) -> Result<Book, String> {

/// Convert an inet-encoded definition into an hvmc AST inet.
pub fn net_to_hvmc(inet: &INet) -> Result<Net, String> {
let (net_root, redexes) = get_tree_roots(inet)?;
let (net_root, net_redexes) = get_tree_roots(inet)?;
let mut port_to_var_id: HashMap<Port, VarId> = HashMap::new();
let root = if let Some(net_root) = net_root {
// If there is a root tree connected to the root node
Expand All @@ -26,13 +26,13 @@ pub fn net_to_hvmc(inet: &INet) -> Result<Net, String> {
port_to_var_id.insert(inet.enter_port(ROOT), 0);
Tree::Var { nam: num_to_name(0) }
};
let mut rdex = vec![];
for [root0, root1] in redexes {
let rdex0 = net_tree_to_hvmc_tree(inet, root0, &mut port_to_var_id);
let rdex1 = net_tree_to_hvmc_tree(inet, root1, &mut port_to_var_id);
rdex.push((rdex0, rdex1));
let mut redexes = vec![];
for [root0, root1] in net_redexes {
let root0 = net_tree_to_hvmc_tree(inet, root0, &mut port_to_var_id);
let root1 = net_tree_to_hvmc_tree(inet, root1, &mut port_to_var_id);
redexes.push((root0, root1));
}
Ok(Net { root, rdex })
Ok(Net { root, redexes })
}

fn net_tree_to_hvmc_tree(inet: &INet, tree_root: NodeId, port_to_var_id: &mut HashMap<Port, VarId>) -> Tree {
Expand Down
8 changes: 4 additions & 4 deletions tests/golden_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,14 +143,14 @@ fn run_file() {
let book = do_parse_book(code, path)?;
// 1 million nodes for the test runtime. Smaller doesn't seem to make it any faster
let (res, info) =
run_book(book, 1 << 20, RunOpts::lazy(), WarningOpts::deny_all(), CompileOpts::heavy())?;
run_book(book, 1 << 24, RunOpts::lazy(), WarningOpts::deny_all(), CompileOpts::heavy())?;
Ok(format!("{}{}", display_readback_errors(&info.readback_errors), res))
}),
(&|code, path| {
let book = do_parse_book(code, path)?;
// 1 million nodes for the test runtime. Smaller doesn't seem to make it any faster
let (res, info) =
run_book(book, 1 << 20, RunOpts::default(), WarningOpts::deny_all(), CompileOpts::heavy())?;
run_book(book, 1 << 24, RunOpts::default(), WarningOpts::deny_all(), CompileOpts::heavy())?;
Ok(format!("{}{}", display_readback_errors(&info.readback_errors), res))
}),
])
Expand All @@ -166,7 +166,7 @@ fn run_lazy() {
desugar_opts.lazy_mode();

// 1 million nodes for the test runtime. Smaller doesn't seem to make it any faster
let (res, info) = run_book(book, 1 << 20, run_opts, WarningOpts::deny_all(), desugar_opts)?;
let (res, info) = run_book(book, 1 << 24, run_opts, WarningOpts::deny_all(), desugar_opts)?;
Ok(format!("{}{}", display_readback_errors(&info.readback_errors), res))
})
}
Expand Down Expand Up @@ -299,7 +299,7 @@ fn run_entrypoint() {
book.entrypoint = Some(Name::from("foo"));
// 1 million nodes for the test runtime. Smaller doesn't seem to make it any faster
let (res, info) =
run_book(book, 1 << 20, RunOpts::default(), WarningOpts::deny_all(), CompileOpts::heavy())?;
run_book(book, 1 << 24, RunOpts::default(), WarningOpts::deny_all(), CompileOpts::heavy())?;
Ok(format!("{}{}", display_readback_errors(&info.readback_errors), res))
})
}
Loading