Skip to content

Commit

Permalink
rebase main
Browse files Browse the repository at this point in the history
Signed-off-by: he1pa <[email protected]>
  • Loading branch information
He1pa committed Sep 18, 2024
1 parent 1d8b41b commit ff81f90
Showing 1 changed file with 112 additions and 120 deletions.
232 changes: 112 additions & 120 deletions kclvm/parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,6 @@ struct Loader {
sess: ParseSessionRef,
paths: Vec<String>,
opts: LoadProgramOptions,
missing_pkgs: Vec<String>,
module_cache: Option<KCLModuleCache>,
file_graph: FileGraphCache,
}
Expand All @@ -330,7 +329,6 @@ impl Loader {
.collect(),
opts: opts.unwrap_or_default(),
module_cache,
missing_pkgs: Default::default(),
file_graph: FileGraphCache::default(),
}
}
Expand All @@ -341,123 +339,121 @@ impl Loader {
}

fn _load_main(&mut self) -> Result<LoadProgramResult> {
// for test:

// let asts = match &self.module_cache {
// Some(c) => c.read().unwrap().asts.clone(),
// None => ASTCache::default(),
// };
// return parse_kcl_program(
// self.sess.clone(),
// self.paths.clone(),
// asts,
// self.file_graph.clone(),
// &self.opts,
// );

let compile_entries = get_compile_entries_from_paths(&self.paths, &self.opts)?;
let workdir = compile_entries.get_root_path().to_string();
let mut pkgs = HashMap::new();
let mut pkg_files = Vec::new();
// update cache
if let Some(module_cache) = self.module_cache.as_ref() {
for entry in compile_entries.iter() {
let k_files = entry.get_k_files();
let maybe_k_codes = entry.get_k_codes();
// update main pkg ast cache
for (i, filename) in k_files.iter().enumerate() {
let m = parse_file_with_session(
self.sess.clone(),
filename,
maybe_k_codes[i].clone(),
)?;
let mut module_cache_ref = module_cache.write().unwrap();
module_cache_ref
.ast_cache
.insert(filename.clone(), m.clone());
}
// update invalidate module ast cache
let mut module_cache_ref = module_cache.write().unwrap();
let invalidate_module = module_cache_ref.invalidate_module.clone();
module_cache_ref.invalidate_module.clear();
drop(module_cache_ref);

for (filename, code) in invalidate_module.iter() {
let m = parse_file_with_session(self.sess.clone(), filename, code.clone())?;
let mut module_cache_ref = module_cache.write().unwrap();
module_cache_ref
.ast_cache
.insert(filename.clone(), m.clone());
}
}
}

for entry in compile_entries.iter() {
let k_files = entry.get_k_files();
let maybe_k_codes = entry.get_k_codes();
// Load main package.
for (i, filename) in k_files.iter().enumerate() {
let mut m = if let Some(module_cache) = self.module_cache.as_ref() {
let module_cache_ref = module_cache.read().unwrap();
module_cache_ref.ast_cache.get(filename).unwrap().clone()
} else {
parse_file_with_session(self.sess.clone(), filename, maybe_k_codes[i].clone())?
};
fix_rel_import_path(entry.path(), &mut m);
pkg_files.push(m);
}

// Insert an empty vec to determine whether there is a circular import.
pkgs.insert(kclvm_ast::MAIN_PKG.to_string(), vec![]);
self.load_import_package(
entry.path(),
entry.name().to_string(),
kclvm_ast::MAIN_PKG.to_string(),
&mut pkg_files,
&mut pkgs,
)?;
}
// Insert the complete ast to replace the empty list.
pkgs.insert(kclvm_ast::MAIN_PKG.to_string(), pkg_files);
let program = ast::Program {
root: workdir,
pkgs,
let asts = match &self.module_cache {
Some(c) => c.read().unwrap().asts.clone(),
None => ASTCache::default(),
};
// Return the files in the order they should be compiled
let file_graph = self.file_graph.read().unwrap();
let paths = match file_graph.toposort() {
Ok(files) => files,
Err(cycle) => {
let formatted_cycle = cycle
.iter()
.map(|file| format!("- {}\n", file.path.to_string_lossy()))
.collect::<String>();

self.sess.1.write().add_error(
ErrorKind::RecursiveLoad,
&[Message {
range: (Position::dummy_pos(), Position::dummy_pos()),
style: Style::Line,
message: format!(
"Could not compiles due to cyclic import statements\n{}",
formatted_cycle.trim_end()
),
note: None,
suggested_replacement: None,
}],
);
parse_kcl_program(
self.sess.clone(),
self.paths.clone(),
asts,
self.file_graph.clone(),
&self.opts,
)

// Return a list of all paths.
file_graph.paths()
}
};
drop(file_graph);
// let compile_entries = get_compile_entries_from_paths(&self.paths, &self.opts)?;
// let workdir = compile_entries.get_root_path().to_string();
// let mut pkgs = HashMap::new();
// let mut pkg_files = Vec::new();
// // update cache
// if let Some(module_cache) = self.module_cache.as_ref() {
// for entry in compile_entries.iter() {
// let k_files = entry.get_k_files();
// let maybe_k_codes = entry.get_k_codes();
// // update main pkg ast cache
// for (i, filename) in k_files.iter().enumerate() {
// let m = parse_file_with_session(
// self.sess.clone(),
// filename,
// maybe_k_codes[i].clone(),
// )?;
// let mut module_cache_ref = module_cache.write().unwrap();
// module_cache_ref
// .ast_cache
// .insert(filename.clone(), m.clone());
// }
// // update invalidate module ast cache
// let mut module_cache_ref = module_cache.write().unwrap();
// let invalidate_module = module_cache_ref.invalidate_module.clone();
// module_cache_ref.invalidate_module.clear();
// drop(module_cache_ref);

// for (filename, code) in invalidate_module.iter() {
// let m = parse_file_with_session(self.sess.clone(), filename, code.clone())?;
// let mut module_cache_ref = module_cache.write().unwrap();
// module_cache_ref
// .ast_cache
// .insert(filename.clone(), m.clone());
// }
// }
// }

// for entry in compile_entries.iter() {
// let k_files = entry.get_k_files();
// let maybe_k_codes = entry.get_k_codes();
// // Load main package.
// for (i, filename) in k_files.iter().enumerate() {
// let mut m = if let Some(module_cache) = self.module_cache.as_ref() {
// let module_cache_ref = module_cache.read().unwrap();
// module_cache_ref.ast_cache.get(filename).unwrap().clone()
// } else {
// parse_file_with_session(self.sess.clone(), filename, maybe_k_codes[i].clone())?
// };
// fix_rel_import_path(entry.path(), &mut m);
// pkg_files.push(m);
// }

// // Insert an empty vec to determine whether there is a circular import.
// pkgs.insert(kclvm_ast::MAIN_PKG.to_string(), vec![]);
// self.load_import_package(
// entry.path(),
// entry.name().to_string(),
// kclvm_ast::MAIN_PKG.to_string(),
// &mut pkg_files,
// &mut pkgs,
// )?;
// }
// // Insert the complete ast to replace the empty list.
// pkgs.insert(kclvm_ast::MAIN_PKG.to_string(), pkg_files);
// let program = ast::Program {
// root: workdir,
// pkgs,
// };
// // Return the files in the order they should be compiled
// let file_graph = self.file_graph.read().unwrap();
// let paths = match file_graph.toposort() {
// Ok(files) => files,
// Err(cycle) => {
// let formatted_cycle = cycle
// .iter()
// .map(|file| format!("- {}\n", file.path.to_string_lossy()))
// .collect::<String>();

// self.sess.1.write().add_error(
// ErrorKind::RecursiveLoad,
// &[Message {
// range: (Position::dummy_pos(), Position::dummy_pos()),
// style: Style::Line,
// message: format!(
// "Could not compiles due to cyclic import statements\n{}",
// formatted_cycle.trim_end()
// ),
// note: None,
// suggested_replacement: None,
// }],
// );

// // Return a list of all paths.
// file_graph.paths()
// }
// };
// drop(file_graph);

Ok(LoadProgramResult {
program,
errors: self.sess.1.read().diagnostics.clone(),
paths: paths.iter().map(|file| file.path.clone()).collect(),
})
// Ok(LoadProgramResult {
// program,
// errors: self.sess.1.read().diagnostics.clone(),
// paths: paths.iter().map(|file| file.path.clone()).collect(),
// })
}

/// [`find_packages`] will find the kcl package.
Expand Down Expand Up @@ -615,10 +611,6 @@ impl Loader {
if pkgs.contains_key(&pkgpath) {
return Ok(None);
}
if self.missing_pkgs.contains(&pkgpath) {
return Ok(None);
}

// plugin pkgs
if is_plugin_pkg(pkgpath.as_str()) {
if !self.opts.load_plugins {
Expand Down

0 comments on commit ff81f90

Please sign in to comment.