Skip to content

Commit

Permalink
dev: implements module dependencies analysis (#41)
Browse files Browse the repository at this point in the history
  • Loading branch information
Myriad-Dreamin authored Mar 15, 2024
1 parent c88e37f commit fe25933
Show file tree
Hide file tree
Showing 15 changed files with 397 additions and 38 deletions.
1 change: 1 addition & 0 deletions crates/tinymist-query/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ serde.workspace = true
serde_json.workspace = true
parking_lot.workspace = true
ena.workspace = true
once_cell.workspace = true
fxhash.workspace = true
walkdir = "2"
indexmap = "2.1.0"
Expand Down
79 changes: 69 additions & 10 deletions crates/tinymist-query/src/analysis.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,77 @@
pub mod track_values;
pub use track_values::*;
pub mod lexical_hierarchy;
pub(crate) use lexical_hierarchy::*;
pub mod definition;
pub use definition::*;
pub mod import;
pub use import::*;
pub mod reference;
pub use reference::*;
pub mod def_use;
pub use def_use::*;
pub mod import;
pub use import::*;
pub mod lexical_hierarchy;
pub(crate) use lexical_hierarchy::*;
pub mod matcher;
pub use matcher::*;
pub mod module;
pub use module::*;
pub mod reference;
pub use reference::*;
pub mod track_values;
pub use track_values::*;

mod global;
pub use global::*;

#[cfg(test)]
mod module_tests {
use serde_json::json;
use typst_ts_core::path::unix_slash;
use typst_ts_core::typst::prelude::EcoVec;

use crate::analysis::module::*;
use crate::prelude::*;
use crate::tests::*;

#[test]
fn test() {
snapshot_testing2("modules", &|ctx, _| {
fn ids(ids: EcoVec<TypstFileId>) -> Vec<String> {
let mut ids: Vec<String> = ids
.into_iter()
.map(|id| unix_slash(id.vpath().as_rooted_path()))
.collect();
ids.sort();
ids
}

let dependencies = construct_module_dependencies(ctx);

let mut dependencies = dependencies
.into_iter()
.map(|(id, v)| {
(
unix_slash(id.vpath().as_rooted_path()),
ids(v.dependencies),
ids(v.dependents),
)
})
.collect::<Vec<_>>();

dependencies.sort();
// remove /main.typ
dependencies.retain(|(p, _, _)| p != "/main.typ");

let dependencies = dependencies
.into_iter()
.map(|(id, deps, dependents)| {
let mut mp = serde_json::Map::new();
mp.insert("id".to_string(), json!(id));
mp.insert("dependencies".to_string(), json!(deps));
mp.insert("dependents".to_string(), json!(dependents));
json!(mp)
})
.collect::<Vec<_>>();

assert_snapshot!(JsonRepr::new_pure(dependencies));
});
}
}

#[cfg(test)]
mod lexical_hierarchy_tests {
Expand Down Expand Up @@ -43,8 +103,7 @@ mod lexical_hierarchy_tests {
snapshot_testing(set, &|world, path| {
let source = get_suitable_source_in_workspace(world, &path).unwrap();

let world: &dyn World = world;
let result = get_def_use(world.track(), source);
let result = get_def_use(&mut AnalysisContext::new(world), source);
let result = result.as_deref().map(DefUseSnapshot);

assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
Expand Down
41 changes: 28 additions & 13 deletions crates/tinymist-query/src/analysis/def_use.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,17 @@ use std::{
sync::Arc,
};

use comemo::Tracked;
use log::info;
use parking_lot::Mutex;
use serde::Serialize;
use typst::{syntax::Source, World};
use typst::syntax::Source;
use typst_ts_core::{path::unix_slash, TypstFileId};

use crate::{adt::snapshot_map::SnapshotMap, analysis::find_source_by_import_path};

use super::{
get_lexical_hierarchy, LexicalHierarchy, LexicalKind, LexicalScopeKind, LexicalVarKind, ModSrc,
get_lexical_hierarchy, AnalysisContext, LexicalHierarchy, LexicalKind, LexicalScopeKind,
LexicalVarKind, ModSrc,
};

pub use typst_ts_core::vector::ir::DefId;
Expand Down Expand Up @@ -86,26 +86,36 @@ impl DefUseInfo {
}
}

pub fn get_def_use(world: Tracked<'_, dyn World>, source: Source) -> Option<Arc<DefUseInfo>> {
let ctx = SearchCtx {
world,
pub fn get_def_use<'a>(
world: &'a mut AnalysisContext<'a>,
source: Source,
) -> Option<Arc<DefUseInfo>> {
let mut ctx = SearchCtx {
ctx: world,
searched: Default::default(),
};

get_def_use_inner(&ctx, source)
get_def_use_inner(&mut ctx, source)
}

struct SearchCtx<'a> {
world: Tracked<'a, dyn World>,
struct SearchCtx<'w> {
ctx: &'w mut AnalysisContext<'w>,
searched: Mutex<HashSet<TypstFileId>>,
}

fn get_def_use_inner<'w>(ctx: &'w SearchCtx<'w>, source: Source) -> Option<Arc<DefUseInfo>> {
fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseInfo>> {
let current_id = source.id();
if !ctx.searched.lock().insert(current_id) {
return None;
}

ctx.ctx.get_mut(current_id);
let c = ctx.ctx.get(current_id).unwrap();

if let Some(info) = c.def_use() {
return Some(info);
}

let e = get_lexical_hierarchy(source, LexicalScopeKind::DefUse)?;

let mut collector = DefUseCollector {
Expand All @@ -120,11 +130,16 @@ fn get_def_use_inner<'w>(ctx: &'w SearchCtx<'w>, source: Source) -> Option<Arc<D

collector.scan(&e);
collector.calc_exports();
Some(Arc::new(collector.info))
let res = Some(Arc::new(collector.info));

let c = ctx.ctx.get(current_id).unwrap();
// todo: cyclic import cause no any information
c.compute_def_use(|| res.clone());
res
}

struct DefUseCollector<'a, 'w> {
ctx: &'w SearchCtx<'w>,
ctx: &'a mut SearchCtx<'w>,
info: DefUseInfo,
label_scope: SnapshotMap<String, DefId>,
id_scope: SnapshotMap<String, DefId>,
Expand Down Expand Up @@ -183,7 +198,7 @@ impl<'a, 'w> DefUseCollector<'a, 'w> {
LexicalKind::Mod(super::LexicalModKind::Star) => {
if let Some(path) = self.current_path {
let external_info =
find_source_by_import_path(self.ctx.world, self.current_id, path)
find_source_by_import_path(self.ctx.ctx.world, self.current_id, path)
.and_then(|source| {
info!("diving source for def use: {:?}", source.id());
Some(source.id()).zip(get_def_use_inner(self.ctx, source))
Expand Down
8 changes: 4 additions & 4 deletions crates/tinymist-query/src/analysis/definition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ fn find_ref_in_import<'b, 'a>(

match imports {
ast::Imports::Wildcard => {
let dep = find_source_by_import(ctx.world, ctx.current, import_node)?;
let dep = find_source_by_import(ctx.world.deref(), ctx.current, import_node)?;
let res = find_definition_in_module(ctx, dep, name)?;
return Some(ImportRef::ExternalResolved(res));
}
Expand Down Expand Up @@ -293,7 +293,7 @@ fn find_syntax_definition<'b, 'a>(
match find_ref_in_import(self.ctx, import_node, self.name)? {
ImportRef::ModuleAs(ident) => {
let m = find_source_by_import(
self.ctx.world,
self.ctx.world.deref(),
self.ctx.current,
import_node,
)?;
Expand All @@ -305,7 +305,7 @@ fn find_syntax_definition<'b, 'a>(
}
ImportRef::Path(s) => {
let m = find_source_by_import(
self.ctx.world,
self.ctx.world.deref(),
self.ctx.current,
import_node,
)?;
Expand Down Expand Up @@ -397,7 +397,7 @@ pub(crate) fn find_definition<'a>(
ast::Expr::Str(..) => {
if let Some(parent) = ancestor.parent() {
let e = parent.cast::<ast::ModuleImport>()?;
let source = find_source_by_import(world, current, e)?;
let source = find_source_by_import(world.deref(), current, e)?;
let src = ancestor.find(e.source().span())?;
return Some(Definition::Module(ModuleDefinition {
module: source.id(),
Expand Down
150 changes: 150 additions & 0 deletions crates/tinymist-query/src/analysis/global.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
use std::{collections::HashMap, path::Path, sync::Arc};

use once_cell::sync::OnceCell;
use typst::{
diag::{eco_format, FileError, FileResult},
syntax::{Source, VirtualPath},
World,
};
use typst_ts_compiler::{service::WorkspaceProvider, TypstSystemWorld};
use typst_ts_core::{cow_mut::CowMut, ImmutPath, TypstFileId};

use super::DefUseInfo;

pub struct ModuleAnalysisCache {
source: OnceCell<FileResult<Source>>,
def_use: OnceCell<Option<Arc<DefUseInfo>>>,
}

impl ModuleAnalysisCache {
pub fn source(&self, ctx: &AnalysisContext, file_id: TypstFileId) -> FileResult<Source> {
self.source
.get_or_init(|| ctx.world.source(file_id))
.clone()
}

pub fn def_use(&self) -> Option<Arc<DefUseInfo>> {
self.def_use.get().cloned().flatten()
}

pub fn compute_def_use(
&self,
f: impl FnOnce() -> Option<Arc<DefUseInfo>>,
) -> Option<Arc<DefUseInfo>> {
self.def_use.get_or_init(f).clone()
}
}

pub struct Analysis {
pub root: ImmutPath,
}

pub struct AnalysisCaches {
modules: HashMap<TypstFileId, ModuleAnalysisCache>,
root_files: OnceCell<Vec<TypstFileId>>,
}

// fn search_in_workspace(
// world: &TypstSystemWorld,
// def_id: TypstFileId,
// ident: &str,
// new_name: &str,
// editions: &mut HashMap<Url, Vec<TextEdit>>,
// wq: &mut WorkQueue,
// position_encoding: PositionEncoding,
// ) -> Option<()> {
// }

pub struct AnalysisContext<'a> {
pub world: &'a TypstSystemWorld,
pub analysis: CowMut<'a, Analysis>,
caches: AnalysisCaches,
}

impl<'a> AnalysisContext<'a> {
pub fn new(world: &'a TypstSystemWorld) -> Self {
Self {
world,
analysis: CowMut::Owned(Analysis {
root: world.workspace_root(),
}),
caches: AnalysisCaches {
modules: HashMap::new(),
root_files: OnceCell::new(),
},
}
}

#[cfg(test)]
pub fn test_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) -> &Vec<TypstFileId> {
self.caches.root_files.get_or_init(f)
}

pub fn files(&mut self) -> &Vec<TypstFileId> {
self.caches.root_files.get_or_init(|| self.search_files())
}

pub fn get_mut(&mut self, file_id: TypstFileId) -> &ModuleAnalysisCache {
self.caches.modules.entry(file_id).or_insert_with(|| {
let source = OnceCell::new();
let def_use = OnceCell::new();
ModuleAnalysisCache { source, def_use }
})
}

pub fn get(&self, file_id: TypstFileId) -> Option<&ModuleAnalysisCache> {
self.caches.modules.get(&file_id)
}

pub fn source_by_id(&mut self, id: TypstFileId) -> FileResult<Source> {
self.get_mut(id);
self.get(id).unwrap().source(self, id)
}

pub fn source_by_path(&mut self, p: &Path) -> FileResult<Source> {
// todo: source in packages
let relative_path = p.strip_prefix(&self.analysis.root).map_err(|_| {
FileError::Other(Some(eco_format!(
"not in root, path is {p:?}, root is {:?}",
self.analysis.root
)))
})?;

let id = TypstFileId::new(None, VirtualPath::new(relative_path));
self.source_by_id(id)
}

fn search_files(&self) -> Vec<TypstFileId> {
let root = self.analysis.root.clone();

let mut res = vec![];
for path in walkdir::WalkDir::new(&root).follow_links(false).into_iter() {
let Ok(de) = path else {
continue;
};
if !de.file_type().is_file() {
continue;
}
if !de
.path()
.extension()
.is_some_and(|e| e == "typ" || e == "typc")
{
continue;
}

let path = de.path();
let relative_path = match path.strip_prefix(&root) {
Ok(p) => p,
Err(err) => {
log::warn!("failed to strip prefix, path: {path:?}, root: {root:?}: {err}");
continue;
}
};

res.push(TypstFileId::new(None, VirtualPath::new(relative_path)));
}

res
}
}
Loading

0 comments on commit fe25933

Please sign in to comment.