Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: partially implement semantic highlight #36

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions client/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,9 @@ class Extension implements Disposable {
dimInactiveConfigItems: workspace
.getConfiguration()
.get<boolean>("vscode-kanata.dimInactiveConfigItems", true),
enableSemanticHighlight: workspace
.getConfiguration()
.get<boolean>("vscode-kanata.enableSemanticHighlight", true),
},
};

Expand Down
168 changes: 125 additions & 43 deletions kls/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,11 @@ use lsp_types::{
SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensParams, SemanticTokensResult,
TextDocumentItem, TextDocumentSyncKind, TextEdit, Url, VersionedTextDocumentIdentifier,
};
use semantic_tokens::{SEMANTIC_TOKEN_MODIFIERS, SEMANTIC_TOKEN_TYPES};
use serde::Deserialize;
use serde_wasm_bindgen::{from_value, to_value};
use std::{
cmp::Ordering,
collections::{BTreeMap, HashMap},
fmt::Display,
path::{self, Path, PathBuf},
Expand All @@ -46,6 +48,7 @@ use helpers::{

mod formatter;
mod navigation;
mod semantic_tokens;

struct Kanata {
def_local_keys_variant_to_apply: String,
Expand Down Expand Up @@ -189,6 +192,8 @@ struct Config {
env_variables: HashMap<String, String>,
#[serde(rename = "dimInactiveConfigItems")]
dim_inactive_config_items: bool,
#[serde(rename = "enableSemanticHighlight")]
enable_semantic_highlight: bool,
}

#[derive(Debug, Deserialize, Clone, Copy)]
Expand Down Expand Up @@ -264,7 +269,7 @@ pub struct KanataLanguageServer {
workspace_options: WorkspaceOptions,
send_diagnostics_callback: js_sys::Function,
formatter: formatter::Formatter,
dim_inactive_config_items: bool,
config: Config,
}

/// Public API exposed via WASM.
Expand Down Expand Up @@ -304,7 +309,7 @@ impl KanataLanguageServer {
};

let workspace_options = WorkspaceOptions::from_config(&config, root_uri);
let env_vars: Vec<_> = config.env_variables.into_iter().collect();
let env_vars: Vec<_> = config.env_variables.clone().into_iter().collect();

log!("env variables: {:?}", &env_vars);

Expand All @@ -317,7 +322,7 @@ impl KanataLanguageServer {
},
workspace_options,
send_diagnostics_callback: send_diagnostics_callback.clone(),
dim_inactive_config_items: config.dim_inactive_config_items,
config,
}

// self_.reload_diagnostics_debouncer =
Expand All @@ -327,6 +332,9 @@ impl KanataLanguageServer {
// self_
}

/// We don't actually do full initialization here, only finish it;
/// Here we're just assembling [InitializeResult] and returning it.
/// The actual initialization is done in the constructor.
#[allow(unused_variables)]
#[wasm_bindgen(js_class = KanataLanguageServer, js_name = initialize)]
pub fn initialize(&mut self, params: JsValue) -> JsValue {
Expand All @@ -337,45 +345,6 @@ impl KanataLanguageServer {
}

fn initialize_impl(&mut self, _params: &InitializeParams) -> InitializeResult {
let sem_tokens_legend = SemanticTokensLegend {
token_types: vec![
SemanticTokenType::NAMESPACE,
SemanticTokenType::TYPE,
SemanticTokenType::CLASS,
SemanticTokenType::ENUM,
SemanticTokenType::INTERFACE,
SemanticTokenType::STRUCT,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::PARAMETER,
SemanticTokenType::VARIABLE,
SemanticTokenType::PROPERTY,
SemanticTokenType::ENUM_MEMBER,
SemanticTokenType::EVENT,
SemanticTokenType::FUNCTION,
SemanticTokenType::METHOD,
SemanticTokenType::MACRO,
SemanticTokenType::KEYWORD,
SemanticTokenType::MODIFIER,
SemanticTokenType::COMMENT,
SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
SemanticTokenType::REGEXP,
SemanticTokenType::OPERATOR,
],
token_modifiers: vec![
SemanticTokenModifier::DECLARATION,
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::READONLY,
SemanticTokenModifier::STATIC,
SemanticTokenModifier::DEPRECATED,
SemanticTokenModifier::ABSTRACT,
SemanticTokenModifier::ASYNC,
SemanticTokenModifier::MODIFICATION,
SemanticTokenModifier::DOCUMENTATION,
SemanticTokenModifier::DEFAULT_LIBRARY,
],
};

InitializeResult {
capabilities: lsp_types::ServerCapabilities {
// UTF-8 is not supported in vscode-languageserver/node. See:
Expand Down Expand Up @@ -410,6 +379,21 @@ impl KanataLanguageServer {
..Default::default()
}),
}),
semantic_tokens_provider: Some(
lsp_types::SemanticTokensServerCapabilities::SemanticTokensOptions(
lsp_types::SemanticTokensOptions {
work_done_progress_options: lsp_types::WorkDoneProgressOptions {
work_done_progress: Some(false),
},
legend: SemanticTokensLegend {
token_types: SEMANTIC_TOKEN_TYPES.into(),
token_modifiers: SEMANTIC_TOKEN_MODIFIERS.into(),
},
range: Some(false),
full: Some(SemanticTokensFullOptions::Bool(true)),
},
),
),
..Default::default()
},
server_info: None,
Expand Down Expand Up @@ -696,6 +680,104 @@ impl KanataLanguageServer {
Some(acc)
})
}

#[allow(unused_variables)]
#[wasm_bindgen(js_class = KanataLanguageServer, js_name = onSemanticTokens)]
pub fn on_semantic_tokens(&mut self, params: JsValue) -> JsValue {
type Params = <SemanticTokensFullRequest as Request>::Params;
type Result = <SemanticTokensFullRequest as Request>::Result;
let params = from_value::<Params>(params).expect("deserializes");
to_value::<Result>(&self.on_semantic_tokens_impl(&params)).expect("no conversion error")
}

fn on_semantic_tokens_impl(
&mut self,
params: &SemanticTokensParams,
) -> Option<SemanticTokensResult> {
if !self.config.enable_semantic_highlight {
return None;
}
// FIXME: Block until all files in workspace are loaded.
// otherwise, as in right now, semantic tokens are loaded properly
// on extension initialization, because of a race condition.

log!("server received SemanticTokensFullRequest");

let source_doc_url = &params.text_document.uri;
let (_, definition_locations_per_doc, reference_locations_per_doc) = self.parse();

let defs = match definition_locations_per_doc.get(source_doc_url) {
Some(x) => &x.0,
None => {
log!("semantic_tokens: BUG? current document not in parse() results");
return None;
}
};
let refs = match reference_locations_per_doc.get(source_doc_url) {
Some(x) => &x.0,
None => {
log!("semantic_tokens: BUG? current document not in parse() results");
return None;
}
};

let mut unsorted_tokens: Vec<SemanticTokenWithAbsoluteRange> = vec![];

let def_mod = &[SemanticTokenModifier::DEFINITION];

push_defs!(unsorted_tokens, defs.alias, VARIABLE, def_mod);
push_refs!(unsorted_tokens, refs.alias, VARIABLE, &[]);

push_defs!(unsorted_tokens, defs.variable, VARIABLE, def_mod);
push_refs!(unsorted_tokens, refs.variable, VARIABLE, &[]);

push_defs!(unsorted_tokens, defs.virtual_key, PROPERTY, def_mod);
push_refs!(unsorted_tokens, refs.virtual_key, PROPERTY, &[]);

push_defs!(unsorted_tokens, defs.layer, CLASS, def_mod);
push_refs!(unsorted_tokens, refs.layer, CLASS, &[]);

push_defs!(unsorted_tokens, defs.template, KEYWORD, def_mod);
push_refs!(unsorted_tokens, refs.template, KEYWORD, &[]);

push_refs!(unsorted_tokens, refs.include, PROPERTY, &[]);

log!("semantic_tokens: {} tokens total", unsorted_tokens.len());

unsorted_tokens.sort_by(|t1, t2| {
if t1.span.start() > t2.span.start() {
Ordering::Greater
} else {
Ordering::Less
}
});
let sorted_tokens = unsorted_tokens;

let mut result: Vec<SemanticToken> = Vec::with_capacity(sorted_tokens.len());

let mut prev_line = 0;
let mut prev_char = 0;
for tok in sorted_tokens.into_iter() {
let lsp_range = lsp_range_from_span(&tok.span);
if prev_line != lsp_range.start.line {
prev_char = 0;
}
result.push(SemanticToken {
delta_line: lsp_range.start.line - prev_line,
delta_start: lsp_range.start.character - prev_char,
length: (tok.span.end.absolute - tok.span.start.absolute) as u32,
token_type: tok.token_type,
token_modifiers_bitset: tok.token_modifiers_bitset,
});
prev_line = lsp_range.start.line;
prev_char = lsp_range.start.character;
}

Some(SemanticTokensResult::Tokens(SemanticTokens {
result_id: None,
data: result,
}))
}
}

/// Individual LSP notification handlers.
Expand Down Expand Up @@ -1042,7 +1124,7 @@ impl KanataLanguageServer {

let mut diagnostics = self.empty_diagnostics_for_all_documents();
diagnostics.extend(new_error_diags);
if self.dim_inactive_config_items {
if self.config.dim_inactive_config_items {
diagnostics.extend(new_inactive_codes_diags);
}
(diagnostics, identifiers, references)
Expand Down
120 changes: 120 additions & 0 deletions kls/src/semantic_tokens.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
use kanata_parser::cfg::sexpr::Span;
use lsp_types::{SemanticTokenModifier, SemanticTokenType};

/// Global enable/disable of certain semantic token types.
pub const SEMANTIC_TOKEN_TYPES: &[SemanticTokenType] = &[
SemanticTokenType::NAMESPACE,
SemanticTokenType::TYPE,
SemanticTokenType::CLASS,
SemanticTokenType::ENUM,
SemanticTokenType::INTERFACE,
SemanticTokenType::STRUCT,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::PARAMETER,
SemanticTokenType::VARIABLE,
SemanticTokenType::PROPERTY,
SemanticTokenType::ENUM_MEMBER,
SemanticTokenType::EVENT,
SemanticTokenType::FUNCTION,
SemanticTokenType::METHOD,
SemanticTokenType::MACRO,
SemanticTokenType::KEYWORD,
SemanticTokenType::MODIFIER,
SemanticTokenType::COMMENT,
SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
SemanticTokenType::REGEXP,
SemanticTokenType::OPERATOR,
];

/// Global enable/disable of certain semantic token modifiers.
pub const SEMANTIC_TOKEN_MODIFIERS: &[SemanticTokenModifier] = &[
// SemanticTokenModifier::DECLARATION,
SemanticTokenModifier::DEFINITION,
// SemanticTokenModifier::READONLY,
// SemanticTokenModifier::STATIC,
// SemanticTokenModifier::DEPRECATED, // potentially could be useful I think
// SemanticTokenModifier::ABSTRACT,
// SemanticTokenModifier::ASYNC,
// SemanticTokenModifier::MODIFICATION,
// SemanticTokenModifier::DOCUMENTATION,
// SemanticTokenModifier::DEFAULT_LIBRARY,
];

pub fn index_of_token_type(t: SemanticTokenType) -> Option<u32> {
for (i, type_) in SEMANTIC_TOKEN_TYPES.iter().enumerate() {
if type_ == &t {
return Some(i as u32);
}
}
None
}

fn index_of_token_modifier(t: SemanticTokenModifier) -> Option<u32> {
for (i, type_) in SEMANTIC_TOKEN_MODIFIERS.iter().enumerate() {
if type_ == &t {
return Some(i as u32);
}
}
None
}

pub fn bitset_of_token_modifiers(mods: &[SemanticTokenModifier]) -> u32 {
mods.iter()
.filter_map(|mod_| index_of_token_modifier(mod_.clone()))
.fold(0, |acc, i| acc | 1 << i)
}

// #[test]
// fn bitset_of_token_modifiers_works() {
// assert_eq!(
// bitset_of_token_modifiers(&[
// SemanticTokenModifier::DECLARATION,
// SemanticTokenModifier::READONLY
// ]),
// 5
// );
// }

#[derive(Debug, Eq, PartialEq, Clone, Default)]
pub struct SemanticTokenWithAbsoluteRange {
pub span: Span,
pub token_type: u32,
pub token_modifiers_bitset: u32,
}

#[macro_export]
macro_rules! push_defs {
($results:expr, $defs:expr, $token_type:ident, $token_modifiers:expr) => {
#[allow(unused)]
use $crate::semantic_tokens::*;
if let Some(token_type_index) = index_of_token_type(SemanticTokenType::$token_type) {
for (_, span) in $defs.iter() {
$results.push(SemanticTokenWithAbsoluteRange {
span: span.clone(),
token_type: token_type_index,
token_modifiers_bitset: bitset_of_token_modifiers($token_modifiers),
});
}
}
};
}

#[macro_export]
macro_rules! push_refs {
($results:expr, $refs:expr, $token_type:ident, $token_modifiers:expr) => {
#[allow(unused)]
use $crate::semantic_tokens::*;
if let Some(token_type_index) = index_of_token_type(SemanticTokenType::$token_type) {
for (_, spans) in $refs.0.iter() {
for span in spans.iter() {
$results.push(SemanticTokenWithAbsoluteRange {
span: span.clone(),
token_type: token_type_index,
token_modifiers_bitset: bitset_of_token_modifiers($token_modifiers),
});
}
}
}
};
}
5 changes: 5 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,11 @@
"type": "boolean",
"default": true,
"markdownDescription": "Gray-out configuration items that are not applicable with the current settings (`deflocalkeys-*`, `defaliasenvcond`, `platform`)"
},
"vscode-kanata.enableSemanticHighlight": {
"type": "boolean",
"default": false,
"markdownDescription": "Enable semantic highlight (experimental)"
}
}
}
Expand Down
Loading
Loading