diff --git a/src/elements/script.rs b/src/elements/script.rs index a7989f6..61b13a8 100644 --- a/src/elements/script.rs +++ b/src/elements/script.rs @@ -307,7 +307,7 @@ impl RegexRule for ScriptRule { }); if !label.is_empty() { label.pop(); - hints.add(matches.get(0).unwrap().end() - 1, label); + hints.add(matches.get(0).unwrap().end(), label); } } diff --git a/src/elements/variable.rs b/src/elements/variable.rs index 3c480ab..112be44 100644 --- a/src/elements/variable.rs +++ b/src/elements/variable.rs @@ -12,6 +12,8 @@ use crate::parser::reports::*; use crate::parser::rule::RegexRule; use crate::parser::source::Token; use ariadne::Fmt; +use lsp::definition; +use lsp::hints::Hints; use mlua::Function; use mlua::Lua; use regex::Regex; @@ -430,6 +432,16 @@ impl RegexRule for VariableSubstitutionRule { sems.add(name.end..name.end + 1, tokens.variable_sub_sep); } + if let Some(hints) = Hints::from_source(token.source(), &state.shared.lsp) { + let label = variable.to_string(); + if !label.is_empty() { + hints.add(matches.get(0).unwrap().end(), label); + } + } + + // Add definition + definition::from_source(token, variable.location(), &state.shared.lsp); + reports } } diff --git a/src/lsp/data.rs b/src/lsp/data.rs index ecbad75..32847c7 100644 --- a/src/lsp/data.rs +++ b/src/lsp/data.rs @@ -3,6 +3,7 @@ use std::rc::Rc; use crate::parser::source::Source; +use super::definition::DefinitionData; use super::hints::HintsData; use super::semantic::SemanticsData; use super::semantic::Tokens; @@ -12,6 +13,7 @@ pub struct LSPData { pub semantic_tokens: Tokens, pub semantic_data: HashMap, SemanticsData>, pub inlay_hints: HashMap, HintsData>, + pub definitions: HashMap, DefinitionData>, } impl LSPData { @@ -20,6 +22,23 @@ impl LSPData { semantic_tokens: Tokens::new(), semantic_data: HashMap::new(), inlay_hints: HashMap::new(), + definitions: HashMap::new(), + } + } + + /// Method that must be called when a source is added + pub fn new_source(&mut self, source: Rc) { + if !self.semantic_data.contains_key(&source) { + self.semantic_data + .insert(source.clone(), SemanticsData::new(source.clone())); + } + if !self.inlay_hints.contains_key(&source) { + self.inlay_hints + .insert(source.clone(), HintsData::new(source.clone())); + } + if !self.definitions.contains_key(&source) { + self.definitions + .insert(source.clone(), DefinitionData::new()); } } } diff --git a/src/lsp/definition.rs b/src/lsp/definition.rs new file mode 100644 index 0000000..5efeb4b --- /dev/null +++ b/src/lsp/definition.rs @@ -0,0 +1,113 @@ +use std::cell::RefCell; +use std::rc::Rc; + +use tower_lsp::lsp_types::Location; +use tower_lsp::lsp_types::Position; +use tower_lsp::lsp_types::Range; +use tower_lsp::lsp_types::Url; + +use crate::parser::source::LineCursor; +use crate::parser::source::Source; +use crate::parser::source::SourceFile; +use crate::parser::source::SourcePosition; +use crate::parser::source::Token; +use crate::parser::source::VirtualSource; + +use super::data::LSPData; + +/// Per file definitions +#[derive(Debug)] +pub struct DefinitionData { + /// The definitions + pub definitions: RefCell>, +} + +impl DefinitionData { + pub fn new() -> Self { + Self { + definitions: RefCell::new(vec![]), + } + } +} + +fn from_source_impl( + source: Rc, + target: &Token, + lsp: &Option>, + original: Token, +) { + if (source.name().starts_with(":LUA:") || source.name().starts_with(":VAR:")) + && source.downcast_ref::().is_some() + { + return; + } + + if let Some(location) = source + .clone() + .downcast_rc::() + .ok() + .as_ref() + .map(|parent| parent.location()) + .unwrap_or(None) + { + return from_source_impl(location.source(), target, lsp, original); + } else if let Ok(sourcefile) = source.downcast_rc::() { + let borrow = lsp.as_ref().unwrap().borrow(); + let definitions = borrow.definitions.get(&original.source()).unwrap(); + let mut db = definitions.definitions.borrow_mut(); + { + let token = original.source().original_range(original.range).1; + + // Resolve target + let mut target_cursor = LineCursor::new(target.source()); + let orignal_target = target.source().original_range(target.range.clone()); + target_cursor.move_to(orignal_target.1.start); + let target_start = Position { + line: target_cursor.line as u32, + character: target_cursor.line_pos as u32, + }; + target_cursor.move_to(orignal_target.1.end); + let target_end = Position { + line: target_cursor.line as u32, + character: target_cursor.line_pos as u32, + }; + + // Resolve source + let mut source_cursor = LineCursor::new(sourcefile); + source_cursor.move_to(token.start); + let source_start = Position { + line: source_cursor.line as u32, + character: source_cursor.line_pos as u32, + }; + source_cursor.move_to(token.end); + let source_end = Position { + line: source_cursor.line as u32, + character: source_cursor.line_pos as u32, + }; + + // Add definition + let target_path = std::fs::canonicalize(orignal_target.0.name().as_str()).unwrap(); + let uri = Url::from_file_path(target_path).unwrap(); + db.push(( + Location { + uri, + range: Range { + start: target_start, + end: target_end, + }, + }, + Range { + start: source_start, + end: source_end, + }, + )) + } + } +} + +pub fn from_source(source: Token, target: &Token, lsp: &Option>) { + if lsp.is_none() { + return; + } + from_source_impl(source.source(), target, lsp, source) +} diff --git a/src/lsp/mod.rs b/src/lsp/mod.rs index 5f8a00e..ba6ed88 100644 --- a/src/lsp/mod.rs +++ b/src/lsp/mod.rs @@ -1,3 +1,4 @@ pub mod data; +pub mod definition; pub mod hints; pub mod semantic; diff --git a/src/lsp/semantic.rs b/src/lsp/semantic.rs index 22fcebb..82ffad6 100644 --- a/src/lsp/semantic.rs +++ b/src/lsp/semantic.rs @@ -435,10 +435,6 @@ impl<'a> Semantics<'a> { /// Add a semantic token to be processed instantly pub fn add(&self, range: Range, token: (u32, u32)) { let range = self.original_source.original_range(range).1; - eprintln!( - "Added {token:#?} range={range:#?} source={:#?}", - self.original_source - ); self.process_queue(range.start); self.add_impl(range, token); } diff --git a/src/parser/langparser.rs b/src/parser/langparser.rs index a9f8197..7c81dbe 100644 --- a/src/parser/langparser.rs +++ b/src/parser/langparser.rs @@ -6,6 +6,7 @@ use crate::document::document::Document; use crate::document::element::DocumentEnd; use crate::document::langdocument::LangDocument; use crate::elements::text::Text; +use crate::lsp::definition::DefinitionData; use crate::lsp::hints::HintsData; use crate::lsp::semantic::Semantics; use crate::lsp::semantic::SemanticsData; @@ -141,15 +142,7 @@ impl<'b> Parser for LangParser<'b> { source.clone().downcast_rc::().ok(), state.shared.lsp.as_ref(), ) { - let mut b = lsp.borrow_mut(); - if !b.semantic_data.contains_key(&source) { - b.semantic_data - .insert(source.clone(), SemanticsData::new(source.clone())); - } - if !b.inlay_hints.contains_key(&source) { - b.inlay_hints - .insert(source.clone(), HintsData::new(source.clone())); - } + lsp.borrow_mut().new_source(source.clone()); } let content = source.content(); diff --git a/src/server.rs b/src/server.rs index a05e246..036e9ad 100644 --- a/src/server.rs +++ b/src/server.rs @@ -25,6 +25,7 @@ use tower_lsp::Server; struct Backend { client: Client, document_map: DashMap, + definition_map: DashMap>, semantic_token_map: DashMap>, diagnostic_map: DashMap>, hints_map: DashMap>, @@ -95,6 +96,22 @@ impl Backend { } } } + + // Definitions + if let Some(lsp) = state.shared.lsp.as_ref() { + let borrow = lsp.borrow(); + for (source, definitions) in &borrow.definitions { + if let Some(path) = source + .clone() + .downcast_rc::() + .ok() + .map(|source| source.path().to_owned()) + { + self.definition_map + .insert(path, definitions.definitions.replace(vec![])); + } + } + } } } @@ -109,6 +126,7 @@ impl LanguageServer for Backend { text_document_sync: Some(TextDocumentSyncCapability::Kind( TextDocumentSyncKind::FULL, )), + definition_provider: Some(OneOf::Left(true)), completion_provider: Some(CompletionOptions { resolve_provider: Some(false), trigger_characters: Some(vec!["%".to_string()]), @@ -186,6 +204,42 @@ impl LanguageServer for Backend { .await } + async fn goto_definition( + &self, + params: GotoDefinitionParams, + ) -> tower_lsp::jsonrpc::Result> { + let uri = ¶ms.text_document_position_params.text_document.uri; + let pos = ¶ms.text_document_position_params.position; + + if let Some(definitions) = self.definition_map.get(uri.as_str()) { + let index = definitions.binary_search_by(|(_, range)| { + if range.start.line > pos.line { + std::cmp::Ordering::Greater + } else if range.end.line <= pos.line { + if range.start.line == pos.line && range.start.character <= pos.character { + std::cmp::Ordering::Equal + } else if range.end.line == pos.line && range.end.character >= pos.character { + std::cmp::Ordering::Equal + } else if range.start.line < pos.line && range.end.line > pos.line { + std::cmp::Ordering::Equal + } else { + std::cmp::Ordering::Less + } + } else { + std::cmp::Ordering::Less + } + }); + if let Ok(index) = index { + let loc = self.definition_map.get(uri.as_str()).as_ref().unwrap()[index] + .0 + .clone(); + return Ok(Some(GotoDefinitionResponse::Scalar(loc))); + } + } + + Err(tower_lsp::jsonrpc::Error::method_not_found()) + } + async fn completion( &self, _params: CompletionParams, @@ -262,6 +316,7 @@ async fn main() { let (service, socket) = LspService::new(|client| Backend { client, document_map: DashMap::new(), + definition_map: DashMap::new(), semantic_token_map: DashMap::new(), diagnostic_map: DashMap::new(), hints_map: DashMap::new(),