diff --git a/src/elements/blockquote.rs b/src/elements/blockquote.rs index 300cd1f..0040a96 100644 --- a/src/elements/blockquote.rs +++ b/src/elements/blockquote.rs @@ -287,8 +287,12 @@ impl Rule for BlockquoteRule { Semantics::from_source(cursor.source.clone(), &state.shared.lsp) { let range = captures.get(0).unwrap().range(); - let start = if content.as_bytes()[range.start] == b'\n' { range.start+1 } else { range.start }; - sems.add(start..start+1, tokens.blockquote_marker); + let start = if content.as_bytes()[range.start] == b'\n' { + range.start + 1 + } else { + range.start + }; + sems.add(start..start + 1, tokens.blockquote_marker); if let Some(props) = captures.get(1).map(|m| m.range()) { sems.add(props.start - 1..props.start, tokens.blockquote_props_sep); sems.add(props.clone(), tokens.blockquote_props); @@ -309,8 +313,9 @@ impl Rule for BlockquoteRule { // Offset let last = offsets.last().map_or(0, |(_, last)| *last); offsets.push(( - entry_content.len(), - last + (captures.get(1).unwrap().start() - captures.get(0).unwrap().start() - 1) as isize + entry_content.len(), + last + (captures.get(1).unwrap().start() - captures.get(0).unwrap().start() - 1) + as isize, )); entry_content += "\n"; @@ -320,8 +325,12 @@ impl Rule for BlockquoteRule { Semantics::from_source(cursor.source.clone(), &state.shared.lsp) { let range = captures.get(0).unwrap().range(); - let start = if content.as_bytes()[range.start] == b'\n' { range.start+1 } else { range.start }; - sems.add_to_queue(start..start+1, tokens.blockquote_marker); + let start = if content.as_bytes()[range.start] == b'\n' { + range.start + 1 + } else { + range.start + }; + sems.add_to_queue(start..start + 1, tokens.blockquote_marker); } } @@ -331,7 +340,7 @@ impl Rule for BlockquoteRule { token.clone(), "Blockquote Entry".to_string(), entry_content, - offsets + offsets, )); // Parse content let parsed_doc = state.with_state(|new_state| { diff --git a/src/elements/code.rs b/src/elements/code.rs index 65c73a1..50fad92 100644 --- a/src/elements/code.rs +++ b/src/elements/code.rs @@ -503,9 +503,7 @@ impl RegexRule for CodeRule { ); } - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let range = matches .get(0) .map(|m| { diff --git a/src/elements/comment.rs b/src/elements/comment.rs index b62eba0..d9e3eed 100644 --- a/src/elements/comment.rs +++ b/src/elements/comment.rs @@ -90,9 +90,7 @@ impl RegexRule for CommentRule { }), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let comment = matches.get(1).unwrap().range(); sems.add(comment.start - 2..comment.end, tokens.comment); } diff --git a/src/elements/customstyle.rs b/src/elements/customstyle.rs index 0772ef8..75ed23b 100644 --- a/src/elements/customstyle.rs +++ b/src/elements/customstyle.rs @@ -52,11 +52,7 @@ impl CustomStyle for LuaCustomStyle { let kernel: Ref<'_, Kernel> = Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap()); //let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap()); - let mut ctx = KernelContext::new( - location.clone(), - state, - document, - ); + let mut ctx = KernelContext::new(location.clone(), state, document); let mut reports = vec![]; kernel.run_with_context(&mut ctx, |lua| { @@ -86,11 +82,7 @@ impl CustomStyle for LuaCustomStyle { ) -> Vec { let kernel: Ref<'_, Kernel> = Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap()); - let mut ctx = KernelContext::new( - location.clone(), - state, - document, - ); + let mut ctx = KernelContext::new(location.clone(), state, document); let mut reports = vec![]; kernel.run_with_context(&mut ctx, |lua| { @@ -337,9 +329,7 @@ impl Rule for CustomStyleRule { style.on_start(token.clone(), state, document) }; - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { sems.add(token.range.clone(), tokens.customstyle_marker); } diff --git a/src/elements/elemstyle.rs b/src/elements/elemstyle.rs index 2557757..266fc22 100644 --- a/src/elements/elemstyle.rs +++ b/src/elements/elemstyle.rs @@ -5,6 +5,7 @@ use std::rc::Rc; use std::sync::Arc; use ariadne::Fmt; +use lsp::semantic::Semantics; use mlua::Error::BadArgument; use mlua::Function; use mlua::Lua; @@ -62,10 +63,13 @@ impl Rule for ElemStyleRule { fn next_match( &self, - _mode: &ParseMode, + mode: &ParseMode, _state: &ParserState, cursor: &Cursor, ) -> Option<(usize, Box)> { + if mode.paragraph_only { + return None; + } self.start_re .find_at(cursor.source.content(), cursor.pos) .map(|m| (m.start(), Box::new([false; 0]) as Box)) @@ -139,8 +143,6 @@ impl Rule for ElemStyleRule { return (cursor, reports); } Some(json) => { - cursor = cursor.at(cursor.pos + json.len()); - // Attempt to deserialize match style.from_json(json) { Err(err) => { @@ -157,9 +159,28 @@ impl Rule for ElemStyleRule { ) ) ); + + cursor = cursor.at(cursor.pos + json.len()); return (cursor, reports); } - Ok(style) => style, + Ok(style) => { + if let Some((sems, tokens)) = + Semantics::from_source(cursor.source.clone(), &state.shared.lsp) + { + let style = matches.get(1).unwrap(); + sems.add(style.start() - 2..style.start(), tokens.elemstyle_operator); + sems.add(style.range(), tokens.elemstyle_name); + sems.add(style.end()..style.end() + 1, tokens.elemstyle_equal); + sems.add( + matches.get(0).unwrap().end() - 1 + ..matches.get(0).unwrap().end() + json.len(), + tokens.elemstyle_value, + ); + } + + cursor = cursor.at(cursor.pos + json.len()); + style + } } } }; @@ -217,3 +238,44 @@ impl Rule for ElemStyleRule { bindings } } + +#[cfg(test)] +pub mod tests { + use parser::langparser::LangParser; + use parser::parser::Parser; + use parser::source::SourceFile; + + use super::*; + + #[test] + fn semantics() { + let source = Rc::new(SourceFile::with_content( + "".to_string(), + r#" +@@style.section = { + "link_pos": "Before", + "link": ["", "⛓️", " "] +} + "# + .to_string(), + None, + )); + let parser = LangParser::default(); + let (_, state) = parser.parse( + ParserState::new_with_semantics(&parser, None), + source.clone(), + None, + ParseMode::default(), + ); + + validate_semantics!(state, source.clone(), 0, + elemstyle_operator { delta_line == 1, delta_start == 0, length == 2 }; + elemstyle_name { delta_line == 0, delta_start == 2, length == 14 }; + elemstyle_equal { delta_line == 0, delta_start == 14, length == 1 }; + elemstyle_value { delta_line == 0, delta_start == 2, length == 2 }; + elemstyle_value { delta_line == 1, delta_start == 0, length == 23 }; + elemstyle_value { delta_line == 1, delta_start == 0, length == 31 }; + elemstyle_value { delta_line == 1, delta_start == 0, length == 2 }; + ); + } +} diff --git a/src/elements/graphviz.rs b/src/elements/graphviz.rs index d71e97f..e913885 100644 --- a/src/elements/graphviz.rs +++ b/src/elements/graphviz.rs @@ -343,24 +343,16 @@ impl RegexRule for GraphRule { }), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let range = token.range; - sems.add( - range.start..range.start + 7, - tokens.graph_sep, - ); + sems.add(range.start..range.start + 7, tokens.graph_sep); if let Some(props) = matches.get(1).map(|m| m.range()) { sems.add(props.start - 1..props.start, tokens.graph_props_sep); sems.add(props.clone(), tokens.graph_props); sems.add(props.end..props.end + 1, tokens.graph_props_sep); } sems.add(matches.get(2).unwrap().range(), tokens.graph_content); - sems.add( - range.end - 8..range.end, - tokens.graph_sep, - ); + sems.add(range.end - 8..range.end, tokens.graph_sep); } reports diff --git a/src/elements/import.rs b/src/elements/import.rs index 5e57cab..c63a3bb 100644 --- a/src/elements/import.rs +++ b/src/elements/import.rs @@ -179,9 +179,7 @@ impl RegexRule for ImportRule { ); } - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { // @import let import = if token.source().content().as_bytes()[matches.get(0).unwrap().start()] == b'\n' { diff --git a/src/elements/layout.rs b/src/elements/layout.rs index 3920c85..1b86a08 100644 --- a/src/elements/layout.rs +++ b/src/elements/layout.rs @@ -580,8 +580,7 @@ impl RegexRule for LayoutRule { Err(()) => return reports, }; - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let start = matches .get(0) @@ -667,8 +666,7 @@ impl RegexRule for LayoutRule { let id = tokens.len(); layout_state.stack.pop(); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let start = matches .get(0) diff --git a/src/elements/link.rs b/src/elements/link.rs index f2f1df1..152006f 100644 --- a/src/elements/link.rs +++ b/src/elements/link.rs @@ -214,9 +214,7 @@ impl RegexRule for LinkRule { }), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { sems.add( matches.get(1).unwrap().end()..matches.get(1).unwrap().end() + 1, tokens.link_display_sep, diff --git a/src/elements/raw.rs b/src/elements/raw.rs index d1898a9..d21b9d6 100644 --- a/src/elements/raw.rs +++ b/src/elements/raw.rs @@ -209,9 +209,7 @@ impl RegexRule for RawRule { }), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let range = matches.get(0).unwrap().range(); sems.add(range.start..range.start + 2, tokens.raw_sep); if let Some(props) = matches.get(1).map(|m| m.range()) { diff --git a/src/elements/reference.rs b/src/elements/reference.rs index 648551a..ed6faa9 100644 --- a/src/elements/reference.rs +++ b/src/elements/reference.rs @@ -326,8 +326,7 @@ impl RegexRule for ReferenceRule { ); } - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let link = matches.get(1).unwrap().range(); sems.add(link.start - 2..link.start - 1, tokens.reference_operator); @@ -356,8 +355,7 @@ impl RegexRule for ReferenceRule { }), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let link = matches.get(1).unwrap().range(); sems.add(link.start - 2..link.start - 1, tokens.reference_operator); diff --git a/src/elements/script.rs b/src/elements/script.rs index 8da3934..da83e83 100644 --- a/src/elements/script.rs +++ b/src/elements/script.rs @@ -255,9 +255,7 @@ impl RegexRule for ScriptRule { kernel.run_with_context(&mut ctx, execute); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let range = matches .get(0) .map(|m| { @@ -288,15 +286,12 @@ impl RegexRule for ScriptRule { sems.add(range.end - 2..range.end, tokens.script_sep); } - if let Some(hints) = Hints::from_source(token.source(), &state.shared.lsp) - { + if let Some(hints) = Hints::from_source(token.source(), &state.shared.lsp) { let mut label = String::new(); - ctx.redirects.iter() - .for_each(|redir| { - label += format!("{}: {} ", redir.source, redir.content).as_str(); - }); - if !label.is_empty() - { + ctx.redirects.iter().for_each(|redir| { + label += format!("{}: {} ", redir.source, redir.content).as_str(); + }); + if !label.is_empty() { label.pop(); hints.add(matches.get(0).unwrap().end(), label); } diff --git a/src/elements/section.rs b/src/elements/section.rs index ca6b75f..1c4fee9 100644 --- a/src/elements/section.rs +++ b/src/elements/section.rs @@ -335,9 +335,7 @@ impl RegexRule for SectionRule { }), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { sems.add(matches.get(1).unwrap().range(), tokens.section_heading); if let Some(reference) = matches.get(2) { sems.add( @@ -592,7 +590,7 @@ nml.section.push("6", 6, "", "refname") section_name { delta_line == 0, delta_start == 1 }; section_heading { delta_line == 1, delta_start == 0, length == 2 }; - section_reference { delta_line == 0, delta_start == 2, length == 4 }; + section_reference { delta_line == 0, delta_start == 2, length == 3 }; section_kind { delta_line == 0, delta_start == 4, length == 1 }; section_name { delta_line == 0, delta_start == 1 }; diff --git a/src/elements/style.rs b/src/elements/style.rs index 6e6cecb..101df6f 100644 --- a/src/elements/style.rs +++ b/src/elements/style.rs @@ -195,8 +195,7 @@ impl RegexRule for StyleRule { )), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { sems.add(token.start()..token.end(), tokens.style_marker); } diff --git a/src/elements/tex.rs b/src/elements/tex.rs index cbc3baa..1ec342c 100644 --- a/src/elements/tex.rs +++ b/src/elements/tex.rs @@ -424,9 +424,7 @@ impl RegexRule for TexRule { }), ); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let range = token.range; sems.add( range.start..range.start + if index == 0 { 2 } else { 1 }, diff --git a/src/elements/variable.rs b/src/elements/variable.rs index b76a261..3c480ab 100644 --- a/src/elements/variable.rs +++ b/src/elements/variable.rs @@ -253,9 +253,7 @@ impl RegexRule for VariableRule { } } - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let name = matches.get(2).unwrap().range(); if let Some(kind) = matches.get(1).map(|m| m.range()) { sems.add(kind.start - 1..kind.start, tokens.variable_operator); @@ -425,9 +423,7 @@ impl RegexRule for VariableSubstitutionRule { variable.parse(state, token.clone(), document); - if let Some((sems, tokens)) = - Semantics::from_source(token.source(), &state.shared.lsp) - { + if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) { let name = matches.get(1).unwrap().range(); sems.add(name.start - 1..name.start, tokens.variable_sub_sep); sems.add(name.clone(), tokens.variable_sub_name); diff --git a/src/lsp/hints.rs b/src/lsp/hints.rs index c8168d7..dcc7164 100644 --- a/src/lsp/hints.rs +++ b/src/lsp/hints.rs @@ -1,15 +1,19 @@ -use std::{cell::{Ref, RefCell}, rc::Rc}; +use std::cell::Ref; +use std::cell::RefCell; +use std::rc::Rc; use tower_lsp::lsp_types::InlayHint; -use crate::parser::source::{LineCursor, Source, SourceFile, VirtualSource}; +use crate::parser::source::LineCursor; +use crate::parser::source::Source; +use crate::parser::source::SourceFile; +use crate::parser::source::VirtualSource; use super::data::LSPData; /// Per file hints #[derive(Debug)] -pub struct HintsData -{ +pub struct HintsData { /// The current cursor cursor: RefCell, @@ -55,41 +59,35 @@ impl<'a> Hints<'a> { { return Self::from_source_impl(location.source(), lsp, original_source); } else if let Ok(source) = source.clone().downcast_rc::() { - return Ref::filter_map( - lsp.as_ref().unwrap().borrow(), - |lsp: &LSPData| { - lsp.inlay_hints.get(&(source.clone() as Rc)) - }, - ) + return Ref::filter_map(lsp.as_ref().unwrap().borrow(), |lsp: &LSPData| { + lsp.inlay_hints.get(&(source.clone() as Rc)) + }) .ok() - .map(|hints| { - Self { - hints, - source, - original_source, - } + .map(|hints| Self { + hints, + source, + original_source, }); } None } - pub fn from_source( - source: Rc, - lsp: &'a Option>, - ) -> Option { + pub fn from_source(source: Rc, lsp: &'a Option>) -> Option { if lsp.is_none() { return None; } Self::from_source_impl(source.clone(), lsp, source) } - pub fn add(&self, position: usize, label: String) - { + pub fn add(&self, position: usize, label: String) { let mut cursor = self.hints.cursor.borrow_mut(); cursor.move_to(position); self.hints.hints.borrow_mut().push(InlayHint { - position: tower_lsp::lsp_types::Position { line: cursor.line as u32, character: cursor.line_pos as u32 }, + position: tower_lsp::lsp_types::Position { + line: cursor.line as u32, + character: cursor.line_pos as u32, + }, label: tower_lsp::lsp_types::InlayHintLabel::String(label), kind: Some(tower_lsp::lsp_types::InlayHintKind::PARAMETER), text_edits: None, diff --git a/src/lsp/mod.rs b/src/lsp/mod.rs index 648ce42..5f8a00e 100644 --- a/src/lsp/mod.rs +++ b/src/lsp/mod.rs @@ -1,3 +1,3 @@ -pub mod semantic; pub mod data; pub mod hints; +pub mod semantic; diff --git a/src/lsp/semantic.rs b/src/lsp/semantic.rs index a997a69..2eaa726 100644 --- a/src/lsp/semantic.rs +++ b/src/lsp/semantic.rs @@ -128,6 +128,11 @@ pub struct Tokens { pub variable_sub_sep: (u32, u32), pub variable_sub_name: (u32, u32), + pub elemstyle_operator: (u32, u32), + pub elemstyle_name: (u32, u32), + pub elemstyle_equal: (u32, u32), + pub elemstyle_value: (u32, u32), + pub code_sep: (u32, u32), pub code_props_sep: (u32, u32), pub code_props: (u32, u32), @@ -211,6 +216,11 @@ impl Tokens { variable_sub_sep: token!("operator"), variable_sub_name: token!("macro"), + elemstyle_operator: token!("operator"), + elemstyle_name: token!("macro"), + elemstyle_equal: token!("operator"), + elemstyle_value: token!("number"), + code_sep: token!("operator"), code_props_sep: token!("operator"), code_props: token!("enum"), @@ -308,12 +318,9 @@ impl<'a> Semantics<'a> { { return Self::from_source_impl(location.source(), lsp, original_source); } else if let Ok(source) = source.clone().downcast_rc::() { - return Ref::filter_map( - lsp.as_ref().unwrap().borrow(), - |lsp: &LSPData| { - lsp.semantic_data.get(&(source.clone() as Rc)) - }, - ) + return Ref::filter_map(lsp.as_ref().unwrap().borrow(), |lsp: &LSPData| { + lsp.semantic_data.get(&(source.clone() as Rc)) + }) .ok() .map(|sems| { ( @@ -322,10 +329,9 @@ impl<'a> Semantics<'a> { source, original_source, }, - Ref::map( - lsp.as_ref().unwrap().borrow(), - |lsp: &LSPData| &lsp.semantic_tokens, - ), + Ref::map(lsp.as_ref().unwrap().borrow(), |lsp: &LSPData| { + &lsp.semantic_tokens + }), ) }); } @@ -345,28 +351,22 @@ impl<'a> Semantics<'a> { /// Method that should be called at the end of parsing /// /// This function will process the end of the semantic queue - pub fn on_document_end(lsp: &'a Option>, source: Rc) - { - if source.content().is_empty() - { + pub fn on_document_end(lsp: &'a Option>, source: Rc) { + if source.content().is_empty() { return; } let pos = source.original_position(source.content().len() - 1).1; - if let Some((sems, _)) = Self::from_source(source, lsp) - { + if let Some((sems, _)) = Self::from_source(source, lsp) { sems.process_queue(pos); } } /// Processes the semantic queue up to a certain position - fn process_queue(&self, pos: usize) - { + fn process_queue(&self, pos: usize) { let mut queue = self.sems.semantic_queue.borrow_mut(); - while !queue.is_empty() - { + while !queue.is_empty() { let (range, token) = queue.front().unwrap(); - if range.start > pos - { + if range.start > pos { break; } @@ -375,8 +375,7 @@ impl<'a> Semantics<'a> { } } - fn add_impl(&self, range: Range, token: (u32, u32)) - { + fn add_impl(&self, range: Range, token: (u32, u32)) { let mut tokens = self.sems.tokens.borrow_mut(); let mut cursor = self.sems.cursor.borrow_mut(); let mut current = cursor.clone(); @@ -390,7 +389,7 @@ impl<'a> Semantics<'a> { let len = usize::min(range.end - cursor.pos, end); let clen = self.source.content()[cursor.pos..cursor.pos + len] .chars() - .fold(0, |acc, c| acc + c.len_utf16()); + .fold(0, |acc, _| acc + 1); let delta_line = cursor.line - current.line; let delta_start = if delta_line == 0 { @@ -423,15 +422,11 @@ impl<'a> Semantics<'a> { } /// Add a semantic token to be processed in a future call to `add()` - pub fn add_to_queue(&self, range: Range, token: (u32, u32)) - { + pub fn add_to_queue(&self, range: Range, token: (u32, u32)) { let range = self.original_source.original_range(range).1; let mut queue = self.sems.semantic_queue.borrow_mut(); - match queue.binary_search_by_key(&range.start, |(range, _)| range.start) - { - Ok(pos) | Err(pos) => { - queue.insert(pos, (range, token)) - }, + match queue.binary_search_by_key(&range.start, |(range, _)| range.start) { + Ok(pos) | Err(pos) => queue.insert(pos, (range, token)), } } } diff --git a/src/lua/kernel.rs b/src/lua/kernel.rs index 7712fb0..4aa81bd 100644 --- a/src/lua/kernel.rs +++ b/src/lua/kernel.rs @@ -8,10 +8,8 @@ use crate::parser::parser::Parser; use crate::parser::parser::ParserState; use crate::parser::source::Token; - /// Redirected data from lua execution -pub struct KernelRedirect -{ +pub struct KernelRedirect { /// Message source e.g print() pub source: String, /// Message content @@ -26,12 +24,20 @@ pub struct KernelContext<'a, 'b, 'c> { } impl<'a, 'b, 'c> KernelContext<'a, 'b, 'c> { - pub fn new(location: Token, state: &'a ParserState<'a, 'b>, document: &'c dyn Document<'c>) -> Self { - Self { location, state, document, redirects: vec![] } - } + pub fn new( + location: Token, + state: &'a ParserState<'a, 'b>, + document: &'c dyn Document<'c>, + ) -> Self { + Self { + location, + state, + document, + redirects: vec![], + } + } } - thread_local! { pub static CTX: RefCell>> = const { RefCell::new(None) }; } @@ -60,17 +66,23 @@ impl Kernel { lua.globals().set("nml", nml_table).unwrap(); } - lua.globals().set("print", lua.create_function(|_, msg: String| { - CTX.with_borrow_mut(|ctx| { - ctx.as_mut().map(|ctx| { - ctx.redirects.push(KernelRedirect { - source: "print".into(), - content: msg, + lua.globals() + .set( + "print", + lua.create_function(|_, msg: String| { + CTX.with_borrow_mut(|ctx| { + ctx.as_mut().map(|ctx| { + ctx.redirects.push(KernelRedirect { + source: "print".into(), + content: msg, + }); + }); }); - }); - }); - Ok(()) - }).unwrap()).unwrap(); + Ok(()) + }) + .unwrap(), + ) + .unwrap(); Self { lua } } diff --git a/src/parser/langparser.rs b/src/parser/langparser.rs index dbc52d0..807a4f0 100644 --- a/src/parser/langparser.rs +++ b/src/parser/langparser.rs @@ -48,11 +48,17 @@ impl<'a> LangParser<'a> { s } - pub fn new(with_colors: bool, report_handler: Box) + 'a>) -> Self - { + pub fn new( + with_colors: bool, + report_handler: Box) + 'a>, + ) -> Self { let mut s = Self { rules: vec![], - colors: if with_colors { ReportColors::with_colors() } else { ReportColors::without_colors() }, + colors: if with_colors { + ReportColors::with_colors() + } else { + ReportColors::without_colors() + }, err_flag: RefCell::new(false), report_handler, }; @@ -219,8 +225,7 @@ impl<'b> Parser for LangParser<'b> { } /// Handles the reports produced by parsing. - fn handle_reports(&self, reports: Vec) - { + fn handle_reports(&self, reports: Vec) { (self.report_handler)(self.colors(), reports); } } diff --git a/src/parser/reports.rs b/src/parser/reports.rs index 0f6c155..4fd9b0e 100644 --- a/src/parser/reports.rs +++ b/src/parser/reports.rs @@ -109,20 +109,25 @@ impl Report { }); } - fn to_diagnostics(self, diagnostic_map: &DashMap>) - { + fn to_diagnostics(self, diagnostic_map: &DashMap>) { for span in self.spans { let (source, range) = span.token.source().original_range(span.token.range.clone()); - + let mut start = LineCursor::new(source.clone()); start.move_to(range.start); let mut end = start.clone(); end.move_to(range.end); let diag = Diagnostic { - range: tower_lsp::lsp_types::Range { - start: tower_lsp::lsp_types::Position{ line: start.line as u32, character: start.line_pos as u32 }, - end: tower_lsp::lsp_types::Position{ line: end.line as u32, character: end.line_pos as u32 }, + range: tower_lsp::lsp_types::Range { + start: tower_lsp::lsp_types::Position { + line: start.line as u32, + character: start.line_pos as u32, + }, + end: tower_lsp::lsp_types::Position { + line: end.line as u32, + character: end.line_pos as u32, + }, }, severity: Some((&self.kind).into()), code: None, @@ -133,21 +138,19 @@ impl Report { tags: None, data: None, }; - if let Some(mut diags) = diagnostic_map.get_mut(source.name()) - { + if let Some(mut diags) = diagnostic_map.get_mut(source.name()) { diags.push(diag); - } - else - { + } else { diagnostic_map.insert(source.name().to_owned(), vec![diag]); } } } - pub fn reports_to_diagnostics(diagnostic_map: &DashMap>, mut reports: Vec) - { - for report in reports.drain(..) - { + pub fn reports_to_diagnostics( + diagnostic_map: &DashMap>, + mut reports: Vec, + ) { + for report in reports.drain(..) { report.to_diagnostics(diagnostic_map); } //diagnostics diff --git a/src/parser/source.rs b/src/parser/source.rs index ee675ce..275d3a2 100644 --- a/src/parser/source.rs +++ b/src/parser/source.rs @@ -17,16 +17,6 @@ pub trait Source: Downcast + Debug { } impl_downcast!(Source); -pub trait SourcePosition { - /// Transforms a position to it's position in the oldest parent source - fn original_position(&self, pos: usize) -> (Rc, usize); - - /// Transforms a range to the oldest parent source - /// - /// This function takes a range from a source and attempts to get the range's position in the oldest parent - fn original_range(&self, range: Range) -> (Rc, Range); -} - impl core::fmt::Display for dyn Source { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name()) @@ -43,6 +33,7 @@ impl std::hash::Hash for dyn Source { fn hash(&self, state: &mut H) { self.name().hash(state) } } +/// [`SourceFile`] is a type of [`Source`] that represents a real file. #[derive(Debug)] pub struct SourceFile { location: Option, @@ -82,6 +73,9 @@ impl Source for SourceFile { /// Stores the offsets in a virtual source /// +/// The offsets are used to implement the [`SourcePosition`] trait, which allows diagnostics from +/// [`VirtualSource`] to propagate back to their corresponding [`SourceFile`]. +/// /// # Example /// /// Let's say you make a virtual source from the following: "Con\]tent" -> "Con]tent" @@ -108,6 +102,8 @@ impl SourceOffset { } } +/// [`VirtualSource`] is a type of [`Source`] that represents a virtual file. [`VirtualSource`]s +/// can be created from other [`VirtualSource`]s but it must always come from a [`SourceFile`]. #[derive(Debug)] pub struct VirtualSource { location: Token, @@ -148,6 +144,34 @@ impl Source for VirtualSource { fn content(&self) -> &String { &self.content } } +/// Trait for accessing position in a parent [`SourceFile`] +/// +/// This trait is used to create precise error diagnostics and the bakcbone of the LSP. +/// +/// # Example +/// +/// Given the following source file: +/// ``` +/// input.nml: +/// [*link*](url) +/// ``` +/// When parsed, a [`VirtualSource`] is created for parsing the link display: `*link*`. +/// If an error or a semantic highlight is requested for that new source, this trait allows to +/// recover the original position in the parent [`SourceFile`]. +pub trait SourcePosition { + /// Transforms a position to the corresponding position in the oldest parent [`SourceFile`]. + /// + /// This function will return the first parent [`SourceFile`] aswell as the position mapped + /// in that source + fn original_position(&self, pos: usize) -> (Rc, usize); + + /// Transforms a range to the corresponding range in the oldest parent [`SourceFile`]. + /// + /// This function will return the first parent [`SourceFile`] aswell as the range mapped + /// in that source + fn original_range(&self, range: Range) -> (Rc, Range); +} + impl SourcePosition for Rc { fn original_position(&self, mut pos: usize) -> (Rc, usize) { // Stop recursion @@ -196,7 +220,10 @@ impl SourcePosition for Rc { } } -#[derive(Debug)] +/// Cursor in a file +/// +/// Represents a position in a specific file. +#[derive(Debug, Clone)] pub struct Cursor { pub pos: usize, pub source: Rc, @@ -214,18 +241,13 @@ impl Cursor { } } -impl Clone for Cursor { - fn clone(&self) -> Self { - Self { - pos: self.pos, - source: self.source.clone(), - } - } - - fn clone_from(&mut self, source: &Self) { *self = source.clone() } -} - /// Cursor type used for the language server +/// +/// # Notes +/// +/// Because the LSP uses UTF-16 encoded positions, field [`line_pos`] corresponds to the UTF-16 +/// distance between the first character (position = 0 or after '\n') and the character at the +/// current position. #[derive(Debug, Clone)] pub struct LineCursor { /// Byte position in the source @@ -252,6 +274,7 @@ impl LineCursor { /// Moves [`LineCursor`] to an absolute byte position /// /// # Error + /// /// This function will panic if [`pos`] is not utf8 aligned pub fn move_to(&mut self, pos: usize) { if self.pos < pos { @@ -283,6 +306,7 @@ impl LineCursor { } } +/// A token is a [`Range`] in a [`Source`] #[derive(Debug, Clone)] pub struct Token { pub range: Range, @@ -294,16 +318,6 @@ impl Token { pub fn source(&self) -> Rc { self.source.clone() } - /// Construct Token from a range - pub fn from(start: &Cursor, end: &Cursor) -> Self { - assert!(Rc::ptr_eq(&start.source, &end.source)); - - Self { - range: start.pos..end.pos, - source: start.source.clone(), - } - } - pub fn start(&self) -> usize { self.range.start } pub fn end(&self) -> usize { self.range.end } diff --git a/src/server.rs b/src/server.rs index 696c5d6..a05e246 100644 --- a/src/server.rs +++ b/src/server.rs @@ -51,9 +51,12 @@ impl Backend { // Diagnostics self.diagnostic_map.clear(); - let parser = LangParser::new(false, Box::new( - |_colors, reports| Report::reports_to_diagnostics(&self.diagnostic_map, reports) - )); + let parser = LangParser::new( + false, + Box::new(|_colors, reports| { + Report::reports_to_diagnostics(&self.diagnostic_map, reports) + }), + ); // Parse let (_doc, state) = parser.parse( ParserState::new_with_semantics(&parser, None), @@ -88,8 +91,7 @@ impl Backend { .ok() .map(|source| source.path().to_owned()) { - self.hints_map - .insert(path, hints.hints.replace(vec![])); + self.hints_map.insert(path, hints.hints.replace(vec![])); } } } @@ -139,15 +141,14 @@ impl LanguageServer for Backend { }, ), ), - diagnostic_provider: Some( - DiagnosticServerCapabilities::Options( - DiagnosticOptions { - identifier: None, - inter_file_dependencies: true, - workspace_diagnostics: true, - work_done_progress_options: WorkDoneProgressOptions::default(), - }) - ), + diagnostic_provider: Some(DiagnosticServerCapabilities::Options( + DiagnosticOptions { + identifier: None, + inter_file_dependencies: true, + workspace_diagnostics: true, + work_done_progress_options: WorkDoneProgressOptions::default(), + }, + )), inlay_hint_provider: Some(OneOf::Left(true)), ..ServerCapabilities::default() }, @@ -226,25 +227,25 @@ impl LanguageServer for Backend { &self, params: DocumentDiagnosticParams, ) -> tower_lsp::jsonrpc::Result { - Ok( - DocumentDiagnosticReportResult::Report( - DocumentDiagnosticReport::Full( - RelatedFullDocumentDiagnosticReport { - related_documents: None, - full_document_diagnostic_report: FullDocumentDiagnosticReport { - result_id: None, - items: self.diagnostic_map.get(params.text_document.uri.as_str()).map_or(vec![], |v| v.to_owned()) - } - } - ) - ) - ) + Ok(DocumentDiagnosticReportResult::Report( + DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: FullDocumentDiagnosticReport { + result_id: None, + items: self + .diagnostic_map + .get(params.text_document.uri.as_str()) + .map_or(vec![], |v| v.to_owned()), + }, + }), + )) } - async fn inlay_hint(&self, params: InlayHintParams) -> tower_lsp::jsonrpc::Result>> - { - if let Some(hints) = self.hints_map.get(params.text_document.uri.as_str()) - { + async fn inlay_hint( + &self, + params: InlayHintParams, + ) -> tower_lsp::jsonrpc::Result>> { + if let Some(hints) = self.hints_map.get(params.text_document.uri.as_str()) { let (_, data) = hints.pair(); return Ok(Some(data.to_owned()));