Docs, semantics & bugfix

This commit is contained in:
ef3d0c3e 2024-10-26 17:30:10 +02:00
parent 9fcccdd137
commit 141760a677
24 changed files with 293 additions and 240 deletions

View file

@ -287,8 +287,12 @@ impl Rule for BlockquoteRule {
Semantics::from_source(cursor.source.clone(), &state.shared.lsp) Semantics::from_source(cursor.source.clone(), &state.shared.lsp)
{ {
let range = captures.get(0).unwrap().range(); let range = captures.get(0).unwrap().range();
let start = if content.as_bytes()[range.start] == b'\n' { range.start+1 } else { range.start }; let start = if content.as_bytes()[range.start] == b'\n' {
sems.add(start..start+1, tokens.blockquote_marker); range.start + 1
} else {
range.start
};
sems.add(start..start + 1, tokens.blockquote_marker);
if let Some(props) = captures.get(1).map(|m| m.range()) { if let Some(props) = captures.get(1).map(|m| m.range()) {
sems.add(props.start - 1..props.start, tokens.blockquote_props_sep); sems.add(props.start - 1..props.start, tokens.blockquote_props_sep);
sems.add(props.clone(), tokens.blockquote_props); sems.add(props.clone(), tokens.blockquote_props);
@ -309,8 +313,9 @@ impl Rule for BlockquoteRule {
// Offset // Offset
let last = offsets.last().map_or(0, |(_, last)| *last); let last = offsets.last().map_or(0, |(_, last)| *last);
offsets.push(( offsets.push((
entry_content.len(), entry_content.len(),
last + (captures.get(1).unwrap().start() - captures.get(0).unwrap().start() - 1) as isize last + (captures.get(1).unwrap().start() - captures.get(0).unwrap().start() - 1)
as isize,
)); ));
entry_content += "\n"; entry_content += "\n";
@ -320,8 +325,12 @@ impl Rule for BlockquoteRule {
Semantics::from_source(cursor.source.clone(), &state.shared.lsp) Semantics::from_source(cursor.source.clone(), &state.shared.lsp)
{ {
let range = captures.get(0).unwrap().range(); let range = captures.get(0).unwrap().range();
let start = if content.as_bytes()[range.start] == b'\n' { range.start+1 } else { range.start }; let start = if content.as_bytes()[range.start] == b'\n' {
sems.add_to_queue(start..start+1, tokens.blockquote_marker); range.start + 1
} else {
range.start
};
sems.add_to_queue(start..start + 1, tokens.blockquote_marker);
} }
} }
@ -331,7 +340,7 @@ impl Rule for BlockquoteRule {
token.clone(), token.clone(),
"Blockquote Entry".to_string(), "Blockquote Entry".to_string(),
entry_content, entry_content,
offsets offsets,
)); ));
// Parse content // Parse content
let parsed_doc = state.with_state(|new_state| { let parsed_doc = state.with_state(|new_state| {

View file

@ -503,9 +503,7 @@ impl RegexRule for CodeRule {
); );
} }
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = matches let range = matches
.get(0) .get(0)
.map(|m| { .map(|m| {

View file

@ -90,9 +90,7 @@ impl RegexRule for CommentRule {
}), }),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let comment = matches.get(1).unwrap().range(); let comment = matches.get(1).unwrap().range();
sems.add(comment.start - 2..comment.end, tokens.comment); sems.add(comment.start - 2..comment.end, tokens.comment);
} }

View file

@ -52,11 +52,7 @@ impl CustomStyle for LuaCustomStyle {
let kernel: Ref<'_, Kernel> = let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap()); Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
//let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap()); //let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap());
let mut ctx = KernelContext::new( let mut ctx = KernelContext::new(location.clone(), state, document);
location.clone(),
state,
document,
);
let mut reports = vec![]; let mut reports = vec![];
kernel.run_with_context(&mut ctx, |lua| { kernel.run_with_context(&mut ctx, |lua| {
@ -86,11 +82,7 @@ impl CustomStyle for LuaCustomStyle {
) -> Vec<Report> { ) -> Vec<Report> {
let kernel: Ref<'_, Kernel> = let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap()); Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
let mut ctx = KernelContext::new( let mut ctx = KernelContext::new(location.clone(), state, document);
location.clone(),
state,
document,
);
let mut reports = vec![]; let mut reports = vec![];
kernel.run_with_context(&mut ctx, |lua| { kernel.run_with_context(&mut ctx, |lua| {
@ -337,9 +329,7 @@ impl Rule for CustomStyleRule {
style.on_start(token.clone(), state, document) style.on_start(token.clone(), state, document)
}; };
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add(token.range.clone(), tokens.customstyle_marker); sems.add(token.range.clone(), tokens.customstyle_marker);
} }

View file

@ -5,6 +5,7 @@ use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use ariadne::Fmt; use ariadne::Fmt;
use lsp::semantic::Semantics;
use mlua::Error::BadArgument; use mlua::Error::BadArgument;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
@ -62,10 +63,13 @@ impl Rule for ElemStyleRule {
fn next_match( fn next_match(
&self, &self,
_mode: &ParseMode, mode: &ParseMode,
_state: &ParserState, _state: &ParserState,
cursor: &Cursor, cursor: &Cursor,
) -> Option<(usize, Box<dyn Any>)> { ) -> Option<(usize, Box<dyn Any>)> {
if mode.paragraph_only {
return None;
}
self.start_re self.start_re
.find_at(cursor.source.content(), cursor.pos) .find_at(cursor.source.content(), cursor.pos)
.map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>)) .map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
@ -139,8 +143,6 @@ impl Rule for ElemStyleRule {
return (cursor, reports); return (cursor, reports);
} }
Some(json) => { Some(json) => {
cursor = cursor.at(cursor.pos + json.len());
// Attempt to deserialize // Attempt to deserialize
match style.from_json(json) { match style.from_json(json) {
Err(err) => { Err(err) => {
@ -157,9 +159,28 @@ impl Rule for ElemStyleRule {
) )
) )
); );
cursor = cursor.at(cursor.pos + json.len());
return (cursor, reports); return (cursor, reports);
} }
Ok(style) => style, Ok(style) => {
if let Some((sems, tokens)) =
Semantics::from_source(cursor.source.clone(), &state.shared.lsp)
{
let style = matches.get(1).unwrap();
sems.add(style.start() - 2..style.start(), tokens.elemstyle_operator);
sems.add(style.range(), tokens.elemstyle_name);
sems.add(style.end()..style.end() + 1, tokens.elemstyle_equal);
sems.add(
matches.get(0).unwrap().end() - 1
..matches.get(0).unwrap().end() + json.len(),
tokens.elemstyle_value,
);
}
cursor = cursor.at(cursor.pos + json.len());
style
}
} }
} }
}; };
@ -217,3 +238,44 @@ impl Rule for ElemStyleRule {
bindings bindings
} }
} }
#[cfg(test)]
pub mod tests {
use parser::langparser::LangParser;
use parser::parser::Parser;
use parser::source::SourceFile;
use super::*;
#[test]
fn semantics() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
@@style.section = {
"link_pos": "Before",
"link": ["", "⛓️", " "]
}
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (_, state) = parser.parse(
ParserState::new_with_semantics(&parser, None),
source.clone(),
None,
ParseMode::default(),
);
validate_semantics!(state, source.clone(), 0,
elemstyle_operator { delta_line == 1, delta_start == 0, length == 2 };
elemstyle_name { delta_line == 0, delta_start == 2, length == 14 };
elemstyle_equal { delta_line == 0, delta_start == 14, length == 1 };
elemstyle_value { delta_line == 0, delta_start == 2, length == 2 };
elemstyle_value { delta_line == 1, delta_start == 0, length == 23 };
elemstyle_value { delta_line == 1, delta_start == 0, length == 31 };
elemstyle_value { delta_line == 1, delta_start == 0, length == 2 };
);
}
}

View file

@ -343,24 +343,16 @@ impl RegexRule for GraphRule {
}), }),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = token.range; let range = token.range;
sems.add( sems.add(range.start..range.start + 7, tokens.graph_sep);
range.start..range.start + 7,
tokens.graph_sep,
);
if let Some(props) = matches.get(1).map(|m| m.range()) { if let Some(props) = matches.get(1).map(|m| m.range()) {
sems.add(props.start - 1..props.start, tokens.graph_props_sep); sems.add(props.start - 1..props.start, tokens.graph_props_sep);
sems.add(props.clone(), tokens.graph_props); sems.add(props.clone(), tokens.graph_props);
sems.add(props.end..props.end + 1, tokens.graph_props_sep); sems.add(props.end..props.end + 1, tokens.graph_props_sep);
} }
sems.add(matches.get(2).unwrap().range(), tokens.graph_content); sems.add(matches.get(2).unwrap().range(), tokens.graph_content);
sems.add( sems.add(range.end - 8..range.end, tokens.graph_sep);
range.end - 8..range.end,
tokens.graph_sep,
);
} }
reports reports

View file

@ -179,9 +179,7 @@ impl RegexRule for ImportRule {
); );
} }
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
// @import // @import
let import = let import =
if token.source().content().as_bytes()[matches.get(0).unwrap().start()] == b'\n' { if token.source().content().as_bytes()[matches.get(0).unwrap().start()] == b'\n' {

View file

@ -580,8 +580,7 @@ impl RegexRule for LayoutRule {
Err(()) => return reports, Err(()) => return reports,
}; };
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp)
Semantics::from_source(token.source(), &state.shared.lsp)
{ {
let start = matches let start = matches
.get(0) .get(0)
@ -667,8 +666,7 @@ impl RegexRule for LayoutRule {
let id = tokens.len(); let id = tokens.len();
layout_state.stack.pop(); layout_state.stack.pop();
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp)
Semantics::from_source(token.source(), &state.shared.lsp)
{ {
let start = matches let start = matches
.get(0) .get(0)

View file

@ -214,9 +214,7 @@ impl RegexRule for LinkRule {
}), }),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add( sems.add(
matches.get(1).unwrap().end()..matches.get(1).unwrap().end() + 1, matches.get(1).unwrap().end()..matches.get(1).unwrap().end() + 1,
tokens.link_display_sep, tokens.link_display_sep,

View file

@ -209,9 +209,7 @@ impl RegexRule for RawRule {
}), }),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = matches.get(0).unwrap().range(); let range = matches.get(0).unwrap().range();
sems.add(range.start..range.start + 2, tokens.raw_sep); sems.add(range.start..range.start + 2, tokens.raw_sep);
if let Some(props) = matches.get(1).map(|m| m.range()) { if let Some(props) = matches.get(1).map(|m| m.range()) {

View file

@ -326,8 +326,7 @@ impl RegexRule for ReferenceRule {
); );
} }
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp)
Semantics::from_source(token.source(), &state.shared.lsp)
{ {
let link = matches.get(1).unwrap().range(); let link = matches.get(1).unwrap().range();
sems.add(link.start - 2..link.start - 1, tokens.reference_operator); sems.add(link.start - 2..link.start - 1, tokens.reference_operator);
@ -356,8 +355,7 @@ impl RegexRule for ReferenceRule {
}), }),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp)
Semantics::from_source(token.source(), &state.shared.lsp)
{ {
let link = matches.get(1).unwrap().range(); let link = matches.get(1).unwrap().range();
sems.add(link.start - 2..link.start - 1, tokens.reference_operator); sems.add(link.start - 2..link.start - 1, tokens.reference_operator);

View file

@ -255,9 +255,7 @@ impl RegexRule for ScriptRule {
kernel.run_with_context(&mut ctx, execute); kernel.run_with_context(&mut ctx, execute);
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = matches let range = matches
.get(0) .get(0)
.map(|m| { .map(|m| {
@ -288,15 +286,12 @@ impl RegexRule for ScriptRule {
sems.add(range.end - 2..range.end, tokens.script_sep); sems.add(range.end - 2..range.end, tokens.script_sep);
} }
if let Some(hints) = Hints::from_source(token.source(), &state.shared.lsp) if let Some(hints) = Hints::from_source(token.source(), &state.shared.lsp) {
{
let mut label = String::new(); let mut label = String::new();
ctx.redirects.iter() ctx.redirects.iter().for_each(|redir| {
.for_each(|redir| { label += format!("{}: {} ", redir.source, redir.content).as_str();
label += format!("{}: {} ", redir.source, redir.content).as_str(); });
}); if !label.is_empty() {
if !label.is_empty()
{
label.pop(); label.pop();
hints.add(matches.get(0).unwrap().end(), label); hints.add(matches.get(0).unwrap().end(), label);
} }

View file

@ -335,9 +335,7 @@ impl RegexRule for SectionRule {
}), }),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add(matches.get(1).unwrap().range(), tokens.section_heading); sems.add(matches.get(1).unwrap().range(), tokens.section_heading);
if let Some(reference) = matches.get(2) { if let Some(reference) = matches.get(2) {
sems.add( sems.add(
@ -592,7 +590,7 @@ nml.section.push("6", 6, "", "refname")
section_name { delta_line == 0, delta_start == 1 }; section_name { delta_line == 0, delta_start == 1 };
section_heading { delta_line == 1, delta_start == 0, length == 2 }; section_heading { delta_line == 1, delta_start == 0, length == 2 };
section_reference { delta_line == 0, delta_start == 2, length == 4 }; section_reference { delta_line == 0, delta_start == 2, length == 3 };
section_kind { delta_line == 0, delta_start == 4, length == 1 }; section_kind { delta_line == 0, delta_start == 4, length == 1 };
section_name { delta_line == 0, delta_start == 1 }; section_name { delta_line == 0, delta_start == 1 };

View file

@ -195,8 +195,7 @@ impl RegexRule for StyleRule {
)), )),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp)
Semantics::from_source(token.source(), &state.shared.lsp)
{ {
sems.add(token.start()..token.end(), tokens.style_marker); sems.add(token.start()..token.end(), tokens.style_marker);
} }

View file

@ -424,9 +424,7 @@ impl RegexRule for TexRule {
}), }),
); );
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = token.range; let range = token.range;
sems.add( sems.add(
range.start..range.start + if index == 0 { 2 } else { 1 }, range.start..range.start + if index == 0 { 2 } else { 1 },

View file

@ -253,9 +253,7 @@ impl RegexRule for VariableRule {
} }
} }
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let name = matches.get(2).unwrap().range(); let name = matches.get(2).unwrap().range();
if let Some(kind) = matches.get(1).map(|m| m.range()) { if let Some(kind) = matches.get(1).map(|m| m.range()) {
sems.add(kind.start - 1..kind.start, tokens.variable_operator); sems.add(kind.start - 1..kind.start, tokens.variable_operator);
@ -425,9 +423,7 @@ impl RegexRule for VariableSubstitutionRule {
variable.parse(state, token.clone(), document); variable.parse(state, token.clone(), document);
if let Some((sems, tokens)) = if let Some((sems, tokens)) = Semantics::from_source(token.source(), &state.shared.lsp) {
Semantics::from_source(token.source(), &state.shared.lsp)
{
let name = matches.get(1).unwrap().range(); let name = matches.get(1).unwrap().range();
sems.add(name.start - 1..name.start, tokens.variable_sub_sep); sems.add(name.start - 1..name.start, tokens.variable_sub_sep);
sems.add(name.clone(), tokens.variable_sub_name); sems.add(name.clone(), tokens.variable_sub_name);

View file

@ -1,15 +1,19 @@
use std::{cell::{Ref, RefCell}, rc::Rc}; use std::cell::Ref;
use std::cell::RefCell;
use std::rc::Rc;
use tower_lsp::lsp_types::InlayHint; use tower_lsp::lsp_types::InlayHint;
use crate::parser::source::{LineCursor, Source, SourceFile, VirtualSource}; use crate::parser::source::LineCursor;
use crate::parser::source::Source;
use crate::parser::source::SourceFile;
use crate::parser::source::VirtualSource;
use super::data::LSPData; use super::data::LSPData;
/// Per file hints /// Per file hints
#[derive(Debug)] #[derive(Debug)]
pub struct HintsData pub struct HintsData {
{
/// The current cursor /// The current cursor
cursor: RefCell<LineCursor>, cursor: RefCell<LineCursor>,
@ -55,41 +59,35 @@ impl<'a> Hints<'a> {
{ {
return Self::from_source_impl(location.source(), lsp, original_source); return Self::from_source_impl(location.source(), lsp, original_source);
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() { } else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
return Ref::filter_map( return Ref::filter_map(lsp.as_ref().unwrap().borrow(), |lsp: &LSPData| {
lsp.as_ref().unwrap().borrow(), lsp.inlay_hints.get(&(source.clone() as Rc<dyn Source>))
|lsp: &LSPData| { })
lsp.inlay_hints.get(&(source.clone() as Rc<dyn Source>))
},
)
.ok() .ok()
.map(|hints| { .map(|hints| Self {
Self { hints,
hints, source,
source, original_source,
original_source,
}
}); });
} }
None None
} }
pub fn from_source( pub fn from_source(source: Rc<dyn Source>, lsp: &'a Option<RefCell<LSPData>>) -> Option<Self> {
source: Rc<dyn Source>,
lsp: &'a Option<RefCell<LSPData>>,
) -> Option<Self> {
if lsp.is_none() { if lsp.is_none() {
return None; return None;
} }
Self::from_source_impl(source.clone(), lsp, source) Self::from_source_impl(source.clone(), lsp, source)
} }
pub fn add(&self, position: usize, label: String) pub fn add(&self, position: usize, label: String) {
{
let mut cursor = self.hints.cursor.borrow_mut(); let mut cursor = self.hints.cursor.borrow_mut();
cursor.move_to(position); cursor.move_to(position);
self.hints.hints.borrow_mut().push(InlayHint { self.hints.hints.borrow_mut().push(InlayHint {
position: tower_lsp::lsp_types::Position { line: cursor.line as u32, character: cursor.line_pos as u32 }, position: tower_lsp::lsp_types::Position {
line: cursor.line as u32,
character: cursor.line_pos as u32,
},
label: tower_lsp::lsp_types::InlayHintLabel::String(label), label: tower_lsp::lsp_types::InlayHintLabel::String(label),
kind: Some(tower_lsp::lsp_types::InlayHintKind::PARAMETER), kind: Some(tower_lsp::lsp_types::InlayHintKind::PARAMETER),
text_edits: None, text_edits: None,

View file

@ -1,3 +1,3 @@
pub mod semantic;
pub mod data; pub mod data;
pub mod hints; pub mod hints;
pub mod semantic;

View file

@ -128,6 +128,11 @@ pub struct Tokens {
pub variable_sub_sep: (u32, u32), pub variable_sub_sep: (u32, u32),
pub variable_sub_name: (u32, u32), pub variable_sub_name: (u32, u32),
pub elemstyle_operator: (u32, u32),
pub elemstyle_name: (u32, u32),
pub elemstyle_equal: (u32, u32),
pub elemstyle_value: (u32, u32),
pub code_sep: (u32, u32), pub code_sep: (u32, u32),
pub code_props_sep: (u32, u32), pub code_props_sep: (u32, u32),
pub code_props: (u32, u32), pub code_props: (u32, u32),
@ -211,6 +216,11 @@ impl Tokens {
variable_sub_sep: token!("operator"), variable_sub_sep: token!("operator"),
variable_sub_name: token!("macro"), variable_sub_name: token!("macro"),
elemstyle_operator: token!("operator"),
elemstyle_name: token!("macro"),
elemstyle_equal: token!("operator"),
elemstyle_value: token!("number"),
code_sep: token!("operator"), code_sep: token!("operator"),
code_props_sep: token!("operator"), code_props_sep: token!("operator"),
code_props: token!("enum"), code_props: token!("enum"),
@ -308,12 +318,9 @@ impl<'a> Semantics<'a> {
{ {
return Self::from_source_impl(location.source(), lsp, original_source); return Self::from_source_impl(location.source(), lsp, original_source);
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() { } else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
return Ref::filter_map( return Ref::filter_map(lsp.as_ref().unwrap().borrow(), |lsp: &LSPData| {
lsp.as_ref().unwrap().borrow(), lsp.semantic_data.get(&(source.clone() as Rc<dyn Source>))
|lsp: &LSPData| { })
lsp.semantic_data.get(&(source.clone() as Rc<dyn Source>))
},
)
.ok() .ok()
.map(|sems| { .map(|sems| {
( (
@ -322,10 +329,9 @@ impl<'a> Semantics<'a> {
source, source,
original_source, original_source,
}, },
Ref::map( Ref::map(lsp.as_ref().unwrap().borrow(), |lsp: &LSPData| {
lsp.as_ref().unwrap().borrow(), &lsp.semantic_tokens
|lsp: &LSPData| &lsp.semantic_tokens, }),
),
) )
}); });
} }
@ -345,28 +351,22 @@ impl<'a> Semantics<'a> {
/// Method that should be called at the end of parsing /// Method that should be called at the end of parsing
/// ///
/// This function will process the end of the semantic queue /// This function will process the end of the semantic queue
pub fn on_document_end(lsp: &'a Option<RefCell<LSPData>>, source: Rc<dyn Source>) pub fn on_document_end(lsp: &'a Option<RefCell<LSPData>>, source: Rc<dyn Source>) {
{ if source.content().is_empty() {
if source.content().is_empty()
{
return; return;
} }
let pos = source.original_position(source.content().len() - 1).1; let pos = source.original_position(source.content().len() - 1).1;
if let Some((sems, _)) = Self::from_source(source, lsp) if let Some((sems, _)) = Self::from_source(source, lsp) {
{
sems.process_queue(pos); sems.process_queue(pos);
} }
} }
/// Processes the semantic queue up to a certain position /// Processes the semantic queue up to a certain position
fn process_queue(&self, pos: usize) fn process_queue(&self, pos: usize) {
{
let mut queue = self.sems.semantic_queue.borrow_mut(); let mut queue = self.sems.semantic_queue.borrow_mut();
while !queue.is_empty() while !queue.is_empty() {
{
let (range, token) = queue.front().unwrap(); let (range, token) = queue.front().unwrap();
if range.start > pos if range.start > pos {
{
break; break;
} }
@ -375,8 +375,7 @@ impl<'a> Semantics<'a> {
} }
} }
fn add_impl(&self, range: Range<usize>, token: (u32, u32)) fn add_impl(&self, range: Range<usize>, token: (u32, u32)) {
{
let mut tokens = self.sems.tokens.borrow_mut(); let mut tokens = self.sems.tokens.borrow_mut();
let mut cursor = self.sems.cursor.borrow_mut(); let mut cursor = self.sems.cursor.borrow_mut();
let mut current = cursor.clone(); let mut current = cursor.clone();
@ -390,7 +389,7 @@ impl<'a> Semantics<'a> {
let len = usize::min(range.end - cursor.pos, end); let len = usize::min(range.end - cursor.pos, end);
let clen = self.source.content()[cursor.pos..cursor.pos + len] let clen = self.source.content()[cursor.pos..cursor.pos + len]
.chars() .chars()
.fold(0, |acc, c| acc + c.len_utf16()); .fold(0, |acc, _| acc + 1);
let delta_line = cursor.line - current.line; let delta_line = cursor.line - current.line;
let delta_start = if delta_line == 0 { let delta_start = if delta_line == 0 {
@ -423,15 +422,11 @@ impl<'a> Semantics<'a> {
} }
/// Add a semantic token to be processed in a future call to `add()` /// Add a semantic token to be processed in a future call to `add()`
pub fn add_to_queue(&self, range: Range<usize>, token: (u32, u32)) pub fn add_to_queue(&self, range: Range<usize>, token: (u32, u32)) {
{
let range = self.original_source.original_range(range).1; let range = self.original_source.original_range(range).1;
let mut queue = self.sems.semantic_queue.borrow_mut(); let mut queue = self.sems.semantic_queue.borrow_mut();
match queue.binary_search_by_key(&range.start, |(range, _)| range.start) match queue.binary_search_by_key(&range.start, |(range, _)| range.start) {
{ Ok(pos) | Err(pos) => queue.insert(pos, (range, token)),
Ok(pos) | Err(pos) => {
queue.insert(pos, (range, token))
},
} }
} }
} }

View file

@ -8,10 +8,8 @@ use crate::parser::parser::Parser;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::source::Token; use crate::parser::source::Token;
/// Redirected data from lua execution /// Redirected data from lua execution
pub struct KernelRedirect pub struct KernelRedirect {
{
/// Message source e.g print() /// Message source e.g print()
pub source: String, pub source: String,
/// Message content /// Message content
@ -26,12 +24,20 @@ pub struct KernelContext<'a, 'b, 'c> {
} }
impl<'a, 'b, 'c> KernelContext<'a, 'b, 'c> { impl<'a, 'b, 'c> KernelContext<'a, 'b, 'c> {
pub fn new(location: Token, state: &'a ParserState<'a, 'b>, document: &'c dyn Document<'c>) -> Self { pub fn new(
Self { location, state, document, redirects: vec![] } location: Token,
} state: &'a ParserState<'a, 'b>,
document: &'c dyn Document<'c>,
) -> Self {
Self {
location,
state,
document,
redirects: vec![],
}
}
} }
thread_local! { thread_local! {
pub static CTX: RefCell<Option<&'static mut KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) }; pub static CTX: RefCell<Option<&'static mut KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) };
} }
@ -60,17 +66,23 @@ impl Kernel {
lua.globals().set("nml", nml_table).unwrap(); lua.globals().set("nml", nml_table).unwrap();
} }
lua.globals().set("print", lua.create_function(|_, msg: String| { lua.globals()
CTX.with_borrow_mut(|ctx| { .set(
ctx.as_mut().map(|ctx| { "print",
ctx.redirects.push(KernelRedirect { lua.create_function(|_, msg: String| {
source: "print".into(), CTX.with_borrow_mut(|ctx| {
content: msg, ctx.as_mut().map(|ctx| {
ctx.redirects.push(KernelRedirect {
source: "print".into(),
content: msg,
});
});
}); });
}); Ok(())
}); })
Ok(()) .unwrap(),
}).unwrap()).unwrap(); )
.unwrap();
Self { lua } Self { lua }
} }

View file

@ -48,11 +48,17 @@ impl<'a> LangParser<'a> {
s s
} }
pub fn new(with_colors: bool, report_handler: Box<dyn Fn(&ReportColors, Vec<Report>) + 'a>) -> Self pub fn new(
{ with_colors: bool,
report_handler: Box<dyn Fn(&ReportColors, Vec<Report>) + 'a>,
) -> Self {
let mut s = Self { let mut s = Self {
rules: vec![], rules: vec![],
colors: if with_colors { ReportColors::with_colors() } else { ReportColors::without_colors() }, colors: if with_colors {
ReportColors::with_colors()
} else {
ReportColors::without_colors()
},
err_flag: RefCell::new(false), err_flag: RefCell::new(false),
report_handler, report_handler,
}; };
@ -219,8 +225,7 @@ impl<'b> Parser for LangParser<'b> {
} }
/// Handles the reports produced by parsing. /// Handles the reports produced by parsing.
fn handle_reports(&self, reports: Vec<Report>) fn handle_reports(&self, reports: Vec<Report>) {
{
(self.report_handler)(self.colors(), reports); (self.report_handler)(self.colors(), reports);
} }
} }

View file

@ -109,8 +109,7 @@ impl Report {
}); });
} }
fn to_diagnostics(self, diagnostic_map: &DashMap<String, Vec<Diagnostic>>) fn to_diagnostics(self, diagnostic_map: &DashMap<String, Vec<Diagnostic>>) {
{
for span in self.spans { for span in self.spans {
let (source, range) = span.token.source().original_range(span.token.range.clone()); let (source, range) = span.token.source().original_range(span.token.range.clone());
@ -121,8 +120,14 @@ impl Report {
let diag = Diagnostic { let diag = Diagnostic {
range: tower_lsp::lsp_types::Range { range: tower_lsp::lsp_types::Range {
start: tower_lsp::lsp_types::Position{ line: start.line as u32, character: start.line_pos as u32 }, start: tower_lsp::lsp_types::Position {
end: tower_lsp::lsp_types::Position{ line: end.line as u32, character: end.line_pos as u32 }, line: start.line as u32,
character: start.line_pos as u32,
},
end: tower_lsp::lsp_types::Position {
line: end.line as u32,
character: end.line_pos as u32,
},
}, },
severity: Some((&self.kind).into()), severity: Some((&self.kind).into()),
code: None, code: None,
@ -133,21 +138,19 @@ impl Report {
tags: None, tags: None,
data: None, data: None,
}; };
if let Some(mut diags) = diagnostic_map.get_mut(source.name()) if let Some(mut diags) = diagnostic_map.get_mut(source.name()) {
{
diags.push(diag); diags.push(diag);
} } else {
else
{
diagnostic_map.insert(source.name().to_owned(), vec![diag]); diagnostic_map.insert(source.name().to_owned(), vec![diag]);
} }
} }
} }
pub fn reports_to_diagnostics(diagnostic_map: &DashMap<String, Vec<Diagnostic>>, mut reports: Vec<Report>) pub fn reports_to_diagnostics(
{ diagnostic_map: &DashMap<String, Vec<Diagnostic>>,
for report in reports.drain(..) mut reports: Vec<Report>,
{ ) {
for report in reports.drain(..) {
report.to_diagnostics(diagnostic_map); report.to_diagnostics(diagnostic_map);
} }
//diagnostics //diagnostics

View file

@ -17,16 +17,6 @@ pub trait Source: Downcast + Debug {
} }
impl_downcast!(Source); impl_downcast!(Source);
pub trait SourcePosition {
/// Transforms a position to it's position in the oldest parent source
fn original_position(&self, pos: usize) -> (Rc<dyn Source>, usize);
/// Transforms a range to the oldest parent source
///
/// This function takes a range from a source and attempts to get the range's position in the oldest parent
fn original_range(&self, range: Range<usize>) -> (Rc<dyn Source>, Range<usize>);
}
impl core::fmt::Display for dyn Source { impl core::fmt::Display for dyn Source {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name()) write!(f, "{}", self.name())
@ -43,6 +33,7 @@ impl std::hash::Hash for dyn Source {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.name().hash(state) } fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.name().hash(state) }
} }
/// [`SourceFile`] is a type of [`Source`] that represents a real file.
#[derive(Debug)] #[derive(Debug)]
pub struct SourceFile { pub struct SourceFile {
location: Option<Token>, location: Option<Token>,
@ -82,6 +73,9 @@ impl Source for SourceFile {
/// Stores the offsets in a virtual source /// Stores the offsets in a virtual source
/// ///
/// The offsets are used to implement the [`SourcePosition`] trait, which allows diagnostics from
/// [`VirtualSource`] to propagate back to their corresponding [`SourceFile`].
///
/// # Example /// # Example
/// ///
/// Let's say you make a virtual source from the following: "Con\]tent" -> "Con]tent" /// Let's say you make a virtual source from the following: "Con\]tent" -> "Con]tent"
@ -108,6 +102,8 @@ impl SourceOffset {
} }
} }
/// [`VirtualSource`] is a type of [`Source`] that represents a virtual file. [`VirtualSource`]s
/// can be created from other [`VirtualSource`]s but it must always come from a [`SourceFile`].
#[derive(Debug)] #[derive(Debug)]
pub struct VirtualSource { pub struct VirtualSource {
location: Token, location: Token,
@ -148,6 +144,34 @@ impl Source for VirtualSource {
fn content(&self) -> &String { &self.content } fn content(&self) -> &String { &self.content }
} }
/// Trait for accessing position in a parent [`SourceFile`]
///
/// This trait is used to create precise error diagnostics and the bakcbone of the LSP.
///
/// # Example
///
/// Given the following source file:
/// ```
/// input.nml:
/// [*link*](url)
/// ```
/// When parsed, a [`VirtualSource`] is created for parsing the link display: `*link*`.
/// If an error or a semantic highlight is requested for that new source, this trait allows to
/// recover the original position in the parent [`SourceFile`].
pub trait SourcePosition {
/// Transforms a position to the corresponding position in the oldest parent [`SourceFile`].
///
/// This function will return the first parent [`SourceFile`] aswell as the position mapped
/// in that source
fn original_position(&self, pos: usize) -> (Rc<dyn Source>, usize);
/// Transforms a range to the corresponding range in the oldest parent [`SourceFile`].
///
/// This function will return the first parent [`SourceFile`] aswell as the range mapped
/// in that source
fn original_range(&self, range: Range<usize>) -> (Rc<dyn Source>, Range<usize>);
}
impl SourcePosition for Rc<dyn Source> { impl SourcePosition for Rc<dyn Source> {
fn original_position(&self, mut pos: usize) -> (Rc<dyn Source>, usize) { fn original_position(&self, mut pos: usize) -> (Rc<dyn Source>, usize) {
// Stop recursion // Stop recursion
@ -196,7 +220,10 @@ impl SourcePosition for Rc<dyn Source> {
} }
} }
#[derive(Debug)] /// Cursor in a file
///
/// Represents a position in a specific file.
#[derive(Debug, Clone)]
pub struct Cursor { pub struct Cursor {
pub pos: usize, pub pos: usize,
pub source: Rc<dyn Source>, pub source: Rc<dyn Source>,
@ -214,18 +241,13 @@ impl Cursor {
} }
} }
impl Clone for Cursor {
fn clone(&self) -> Self {
Self {
pos: self.pos,
source: self.source.clone(),
}
}
fn clone_from(&mut self, source: &Self) { *self = source.clone() }
}
/// Cursor type used for the language server /// Cursor type used for the language server
///
/// # Notes
///
/// Because the LSP uses UTF-16 encoded positions, field [`line_pos`] corresponds to the UTF-16
/// distance between the first character (position = 0 or after '\n') and the character at the
/// current position.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LineCursor { pub struct LineCursor {
/// Byte position in the source /// Byte position in the source
@ -252,6 +274,7 @@ impl LineCursor {
/// Moves [`LineCursor`] to an absolute byte position /// Moves [`LineCursor`] to an absolute byte position
/// ///
/// # Error /// # Error
///
/// This function will panic if [`pos`] is not utf8 aligned /// This function will panic if [`pos`] is not utf8 aligned
pub fn move_to(&mut self, pos: usize) { pub fn move_to(&mut self, pos: usize) {
if self.pos < pos { if self.pos < pos {
@ -283,6 +306,7 @@ impl LineCursor {
} }
} }
/// A token is a [`Range<usize>`] in a [`Source`]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Token { pub struct Token {
pub range: Range<usize>, pub range: Range<usize>,
@ -294,16 +318,6 @@ impl Token {
pub fn source(&self) -> Rc<dyn Source> { self.source.clone() } pub fn source(&self) -> Rc<dyn Source> { self.source.clone() }
/// Construct Token from a range
pub fn from(start: &Cursor, end: &Cursor) -> Self {
assert!(Rc::ptr_eq(&start.source, &end.source));
Self {
range: start.pos..end.pos,
source: start.source.clone(),
}
}
pub fn start(&self) -> usize { self.range.start } pub fn start(&self) -> usize { self.range.start }
pub fn end(&self) -> usize { self.range.end } pub fn end(&self) -> usize { self.range.end }

View file

@ -51,9 +51,12 @@ impl Backend {
// Diagnostics // Diagnostics
self.diagnostic_map.clear(); self.diagnostic_map.clear();
let parser = LangParser::new(false, Box::new( let parser = LangParser::new(
|_colors, reports| Report::reports_to_diagnostics(&self.diagnostic_map, reports) false,
)); Box::new(|_colors, reports| {
Report::reports_to_diagnostics(&self.diagnostic_map, reports)
}),
);
// Parse // Parse
let (_doc, state) = parser.parse( let (_doc, state) = parser.parse(
ParserState::new_with_semantics(&parser, None), ParserState::new_with_semantics(&parser, None),
@ -88,8 +91,7 @@ impl Backend {
.ok() .ok()
.map(|source| source.path().to_owned()) .map(|source| source.path().to_owned())
{ {
self.hints_map self.hints_map.insert(path, hints.hints.replace(vec![]));
.insert(path, hints.hints.replace(vec![]));
} }
} }
} }
@ -139,15 +141,14 @@ impl LanguageServer for Backend {
}, },
), ),
), ),
diagnostic_provider: Some( diagnostic_provider: Some(DiagnosticServerCapabilities::Options(
DiagnosticServerCapabilities::Options( DiagnosticOptions {
DiagnosticOptions { identifier: None,
identifier: None, inter_file_dependencies: true,
inter_file_dependencies: true, workspace_diagnostics: true,
workspace_diagnostics: true, work_done_progress_options: WorkDoneProgressOptions::default(),
work_done_progress_options: WorkDoneProgressOptions::default(), },
}) )),
),
inlay_hint_provider: Some(OneOf::Left(true)), inlay_hint_provider: Some(OneOf::Left(true)),
..ServerCapabilities::default() ..ServerCapabilities::default()
}, },
@ -226,25 +227,25 @@ impl LanguageServer for Backend {
&self, &self,
params: DocumentDiagnosticParams, params: DocumentDiagnosticParams,
) -> tower_lsp::jsonrpc::Result<DocumentDiagnosticReportResult> { ) -> tower_lsp::jsonrpc::Result<DocumentDiagnosticReportResult> {
Ok( Ok(DocumentDiagnosticReportResult::Report(
DocumentDiagnosticReportResult::Report( DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
DocumentDiagnosticReport::Full( related_documents: None,
RelatedFullDocumentDiagnosticReport { full_document_diagnostic_report: FullDocumentDiagnosticReport {
related_documents: None, result_id: None,
full_document_diagnostic_report: FullDocumentDiagnosticReport { items: self
result_id: None, .diagnostic_map
items: self.diagnostic_map.get(params.text_document.uri.as_str()).map_or(vec![], |v| v.to_owned()) .get(params.text_document.uri.as_str())
} .map_or(vec![], |v| v.to_owned()),
} },
) }),
) ))
)
} }
async fn inlay_hint(&self, params: InlayHintParams) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>> async fn inlay_hint(
{ &self,
if let Some(hints) = self.hints_map.get(params.text_document.uri.as_str()) params: InlayHintParams,
{ ) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>> {
if let Some(hints) = self.hints_map.get(params.text_document.uri.as_str()) {
let (_, data) = hints.pair(); let (_, data) = hints.pair();
return Ok(Some(data.to_owned())); return Ok(Some(data.to_owned()));