From dc358a6a6657a95c38085df982d577851352305a Mon Sep 17 00:00:00 2001 From: ef3d0c3e Date: Thu, 24 Oct 2024 14:04:04 +0200 Subject: [PATCH] Diagnostics --- src/elements/layout.rs | 30 +++++++++++---- src/parser/langparser.rs | 33 ++++++++++++++-- src/parser/parser.rs | 82 +++++++--------------------------------- src/parser/reports.rs | 64 +++++++++++++++++++++++++++++-- src/server.rs | 36 +++++++++++++++++- 5 files changed, 161 insertions(+), 84 deletions(-) diff --git a/src/elements/layout.rs b/src/elements/layout.rs index 2dafe16..7e24bd6 100644 --- a/src/elements/layout.rs +++ b/src/elements/layout.rs @@ -341,7 +341,7 @@ impl LayoutRule { token.source(), "Invalid Layout Properties".into(), span( - token.range.clone(), + token.start() + 1..token.end(), format!("Layout is missing required property: {err}") ) ); @@ -400,7 +400,7 @@ impl RegexRule for LayoutRule { token.source(), "Missing Layout Name".into(), span( - token.range.clone(), + token.start() + 1..token.end(), format!( "Missing layout name after `{}`", "#+BEGIN_LAYOUT".fg(state.parser.colors().highlight) @@ -534,7 +534,10 @@ impl RegexRule for LayoutRule { &mut reports, token.source(), "Invalid #+LAYOUT_NEXT".into(), - span(token.range.clone(), "No active layout found".into()) + span( + token.start() + 1..token.end(), + "No active layout found".into() + ) ); return reports; } @@ -549,7 +552,7 @@ impl RegexRule for LayoutRule { token.source(), "Unexpected #+LAYOUT_NEXT".into(), span( - token.range.clone(), + token.start() + 1..token.end(), format!( "Layout expects a maximum of {} blocks, currently at {}", layout_type.expects().end.fg(state.parser.colors().info), @@ -607,8 +610,11 @@ impl RegexRule for LayoutRule { report_err!( &mut reports, token.source(), - "Invalid #+LAYOUT_NEXT".into(), - span(token.range.clone(), "No active layout found".into()) + "Invalid #+LAYOUT_END".into(), + span( + token.start() + 1..token.end(), + "No active layout found".into() + ) ); return reports; } @@ -618,17 +624,25 @@ impl RegexRule for LayoutRule { if layout_type.expects().start > tokens.len() // Not enough blocks { + let start = &tokens[0]; report_err!( &mut reports, token.source(), - "Unexpected #+LAYOUT_NEXT".into(), + "Unexpected #+LAYOUT_END".into(), span( - token.range.clone(), + token.start() + 1..token.end(), format!( "Layout expects a minimum of {} blocks, currently at {}", layout_type.expects().start.fg(state.parser.colors().info), tokens.len().fg(state.parser.colors().info), ) + ), + span( + start.source(), + start.start() + 1.. start.end(), + format!( + "Layout begins here", + ) ) ); return reports; diff --git a/src/parser/langparser.rs b/src/parser/langparser.rs index 81d7f5e..6f2e382 100644 --- a/src/parser/langparser.rs +++ b/src/parser/langparser.rs @@ -11,6 +11,7 @@ use super::parser::ParseMode; use super::parser::Parser; use super::parser::ParserState; use super::parser::ReportColors; +use super::reports::Report; use super::rule::Rule; use super::source::Cursor; use super::source::Source; @@ -19,21 +20,39 @@ use super::source::Token; use super::util; /// Parser for the language -#[derive(Debug)] -pub struct LangParser { +pub struct LangParser<'a> { rules: Vec>, colors: ReportColors, + report_handler: Box) + 'a>, // Parser state pub err_flag: RefCell, } -impl LangParser { +impl<'a> LangParser<'a> { pub fn default() -> Self { let mut s = Self { rules: vec![], colors: ReportColors::with_colors(), err_flag: RefCell::new(false), + report_handler: Box::new(Report::reports_to_stdout), + }; + + // Register rules + for rule in super::rule::get_rule_registry() { + s.add_rule(rule).unwrap(); + } + + s + } + + pub fn new(with_colors: bool, report_handler: Box) + 'a>) -> Self + { + let mut s = Self { + rules: vec![], + colors: if with_colors { ReportColors::with_colors() } else { ReportColors::without_colors() }, + err_flag: RefCell::new(false), + report_handler, }; // Register rules @@ -45,7 +64,7 @@ impl LangParser { } } -impl Parser for LangParser { +impl<'b> Parser for LangParser<'b> { fn colors(&self) -> &ReportColors { &self.colors } fn rules(&self) -> &Vec> { &self.rules } @@ -189,4 +208,10 @@ impl Parser for LangParser { //return doc; } + + /// Handles the reports produced by parsing. + fn handle_reports(&self, reports: Vec) + { + (self.report_handler)(self.colors(), reports); + } } diff --git a/src/parser/parser.rs b/src/parser/parser.rs index 8c51e4e..42c71a7 100644 --- a/src/parser/parser.rs +++ b/src/parser/parser.rs @@ -24,28 +24,28 @@ use ariadne::Color; #[derive(Debug)] pub struct ReportColors { - pub error: Color, - pub warning: Color, - pub info: Color, - pub highlight: Color, + pub error: Option, + pub warning: Option, + pub info: Option, + pub highlight: Option, } impl ReportColors { pub fn with_colors() -> Self { Self { - error: Color::Red, - warning: Color::Yellow, - info: Color::BrightBlue, - highlight: Color::BrightMagenta, + error: Some(Color::Red), + warning: Some(Color::Yellow), + info: Some(Color::BrightBlue), + highlight: Some(Color::BrightMagenta), } } pub fn without_colors() -> Self { Self { - error: Color::Primary, - warning: Color::Primary, - info: Color::Primary, - highlight: Color::Primary, + error: None, + warning: None, + info: None, + highlight: None, } } } @@ -420,60 +420,6 @@ pub trait Parser { Ok(()) } - /// Handles the reports produced by parsing. The default is to output them - /// to stderr, but you are free to modify it. - fn handle_reports(&self, reports: Vec) { - Report::reports_to_stdout(self.colors(), reports); - //todo!(); // TODO - /* - for mut report in reports { - let mut sources: HashSet> = HashSet::new(); - fn recurse_source(sources: &mut HashSet>, source: Rc) { - sources.insert(source.clone()); - if let Some(parent) = source.location() { - let parent_source = parent.source(); - if sources.get(&parent_source).is_none() { - recurse_source(sources, parent_source); - } - } - } - - report.labels.iter().for_each(|label| { - recurse_source(&mut sources, label.span.0.clone()); - }); - - let cache = sources - .iter() - .map(|source| (source.clone(), source.content().clone())) - .collect::, String)>>(); - - cache.iter().for_each(|(source, _)| { - if let Some(location) = source.location() { - if let Some(_s) = source.downcast_ref::() { - report.labels.push( - Label::new((location.source(), location.start() + 1..location.end())) - .with_message("In file included from here") - .with_order(-1), - ); - }; - - if let Some(_s) = source.downcast_ref::() { - let start = location.start() - + if location.source().content().as_bytes()[location.start()] == b'\n' { - 1 - } else { - 0 - }; - report.labels.push( - Label::new((location.source(), start..location.end())) - .with_message("In evaluation of") - .with_order(-1), - ); - }; - } - }); - report.eprint(ariadne::sources(cache)).unwrap() - } - */ - } + /// Handles the reports produced by parsing. + fn handle_reports(&self, reports: Vec); } diff --git a/src/parser/reports.rs b/src/parser/reports.rs index a197f65..0f6c155 100644 --- a/src/parser/reports.rs +++ b/src/parser/reports.rs @@ -2,6 +2,11 @@ use std::collections::HashMap; use std::ops::Range; use std::rc::Rc; +use dashmap::DashMap; +use tower_lsp::lsp_types::Diagnostic; + +use crate::parser::source::LineCursor; + use super::parser::ReportColors; use super::source::Source; use super::source::SourcePosition; @@ -22,6 +27,15 @@ impl From<&ReportKind> for ariadne::ReportKind<'static> { } } +impl From<&ReportKind> for tower_lsp::lsp_types::DiagnosticSeverity { + fn from(val: &ReportKind) -> Self { + match val { + ReportKind::Error => tower_lsp::lsp_types::DiagnosticSeverity::ERROR, + ReportKind::Warning => tower_lsp::lsp_types::DiagnosticSeverity::WARNING, + } + } +} + #[derive(Debug)] pub struct ReportSpan { pub token: Token, @@ -41,12 +55,12 @@ pub struct Report { impl Report { fn ariadne_color(kind: &ReportKind, colors: &ReportColors) -> ariadne::Color { match kind { - ReportKind::Error => colors.error, - ReportKind::Warning => colors.warning, + ReportKind::Error => colors.error.unwrap_or(ariadne::Color::Primary), + ReportKind::Warning => colors.warning.unwrap_or(ariadne::Color::Primary), } } - pub fn to_ariadne( + fn to_ariadne( self, colors: &ReportColors, ) -> ( @@ -94,6 +108,50 @@ impl Report { report.eprint(cache).unwrap(); }); } + + fn to_diagnostics(self, diagnostic_map: &DashMap>) + { + for span in self.spans { + let (source, range) = span.token.source().original_range(span.token.range.clone()); + + let mut start = LineCursor::new(source.clone()); + start.move_to(range.start); + let mut end = start.clone(); + end.move_to(range.end); + + let diag = Diagnostic { + range: tower_lsp::lsp_types::Range { + start: tower_lsp::lsp_types::Position{ line: start.line as u32, character: start.line_pos as u32 }, + end: tower_lsp::lsp_types::Position{ line: end.line as u32, character: end.line_pos as u32 }, + }, + severity: Some((&self.kind).into()), + code: None, + code_description: None, + source: None, + message: format!("{}: {}", self.message, span.message), + related_information: None, + tags: None, + data: None, + }; + if let Some(mut diags) = diagnostic_map.get_mut(source.name()) + { + diags.push(diag); + } + else + { + diagnostic_map.insert(source.name().to_owned(), vec![diag]); + } + } + } + + pub fn reports_to_diagnostics(diagnostic_map: &DashMap>, mut reports: Vec) + { + for report in reports.drain(..) + { + report.to_diagnostics(diagnostic_map); + } + //diagnostics + } } pub mod macros { diff --git a/src/server.rs b/src/server.rs index 60127ae..f590c3f 100644 --- a/src/server.rs +++ b/src/server.rs @@ -13,6 +13,7 @@ use parser::langparser::LangParser; use parser::parser::ParseMode; use parser::parser::Parser; use parser::parser::ParserState; +use parser::reports::Report; use parser::source::SourceFile; use tower_lsp::lsp_types::*; use tower_lsp::Client; @@ -25,6 +26,7 @@ struct Backend { client: Client, document_map: DashMap, semantic_token_map: DashMap>, + diagnostic_map: DashMap>, } #[derive(Debug)] @@ -45,7 +47,10 @@ impl Backend { params.text.clone(), None, )); - let parser = LangParser::default(); + self.diagnostic_map.clear(); + let parser = LangParser::new(false, Box::new( + |_colors, reports| Report::reports_to_diagnostics(&self.diagnostic_map, reports) + )); let (_doc, state) = parser.parse( ParserState::new_with_semantics(&parser, None), source.clone(), @@ -113,6 +118,15 @@ impl LanguageServer for Backend { }, ), ), + diagnostic_provider: Some( + DiagnosticServerCapabilities::Options( + DiagnosticOptions { + identifier: None, + inter_file_dependencies: true, + workspace_diagnostics: true, + work_done_progress_options: WorkDoneProgressOptions::default(), + }) + ), ..ServerCapabilities::default() }, server_info: Some(ServerInfo { @@ -185,6 +199,25 @@ impl LanguageServer for Backend { } Ok(None) } + + async fn diagnostic( + &self, + params: DocumentDiagnosticParams, + ) -> tower_lsp::jsonrpc::Result { + Ok( + DocumentDiagnosticReportResult::Report( + DocumentDiagnosticReport::Full( + RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: FullDocumentDiagnosticReport { + result_id: None, + items: self.diagnostic_map.get(params.text_document.uri.as_str()).map_or(vec![], |v| v.to_owned()) + } + } + ) + ) + ) + } } #[tokio::main] @@ -196,6 +229,7 @@ async fn main() { client, document_map: DashMap::new(), semantic_token_map: DashMap::new(), + diagnostic_map: DashMap::new(), }); Server::new(stdin, stdout, socket).serve(service).await; }