Diagnostics
This commit is contained in:
parent
8cd3e8e05b
commit
dc358a6a66
5 changed files with 161 additions and 84 deletions
|
@ -341,7 +341,7 @@ impl LayoutRule {
|
|||
token.source(),
|
||||
"Invalid Layout Properties".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
token.start() + 1..token.end(),
|
||||
format!("Layout is missing required property: {err}")
|
||||
)
|
||||
);
|
||||
|
@ -400,7 +400,7 @@ impl RegexRule for LayoutRule {
|
|||
token.source(),
|
||||
"Missing Layout Name".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
token.start() + 1..token.end(),
|
||||
format!(
|
||||
"Missing layout name after `{}`",
|
||||
"#+BEGIN_LAYOUT".fg(state.parser.colors().highlight)
|
||||
|
@ -534,7 +534,10 @@ impl RegexRule for LayoutRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid #+LAYOUT_NEXT".into(),
|
||||
span(token.range.clone(), "No active layout found".into())
|
||||
span(
|
||||
token.start() + 1..token.end(),
|
||||
"No active layout found".into()
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -549,7 +552,7 @@ impl RegexRule for LayoutRule {
|
|||
token.source(),
|
||||
"Unexpected #+LAYOUT_NEXT".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
token.start() + 1..token.end(),
|
||||
format!(
|
||||
"Layout expects a maximum of {} blocks, currently at {}",
|
||||
layout_type.expects().end.fg(state.parser.colors().info),
|
||||
|
@ -607,8 +610,11 @@ impl RegexRule for LayoutRule {
|
|||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid #+LAYOUT_NEXT".into(),
|
||||
span(token.range.clone(), "No active layout found".into())
|
||||
"Invalid #+LAYOUT_END".into(),
|
||||
span(
|
||||
token.start() + 1..token.end(),
|
||||
"No active layout found".into()
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -618,17 +624,25 @@ impl RegexRule for LayoutRule {
|
|||
if layout_type.expects().start > tokens.len()
|
||||
// Not enough blocks
|
||||
{
|
||||
let start = &tokens[0];
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Unexpected #+LAYOUT_NEXT".into(),
|
||||
"Unexpected #+LAYOUT_END".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
token.start() + 1..token.end(),
|
||||
format!(
|
||||
"Layout expects a minimum of {} blocks, currently at {}",
|
||||
layout_type.expects().start.fg(state.parser.colors().info),
|
||||
tokens.len().fg(state.parser.colors().info),
|
||||
)
|
||||
),
|
||||
span(
|
||||
start.source(),
|
||||
start.start() + 1.. start.end(),
|
||||
format!(
|
||||
"Layout begins here",
|
||||
)
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
|
|
|
@ -11,6 +11,7 @@ use super::parser::ParseMode;
|
|||
use super::parser::Parser;
|
||||
use super::parser::ParserState;
|
||||
use super::parser::ReportColors;
|
||||
use super::reports::Report;
|
||||
use super::rule::Rule;
|
||||
use super::source::Cursor;
|
||||
use super::source::Source;
|
||||
|
@ -19,21 +20,39 @@ use super::source::Token;
|
|||
use super::util;
|
||||
|
||||
/// Parser for the language
|
||||
#[derive(Debug)]
|
||||
pub struct LangParser {
|
||||
pub struct LangParser<'a> {
|
||||
rules: Vec<Box<dyn Rule>>,
|
||||
colors: ReportColors,
|
||||
report_handler: Box<dyn Fn(&ReportColors, Vec<Report>) + 'a>,
|
||||
|
||||
// Parser state
|
||||
pub err_flag: RefCell<bool>,
|
||||
}
|
||||
|
||||
impl LangParser {
|
||||
impl<'a> LangParser<'a> {
|
||||
pub fn default() -> Self {
|
||||
let mut s = Self {
|
||||
rules: vec![],
|
||||
colors: ReportColors::with_colors(),
|
||||
err_flag: RefCell::new(false),
|
||||
report_handler: Box::new(Report::reports_to_stdout),
|
||||
};
|
||||
|
||||
// Register rules
|
||||
for rule in super::rule::get_rule_registry() {
|
||||
s.add_rule(rule).unwrap();
|
||||
}
|
||||
|
||||
s
|
||||
}
|
||||
|
||||
pub fn new(with_colors: bool, report_handler: Box<dyn Fn(&ReportColors, Vec<Report>) + 'a>) -> Self
|
||||
{
|
||||
let mut s = Self {
|
||||
rules: vec![],
|
||||
colors: if with_colors { ReportColors::with_colors() } else { ReportColors::without_colors() },
|
||||
err_flag: RefCell::new(false),
|
||||
report_handler,
|
||||
};
|
||||
|
||||
// Register rules
|
||||
|
@ -45,7 +64,7 @@ impl LangParser {
|
|||
}
|
||||
}
|
||||
|
||||
impl Parser for LangParser {
|
||||
impl<'b> Parser for LangParser<'b> {
|
||||
fn colors(&self) -> &ReportColors { &self.colors }
|
||||
|
||||
fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
|
||||
|
@ -189,4 +208,10 @@ impl Parser for LangParser {
|
|||
|
||||
//return doc;
|
||||
}
|
||||
|
||||
/// Handles the reports produced by parsing.
|
||||
fn handle_reports(&self, reports: Vec<Report>)
|
||||
{
|
||||
(self.report_handler)(self.colors(), reports);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,28 +24,28 @@ use ariadne::Color;
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct ReportColors {
|
||||
pub error: Color,
|
||||
pub warning: Color,
|
||||
pub info: Color,
|
||||
pub highlight: Color,
|
||||
pub error: Option<Color>,
|
||||
pub warning: Option<Color>,
|
||||
pub info: Option<Color>,
|
||||
pub highlight: Option<Color>,
|
||||
}
|
||||
|
||||
impl ReportColors {
|
||||
pub fn with_colors() -> Self {
|
||||
Self {
|
||||
error: Color::Red,
|
||||
warning: Color::Yellow,
|
||||
info: Color::BrightBlue,
|
||||
highlight: Color::BrightMagenta,
|
||||
error: Some(Color::Red),
|
||||
warning: Some(Color::Yellow),
|
||||
info: Some(Color::BrightBlue),
|
||||
highlight: Some(Color::BrightMagenta),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn without_colors() -> Self {
|
||||
Self {
|
||||
error: Color::Primary,
|
||||
warning: Color::Primary,
|
||||
info: Color::Primary,
|
||||
highlight: Color::Primary,
|
||||
error: None,
|
||||
warning: None,
|
||||
info: None,
|
||||
highlight: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -420,60 +420,6 @@ pub trait Parser {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Handles the reports produced by parsing. The default is to output them
|
||||
/// to stderr, but you are free to modify it.
|
||||
fn handle_reports(&self, reports: Vec<Report>) {
|
||||
Report::reports_to_stdout(self.colors(), reports);
|
||||
//todo!(); // TODO
|
||||
/*
|
||||
for mut report in reports {
|
||||
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
|
||||
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
|
||||
sources.insert(source.clone());
|
||||
if let Some(parent) = source.location() {
|
||||
let parent_source = parent.source();
|
||||
if sources.get(&parent_source).is_none() {
|
||||
recurse_source(sources, parent_source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
report.labels.iter().for_each(|label| {
|
||||
recurse_source(&mut sources, label.span.0.clone());
|
||||
});
|
||||
|
||||
let cache = sources
|
||||
.iter()
|
||||
.map(|source| (source.clone(), source.content().clone()))
|
||||
.collect::<Vec<(Rc<dyn Source>, String)>>();
|
||||
|
||||
cache.iter().for_each(|(source, _)| {
|
||||
if let Some(location) = source.location() {
|
||||
if let Some(_s) = source.downcast_ref::<SourceFile>() {
|
||||
report.labels.push(
|
||||
Label::new((location.source(), location.start() + 1..location.end()))
|
||||
.with_message("In file included from here")
|
||||
.with_order(-1),
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
|
||||
let start = location.start()
|
||||
+ if location.source().content().as_bytes()[location.start()] == b'\n' {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
report.labels.push(
|
||||
Label::new((location.source(), start..location.end()))
|
||||
.with_message("In evaluation of")
|
||||
.with_order(-1),
|
||||
);
|
||||
};
|
||||
}
|
||||
});
|
||||
report.eprint(ariadne::sources(cache)).unwrap()
|
||||
}
|
||||
*/
|
||||
}
|
||||
/// Handles the reports produced by parsing.
|
||||
fn handle_reports(&self, reports: Vec<Report>);
|
||||
}
|
||||
|
|
|
@ -2,6 +2,11 @@ use std::collections::HashMap;
|
|||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use dashmap::DashMap;
|
||||
use tower_lsp::lsp_types::Diagnostic;
|
||||
|
||||
use crate::parser::source::LineCursor;
|
||||
|
||||
use super::parser::ReportColors;
|
||||
use super::source::Source;
|
||||
use super::source::SourcePosition;
|
||||
|
@ -22,6 +27,15 @@ impl From<&ReportKind> for ariadne::ReportKind<'static> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&ReportKind> for tower_lsp::lsp_types::DiagnosticSeverity {
|
||||
fn from(val: &ReportKind) -> Self {
|
||||
match val {
|
||||
ReportKind::Error => tower_lsp::lsp_types::DiagnosticSeverity::ERROR,
|
||||
ReportKind::Warning => tower_lsp::lsp_types::DiagnosticSeverity::WARNING,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReportSpan {
|
||||
pub token: Token,
|
||||
|
@ -41,12 +55,12 @@ pub struct Report {
|
|||
impl Report {
|
||||
fn ariadne_color(kind: &ReportKind, colors: &ReportColors) -> ariadne::Color {
|
||||
match kind {
|
||||
ReportKind::Error => colors.error,
|
||||
ReportKind::Warning => colors.warning,
|
||||
ReportKind::Error => colors.error.unwrap_or(ariadne::Color::Primary),
|
||||
ReportKind::Warning => colors.warning.unwrap_or(ariadne::Color::Primary),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_ariadne(
|
||||
fn to_ariadne(
|
||||
self,
|
||||
colors: &ReportColors,
|
||||
) -> (
|
||||
|
@ -94,6 +108,50 @@ impl Report {
|
|||
report.eprint(cache).unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
fn to_diagnostics(self, diagnostic_map: &DashMap<String, Vec<Diagnostic>>)
|
||||
{
|
||||
for span in self.spans {
|
||||
let (source, range) = span.token.source().original_range(span.token.range.clone());
|
||||
|
||||
let mut start = LineCursor::new(source.clone());
|
||||
start.move_to(range.start);
|
||||
let mut end = start.clone();
|
||||
end.move_to(range.end);
|
||||
|
||||
let diag = Diagnostic {
|
||||
range: tower_lsp::lsp_types::Range {
|
||||
start: tower_lsp::lsp_types::Position{ line: start.line as u32, character: start.line_pos as u32 },
|
||||
end: tower_lsp::lsp_types::Position{ line: end.line as u32, character: end.line_pos as u32 },
|
||||
},
|
||||
severity: Some((&self.kind).into()),
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: None,
|
||||
message: format!("{}: {}", self.message, span.message),
|
||||
related_information: None,
|
||||
tags: None,
|
||||
data: None,
|
||||
};
|
||||
if let Some(mut diags) = diagnostic_map.get_mut(source.name())
|
||||
{
|
||||
diags.push(diag);
|
||||
}
|
||||
else
|
||||
{
|
||||
diagnostic_map.insert(source.name().to_owned(), vec![diag]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reports_to_diagnostics(diagnostic_map: &DashMap<String, Vec<Diagnostic>>, mut reports: Vec<Report>)
|
||||
{
|
||||
for report in reports.drain(..)
|
||||
{
|
||||
report.to_diagnostics(diagnostic_map);
|
||||
}
|
||||
//diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
pub mod macros {
|
||||
|
|
|
@ -13,6 +13,7 @@ use parser::langparser::LangParser;
|
|||
use parser::parser::ParseMode;
|
||||
use parser::parser::Parser;
|
||||
use parser::parser::ParserState;
|
||||
use parser::reports::Report;
|
||||
use parser::source::SourceFile;
|
||||
use tower_lsp::lsp_types::*;
|
||||
use tower_lsp::Client;
|
||||
|
@ -25,6 +26,7 @@ struct Backend {
|
|||
client: Client,
|
||||
document_map: DashMap<String, String>,
|
||||
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
|
||||
diagnostic_map: DashMap<String, Vec<Diagnostic>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -45,7 +47,10 @@ impl Backend {
|
|||
params.text.clone(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
self.diagnostic_map.clear();
|
||||
let parser = LangParser::new(false, Box::new(
|
||||
|_colors, reports| Report::reports_to_diagnostics(&self.diagnostic_map, reports)
|
||||
));
|
||||
let (_doc, state) = parser.parse(
|
||||
ParserState::new_with_semantics(&parser, None),
|
||||
source.clone(),
|
||||
|
@ -113,6 +118,15 @@ impl LanguageServer for Backend {
|
|||
},
|
||||
),
|
||||
),
|
||||
diagnostic_provider: Some(
|
||||
DiagnosticServerCapabilities::Options(
|
||||
DiagnosticOptions {
|
||||
identifier: None,
|
||||
inter_file_dependencies: true,
|
||||
workspace_diagnostics: true,
|
||||
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||
})
|
||||
),
|
||||
..ServerCapabilities::default()
|
||||
},
|
||||
server_info: Some(ServerInfo {
|
||||
|
@ -185,6 +199,25 @@ impl LanguageServer for Backend {
|
|||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn diagnostic(
|
||||
&self,
|
||||
params: DocumentDiagnosticParams,
|
||||
) -> tower_lsp::jsonrpc::Result<DocumentDiagnosticReportResult> {
|
||||
Ok(
|
||||
DocumentDiagnosticReportResult::Report(
|
||||
DocumentDiagnosticReport::Full(
|
||||
RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||
result_id: None,
|
||||
items: self.diagnostic_map.get(params.text_document.uri.as_str()).map_or(vec![], |v| v.to_owned())
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
|
@ -196,6 +229,7 @@ async fn main() {
|
|||
client,
|
||||
document_map: DashMap::new(),
|
||||
semantic_token_map: DashMap::new(),
|
||||
diagnostic_map: DashMap::new(),
|
||||
});
|
||||
Server::new(stdin, stdout, socket).serve(service).await;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue