Lint
This commit is contained in:
parent
ea29c4bb53
commit
634f8876ee
40 changed files with 461 additions and 546 deletions
|
@ -1,4 +1,4 @@
|
|||
pub mod compiler;
|
||||
pub mod navigation;
|
||||
pub mod process;
|
||||
pub mod postprocess;
|
||||
pub mod process;
|
||||
|
|
|
@ -239,7 +239,7 @@ pub fn create_navigation(
|
|||
nav.entries
|
||||
.sort_by(|l, r| NavEntries::sort_entry(&entrymap, l.title.as_str(), r.title.as_str()));
|
||||
|
||||
for (_, child) in &mut nav.children {
|
||||
for child in nav.children.values_mut() {
|
||||
sort_entries(child);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,9 @@ impl PostProcess {
|
|||
}
|
||||
if let Some((found_ref, found_doc)) = &found_ref {
|
||||
let found_borrow = found_doc.borrow();
|
||||
let found_path = found_borrow.get_variable("compiler.output").ok_or("Unable to get the output. Aborting postprocessing.".to_string())?;
|
||||
let found_path = found_borrow
|
||||
.get_variable("compiler.output")
|
||||
.ok_or("Unable to get the output. Aborting postprocessing.".to_string())?;
|
||||
let insert_content = format!("{found_path}#{found_ref}");
|
||||
content.insert_str(pos + offset, insert_content.as_str());
|
||||
offset += insert_content.len();
|
||||
|
|
|
@ -26,7 +26,12 @@ fn parse(
|
|||
) -> Result<Box<dyn Document<'static>>, String> {
|
||||
// Parse
|
||||
//let source = SourceFile::new(input.to_string(), None).unwrap();
|
||||
let (doc, _) = parser.parse(ParserState::new(parser, None), source.clone(), None, ParseMode::default());
|
||||
let (doc, _) = parser.parse(
|
||||
ParserState::new(parser, None),
|
||||
source.clone(),
|
||||
None,
|
||||
ParseMode::default(),
|
||||
);
|
||||
|
||||
if debug_opts.contains(&"ast".to_string()) {
|
||||
println!("-- BEGIN AST DEBUGGING --");
|
||||
|
@ -159,7 +164,10 @@ pub fn process(
|
|||
/// Processes sources from in-memory strings
|
||||
/// This function is indented for testing
|
||||
#[cfg(test)]
|
||||
pub fn process_from_memory(target: Target, sources: Vec<String>) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
|
||||
pub fn process_from_memory(
|
||||
target: Target,
|
||||
sources: Vec<String>,
|
||||
) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
|
||||
let mut compiled = vec![];
|
||||
|
||||
let parser = LangParser::default();
|
||||
|
|
|
@ -32,9 +32,8 @@ pub enum CrossReference {
|
|||
}
|
||||
|
||||
impl core::fmt::Display for CrossReference {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
CrossReference::Unspecific(name) => write!(f, "#{name}"),
|
||||
CrossReference::Specific(doc_name, name) => write!(f, "{doc_name}#{name}"),
|
||||
}
|
||||
|
@ -74,8 +73,7 @@ impl Scope {
|
|||
));
|
||||
|
||||
// Variables
|
||||
self.variables
|
||||
.extend(other.variables.drain());
|
||||
self.variables.extend(other.variables.drain());
|
||||
}
|
||||
false => {
|
||||
// References
|
||||
|
@ -131,8 +129,7 @@ pub trait Document<'a>: core::fmt::Debug {
|
|||
);
|
||||
}
|
||||
// Add contained references
|
||||
else if let Some(container) =
|
||||
self
|
||||
else if let Some(container) = self
|
||||
.content()
|
||||
.borrow()
|
||||
.last()
|
||||
|
@ -168,9 +165,7 @@ pub trait Document<'a>: core::fmt::Debug {
|
|||
|
||||
fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> {
|
||||
match self.scope().borrow().variables.get(name) {
|
||||
Some(variable) => {
|
||||
Some(variable.clone())
|
||||
}
|
||||
Some(variable) => Some(variable.clone()),
|
||||
|
||||
// Continue search recursively
|
||||
None => match self.parent() {
|
||||
|
@ -193,11 +188,13 @@ pub trait Document<'a>: core::fmt::Debug {
|
|||
scope: &RefCell<Scope>,
|
||||
merge_as: Option<&String>,
|
||||
) {
|
||||
if let Some(merge_as) = merge_as { self.scope().borrow_mut().merge(
|
||||
&mut scope.borrow_mut(),
|
||||
merge_as,
|
||||
self.content().borrow().len(),
|
||||
) }
|
||||
if let Some(merge_as) = merge_as {
|
||||
self.scope().borrow_mut().merge(
|
||||
&mut scope.borrow_mut(),
|
||||
merge_as,
|
||||
self.content().borrow().len(),
|
||||
)
|
||||
}
|
||||
|
||||
// Content
|
||||
self.content()
|
||||
|
@ -206,10 +203,7 @@ pub trait Document<'a>: core::fmt::Debug {
|
|||
}
|
||||
|
||||
fn get_reference(&self, refname: &str) -> Option<ElemReference> {
|
||||
self.scope()
|
||||
.borrow()
|
||||
.referenceable
|
||||
.get(refname).copied()
|
||||
self.scope().borrow().referenceable.get(refname).copied()
|
||||
}
|
||||
|
||||
fn get_from_reference(
|
||||
|
|
|
@ -50,7 +50,12 @@ pub trait Element: Downcast + core::fmt::Debug {
|
|||
fn as_container(&self) -> Option<&dyn ContainerElement> { None }
|
||||
|
||||
/// Compiles element
|
||||
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String>;
|
||||
fn compile(
|
||||
&self,
|
||||
compiler: &Compiler,
|
||||
document: &dyn Document,
|
||||
cursor: usize,
|
||||
) -> Result<String, String>;
|
||||
}
|
||||
impl_downcast!(Element);
|
||||
|
||||
|
@ -93,7 +98,12 @@ impl Element for DocumentEnd {
|
|||
|
||||
fn element_name(&self) -> &'static str { "Document End" }
|
||||
|
||||
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
|
||||
fn compile(
|
||||
&self,
|
||||
_compiler: &Compiler,
|
||||
_document: &dyn Document,
|
||||
_cursor: usize,
|
||||
) -> Result<String, String> {
|
||||
Ok(String::new())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,9 +31,7 @@ impl<'a> LangDocument<'a> {
|
|||
impl<'a> Document<'a> for LangDocument<'a> {
|
||||
fn source(&self) -> Rc<dyn Source> { self.source.clone() }
|
||||
|
||||
fn parent(&self) -> Option<&'a dyn Document<'a>> {
|
||||
self.parent.map(|p| p as &dyn Document<'a>)
|
||||
}
|
||||
fn parent(&self) -> Option<&'a dyn Document<'a>> { self.parent.map(|p| p as &dyn Document<'a>) }
|
||||
|
||||
fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content }
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
pub mod document;
|
||||
pub mod references;
|
||||
pub mod langdocument;
|
||||
pub mod element;
|
||||
pub mod langdocument;
|
||||
pub mod references;
|
||||
pub mod variable;
|
||||
|
|
|
@ -57,7 +57,9 @@ impl Variable for BaseVariable {
|
|||
));
|
||||
|
||||
state.with_state(|new_state| {
|
||||
let _ = new_state.parser.parse_into(new_state, source, document, ParseMode::default());
|
||||
let _ = new_state
|
||||
.parser
|
||||
.parse_into(new_state, source, document, ParseMode::default());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,17 +23,16 @@ use crate::elements::paragraph::Paragraph;
|
|||
use crate::elements::text::Text;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::source::VirtualSource;
|
||||
use crate::parser::style::StyleHolder;
|
||||
use crate::parser::util::escape_text;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyParser;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Blockquote {
|
||||
|
@ -271,8 +270,11 @@ impl Rule for BlockquoteRule {
|
|||
if let Some(properties) = captures.get(1) {
|
||||
match self.parse_properties(properties) {
|
||||
Err(err) => {
|
||||
report_err!(&mut reports, cursor.source.clone(), "Invalid Blockquote Properties".into(),
|
||||
span(properties.range(), err)
|
||||
report_err!(
|
||||
&mut reports,
|
||||
cursor.source.clone(),
|
||||
"Invalid Blockquote Properties".into(),
|
||||
span(properties.range(), err)
|
||||
);
|
||||
return (end_cursor, reports);
|
||||
}
|
||||
|
@ -329,8 +331,14 @@ impl Rule for BlockquoteRule {
|
|||
} else if elem.downcast_ref::<Blockquote>().is_some() {
|
||||
parsed_content.push(elem);
|
||||
} else {
|
||||
report_err!(&mut reports, token.source(), "Unable to Parse Blockquote Entry".into(),
|
||||
span(token.range.clone(), "Blockquotes may only contain paragraphs and other blockquotes".into())
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Unable to Parse Blockquote Entry".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
"Blockquotes may only contain paragraphs and other blockquotes".into()
|
||||
)
|
||||
);
|
||||
return (end_cursor, reports);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Once;
|
||||
|
||||
use ariadne::Fmt;
|
||||
|
@ -25,16 +23,15 @@ use crate::lsp::semantic::Semantics;
|
|||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyMapError;
|
||||
use crate::parser::util::PropertyParser;
|
||||
use crate::parser::util::{self};
|
||||
use lazy_static::lazy_static;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum CodeKind {
|
||||
|
@ -323,9 +320,7 @@ impl RegexRule for CodeRule {
|
|||
|
||||
fn regexes(&self) -> &[regex::Regex] { &self.re }
|
||||
|
||||
fn enabled(&self, mode: &ParseMode, id: usize) -> bool {
|
||||
return !mode.paragraph_only || id != 0;
|
||||
}
|
||||
fn enabled(&self, mode: &ParseMode, id: usize) -> bool { !mode.paragraph_only || id != 0 }
|
||||
|
||||
fn on_regex_match(
|
||||
&self,
|
||||
|
@ -362,10 +357,7 @@ impl RegexRule for CodeRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Code Properties".into(),
|
||||
span(
|
||||
props.range(),
|
||||
e
|
||||
)
|
||||
span(props.range(), e)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -383,10 +375,7 @@ impl RegexRule for CodeRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Missing Code Language".into(),
|
||||
span(
|
||||
lang.range(),
|
||||
"No language specified".into()
|
||||
)
|
||||
span(lang.range(), "No language specified".into())
|
||||
);
|
||||
|
||||
return reports;
|
||||
|
@ -431,10 +420,7 @@ impl RegexRule for CodeRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Empty Code Content".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
"Code content cannot be empty".into()
|
||||
)
|
||||
span(token.range.clone(), "Code content cannot be empty".into())
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -450,45 +436,44 @@ impl RegexRule for CodeRule {
|
|||
let code_name = name.as_str().trim_end().trim_start().to_string();
|
||||
(!code_name.is_empty()).then_some(code_name)
|
||||
});
|
||||
let line_offset =
|
||||
match properties.get("line_offset", |prop, value| {
|
||||
value.parse::<usize>().map_err(|e| (prop, e))
|
||||
}) {
|
||||
Ok((_prop, offset)) => offset,
|
||||
Err(e) => {
|
||||
match e {
|
||||
PropertyMapError::ParseError((prop, err)) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Code Property".into(),
|
||||
span(
|
||||
token.start()+1..token.end(),
|
||||
format!("Property `line_offset: {}` cannot be converted: {}",
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error))
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
PropertyMapError::NotFoundError(err) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Code Property".into(),
|
||||
span(
|
||||
token.start()+1..token.end(),
|
||||
format!(
|
||||
"Property `{}` doesn't exist",
|
||||
err.fg(state.parser.colors().info)
|
||||
)
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
}
|
||||
let line_offset = match properties.get("line_offset", |prop, value| {
|
||||
value.parse::<usize>().map_err(|e| (prop, e))
|
||||
}) {
|
||||
Ok((_prop, offset)) => offset,
|
||||
Err(e) => match e {
|
||||
PropertyMapError::ParseError((prop, err)) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Code Property".into(),
|
||||
span(
|
||||
token.start() + 1..token.end(),
|
||||
format!(
|
||||
"Property `line_offset: {}` cannot be converted: {}",
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
)
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
};
|
||||
PropertyMapError::NotFoundError(err) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Code Property".into(),
|
||||
span(
|
||||
token.start() + 1..token.end(),
|
||||
format!(
|
||||
"Property `{}` doesn't exist",
|
||||
err.fg(state.parser.colors().info)
|
||||
)
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
state.push(
|
||||
document,
|
||||
|
@ -680,6 +665,7 @@ mod tests {
|
|||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_semantics;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[test]
|
||||
fn code_block() {
|
||||
|
|
|
@ -5,15 +5,12 @@ use crate::document::element::Element;
|
|||
use crate::lsp::semantic::Semantics;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Comment {
|
||||
|
@ -77,10 +74,7 @@ impl RegexRule for CommentRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Empty Comment".into(),
|
||||
span(
|
||||
comment.range(),
|
||||
"Comment is empty".into()
|
||||
)
|
||||
span(comment.range(), "Comment is empty".into())
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -117,6 +111,7 @@ mod tests {
|
|||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
use crate::validate_semantics;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ use std::any::Any;
|
|||
use std::cell::Ref;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -20,14 +19,13 @@ use crate::lua::kernel::CTX;
|
|||
use crate::parser::customstyle::CustomStyle;
|
||||
use crate::parser::customstyle::CustomStyleToken;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::state::RuleState;
|
||||
use crate::parser::state::Scope;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
use super::paragraph::Paragraph;
|
||||
|
||||
|
@ -63,12 +61,15 @@ impl CustomStyle for LuaCustomStyle {
|
|||
kernel.run_with_context(ctx, |lua| {
|
||||
let chunk = lua.load(self.start.as_str());
|
||||
if let Err(err) = chunk.eval::<()>() {
|
||||
report_err!(&mut reports, location.source(), "Lua execution failed".into(),
|
||||
span(location.range.clone(), err.to_string()),
|
||||
note(format!(
|
||||
"When trying to start custom style {}",
|
||||
self.name().fg(state.parser.colors().info)
|
||||
))
|
||||
report_err!(
|
||||
&mut reports,
|
||||
location.source(),
|
||||
"Lua execution failed".into(),
|
||||
span(location.range.clone(), err.to_string()),
|
||||
note(format!(
|
||||
"When trying to start custom style {}",
|
||||
self.name().fg(state.parser.colors().info)
|
||||
))
|
||||
);
|
||||
}
|
||||
});
|
||||
|
@ -94,11 +95,14 @@ impl CustomStyle for LuaCustomStyle {
|
|||
kernel.run_with_context(ctx, |lua| {
|
||||
let chunk = lua.load(self.end.as_str());
|
||||
if let Err(err) = chunk.eval::<()>() {
|
||||
report_err!(&mut reports, location.source(), "Lua execution failed".into(),
|
||||
report_err!(
|
||||
&mut reports,
|
||||
location.source(),
|
||||
"Lua execution failed".into(),
|
||||
span(location.range.clone(), err.to_string()),
|
||||
note(format!(
|
||||
"When trying to end custom style {}",
|
||||
self.name().fg(state.parser.colors().info)
|
||||
"When trying to end custom style {}",
|
||||
self.name().fg(state.parser.colors().info)
|
||||
))
|
||||
);
|
||||
}
|
||||
|
@ -115,11 +119,7 @@ struct CustomStyleState {
|
|||
impl RuleState for CustomStyleState {
|
||||
fn scope(&self) -> Scope { Scope::PARAGRAPH }
|
||||
|
||||
fn on_remove(
|
||||
&self,
|
||||
state: &ParserState,
|
||||
document: &dyn Document,
|
||||
) -> Vec<Report> {
|
||||
fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report> {
|
||||
let mut reports = vec![];
|
||||
|
||||
self.toggled.iter().for_each(|(style, token)| {
|
||||
|
@ -135,13 +135,16 @@ impl RuleState for CustomStyleState {
|
|||
})
|
||||
.unwrap();
|
||||
|
||||
report_err!(&mut reports, token.source(), "Unterminated Custom Style".into(),
|
||||
span(token.range.clone(), format!(
|
||||
"Style {} starts here",
|
||||
style.fg(state.parser.colors().info)
|
||||
)),
|
||||
span(paragraph_end.1, "Paragraph ends here".into()),
|
||||
note("Styles cannot span multiple documents (i.e @import)".into())
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Unterminated Custom Style".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
format!("Style {} starts here", style.fg(state.parser.colors().info))
|
||||
),
|
||||
span(paragraph_end.1, "Paragraph ends here".into()),
|
||||
note("Styles cannot span multiple documents (i.e @import)".into())
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -272,16 +275,19 @@ impl Rule for CustomStyleRule {
|
|||
Token::new(cursor.pos..cursor.pos + s_end.len(), cursor.source.clone());
|
||||
if style_state.toggled.get(style.name()).is_none() {
|
||||
let mut reports = vec![];
|
||||
report_err!(&mut reports, token.source(), "Invalid End of Style".into(),
|
||||
span(token.range.clone(), format!(
|
||||
"Cannot end style {} here, it does not started anywhere",
|
||||
style.name().fg(state.parser.colors().info)
|
||||
))
|
||||
);
|
||||
return (
|
||||
cursor.at(cursor.pos + s_end.len()),
|
||||
reports
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid End of Style".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
format!(
|
||||
"Cannot end style {} here, it does not started anywhere",
|
||||
style.name().fg(state.parser.colors().info)
|
||||
)
|
||||
)
|
||||
);
|
||||
return (cursor.at(cursor.pos + s_end.len()), reports);
|
||||
}
|
||||
|
||||
style_state.toggled.remove(style.name());
|
||||
|
@ -294,20 +300,26 @@ impl Rule for CustomStyleRule {
|
|||
);
|
||||
if let Some(start_token) = style_state.toggled.get(style.name()) {
|
||||
let mut reports = vec![];
|
||||
report_err!(&mut reports, token.source(), "Invalid Start of Style".into(),
|
||||
span(token.range.clone(), format!(
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Start of Style".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
format!(
|
||||
"When trying to start custom style {}",
|
||||
self.name().fg(state.parser.colors().info)
|
||||
)),
|
||||
span(start_token.range.clone(), format!(
|
||||
)
|
||||
),
|
||||
span(
|
||||
start_token.range.clone(),
|
||||
format!(
|
||||
"Style {} previously starts here",
|
||||
self.name().fg(state.parser.colors().info)
|
||||
)),
|
||||
);
|
||||
return (
|
||||
cursor.at(cursor.pos + s_end.len()),
|
||||
reports
|
||||
)
|
||||
),
|
||||
);
|
||||
return (cursor.at(cursor.pos + s_end.len()), reports);
|
||||
}
|
||||
|
||||
style_state
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::style::ElementStyle;
|
||||
use std::any::Any;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -152,9 +151,9 @@ impl Rule for ElemStyleRule {
|
|||
span(
|
||||
cursor.pos..cursor.pos + json.len(),
|
||||
format!(
|
||||
"Failed to serialize `{}` into style with key `{}`: {err}",
|
||||
json.fg(state.parser.colors().highlight),
|
||||
style.key().fg(state.parser.colors().info)
|
||||
"Failed to serialize `{}` into style with key `{}`: {err}",
|
||||
json.fg(state.parser.colors().highlight),
|
||||
style.key().fg(state.parser.colors().info)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Once;
|
||||
|
||||
|
@ -29,12 +27,11 @@ use crate::compiler::compiler::Target;
|
|||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::util;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Graphviz {
|
||||
|
@ -225,21 +222,14 @@ impl RegexRule for GraphRule {
|
|||
return reports;
|
||||
}
|
||||
Some(content) => {
|
||||
let processed = util::escape_text(
|
||||
'\\',
|
||||
"[/graph]",
|
||||
content.as_str(),
|
||||
);
|
||||
let processed = util::escape_text('\\', "[/graph]", content.as_str());
|
||||
|
||||
if processed.is_empty() {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Empty Graph Code".into(),
|
||||
span(
|
||||
content.range(),
|
||||
"Graph code is empty".into()
|
||||
)
|
||||
span(content.range(), "Graph code is empty".into())
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -273,10 +263,7 @@ impl RegexRule for GraphRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Graph Properties".into(),
|
||||
span(
|
||||
props.range(),
|
||||
e
|
||||
)
|
||||
span(props.range(), e)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -299,10 +286,10 @@ impl RegexRule for GraphRule {
|
|||
span(
|
||||
token.range.clone(),
|
||||
format!(
|
||||
"Property `{}` cannot be converted: {}",
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
)
|
||||
"Property `{}` cannot be converted: {}",
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
)
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
|
@ -312,10 +299,7 @@ impl RegexRule for GraphRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Graph Property".into(),
|
||||
span(
|
||||
token.start() + 1..token.end(),
|
||||
err
|
||||
)
|
||||
span(token.start() + 1..token.end(), err)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -412,6 +396,7 @@ mod tests {
|
|||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
|
|
@ -4,17 +4,15 @@ use crate::lsp::semantic::Semantics;
|
|||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::parser::source::Token;
|
||||
use ariadne::Fmt;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
use super::paragraph::Paragraph;
|
||||
|
||||
|
|
|
@ -9,9 +9,9 @@ use crate::parser::layout::LayoutHolder;
|
|||
use crate::parser::layout::LayoutType;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::state::RuleState;
|
||||
use crate::parser::state::Scope;
|
||||
|
@ -31,8 +31,6 @@ use std::ops::Range;
|
|||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum LayoutToken {
|
||||
|
@ -252,11 +250,7 @@ struct LayoutState {
|
|||
impl RuleState for LayoutState {
|
||||
fn scope(&self) -> Scope { Scope::DOCUMENT }
|
||||
|
||||
fn on_remove(
|
||||
&self,
|
||||
state: &ParserState,
|
||||
document: &dyn Document,
|
||||
) -> Vec<Report> {
|
||||
fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report> {
|
||||
let mut reports = vec![];
|
||||
|
||||
let doc_borrow = document.content().borrow();
|
||||
|
@ -270,17 +264,13 @@ impl RuleState for LayoutState {
|
|||
"Unterminated Layout".into(),
|
||||
span(
|
||||
start.source(),
|
||||
start.range.start+1..start.range.end,
|
||||
start.range.start + 1..start.range.end,
|
||||
format!(
|
||||
"Layout {} stars here",
|
||||
layout_type.name().fg(state.parser.colors().info)
|
||||
)
|
||||
),
|
||||
span(
|
||||
at.source(),
|
||||
at.range.clone(),
|
||||
"Document ends here".into()
|
||||
)
|
||||
span(at.source(), at.range.clone(), "Document ends here".into())
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -363,15 +353,11 @@ impl LayoutRule {
|
|||
match layout_type.parse_properties(content.as_str()) {
|
||||
Ok(props) => Ok(props),
|
||||
Err(err) => {
|
||||
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Layout Properties".into(),
|
||||
span(
|
||||
props.range(),
|
||||
err
|
||||
)
|
||||
span(props.range(), err)
|
||||
);
|
||||
Err(())
|
||||
}
|
||||
|
@ -473,7 +459,6 @@ impl RegexRule for LayoutRule {
|
|||
trimmed.fg(state.parser.colors().highlight)
|
||||
)
|
||||
)
|
||||
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -514,9 +499,12 @@ impl RegexRule for LayoutRule {
|
|||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let start = matches.get(0).map(|m| {
|
||||
m.start() + token.source().content()[m.start()..].find('#').unwrap()
|
||||
}).unwrap();
|
||||
let start = matches
|
||||
.get(0)
|
||||
.map(|m| {
|
||||
m.start() + token.source().content()[m.start()..].find('#').unwrap()
|
||||
})
|
||||
.unwrap();
|
||||
sems.add(start..start + 2, tokens.layout_sep);
|
||||
sems.add(
|
||||
start + 2..start + 2 + "LAYOUT_BEGIN".len(),
|
||||
|
@ -546,10 +534,7 @@ impl RegexRule for LayoutRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid #+LAYOUT_NEXT".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
"No active layout found".into()
|
||||
)
|
||||
span(token.range.clone(), "No active layout found".into())
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -583,15 +568,16 @@ impl RegexRule for LayoutRule {
|
|||
matches.get(1),
|
||||
) {
|
||||
Ok(props) => props,
|
||||
Err(rep) => return reports,
|
||||
Err(()) => return reports,
|
||||
};
|
||||
|
||||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let start = matches.get(0).map(|m| {
|
||||
m.start() + token.source().content()[m.start()..].find('#').unwrap()
|
||||
}).unwrap();
|
||||
let start = matches
|
||||
.get(0)
|
||||
.map(|m| m.start() + token.source().content()[m.start()..].find('#').unwrap())
|
||||
.unwrap();
|
||||
sems.add(start..start + 2, tokens.layout_sep);
|
||||
sems.add(
|
||||
start + 2..start + 2 + "LAYOUT_NEXT".len(),
|
||||
|
@ -611,7 +597,6 @@ impl RegexRule for LayoutRule {
|
|||
layout_type.clone(),
|
||||
properties,
|
||||
)
|
||||
|
||||
} else {
|
||||
// LAYOUT_END
|
||||
let mut rule_state_borrow = rule_state.as_ref().borrow_mut();
|
||||
|
@ -623,10 +608,7 @@ impl RegexRule for LayoutRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid #+LAYOUT_NEXT".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
"No active layout found".into()
|
||||
)
|
||||
span(token.range.clone(), "No active layout found".into())
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -660,7 +642,7 @@ impl RegexRule for LayoutRule {
|
|||
matches.get(1),
|
||||
) {
|
||||
Ok(props) => props,
|
||||
Err(rep) => return reports,
|
||||
Err(()) => return reports,
|
||||
};
|
||||
|
||||
let layout_type = layout_type.clone();
|
||||
|
@ -670,9 +652,10 @@ impl RegexRule for LayoutRule {
|
|||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let start = matches.get(0).map(|m| {
|
||||
m.start() + token.source().content()[m.start()..].find('#').unwrap()
|
||||
}).unwrap();
|
||||
let start = matches
|
||||
.get(0)
|
||||
.map(|m| m.start() + token.source().content()[m.start()..].find('#').unwrap())
|
||||
.unwrap();
|
||||
sems.add(start..start + 2, tokens.layout_sep);
|
||||
sems.add(
|
||||
start + 2..start + 2 + "LAYOUT_END".len(),
|
||||
|
@ -920,7 +903,8 @@ mod tests {
|
|||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::{validate_document, validate_semantics};
|
||||
use crate::validate_document;
|
||||
use crate::validate_semantics;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
|
|
@ -8,8 +8,9 @@ use crate::lsp::semantic::Semantics;
|
|||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::source::VirtualSource;
|
||||
use crate::parser::util;
|
||||
|
@ -19,11 +20,8 @@ use mlua::Function;
|
|||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Link {
|
||||
|
@ -118,14 +116,17 @@ impl RegexRule for LinkRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Empty Link Display".into(),
|
||||
span(
|
||||
display.range(),
|
||||
"Link display is empty".into()
|
||||
)
|
||||
span(display.range(), "Link display is empty".into())
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
let display_source = util::escape_source(token.source(), display.range(), "Link Display".into(), '\\', "](");
|
||||
let display_source = util::escape_source(
|
||||
token.source(),
|
||||
display.range(),
|
||||
"Link Display".into(),
|
||||
'\\',
|
||||
"](",
|
||||
);
|
||||
if display_source.content().is_empty() {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
|
@ -177,10 +178,7 @@ impl RegexRule for LinkRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Empty Link URL".into(),
|
||||
span(
|
||||
url.range(),
|
||||
"Link url is empty".into()
|
||||
)
|
||||
span(url.range(), "Link url is empty".into())
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
|
|
@ -13,7 +13,9 @@ use crate::document::element::Element;
|
|||
use crate::lsp::semantic::Semantics;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::Report;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Token;
|
||||
|
@ -23,8 +25,6 @@ use crate::parser::util::escape_text;
|
|||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyMapError;
|
||||
use crate::parser::util::PropertyParser;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
use regex::Match;
|
||||
use regex::Regex;
|
||||
|
||||
|
@ -310,7 +310,10 @@ impl Rule for ListRule {
|
|||
if let Some(properties) = captures.get(2) {
|
||||
match self.parse_properties(properties) {
|
||||
Err(err) => {
|
||||
report_err!(&mut reports, cursor.source.clone(), "Invalid List Entry Properties".into(),
|
||||
report_err!(
|
||||
&mut reports,
|
||||
cursor.source.clone(),
|
||||
"Invalid List Entry Properties".into(),
|
||||
span(properties.range(), err)
|
||||
);
|
||||
return (cursor.at(captures.get(0).unwrap().end()), reports);
|
||||
|
@ -357,7 +360,7 @@ impl Rule for ListRule {
|
|||
.map(|delim| {
|
||||
captures.get(1).unwrap().as_str()[0..delim]
|
||||
.chars()
|
||||
.fold(true, |val, c| val && c.is_whitespace())
|
||||
.all(|c| c.is_whitespace())
|
||||
}) == Some(true)
|
||||
{
|
||||
break;
|
||||
|
@ -378,7 +381,10 @@ impl Rule for ListRule {
|
|||
));
|
||||
let parsed_content = match util::parse_paragraph(state, entry_src, document) {
|
||||
Err(err) => {
|
||||
report_warn!(&mut reports, token.source(), "Unable to parse List Entry".into(),
|
||||
report_warn!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Unable to parse List Entry".into(),
|
||||
span(token.range.clone(), err.into())
|
||||
);
|
||||
// Return an empty paragraph
|
||||
|
@ -428,7 +434,6 @@ impl Rule for ListRule {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::parser::source::Source;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::elements::text::Text;
|
||||
use crate::parser::langparser::LangParser;
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -20,7 +19,8 @@ use crate::document::element::ReferenceableElement;
|
|||
use crate::document::references::validate_refname;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
@ -31,8 +31,6 @@ use crate::parser::util::Property;
|
|||
use crate::parser::util::PropertyMap;
|
||||
use crate::parser::util::PropertyMapError;
|
||||
use crate::parser::util::PropertyParser;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
use super::paragraph::Paragraph;
|
||||
use super::reference::InternalReference;
|
||||
|
@ -314,13 +312,10 @@ impl MediaRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media Properties".into(),
|
||||
span(
|
||||
props.range(),
|
||||
e
|
||||
)
|
||||
span(props.range(), e)
|
||||
);
|
||||
None
|
||||
},
|
||||
}
|
||||
Ok(properties) => Some(properties),
|
||||
}
|
||||
}
|
||||
|
@ -373,10 +368,7 @@ impl RegexRule for MediaRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media Refname".into(),
|
||||
span(
|
||||
m.range(),
|
||||
err
|
||||
)
|
||||
span(m.range(), err)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -392,61 +384,56 @@ impl RegexRule for MediaRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media URI".into(),
|
||||
span(
|
||||
m.range(),
|
||||
err
|
||||
)
|
||||
span(m.range(), err)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
};
|
||||
|
||||
// Properties
|
||||
let properties = match self.parse_properties(&mut reports, &token, &matches.get(3))
|
||||
{
|
||||
let properties = match self.parse_properties(&mut reports, &token, &matches.get(3)) {
|
||||
Some(pm) => pm,
|
||||
None => return reports,
|
||||
};
|
||||
|
||||
let media_type =
|
||||
match Self::detect_filetype(uri.as_str()) {
|
||||
Some(media_type) => media_type,
|
||||
None => match properties.get("type", |prop, value| {
|
||||
MediaType::from_str(value.as_str()).map_err(|e| (prop, e))
|
||||
}) {
|
||||
Ok((_prop, kind)) => kind,
|
||||
Err(e) => match e {
|
||||
PropertyMapError::ParseError((prop, err)) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media Property".into(),
|
||||
span(
|
||||
token.start()+1..token.end(),
|
||||
format!(
|
||||
"Property `type: {}` cannot be converted: {}",
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
)
|
||||
let media_type = match Self::detect_filetype(uri.as_str()) {
|
||||
Some(media_type) => media_type,
|
||||
None => match properties.get("type", |prop, value| {
|
||||
MediaType::from_str(value.as_str()).map_err(|e| (prop, e))
|
||||
}) {
|
||||
Ok((_prop, kind)) => kind,
|
||||
Err(e) => match e {
|
||||
PropertyMapError::ParseError((prop, err)) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media Property".into(),
|
||||
span(
|
||||
token.start() + 1..token.end(),
|
||||
format!(
|
||||
"Property `type: {}` cannot be converted: {}",
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
PropertyMapError::NotFoundError(err) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media Property".into(),
|
||||
span(
|
||||
token.start()+1..token.end(),
|
||||
format!("{err}. Required because mediatype could not be detected")
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
},
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
PropertyMapError::NotFoundError(err) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media Property".into(),
|
||||
span(
|
||||
token.start() + 1..token.end(),
|
||||
format!("{err}. Required because mediatype could not be detected")
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
let width = properties
|
||||
.get("width", |_, value| -> Result<String, ()> {
|
||||
|
@ -481,9 +468,7 @@ impl RegexRule for MediaRule {
|
|||
"Invalid Media Description".into(),
|
||||
span(
|
||||
content.range(),
|
||||
format!(
|
||||
"Could not parse description: {err}"
|
||||
)
|
||||
format!("Could not parse description: {err}")
|
||||
)
|
||||
);
|
||||
return reports;
|
||||
|
@ -522,11 +507,7 @@ impl RegexRule for MediaRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Media".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
err
|
||||
|
||||
)
|
||||
span(token.range.clone(), err)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
pub mod blockquote;
|
||||
pub mod code;
|
||||
pub mod comment;
|
||||
pub mod customstyle;
|
||||
pub mod elemstyle;
|
||||
pub mod graphviz;
|
||||
pub mod import;
|
||||
pub mod layout;
|
||||
|
@ -15,6 +18,3 @@ pub mod style;
|
|||
pub mod tex;
|
||||
pub mod text;
|
||||
pub mod variable;
|
||||
pub mod elemstyle;
|
||||
pub mod customstyle;
|
||||
pub mod blockquote;
|
||||
|
|
|
@ -10,11 +10,10 @@ use crate::document::element::ElemKind;
|
|||
use crate::document::element::Element;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::reports::*;
|
||||
|
||||
// TODO: Full refactor
|
||||
// Problem is that document parsed from other sources i.e by variables
|
||||
|
@ -151,14 +150,13 @@ impl Rule for ParagraphRule {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::rc::Rc;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::elements::text::Text;
|
||||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
#[test]
|
||||
fn parse() {
|
||||
|
|
|
@ -6,8 +6,8 @@ use crate::lsp::semantic::Semantics;
|
|||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::util::Property;
|
||||
|
@ -272,8 +272,6 @@ impl RegexRule for RawRule {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::parser::source::Source;
|
||||
use std::rc::Rc;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::elements::text::Text;
|
||||
use crate::parser::langparser::LangParser;
|
||||
|
@ -281,6 +279,7 @@ mod tests {
|
|||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
use crate::validate_semantics;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[test]
|
||||
fn parser() {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use reference_style::ExternalReferenceStyle;
|
||||
|
@ -20,17 +19,15 @@ use crate::document::references::validate_refname;
|
|||
use crate::lsp::semantic::Semantics;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::style::StyleHolder;
|
||||
use crate::parser::util;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyMap;
|
||||
use crate::parser::util::PropertyParser;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InternalReference {
|
||||
|
@ -149,7 +146,7 @@ impl Element for ExternalReference {
|
|||
format!("Failed to format ExternalReference style `{format_string}`: {err}")
|
||||
})?;
|
||||
|
||||
result += format!("\">{}</a>", args.to_string()).as_str();
|
||||
result += format!("\">{}</a>", args).as_str();
|
||||
}
|
||||
// Add crossreference
|
||||
compiler.insert_crossreference(crossreference_pos, self.reference.clone());
|
||||
|
@ -214,13 +211,10 @@ impl ReferenceRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Reference Properties".into(),
|
||||
span(
|
||||
props.range(),
|
||||
e
|
||||
)
|
||||
span(props.range(), e)
|
||||
);
|
||||
None
|
||||
},
|
||||
}
|
||||
Ok(properties) => Some(properties),
|
||||
}
|
||||
}
|
||||
|
@ -259,10 +253,7 @@ impl RegexRule for ReferenceRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Reference Refname".into(),
|
||||
span(
|
||||
refname_match.range(),
|
||||
err
|
||||
)
|
||||
span(refname_match.range(), err)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -277,10 +268,7 @@ impl RegexRule for ReferenceRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Reference Refname".into(),
|
||||
span(
|
||||
refname_match.range(),
|
||||
err
|
||||
)
|
||||
span(refname_match.range(), err)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -292,8 +280,7 @@ impl RegexRule for ReferenceRule {
|
|||
};
|
||||
|
||||
// Properties
|
||||
let properties = match self.parse_properties(&mut reports, &token, &matches.get(2))
|
||||
{
|
||||
let properties = match self.parse_properties(&mut reports, &token, &matches.get(2)) {
|
||||
Some(pm) => pm,
|
||||
None => return reports,
|
||||
};
|
||||
|
|
|
@ -5,6 +5,8 @@ use crate::lua::kernel::KernelContext;
|
|||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
@ -16,8 +18,6 @@ use mlua::Lua;
|
|||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::rc::Rc;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
use super::text::Text;
|
||||
|
||||
|
@ -106,10 +106,7 @@ impl RegexRule for ScriptRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Kernel Name".into(),
|
||||
span(
|
||||
name.range(),
|
||||
e
|
||||
)
|
||||
span(name.range(), e)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
@ -126,26 +123,27 @@ impl RegexRule for ScriptRule {
|
|||
};
|
||||
|
||||
let script_range = matches.get(if index == 0 { 2 } else { 3 }).unwrap().range();
|
||||
let source = escape_source(token.source(), script_range.clone(), format!(
|
||||
let source = escape_source(
|
||||
token.source(),
|
||||
script_range.clone(),
|
||||
format!(
|
||||
":LUA:{kernel_name}#{}#{}",
|
||||
token.source().name(),
|
||||
matches.get(0).unwrap().start()
|
||||
), '\\', ">@");
|
||||
if source.content().is_empty()
|
||||
{
|
||||
),
|
||||
'\\',
|
||||
">@",
|
||||
);
|
||||
if source.content().is_empty() {
|
||||
report_warn!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Kernel Code".into(),
|
||||
span(
|
||||
script_range,
|
||||
"Kernel code is empty".into()
|
||||
)
|
||||
span(script_range, "Kernel code is empty".into())
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
||||
|
||||
let execute = |lua: &Lua| {
|
||||
let chunk = lua.load(source.content()).set_name(kernel_name);
|
||||
|
||||
|
@ -177,10 +175,7 @@ impl RegexRule for ScriptRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Kernel Code Kind".into(),
|
||||
span(
|
||||
kind.range(),
|
||||
msg
|
||||
)
|
||||
span(kind.range(), msg)
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
|
|
@ -8,8 +8,9 @@ use crate::lsp::semantic::Semantics;
|
|||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::style::StyleHolder;
|
||||
use ariadne::Fmt;
|
||||
|
@ -19,11 +20,8 @@ use mlua::Lua;
|
|||
use regex::Regex;
|
||||
use section_style::SectionLinkPos;
|
||||
use section_style::SectionStyle;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
use super::reference::InternalReference;
|
||||
|
||||
|
@ -300,9 +298,13 @@ impl RegexRule for SectionRule {
|
|||
"Missing Section Spacing".into(),
|
||||
span(
|
||||
name.range(),
|
||||
"Sections require at least one whitespace before the section's name".into()
|
||||
"Sections require at least one whitespace before the section's name"
|
||||
.into()
|
||||
),
|
||||
help(format!("Add a space before `{}`", section_name.fg(state.parser.colors().highlight)))
|
||||
help(format!(
|
||||
"Add a space before `{}`",
|
||||
section_name.fg(state.parser.colors().highlight)
|
||||
))
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
|
|
|
@ -8,8 +8,9 @@ use crate::lsp::semantic::Semantics;
|
|||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::state::RuleState;
|
||||
use crate::parser::state::Scope;
|
||||
|
@ -18,11 +19,8 @@ use mlua::Function;
|
|||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
use super::paragraph::Paragraph;
|
||||
|
||||
|
@ -86,11 +84,7 @@ impl StyleState {
|
|||
impl RuleState for StyleState {
|
||||
fn scope(&self) -> Scope { Scope::PARAGRAPH }
|
||||
|
||||
fn on_remove(
|
||||
&self,
|
||||
state: &ParserState,
|
||||
document: &dyn Document,
|
||||
) -> Vec<Report> {
|
||||
fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report> {
|
||||
let mut reports = vec![];
|
||||
|
||||
self.toggled
|
||||
|
@ -119,15 +113,9 @@ impl RuleState for StyleState {
|
|||
"Unterminated Style".into(),
|
||||
span(
|
||||
token.range.clone(),
|
||||
format!(
|
||||
"Style {} starts here",
|
||||
name.fg(state.parser.colors().info)
|
||||
)
|
||||
),
|
||||
span(
|
||||
paragraph_end.1,
|
||||
"Paragraph ends here".into()
|
||||
format!("Style {} starts here", name.fg(state.parser.colors().info))
|
||||
),
|
||||
span(paragraph_end.1, "Paragraph ends here".into()),
|
||||
note("Styles cannot span multiple documents (i.e @import)".into())
|
||||
);
|
||||
});
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Once;
|
||||
|
@ -29,17 +27,15 @@ use crate::lsp::semantic::Semantics;
|
|||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::util;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyMap;
|
||||
use crate::parser::util::PropertyMapError;
|
||||
use crate::parser::util::PropertyParser;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum TexKind {
|
||||
|
@ -266,15 +262,13 @@ impl TexRule {
|
|||
fn parse_properties(
|
||||
&self,
|
||||
mut reports: &mut Vec<Report>,
|
||||
colors: &ReportColors,
|
||||
token: &Token,
|
||||
m: &Option<Match>,
|
||||
) -> Option<PropertyMap> {
|
||||
match m {
|
||||
None => match self.properties.default() {
|
||||
Ok(properties) => Some(properties),
|
||||
Err(e) =>
|
||||
{
|
||||
Err(e) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
|
@ -296,13 +290,10 @@ impl TexRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Tex Properties".into(),
|
||||
span(
|
||||
props.range(),
|
||||
e
|
||||
)
|
||||
span(props.range(), e)
|
||||
);
|
||||
None
|
||||
},
|
||||
}
|
||||
Ok(properties) => Some(properties),
|
||||
}
|
||||
}
|
||||
|
@ -359,10 +350,7 @@ impl RegexRule for TexRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Empty Tex Code".into(),
|
||||
span(
|
||||
content.range(),
|
||||
"Tex code is empty".into()
|
||||
)
|
||||
span(content.range(), "Tex code is empty".into())
|
||||
);
|
||||
}
|
||||
processed
|
||||
|
@ -370,8 +358,7 @@ impl RegexRule for TexRule {
|
|||
};
|
||||
|
||||
// Properties
|
||||
let properties = match self.parse_properties(&mut reports, state.parser.colors(), &token, &matches.get(1))
|
||||
{
|
||||
let properties = match self.parse_properties(&mut reports, &token, &matches.get(1)) {
|
||||
Some(pm) => pm,
|
||||
None => return reports,
|
||||
};
|
||||
|
@ -383,7 +370,6 @@ impl RegexRule for TexRule {
|
|||
Ok((_prop, kind)) => kind,
|
||||
Err(e) => match e {
|
||||
PropertyMapError::ParseError((prop, err)) => {
|
||||
|
||||
report_err!(
|
||||
&mut reports,
|
||||
token.source(),
|
||||
|
@ -442,14 +428,20 @@ impl RegexRule for TexRule {
|
|||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let range = token.range;
|
||||
sems.add(range.start..range.start + if index == 0 { 2 } else { 1 }, tokens.tex_sep);
|
||||
sems.add(
|
||||
range.start..range.start + if index == 0 { 2 } else { 1 },
|
||||
tokens.tex_sep,
|
||||
);
|
||||
if let Some(props) = matches.get(1).map(|m| m.range()) {
|
||||
sems.add(props.start - 1..props.start, tokens.tex_props_sep);
|
||||
sems.add(props.clone(), tokens.tex_props);
|
||||
sems.add(props.end..props.end + 1, tokens.tex_props_sep);
|
||||
}
|
||||
sems.add(matches.get(2).unwrap().range(), tokens.tex_content);
|
||||
sems.add(range.end - if index == 0 { 2 } else { 1 }..range.end, tokens.tex_sep);
|
||||
sems.add(
|
||||
range.end - if index == 0 { 2 } else { 1 }..range.end,
|
||||
tokens.tex_sep,
|
||||
);
|
||||
}
|
||||
reports
|
||||
}
|
||||
|
@ -550,7 +542,9 @@ mod tests {
|
|||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::{validate_document, validate_semantics};
|
||||
use crate::validate_document;
|
||||
use crate::validate_semantics;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -648,5 +642,4 @@ $[kind=inline]\LaTeX$
|
|||
tex_sep { delta_line == 0, delta_start == 6, length == 1 };
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
use std::any::Any;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
|
@ -12,11 +10,10 @@ use crate::document::element::Element;
|
|||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::reports::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Text {
|
||||
|
|
|
@ -7,18 +7,16 @@ use crate::lua::kernel::CTX;
|
|||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::reports::macros::*;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use ariadne::Fmt;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
use crate::parser::reports::*;
|
||||
use crate::parser::reports::macros::*;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum VariableKind {
|
||||
|
@ -159,18 +157,18 @@ impl RegexRule for VariableRule {
|
|||
)
|
||||
),
|
||||
help(format!(
|
||||
"Leave empty for regular variables. Available variable kinds:{}",
|
||||
self.kinds.iter().skip(1).fold(
|
||||
"".to_string(),
|
||||
|acc, (char, name)| {
|
||||
acc + format!(
|
||||
"\n - `{}` : {}",
|
||||
char.fg(state.parser.colors().highlight),
|
||||
name.fg(state.parser.colors().info)
|
||||
)
|
||||
.as_str()
|
||||
}
|
||||
)
|
||||
"Leave empty for regular variables. Available variable kinds:{}",
|
||||
self.kinds
|
||||
.iter()
|
||||
.skip(1)
|
||||
.fold("".to_string(), |acc, (char, name)| {
|
||||
acc + format!(
|
||||
"\n - `{}` : {}",
|
||||
char.fg(state.parser.colors().highlight),
|
||||
name.fg(state.parser.colors().info)
|
||||
)
|
||||
.as_str()
|
||||
})
|
||||
))
|
||||
);
|
||||
return reports;
|
||||
|
@ -243,11 +241,11 @@ impl RegexRule for VariableRule {
|
|||
"Unable to Create Variable".into(),
|
||||
span(
|
||||
m.start() + 1..m.end(),
|
||||
format!(
|
||||
"Unable to create variable `{}`. {}",
|
||||
var_name.fg(state.parser.colors().highlight),
|
||||
msg
|
||||
)
|
||||
format!(
|
||||
"Unable to create variable `{}`. {}",
|
||||
var_name.fg(state.parser.colors().highlight),
|
||||
msg
|
||||
)
|
||||
),
|
||||
);
|
||||
|
||||
|
@ -396,10 +394,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
&mut reports,
|
||||
token.source(),
|
||||
"Invalid Variable Name".into(),
|
||||
span(
|
||||
name.range(),
|
||||
msg
|
||||
)
|
||||
span(name.range(), msg)
|
||||
);
|
||||
|
||||
return reports;
|
||||
|
|
|
@ -279,7 +279,7 @@ impl<'a> Semantics<'a> {
|
|||
.unwrap_or(None)
|
||||
{
|
||||
return Self::from_source_impl(location.source(), semantics, original_source);
|
||||
} else if let Some(source) = source.clone().downcast_rc::<SourceFile>().ok() {
|
||||
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
|
||||
return Ref::filter_map(
|
||||
semantics.as_ref().unwrap().borrow(),
|
||||
|semantics: &SemanticsHolder| {
|
||||
|
@ -301,7 +301,7 @@ impl<'a> Semantics<'a> {
|
|||
)
|
||||
});
|
||||
}
|
||||
return None;
|
||||
None
|
||||
}
|
||||
|
||||
pub fn from_source(
|
||||
|
@ -311,7 +311,7 @@ impl<'a> Semantics<'a> {
|
|||
if semantics.is_none() {
|
||||
return None;
|
||||
}
|
||||
return Self::from_source_impl(source.clone(), semantics, source);
|
||||
Self::from_source_impl(source.clone(), semantics, source)
|
||||
}
|
||||
|
||||
pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
|
||||
|
@ -381,7 +381,7 @@ pub mod tests {
|
|||
.unwrap()
|
||||
.borrow()
|
||||
.sems
|
||||
.get(&($source as Rc<dyn Source>))
|
||||
.get(&($source as std::rc::Rc<dyn crate::parser::source::Source>))
|
||||
.unwrap()
|
||||
.tokens
|
||||
.borrow()
|
||||
|
@ -414,7 +414,7 @@ pub mod tests {
|
|||
.unwrap()
|
||||
.borrow()
|
||||
.sems
|
||||
.get(&($source as Rc<dyn Source>))
|
||||
.get(&($source as std::rc::Rc<dyn crate::parser::source::Source>))
|
||||
.unwrap()
|
||||
.tokens
|
||||
.borrow()
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::document::document::Document;
|
||||
use crate::parser::source::Token;
|
||||
|
||||
use crate::parser::reports::*;
|
||||
|
||||
|
||||
use super::parser::ParserState;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
|
@ -43,8 +42,7 @@ pub struct CustomStyleHolder {
|
|||
|
||||
impl CustomStyleHolder {
|
||||
pub fn get(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> {
|
||||
self.custom_styles
|
||||
.get(style_name).cloned()
|
||||
self.custom_styles.get(style_name).cloned()
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, style: Rc<dyn CustomStyle>) {
|
||||
|
@ -53,9 +51,7 @@ impl CustomStyleHolder {
|
|||
}
|
||||
|
||||
impl Deref for CustomStyleHolder {
|
||||
type Target = HashMap<String, Rc<dyn CustomStyle>>;
|
||||
type Target = HashMap<String, Rc<dyn CustomStyle>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.custom_styles
|
||||
}
|
||||
fn deref(&self) -> &Self::Target { &self.custom_styles }
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
pub mod customstyle;
|
||||
pub mod langparser;
|
||||
pub mod layout;
|
||||
pub mod parser;
|
||||
pub mod reports;
|
||||
pub mod rule;
|
||||
pub mod source;
|
||||
pub mod state;
|
||||
pub mod util;
|
||||
pub mod style;
|
||||
pub mod layout;
|
||||
pub mod customstyle;
|
||||
pub mod reports;
|
||||
pub mod util;
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
use std::any::Any;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
|
@ -22,8 +20,6 @@ use crate::elements::paragraph::Paragraph;
|
|||
use crate::lsp::semantic::SemanticsHolder;
|
||||
use crate::lua::kernel::Kernel;
|
||||
use crate::lua::kernel::KernelHolder;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::parser::source::VirtualSource;
|
||||
use ariadne::Color;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -217,7 +213,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
|
|||
return;
|
||||
}
|
||||
|
||||
(*matched_at, *match_data) = match rule.next_match(&mode, self, cursor) {
|
||||
(*matched_at, *match_data) = match rule.next_match(mode, self, cursor) {
|
||||
None => (usize::MAX, None),
|
||||
Some((mut pos, mut data)) => {
|
||||
// Check if escaped
|
||||
|
@ -238,7 +234,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
|
|||
}
|
||||
|
||||
// Find next potential match
|
||||
(pos, data) = match rule.next_match(&mode, self, &cursor.at(pos + 1)) {
|
||||
(pos, data) = match rule.next_match(mode, self, &cursor.at(pos + 1)) {
|
||||
Some((new_pos, new_data)) => (new_pos, new_data),
|
||||
None => (usize::MAX, data), // Stop iterating
|
||||
}
|
||||
|
@ -342,18 +338,11 @@ impl<'a, 'b> ParserState<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ParseMode {
|
||||
pub paragraph_only: bool,
|
||||
}
|
||||
|
||||
impl Default for ParseMode {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
paragraph_only: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Parser {
|
||||
/// Gets the colors for formatting errors
|
||||
///
|
||||
|
|
|
@ -13,9 +13,9 @@ pub enum ReportKind {
|
|||
Warning,
|
||||
}
|
||||
|
||||
impl Into<ariadne::ReportKind<'static>> for &ReportKind {
|
||||
fn into(self) -> ariadne::ReportKind<'static> {
|
||||
match self {
|
||||
impl From<&ReportKind> for ariadne::ReportKind<'static> {
|
||||
fn from(val: &ReportKind) -> Self {
|
||||
match val {
|
||||
ReportKind::Error => ariadne::ReportKind::Error,
|
||||
ReportKind::Warning => ariadne::ReportKind::Warning,
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ impl Report {
|
|||
for span in &self.spans {
|
||||
let (osource, opos) = span.token.source().original_position(span.token.start());
|
||||
|
||||
if &osource == &source && opos < start {
|
||||
if osource == source.clone() && opos < start {
|
||||
start = opos;
|
||||
}
|
||||
}
|
||||
|
@ -97,20 +97,20 @@ impl Report {
|
|||
}
|
||||
|
||||
pub mod macros {
|
||||
pub use super::*;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! report_label {
|
||||
($r:expr,) => {{ }};
|
||||
($r:expr, span($source:expr, $range:expr, $message:expr) $(, $($tail:tt)*)?) => {{
|
||||
$r.spans.push(ReportSpan {
|
||||
token: crate::parser::source::Token::new($range, $source),
|
||||
token: $crate::parser::source::Token::new($range, $source),
|
||||
message: $message,
|
||||
});
|
||||
report_label!($r, $($($tail)*)?);
|
||||
}};
|
||||
($r:expr, span($range:expr, $message:expr) $(, $($tail:tt)*)?) => {{
|
||||
$r.spans.push(ReportSpan {
|
||||
token: crate::parser::source::Token::new($range, $r.source.clone()),
|
||||
token: $crate::parser::source::Token::new($range, $r.source.clone()),
|
||||
message: $message,
|
||||
});
|
||||
report_label!($r, $($($tail)*)?);
|
||||
|
|
|
@ -3,7 +3,6 @@ use super::parser::ParseMode;
|
|||
use super::parser::ParserState;
|
||||
use super::reports::Report;
|
||||
use super::source::Cursor;
|
||||
use super::source::Source;
|
||||
use super::source::Token;
|
||||
use super::style::StyleHolder;
|
||||
use crate::document::document::Document;
|
||||
|
@ -14,8 +13,6 @@ use mlua::Lua;
|
|||
|
||||
use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
macro_rules! create_registry {
|
||||
( $($construct:expr),+ $(,)? ) => {{
|
||||
|
@ -184,10 +181,10 @@ impl<T: RegexRule + 'static> Rule for T {
|
|||
let token = Token::new(captures.get(0).unwrap().range(), cursor.source.clone());
|
||||
|
||||
let token_end = token.end();
|
||||
return (
|
||||
(
|
||||
cursor.at(token_end),
|
||||
self.on_regex_match(*index, state, document, token, captures),
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
|
|
|
@ -17,8 +17,7 @@ pub trait Source: Downcast + Debug {
|
|||
}
|
||||
impl_downcast!(Source);
|
||||
|
||||
pub trait SourcePosition
|
||||
{
|
||||
pub trait SourcePosition {
|
||||
/// Transforms a position to it's position in the oldest parent source
|
||||
fn original_position(&self, pos: usize) -> (Rc<dyn Source>, usize);
|
||||
|
||||
|
@ -88,30 +87,23 @@ impl Source for SourceFile {
|
|||
/// Let's say you make a virtual source from the following: "Con\]tent" -> "Con]tent"
|
||||
/// Then at position 3, an offset of 1 will be created to account for the removed '\'
|
||||
#[derive(Debug)]
|
||||
struct SourceOffset
|
||||
{
|
||||
struct SourceOffset {
|
||||
/// Stores the total offsets
|
||||
offsets: Vec<(usize, isize)>,
|
||||
}
|
||||
|
||||
impl SourceOffset
|
||||
{
|
||||
impl SourceOffset {
|
||||
/// Get the offset position
|
||||
pub fn position(&self, pos: usize) -> usize
|
||||
{
|
||||
match self.offsets.binary_search_by_key(&pos, |&(orig, _)| orig)
|
||||
{
|
||||
pub fn position(&self, pos: usize) -> usize {
|
||||
match self.offsets.binary_search_by_key(&pos, |&(orig, _)| orig) {
|
||||
Ok(idx) => (pos as isize + self.offsets[idx].1) as usize,
|
||||
Err(idx) => {
|
||||
if idx == 0
|
||||
{
|
||||
if idx == 0 {
|
||||
pos
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
(pos as isize + self.offsets[idx - 1].1) as usize
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,7 +127,12 @@ impl VirtualSource {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn new_offsets(location: Token, name: String, content: String, offsets: Vec<(usize, isize)>) -> Self {
|
||||
pub fn new_offsets(
|
||||
location: Token,
|
||||
name: String,
|
||||
content: String,
|
||||
offsets: Vec<(usize, isize)>,
|
||||
) -> Self {
|
||||
Self {
|
||||
location,
|
||||
name,
|
||||
|
@ -151,55 +148,52 @@ impl Source for VirtualSource {
|
|||
fn content(&self) -> &String { &self.content }
|
||||
}
|
||||
|
||||
impl SourcePosition for Rc<dyn Source>
|
||||
{
|
||||
fn original_position(&self, mut pos: usize) -> (Rc<dyn Source>, usize) {
|
||||
impl SourcePosition for Rc<dyn Source> {
|
||||
fn original_position(&self, mut pos: usize) -> (Rc<dyn Source>, usize) {
|
||||
// Stop recursion
|
||||
if self.downcast_ref::<SourceFile>().is_some()
|
||||
{
|
||||
if self.downcast_ref::<SourceFile>().is_some() {
|
||||
return (self.clone(), pos);
|
||||
}
|
||||
|
||||
// Apply offsets
|
||||
if let Some(offsets) =
|
||||
self.downcast_ref::<VirtualSource>()
|
||||
.and_then(|source| source.offsets.as_ref())
|
||||
if let Some(offsets) = self
|
||||
.downcast_ref::<VirtualSource>()
|
||||
.and_then(|source| source.offsets.as_ref())
|
||||
{
|
||||
pos = offsets.position(pos);
|
||||
}
|
||||
|
||||
// Recurse to parent
|
||||
if let Some(parent) = self.location()
|
||||
{
|
||||
if let Some(parent) = self.location() {
|
||||
return parent.source().original_position(parent.range.start + pos);
|
||||
}
|
||||
|
||||
return (self.clone(), pos);
|
||||
}
|
||||
(self.clone(), pos)
|
||||
}
|
||||
|
||||
fn original_range(&self, mut range: Range<usize>) -> (Rc<dyn Source>, Range<usize>) {
|
||||
fn original_range(&self, mut range: Range<usize>) -> (Rc<dyn Source>, Range<usize>) {
|
||||
// Stop recursion
|
||||
if self.downcast_ref::<SourceFile>().is_some()
|
||||
{
|
||||
if self.downcast_ref::<SourceFile>().is_some() {
|
||||
return (self.clone(), range);
|
||||
}
|
||||
|
||||
// Apply offsets
|
||||
if let Some(offsets) =
|
||||
self.downcast_ref::<VirtualSource>()
|
||||
.and_then(|source| source.offsets.as_ref())
|
||||
if let Some(offsets) = self
|
||||
.downcast_ref::<VirtualSource>()
|
||||
.and_then(|source| source.offsets.as_ref())
|
||||
{
|
||||
range = offsets.position(range.start) .. offsets.position(range.end);
|
||||
range = offsets.position(range.start)..offsets.position(range.end);
|
||||
}
|
||||
|
||||
// Recurse to parent
|
||||
if let Some(parent) = self.location()
|
||||
{
|
||||
return parent.source.original_range(parent.range.start + range.start..parent.range.start + range.end);
|
||||
if let Some(parent) = self.location() {
|
||||
return parent
|
||||
.source
|
||||
.original_range(parent.range.start + range.start..parent.range.start + range.end);
|
||||
}
|
||||
|
||||
return (self.clone(), range);
|
||||
}
|
||||
(self.clone(), range)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -264,7 +258,7 @@ impl LineCursor {
|
|||
let start = self.pos;
|
||||
let mut it = self.source.content().as_str()[start..].chars().peekable();
|
||||
|
||||
let mut prev = self.source.content().as_str()[..start].chars().rev().next();
|
||||
let mut prev = self.source.content().as_str()[..start].chars().next_back();
|
||||
while self.pos < pos {
|
||||
let c = it.next().unwrap();
|
||||
|
||||
|
|
|
@ -28,11 +28,7 @@ pub trait RuleState: Downcast {
|
|||
fn scope(&self) -> Scope;
|
||||
|
||||
/// Callback called when state goes out of scope
|
||||
fn on_remove(
|
||||
&self,
|
||||
state: &ParserState,
|
||||
document: &dyn Document,
|
||||
) -> Vec<Report>;
|
||||
fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report>;
|
||||
}
|
||||
impl_downcast!(RuleState);
|
||||
|
||||
|
|
|
@ -97,25 +97,28 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
|
|||
/// # Notes
|
||||
///
|
||||
/// If you only need to escape content that won't be parsed, use [`process_escaped`] instead.
|
||||
pub fn escape_source(source: Rc<dyn Source>, range: Range<usize>, name: String, escape: char, token: &'static str) -> Rc<dyn Source>
|
||||
{
|
||||
pub fn escape_source(
|
||||
source: Rc<dyn Source>,
|
||||
range: Range<usize>,
|
||||
name: String,
|
||||
escape: char,
|
||||
token: &'static str,
|
||||
) -> Rc<dyn Source> {
|
||||
let content = &source.content()[range.clone()];
|
||||
|
||||
let mut processed = String::new();
|
||||
let mut escaped = 0;
|
||||
let mut token_it = token.chars().peekable();
|
||||
let mut offset = 0isize;
|
||||
let mut offsets : Vec<(usize, isize)> = vec!();
|
||||
for (pos, c) in content.chars().enumerate()
|
||||
{
|
||||
let mut offsets: Vec<(usize, isize)> = vec![];
|
||||
for (pos, c) in content.chars().enumerate() {
|
||||
if c == escape {
|
||||
escaped += 1;
|
||||
} else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) {
|
||||
let _ = token_it.next();
|
||||
if token_it.peek().is_none() {
|
||||
(0..(escaped / 2)).for_each(|_| processed.push(escape));
|
||||
if ( escaped + 1) / 2 != 0
|
||||
{
|
||||
if (escaped + 1) / 2 != 0 {
|
||||
offset += (escaped + 1) / 2;
|
||||
offsets.push((pos - token.len() - escaped as usize / 2, offset));
|
||||
}
|
||||
|
@ -140,7 +143,7 @@ pub fn escape_source(source: Rc<dyn Source>, range: Range<usize>, name: String,
|
|||
Token::new(range, source),
|
||||
name,
|
||||
processed,
|
||||
offsets
|
||||
offsets,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -205,7 +208,14 @@ pub fn parse_paragraph<'a>(
|
|||
let parsed = state.with_state(|new_state| -> Box<dyn Document> {
|
||||
new_state
|
||||
.parser
|
||||
.parse(new_state, source.clone(), Some(document), ParseMode { paragraph_only: true })
|
||||
.parse(
|
||||
new_state,
|
||||
source.clone(),
|
||||
Some(document),
|
||||
ParseMode {
|
||||
paragraph_only: true,
|
||||
},
|
||||
)
|
||||
.0
|
||||
});
|
||||
if parsed.content().borrow().len() > 1 {
|
||||
|
|
|
@ -40,16 +40,25 @@ impl Backend {
|
|||
|
||||
// TODO: Create a custom parser for the lsp
|
||||
// Which will require a dyn Document to work
|
||||
let source = Rc::new(SourceFile::with_content(params.uri.to_string(), params.text.clone(), None));
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
params.uri.to_string(),
|
||||
params.text.clone(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let (_doc, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None, ParseMode::default());
|
||||
let (_doc, state) = parser.parse(
|
||||
ParserState::new_with_semantics(&parser, None),
|
||||
source.clone(),
|
||||
None,
|
||||
ParseMode::default(),
|
||||
);
|
||||
|
||||
if let Some(sems) = state.shared.semantics.as_ref()
|
||||
{
|
||||
if let Some(sems) = state.shared.semantics.as_ref() {
|
||||
let borrow = sems.borrow();
|
||||
for (source, sem) in &borrow.sems
|
||||
{
|
||||
if let Some(path) = source.clone().downcast_rc::<SourceFile>()
|
||||
for (source, sem) in &borrow.sems {
|
||||
if let Some(path) = source
|
||||
.clone()
|
||||
.downcast_rc::<SourceFile>()
|
||||
.ok()
|
||||
.map(|source| source.path().to_owned())
|
||||
{
|
||||
|
@ -57,14 +66,16 @@ impl Backend {
|
|||
.insert(path, sem.tokens.replace(vec![]));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tower_lsp::async_trait]
|
||||
impl LanguageServer for Backend {
|
||||
async fn initialize(&self, _params: InitializeParams) -> tower_lsp::jsonrpc::Result<InitializeResult> {
|
||||
async fn initialize(
|
||||
&self,
|
||||
_params: InitializeParams,
|
||||
) -> tower_lsp::jsonrpc::Result<InitializeResult> {
|
||||
Ok(InitializeResult {
|
||||
capabilities: ServerCapabilities {
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Kind(
|
||||
|
@ -106,7 +117,7 @@ impl LanguageServer for Backend {
|
|||
},
|
||||
server_info: Some(ServerInfo {
|
||||
name: "nmlls".into(),
|
||||
version: Some("0.1".into())
|
||||
version: Some("0.1".into()),
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
@ -138,7 +149,10 @@ impl LanguageServer for Backend {
|
|||
.await
|
||||
}
|
||||
|
||||
async fn completion(&self, _params: CompletionParams) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
|
||||
async fn completion(
|
||||
&self,
|
||||
_params: CompletionParams,
|
||||
) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
|
||||
//let uri = params.text_document_position.text_document.uri;
|
||||
//let position = params.text_document_position.position;
|
||||
let completions = || -> Option<Vec<CompletionItem>> {
|
||||
|
|
Loading…
Reference in a new issue