This commit is contained in:
ef3d0c3e 2024-10-24 10:18:49 +02:00
parent ea29c4bb53
commit 634f8876ee
40 changed files with 461 additions and 546 deletions

View file

@ -1,4 +1,4 @@
pub mod compiler; pub mod compiler;
pub mod navigation; pub mod navigation;
pub mod process;
pub mod postprocess; pub mod postprocess;
pub mod process;

View file

@ -239,7 +239,7 @@ pub fn create_navigation(
nav.entries nav.entries
.sort_by(|l, r| NavEntries::sort_entry(&entrymap, l.title.as_str(), r.title.as_str())); .sort_by(|l, r| NavEntries::sort_entry(&entrymap, l.title.as_str(), r.title.as_str()));
for (_, child) in &mut nav.children { for child in nav.children.values_mut() {
sort_entries(child); sort_entries(child);
} }
} }

View file

@ -67,7 +67,9 @@ impl PostProcess {
} }
if let Some((found_ref, found_doc)) = &found_ref { if let Some((found_ref, found_doc)) = &found_ref {
let found_borrow = found_doc.borrow(); let found_borrow = found_doc.borrow();
let found_path = found_borrow.get_variable("compiler.output").ok_or("Unable to get the output. Aborting postprocessing.".to_string())?; let found_path = found_borrow
.get_variable("compiler.output")
.ok_or("Unable to get the output. Aborting postprocessing.".to_string())?;
let insert_content = format!("{found_path}#{found_ref}"); let insert_content = format!("{found_path}#{found_ref}");
content.insert_str(pos + offset, insert_content.as_str()); content.insert_str(pos + offset, insert_content.as_str());
offset += insert_content.len(); offset += insert_content.len();

View file

@ -26,7 +26,12 @@ fn parse(
) -> Result<Box<dyn Document<'static>>, String> { ) -> Result<Box<dyn Document<'static>>, String> {
// Parse // Parse
//let source = SourceFile::new(input.to_string(), None).unwrap(); //let source = SourceFile::new(input.to_string(), None).unwrap();
let (doc, _) = parser.parse(ParserState::new(parser, None), source.clone(), None, ParseMode::default()); let (doc, _) = parser.parse(
ParserState::new(parser, None),
source.clone(),
None,
ParseMode::default(),
);
if debug_opts.contains(&"ast".to_string()) { if debug_opts.contains(&"ast".to_string()) {
println!("-- BEGIN AST DEBUGGING --"); println!("-- BEGIN AST DEBUGGING --");
@ -159,7 +164,10 @@ pub fn process(
/// Processes sources from in-memory strings /// Processes sources from in-memory strings
/// This function is indented for testing /// This function is indented for testing
#[cfg(test)] #[cfg(test)]
pub fn process_from_memory(target: Target, sources: Vec<String>) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> { pub fn process_from_memory(
target: Target,
sources: Vec<String>,
) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
let mut compiled = vec![]; let mut compiled = vec![];
let parser = LangParser::default(); let parser = LangParser::default();

View file

@ -33,8 +33,7 @@ pub enum CrossReference {
impl core::fmt::Display for CrossReference { impl core::fmt::Display for CrossReference {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self match self {
{
CrossReference::Unspecific(name) => write!(f, "#{name}"), CrossReference::Unspecific(name) => write!(f, "#{name}"),
CrossReference::Specific(doc_name, name) => write!(f, "{doc_name}#{name}"), CrossReference::Specific(doc_name, name) => write!(f, "{doc_name}#{name}"),
} }
@ -74,8 +73,7 @@ impl Scope {
)); ));
// Variables // Variables
self.variables self.variables.extend(other.variables.drain());
.extend(other.variables.drain());
} }
false => { false => {
// References // References
@ -131,8 +129,7 @@ pub trait Document<'a>: core::fmt::Debug {
); );
} }
// Add contained references // Add contained references
else if let Some(container) = else if let Some(container) = self
self
.content() .content()
.borrow() .borrow()
.last() .last()
@ -168,9 +165,7 @@ pub trait Document<'a>: core::fmt::Debug {
fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> { fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> {
match self.scope().borrow().variables.get(name) { match self.scope().borrow().variables.get(name) {
Some(variable) => { Some(variable) => Some(variable.clone()),
Some(variable.clone())
}
// Continue search recursively // Continue search recursively
None => match self.parent() { None => match self.parent() {
@ -193,11 +188,13 @@ pub trait Document<'a>: core::fmt::Debug {
scope: &RefCell<Scope>, scope: &RefCell<Scope>,
merge_as: Option<&String>, merge_as: Option<&String>,
) { ) {
if let Some(merge_as) = merge_as { self.scope().borrow_mut().merge( if let Some(merge_as) = merge_as {
self.scope().borrow_mut().merge(
&mut scope.borrow_mut(), &mut scope.borrow_mut(),
merge_as, merge_as,
self.content().borrow().len(), self.content().borrow().len(),
) } )
}
// Content // Content
self.content() self.content()
@ -206,10 +203,7 @@ pub trait Document<'a>: core::fmt::Debug {
} }
fn get_reference(&self, refname: &str) -> Option<ElemReference> { fn get_reference(&self, refname: &str) -> Option<ElemReference> {
self.scope() self.scope().borrow().referenceable.get(refname).copied()
.borrow()
.referenceable
.get(refname).copied()
} }
fn get_from_reference( fn get_from_reference(

View file

@ -50,7 +50,12 @@ pub trait Element: Downcast + core::fmt::Debug {
fn as_container(&self) -> Option<&dyn ContainerElement> { None } fn as_container(&self) -> Option<&dyn ContainerElement> { None }
/// Compiles element /// Compiles element
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String>; fn compile(
&self,
compiler: &Compiler,
document: &dyn Document,
cursor: usize,
) -> Result<String, String>;
} }
impl_downcast!(Element); impl_downcast!(Element);
@ -93,7 +98,12 @@ impl Element for DocumentEnd {
fn element_name(&self) -> &'static str { "Document End" } fn element_name(&self) -> &'static str { "Document End" }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn compile(
&self,
_compiler: &Compiler,
_document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
Ok(String::new()) Ok(String::new())
} }
} }

View file

@ -31,9 +31,7 @@ impl<'a> LangDocument<'a> {
impl<'a> Document<'a> for LangDocument<'a> { impl<'a> Document<'a> for LangDocument<'a> {
fn source(&self) -> Rc<dyn Source> { self.source.clone() } fn source(&self) -> Rc<dyn Source> { self.source.clone() }
fn parent(&self) -> Option<&'a dyn Document<'a>> { fn parent(&self) -> Option<&'a dyn Document<'a>> { self.parent.map(|p| p as &dyn Document<'a>) }
self.parent.map(|p| p as &dyn Document<'a>)
}
fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content } fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content }

View file

@ -1,5 +1,5 @@
pub mod document; pub mod document;
pub mod references;
pub mod langdocument;
pub mod element; pub mod element;
pub mod langdocument;
pub mod references;
pub mod variable; pub mod variable;

View file

@ -57,7 +57,9 @@ impl Variable for BaseVariable {
)); ));
state.with_state(|new_state| { state.with_state(|new_state| {
let _ = new_state.parser.parse_into(new_state, source, document, ParseMode::default()); let _ = new_state
.parser
.parse_into(new_state, source, document, ParseMode::default());
}); });
} }
} }

View file

@ -23,17 +23,16 @@ use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text; use crate::elements::text::Text;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::Rule; use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::source::VirtualSource; use crate::parser::source::VirtualSource;
use crate::parser::style::StyleHolder; use crate::parser::style::StyleHolder;
use crate::parser::util::escape_text; use crate::parser::util::escape_text;
use crate::parser::util::Property; use crate::parser::util::Property;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug)] #[derive(Debug)]
pub struct Blockquote { pub struct Blockquote {
@ -271,7 +270,10 @@ impl Rule for BlockquoteRule {
if let Some(properties) = captures.get(1) { if let Some(properties) = captures.get(1) {
match self.parse_properties(properties) { match self.parse_properties(properties) {
Err(err) => { Err(err) => {
report_err!(&mut reports, cursor.source.clone(), "Invalid Blockquote Properties".into(), report_err!(
&mut reports,
cursor.source.clone(),
"Invalid Blockquote Properties".into(),
span(properties.range(), err) span(properties.range(), err)
); );
return (end_cursor, reports); return (end_cursor, reports);
@ -329,8 +331,14 @@ impl Rule for BlockquoteRule {
} else if elem.downcast_ref::<Blockquote>().is_some() { } else if elem.downcast_ref::<Blockquote>().is_some() {
parsed_content.push(elem); parsed_content.push(elem);
} else { } else {
report_err!(&mut reports, token.source(), "Unable to Parse Blockquote Entry".into(), report_err!(
span(token.range.clone(), "Blockquotes may only contain paragraphs and other blockquotes".into()) &mut reports,
token.source(),
"Unable to Parse Blockquote Entry".into(),
span(
token.range.clone(),
"Blockquotes may only contain paragraphs and other blockquotes".into()
)
); );
return (end_cursor, reports); return (end_cursor, reports);
} }

View file

@ -1,6 +1,4 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Once; use std::sync::Once;
use ariadne::Fmt; use ariadne::Fmt;
@ -25,16 +23,15 @@ use crate::lsp::semantic::Semantics;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::util::Property; use crate::parser::util::Property;
use crate::parser::util::PropertyMapError; use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
use crate::parser::util::{self}; use crate::parser::util::{self};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum CodeKind { enum CodeKind {
@ -323,9 +320,7 @@ impl RegexRule for CodeRule {
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn enabled(&self, mode: &ParseMode, id: usize) -> bool { fn enabled(&self, mode: &ParseMode, id: usize) -> bool { !mode.paragraph_only || id != 0 }
return !mode.paragraph_only || id != 0;
}
fn on_regex_match( fn on_regex_match(
&self, &self,
@ -362,10 +357,7 @@ impl RegexRule for CodeRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Code Properties".into(), "Invalid Code Properties".into(),
span( span(props.range(), e)
props.range(),
e
)
); );
return reports; return reports;
} }
@ -383,10 +375,7 @@ impl RegexRule for CodeRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Missing Code Language".into(), "Missing Code Language".into(),
span( span(lang.range(), "No language specified".into())
lang.range(),
"No language specified".into()
)
); );
return reports; return reports;
@ -431,10 +420,7 @@ impl RegexRule for CodeRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Empty Code Content".into(), "Empty Code Content".into(),
span( span(token.range.clone(), "Code content cannot be empty".into())
token.range.clone(),
"Code content cannot be empty".into()
)
); );
return reports; return reports;
} }
@ -450,23 +436,23 @@ impl RegexRule for CodeRule {
let code_name = name.as_str().trim_end().trim_start().to_string(); let code_name = name.as_str().trim_end().trim_start().to_string();
(!code_name.is_empty()).then_some(code_name) (!code_name.is_empty()).then_some(code_name)
}); });
let line_offset = let line_offset = match properties.get("line_offset", |prop, value| {
match properties.get("line_offset", |prop, value| {
value.parse::<usize>().map_err(|e| (prop, e)) value.parse::<usize>().map_err(|e| (prop, e))
}) { }) {
Ok((_prop, offset)) => offset, Ok((_prop, offset)) => offset,
Err(e) => { Err(e) => match e {
match e {
PropertyMapError::ParseError((prop, err)) => { PropertyMapError::ParseError((prop, err)) => {
report_err!( report_err!(
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Code Property".into(), "Invalid Code Property".into(),
span( span(
token.start()+1..token.end(), token.start() + 1..token.end(),
format!("Property `line_offset: {}` cannot be converted: {}", format!(
"Property `line_offset: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info), prop.fg(state.parser.colors().info),
err.fg(state.parser.colors().error)) err.fg(state.parser.colors().error)
)
) )
); );
return reports; return reports;
@ -477,7 +463,7 @@ impl RegexRule for CodeRule {
token.source(), token.source(),
"Invalid Code Property".into(), "Invalid Code Property".into(),
span( span(
token.start()+1..token.end(), token.start() + 1..token.end(),
format!( format!(
"Property `{}` doesn't exist", "Property `{}` doesn't exist",
err.fg(state.parser.colors().info) err.fg(state.parser.colors().info)
@ -486,8 +472,7 @@ impl RegexRule for CodeRule {
); );
return reports; return reports;
} }
} },
}
}; };
state.push( state.push(
@ -680,6 +665,7 @@ mod tests {
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::validate_semantics; use crate::validate_semantics;
use std::rc::Rc;
#[test] #[test]
fn code_block() { fn code_block() {

View file

@ -5,15 +5,12 @@ use crate::document::element::Element;
use crate::lsp::semantic::Semantics; use crate::lsp::semantic::Semantics;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
use std::ops::Range;
use std::rc::Rc;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug)] #[derive(Debug)]
pub struct Comment { pub struct Comment {
@ -77,10 +74,7 @@ impl RegexRule for CommentRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Empty Comment".into(), "Empty Comment".into(),
span( span(comment.range(), "Comment is empty".into())
comment.range(),
"Comment is empty".into()
)
); );
} }
@ -117,6 +111,7 @@ mod tests {
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::validate_document; use crate::validate_document;
use crate::validate_semantics; use crate::validate_semantics;
use std::rc::Rc;
use super::*; use super::*;

View file

@ -4,7 +4,6 @@ use std::any::Any;
use std::cell::Ref; use std::cell::Ref;
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
@ -20,14 +19,13 @@ use crate::lua::kernel::CTX;
use crate::parser::customstyle::CustomStyle; use crate::parser::customstyle::CustomStyle;
use crate::parser::customstyle::CustomStyleToken; use crate::parser::customstyle::CustomStyleToken;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::Rule; use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::state::RuleState; use crate::parser::state::RuleState;
use crate::parser::state::Scope; use crate::parser::state::Scope;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;
@ -63,7 +61,10 @@ impl CustomStyle for LuaCustomStyle {
kernel.run_with_context(ctx, |lua| { kernel.run_with_context(ctx, |lua| {
let chunk = lua.load(self.start.as_str()); let chunk = lua.load(self.start.as_str());
if let Err(err) = chunk.eval::<()>() { if let Err(err) = chunk.eval::<()>() {
report_err!(&mut reports, location.source(), "Lua execution failed".into(), report_err!(
&mut reports,
location.source(),
"Lua execution failed".into(),
span(location.range.clone(), err.to_string()), span(location.range.clone(), err.to_string()),
note(format!( note(format!(
"When trying to start custom style {}", "When trying to start custom style {}",
@ -94,7 +95,10 @@ impl CustomStyle for LuaCustomStyle {
kernel.run_with_context(ctx, |lua| { kernel.run_with_context(ctx, |lua| {
let chunk = lua.load(self.end.as_str()); let chunk = lua.load(self.end.as_str());
if let Err(err) = chunk.eval::<()>() { if let Err(err) = chunk.eval::<()>() {
report_err!(&mut reports, location.source(), "Lua execution failed".into(), report_err!(
&mut reports,
location.source(),
"Lua execution failed".into(),
span(location.range.clone(), err.to_string()), span(location.range.clone(), err.to_string()),
note(format!( note(format!(
"When trying to end custom style {}", "When trying to end custom style {}",
@ -115,11 +119,7 @@ struct CustomStyleState {
impl RuleState for CustomStyleState { impl RuleState for CustomStyleState {
fn scope(&self) -> Scope { Scope::PARAGRAPH } fn scope(&self) -> Scope { Scope::PARAGRAPH }
fn on_remove( fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report> {
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report> {
let mut reports = vec![]; let mut reports = vec![];
self.toggled.iter().for_each(|(style, token)| { self.toggled.iter().for_each(|(style, token)| {
@ -135,11 +135,14 @@ impl RuleState for CustomStyleState {
}) })
.unwrap(); .unwrap();
report_err!(&mut reports, token.source(), "Unterminated Custom Style".into(), report_err!(
span(token.range.clone(), format!( &mut reports,
"Style {} starts here", token.source(),
style.fg(state.parser.colors().info) "Unterminated Custom Style".into(),
)), span(
token.range.clone(),
format!("Style {} starts here", style.fg(state.parser.colors().info))
),
span(paragraph_end.1, "Paragraph ends here".into()), span(paragraph_end.1, "Paragraph ends here".into()),
note("Styles cannot span multiple documents (i.e @import)".into()) note("Styles cannot span multiple documents (i.e @import)".into())
); );
@ -272,16 +275,19 @@ impl Rule for CustomStyleRule {
Token::new(cursor.pos..cursor.pos + s_end.len(), cursor.source.clone()); Token::new(cursor.pos..cursor.pos + s_end.len(), cursor.source.clone());
if style_state.toggled.get(style.name()).is_none() { if style_state.toggled.get(style.name()).is_none() {
let mut reports = vec![]; let mut reports = vec![];
report_err!(&mut reports, token.source(), "Invalid End of Style".into(), report_err!(
span(token.range.clone(), format!( &mut reports,
token.source(),
"Invalid End of Style".into(),
span(
token.range.clone(),
format!(
"Cannot end style {} here, it does not started anywhere", "Cannot end style {} here, it does not started anywhere",
style.name().fg(state.parser.colors().info) style.name().fg(state.parser.colors().info)
)) )
); )
return (
cursor.at(cursor.pos + s_end.len()),
reports
); );
return (cursor.at(cursor.pos + s_end.len()), reports);
} }
style_state.toggled.remove(style.name()); style_state.toggled.remove(style.name());
@ -294,20 +300,26 @@ impl Rule for CustomStyleRule {
); );
if let Some(start_token) = style_state.toggled.get(style.name()) { if let Some(start_token) = style_state.toggled.get(style.name()) {
let mut reports = vec![]; let mut reports = vec![];
report_err!(&mut reports, token.source(), "Invalid Start of Style".into(), report_err!(
span(token.range.clone(), format!( &mut reports,
token.source(),
"Invalid Start of Style".into(),
span(
token.range.clone(),
format!(
"When trying to start custom style {}", "When trying to start custom style {}",
self.name().fg(state.parser.colors().info) self.name().fg(state.parser.colors().info)
)), )
span(start_token.range.clone(), format!( ),
span(
start_token.range.clone(),
format!(
"Style {} previously starts here", "Style {} previously starts here",
self.name().fg(state.parser.colors().info) self.name().fg(state.parser.colors().info)
)), )
); ),
return (
cursor.at(cursor.pos + s_end.len()),
reports
); );
return (cursor.at(cursor.pos + s_end.len()), reports);
} }
style_state style_state

View file

@ -1,7 +1,6 @@
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::style::ElementStyle; use crate::parser::style::ElementStyle;
use std::any::Any; use std::any::Any;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;

View file

@ -1,6 +1,4 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Once; use std::sync::Once;
@ -29,12 +27,11 @@ use crate::compiler::compiler::Target;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::util; use crate::parser::util;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug)] #[derive(Debug)]
struct Graphviz { struct Graphviz {
@ -225,21 +222,14 @@ impl RegexRule for GraphRule {
return reports; return reports;
} }
Some(content) => { Some(content) => {
let processed = util::escape_text( let processed = util::escape_text('\\', "[/graph]", content.as_str());
'\\',
"[/graph]",
content.as_str(),
);
if processed.is_empty() { if processed.is_empty() {
report_err!( report_err!(
&mut reports, &mut reports,
token.source(), token.source(),
"Empty Graph Code".into(), "Empty Graph Code".into(),
span( span(content.range(), "Graph code is empty".into())
content.range(),
"Graph code is empty".into()
)
); );
return reports; return reports;
} }
@ -273,10 +263,7 @@ impl RegexRule for GraphRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Graph Properties".into(), "Invalid Graph Properties".into(),
span( span(props.range(), e)
props.range(),
e
)
); );
return reports; return reports;
} }
@ -312,10 +299,7 @@ impl RegexRule for GraphRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Graph Property".into(), "Invalid Graph Property".into(),
span( span(token.start() + 1..token.end(), err)
token.start() + 1..token.end(),
err
)
); );
return reports; return reports;
} }
@ -412,6 +396,7 @@ mod tests {
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::validate_document; use crate::validate_document;
use std::rc::Rc;
use super::*; use super::*;

View file

@ -4,17 +4,15 @@ use crate::lsp::semantic::Semantics;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::parser::source::Token; use crate::parser::source::Token;
use ariadne::Fmt; use ariadne::Fmt;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;

View file

@ -9,9 +9,9 @@ use crate::parser::layout::LayoutHolder;
use crate::parser::layout::LayoutType; use crate::parser::layout::LayoutType;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::state::RuleState; use crate::parser::state::RuleState;
use crate::parser::state::Scope; use crate::parser::state::Scope;
@ -31,8 +31,6 @@ use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum LayoutToken { pub(crate) enum LayoutToken {
@ -252,11 +250,7 @@ struct LayoutState {
impl RuleState for LayoutState { impl RuleState for LayoutState {
fn scope(&self) -> Scope { Scope::DOCUMENT } fn scope(&self) -> Scope { Scope::DOCUMENT }
fn on_remove( fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report> {
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report> {
let mut reports = vec![]; let mut reports = vec![];
let doc_borrow = document.content().borrow(); let doc_borrow = document.content().borrow();
@ -270,17 +264,13 @@ impl RuleState for LayoutState {
"Unterminated Layout".into(), "Unterminated Layout".into(),
span( span(
start.source(), start.source(),
start.range.start+1..start.range.end, start.range.start + 1..start.range.end,
format!( format!(
"Layout {} stars here", "Layout {} stars here",
layout_type.name().fg(state.parser.colors().info) layout_type.name().fg(state.parser.colors().info)
) )
), ),
span( span(at.source(), at.range.clone(), "Document ends here".into())
at.source(),
at.range.clone(),
"Document ends here".into()
)
); );
} }
@ -363,15 +353,11 @@ impl LayoutRule {
match layout_type.parse_properties(content.as_str()) { match layout_type.parse_properties(content.as_str()) {
Ok(props) => Ok(props), Ok(props) => Ok(props),
Err(err) => { Err(err) => {
report_err!( report_err!(
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Layout Properties".into(), "Invalid Layout Properties".into(),
span( span(props.range(), err)
props.range(),
err
)
); );
Err(()) Err(())
} }
@ -473,7 +459,6 @@ impl RegexRule for LayoutRule {
trimmed.fg(state.parser.colors().highlight) trimmed.fg(state.parser.colors().highlight)
) )
) )
); );
return reports; return reports;
} }
@ -514,9 +499,12 @@ impl RegexRule for LayoutRule {
if let Some((sems, tokens)) = if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics) Semantics::from_source(token.source(), &state.shared.semantics)
{ {
let start = matches.get(0).map(|m| { let start = matches
.get(0)
.map(|m| {
m.start() + token.source().content()[m.start()..].find('#').unwrap() m.start() + token.source().content()[m.start()..].find('#').unwrap()
}).unwrap(); })
.unwrap();
sems.add(start..start + 2, tokens.layout_sep); sems.add(start..start + 2, tokens.layout_sep);
sems.add( sems.add(
start + 2..start + 2 + "LAYOUT_BEGIN".len(), start + 2..start + 2 + "LAYOUT_BEGIN".len(),
@ -546,10 +534,7 @@ impl RegexRule for LayoutRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid #+LAYOUT_NEXT".into(), "Invalid #+LAYOUT_NEXT".into(),
span( span(token.range.clone(), "No active layout found".into())
token.range.clone(),
"No active layout found".into()
)
); );
return reports; return reports;
} }
@ -583,15 +568,16 @@ impl RegexRule for LayoutRule {
matches.get(1), matches.get(1),
) { ) {
Ok(props) => props, Ok(props) => props,
Err(rep) => return reports, Err(()) => return reports,
}; };
if let Some((sems, tokens)) = if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics) Semantics::from_source(token.source(), &state.shared.semantics)
{ {
let start = matches.get(0).map(|m| { let start = matches
m.start() + token.source().content()[m.start()..].find('#').unwrap() .get(0)
}).unwrap(); .map(|m| m.start() + token.source().content()[m.start()..].find('#').unwrap())
.unwrap();
sems.add(start..start + 2, tokens.layout_sep); sems.add(start..start + 2, tokens.layout_sep);
sems.add( sems.add(
start + 2..start + 2 + "LAYOUT_NEXT".len(), start + 2..start + 2 + "LAYOUT_NEXT".len(),
@ -611,7 +597,6 @@ impl RegexRule for LayoutRule {
layout_type.clone(), layout_type.clone(),
properties, properties,
) )
} else { } else {
// LAYOUT_END // LAYOUT_END
let mut rule_state_borrow = rule_state.as_ref().borrow_mut(); let mut rule_state_borrow = rule_state.as_ref().borrow_mut();
@ -623,10 +608,7 @@ impl RegexRule for LayoutRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid #+LAYOUT_NEXT".into(), "Invalid #+LAYOUT_NEXT".into(),
span( span(token.range.clone(), "No active layout found".into())
token.range.clone(),
"No active layout found".into()
)
); );
return reports; return reports;
} }
@ -660,7 +642,7 @@ impl RegexRule for LayoutRule {
matches.get(1), matches.get(1),
) { ) {
Ok(props) => props, Ok(props) => props,
Err(rep) => return reports, Err(()) => return reports,
}; };
let layout_type = layout_type.clone(); let layout_type = layout_type.clone();
@ -670,9 +652,10 @@ impl RegexRule for LayoutRule {
if let Some((sems, tokens)) = if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics) Semantics::from_source(token.source(), &state.shared.semantics)
{ {
let start = matches.get(0).map(|m| { let start = matches
m.start() + token.source().content()[m.start()..].find('#').unwrap() .get(0)
}).unwrap(); .map(|m| m.start() + token.source().content()[m.start()..].find('#').unwrap())
.unwrap();
sems.add(start..start + 2, tokens.layout_sep); sems.add(start..start + 2, tokens.layout_sep);
sems.add( sems.add(
start + 2..start + 2 + "LAYOUT_END".len(), start + 2..start + 2 + "LAYOUT_END".len(),
@ -920,7 +903,8 @@ mod tests {
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::{validate_document, validate_semantics}; use crate::validate_document;
use crate::validate_semantics;
use super::*; use super::*;

View file

@ -8,8 +8,9 @@ use crate::lsp::semantic::Semantics;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::source::VirtualSource; use crate::parser::source::VirtualSource;
use crate::parser::util; use crate::parser::util;
@ -19,11 +20,8 @@ use mlua::Function;
use mlua::Lua; use mlua::Lua;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug)] #[derive(Debug)]
pub struct Link { pub struct Link {
@ -118,14 +116,17 @@ impl RegexRule for LinkRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Empty Link Display".into(), "Empty Link Display".into(),
span( span(display.range(), "Link display is empty".into())
display.range(),
"Link display is empty".into()
)
); );
return reports; return reports;
} }
let display_source = util::escape_source(token.source(), display.range(), "Link Display".into(), '\\', "]("); let display_source = util::escape_source(
token.source(),
display.range(),
"Link Display".into(),
'\\',
"](",
);
if display_source.content().is_empty() { if display_source.content().is_empty() {
report_err!( report_err!(
&mut reports, &mut reports,
@ -177,10 +178,7 @@ impl RegexRule for LinkRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Empty Link URL".into(), "Empty Link URL".into(),
span( span(url.range(), "Link url is empty".into())
url.range(),
"Link url is empty".into()
)
); );
return reports; return reports;
} }

View file

@ -13,7 +13,9 @@ use crate::document::element::Element;
use crate::lsp::semantic::Semantics; use crate::lsp::semantic::Semantics;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::Report; use crate::parser::reports::Report;
use crate::parser::reports::*;
use crate::parser::rule::Rule; use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Token; use crate::parser::source::Token;
@ -23,8 +25,6 @@ use crate::parser::util::escape_text;
use crate::parser::util::Property; use crate::parser::util::Property;
use crate::parser::util::PropertyMapError; use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
use regex::Match; use regex::Match;
use regex::Regex; use regex::Regex;
@ -310,7 +310,10 @@ impl Rule for ListRule {
if let Some(properties) = captures.get(2) { if let Some(properties) = captures.get(2) {
match self.parse_properties(properties) { match self.parse_properties(properties) {
Err(err) => { Err(err) => {
report_err!(&mut reports, cursor.source.clone(), "Invalid List Entry Properties".into(), report_err!(
&mut reports,
cursor.source.clone(),
"Invalid List Entry Properties".into(),
span(properties.range(), err) span(properties.range(), err)
); );
return (cursor.at(captures.get(0).unwrap().end()), reports); return (cursor.at(captures.get(0).unwrap().end()), reports);
@ -357,7 +360,7 @@ impl Rule for ListRule {
.map(|delim| { .map(|delim| {
captures.get(1).unwrap().as_str()[0..delim] captures.get(1).unwrap().as_str()[0..delim]
.chars() .chars()
.fold(true, |val, c| val && c.is_whitespace()) .all(|c| c.is_whitespace())
}) == Some(true) }) == Some(true)
{ {
break; break;
@ -378,7 +381,10 @@ impl Rule for ListRule {
)); ));
let parsed_content = match util::parse_paragraph(state, entry_src, document) { let parsed_content = match util::parse_paragraph(state, entry_src, document) {
Err(err) => { Err(err) => {
report_warn!(&mut reports, token.source(), "Unable to parse List Entry".into(), report_warn!(
&mut reports,
token.source(),
"Unable to parse List Entry".into(),
span(token.range.clone(), err.into()) span(token.range.clone(), err.into())
); );
// Return an empty paragraph // Return an empty paragraph
@ -428,7 +434,6 @@ impl Rule for ListRule {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::parser::source::Source;
use crate::elements::paragraph::Paragraph; use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text; use crate::elements::text::Text;
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;

View file

@ -1,5 +1,4 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
@ -20,7 +19,8 @@ use crate::document::element::ReferenceableElement;
use crate::document::references::validate_refname; use crate::document::references::validate_refname;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
@ -31,8 +31,6 @@ use crate::parser::util::Property;
use crate::parser::util::PropertyMap; use crate::parser::util::PropertyMap;
use crate::parser::util::PropertyMapError; use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;
use super::reference::InternalReference; use super::reference::InternalReference;
@ -314,13 +312,10 @@ impl MediaRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Media Properties".into(), "Invalid Media Properties".into(),
span( span(props.range(), e)
props.range(),
e
)
); );
None None
}, }
Ok(properties) => Some(properties), Ok(properties) => Some(properties),
} }
} }
@ -373,10 +368,7 @@ impl RegexRule for MediaRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Media Refname".into(), "Invalid Media Refname".into(),
span( span(m.range(), err)
m.range(),
err
)
); );
return reports; return reports;
} }
@ -392,24 +384,19 @@ impl RegexRule for MediaRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Media URI".into(), "Invalid Media URI".into(),
span( span(m.range(), err)
m.range(),
err
)
); );
return reports; return reports;
} }
}; };
// Properties // Properties
let properties = match self.parse_properties(&mut reports, &token, &matches.get(3)) let properties = match self.parse_properties(&mut reports, &token, &matches.get(3)) {
{
Some(pm) => pm, Some(pm) => pm,
None => return reports, None => return reports,
}; };
let media_type = let media_type = match Self::detect_filetype(uri.as_str()) {
match Self::detect_filetype(uri.as_str()) {
Some(media_type) => media_type, Some(media_type) => media_type,
None => match properties.get("type", |prop, value| { None => match properties.get("type", |prop, value| {
MediaType::from_str(value.as_str()).map_err(|e| (prop, e)) MediaType::from_str(value.as_str()).map_err(|e| (prop, e))
@ -422,7 +409,7 @@ impl RegexRule for MediaRule {
token.source(), token.source(),
"Invalid Media Property".into(), "Invalid Media Property".into(),
span( span(
token.start()+1..token.end(), token.start() + 1..token.end(),
format!( format!(
"Property `type: {}` cannot be converted: {}", "Property `type: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info), prop.fg(state.parser.colors().info),
@ -438,7 +425,7 @@ impl RegexRule for MediaRule {
token.source(), token.source(),
"Invalid Media Property".into(), "Invalid Media Property".into(),
span( span(
token.start()+1..token.end(), token.start() + 1..token.end(),
format!("{err}. Required because mediatype could not be detected") format!("{err}. Required because mediatype could not be detected")
) )
); );
@ -481,9 +468,7 @@ impl RegexRule for MediaRule {
"Invalid Media Description".into(), "Invalid Media Description".into(),
span( span(
content.range(), content.range(),
format!( format!("Could not parse description: {err}")
"Could not parse description: {err}"
)
) )
); );
return reports; return reports;
@ -522,11 +507,7 @@ impl RegexRule for MediaRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Media".into(), "Invalid Media".into(),
span( span(token.range.clone(), err)
token.range.clone(),
err
)
); );
} }

View file

@ -1,5 +1,8 @@
pub mod blockquote;
pub mod code; pub mod code;
pub mod comment; pub mod comment;
pub mod customstyle;
pub mod elemstyle;
pub mod graphviz; pub mod graphviz;
pub mod import; pub mod import;
pub mod layout; pub mod layout;
@ -15,6 +18,3 @@ pub mod style;
pub mod tex; pub mod tex;
pub mod text; pub mod text;
pub mod variable; pub mod variable;
pub mod elemstyle;
pub mod customstyle;
pub mod blockquote;

View file

@ -10,11 +10,10 @@ use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::*;
use crate::parser::rule::Rule; use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::reports::*;
// TODO: Full refactor // TODO: Full refactor
// Problem is that document parsed from other sources i.e by variables // Problem is that document parsed from other sources i.e by variables
@ -151,14 +150,13 @@ impl Rule for ParagraphRule {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::rc::Rc;
use crate::elements::paragraph::Paragraph; use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text; use crate::elements::text::Text;
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::validate_document; use crate::validate_document;
use std::rc::Rc;
#[test] #[test]
fn parse() { fn parse() {

View file

@ -6,8 +6,8 @@ use crate::lsp::semantic::Semantics;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::*;
use crate::parser::reports::macros::*; use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::util::Property; use crate::parser::util::Property;
@ -272,8 +272,6 @@ impl RegexRule for RawRule {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::parser::source::Source;
use std::rc::Rc;
use crate::elements::paragraph::Paragraph; use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text; use crate::elements::text::Text;
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
@ -281,6 +279,7 @@ mod tests {
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::validate_document; use crate::validate_document;
use crate::validate_semantics; use crate::validate_semantics;
use std::rc::Rc;
#[test] #[test]
fn parser() { fn parser() {

View file

@ -1,5 +1,4 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use reference_style::ExternalReferenceStyle; use reference_style::ExternalReferenceStyle;
@ -20,17 +19,15 @@ use crate::document::references::validate_refname;
use crate::lsp::semantic::Semantics; use crate::lsp::semantic::Semantics;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::style::StyleHolder; use crate::parser::style::StyleHolder;
use crate::parser::util; use crate::parser::util;
use crate::parser::util::Property; use crate::parser::util::Property;
use crate::parser::util::PropertyMap; use crate::parser::util::PropertyMap;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug)] #[derive(Debug)]
pub struct InternalReference { pub struct InternalReference {
@ -149,7 +146,7 @@ impl Element for ExternalReference {
format!("Failed to format ExternalReference style `{format_string}`: {err}") format!("Failed to format ExternalReference style `{format_string}`: {err}")
})?; })?;
result += format!("\">{}</a>", args.to_string()).as_str(); result += format!("\">{}</a>", args).as_str();
} }
// Add crossreference // Add crossreference
compiler.insert_crossreference(crossreference_pos, self.reference.clone()); compiler.insert_crossreference(crossreference_pos, self.reference.clone());
@ -214,13 +211,10 @@ impl ReferenceRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Reference Properties".into(), "Invalid Reference Properties".into(),
span( span(props.range(), e)
props.range(),
e
)
); );
None None
}, }
Ok(properties) => Some(properties), Ok(properties) => Some(properties),
} }
} }
@ -259,10 +253,7 @@ impl RegexRule for ReferenceRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Reference Refname".into(), "Invalid Reference Refname".into(),
span( span(refname_match.range(), err)
refname_match.range(),
err
)
); );
return reports; return reports;
} }
@ -277,10 +268,7 @@ impl RegexRule for ReferenceRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Reference Refname".into(), "Invalid Reference Refname".into(),
span( span(refname_match.range(), err)
refname_match.range(),
err
)
); );
return reports; return reports;
} }
@ -292,8 +280,7 @@ impl RegexRule for ReferenceRule {
}; };
// Properties // Properties
let properties = match self.parse_properties(&mut reports, &token, &matches.get(2)) let properties = match self.parse_properties(&mut reports, &token, &matches.get(2)) {
{
Some(pm) => pm, Some(pm) => pm,
None => return reports, None => return reports,
}; };

View file

@ -5,6 +5,8 @@ use crate::lua::kernel::KernelContext;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
@ -16,8 +18,6 @@ use mlua::Lua;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
use std::rc::Rc; use std::rc::Rc;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
use super::text::Text; use super::text::Text;
@ -106,10 +106,7 @@ impl RegexRule for ScriptRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Kernel Name".into(), "Invalid Kernel Name".into(),
span( span(name.range(), e)
name.range(),
e
)
); );
return reports; return reports;
} }
@ -126,26 +123,27 @@ impl RegexRule for ScriptRule {
}; };
let script_range = matches.get(if index == 0 { 2 } else { 3 }).unwrap().range(); let script_range = matches.get(if index == 0 { 2 } else { 3 }).unwrap().range();
let source = escape_source(token.source(), script_range.clone(), format!( let source = escape_source(
token.source(),
script_range.clone(),
format!(
":LUA:{kernel_name}#{}#{}", ":LUA:{kernel_name}#{}#{}",
token.source().name(), token.source().name(),
matches.get(0).unwrap().start() matches.get(0).unwrap().start()
), '\\', ">@"); ),
if source.content().is_empty() '\\',
{ ">@",
);
if source.content().is_empty() {
report_warn!( report_warn!(
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Kernel Code".into(), "Invalid Kernel Code".into(),
span( span(script_range, "Kernel code is empty".into())
script_range,
"Kernel code is empty".into()
)
); );
return reports; return reports;
} }
let execute = |lua: &Lua| { let execute = |lua: &Lua| {
let chunk = lua.load(source.content()).set_name(kernel_name); let chunk = lua.load(source.content()).set_name(kernel_name);
@ -177,10 +175,7 @@ impl RegexRule for ScriptRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Kernel Code Kind".into(), "Invalid Kernel Code Kind".into(),
span( span(kind.range(), msg)
kind.range(),
msg
)
); );
return reports; return reports;
} }

View file

@ -8,8 +8,9 @@ use crate::lsp::semantic::Semantics;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::style::StyleHolder; use crate::parser::style::StyleHolder;
use ariadne::Fmt; use ariadne::Fmt;
@ -19,11 +20,8 @@ use mlua::Lua;
use regex::Regex; use regex::Regex;
use section_style::SectionLinkPos; use section_style::SectionLinkPos;
use section_style::SectionStyle; use section_style::SectionStyle;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
use super::reference::InternalReference; use super::reference::InternalReference;
@ -300,9 +298,13 @@ impl RegexRule for SectionRule {
"Missing Section Spacing".into(), "Missing Section Spacing".into(),
span( span(
name.range(), name.range(),
"Sections require at least one whitespace before the section's name".into() "Sections require at least one whitespace before the section's name"
.into()
), ),
help(format!("Add a space before `{}`", section_name.fg(state.parser.colors().highlight))) help(format!(
"Add a space before `{}`",
section_name.fg(state.parser.colors().highlight)
))
); );
return reports; return reports;
} }

View file

@ -8,8 +8,9 @@ use crate::lsp::semantic::Semantics;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::state::RuleState; use crate::parser::state::RuleState;
use crate::parser::state::Scope; use crate::parser::state::Scope;
@ -18,11 +19,8 @@ use mlua::Function;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
use std::cell::RefCell; use std::cell::RefCell;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;
@ -86,11 +84,7 @@ impl StyleState {
impl RuleState for StyleState { impl RuleState for StyleState {
fn scope(&self) -> Scope { Scope::PARAGRAPH } fn scope(&self) -> Scope { Scope::PARAGRAPH }
fn on_remove( fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report> {
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report> {
let mut reports = vec![]; let mut reports = vec![];
self.toggled self.toggled
@ -119,15 +113,9 @@ impl RuleState for StyleState {
"Unterminated Style".into(), "Unterminated Style".into(),
span( span(
token.range.clone(), token.range.clone(),
format!( format!("Style {} starts here", name.fg(state.parser.colors().info))
"Style {} starts here",
name.fg(state.parser.colors().info)
)
),
span(
paragraph_end.1,
"Paragraph ends here".into()
), ),
span(paragraph_end.1, "Paragraph ends here".into()),
note("Styles cannot span multiple documents (i.e @import)".into()) note("Styles cannot span multiple documents (i.e @import)".into())
); );
}); });

View file

@ -1,10 +1,8 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Read; use std::io::Read;
use std::io::Write; use std::io::Write;
use std::ops::Range;
use std::process::Command; use std::process::Command;
use std::process::Stdio; use std::process::Stdio;
use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Once; use std::sync::Once;
@ -29,17 +27,15 @@ use crate::lsp::semantic::Semantics;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::util; use crate::parser::util;
use crate::parser::util::Property; use crate::parser::util::Property;
use crate::parser::util::PropertyMap; use crate::parser::util::PropertyMap;
use crate::parser::util::PropertyMapError; use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
enum TexKind { enum TexKind {
@ -266,15 +262,13 @@ impl TexRule {
fn parse_properties( fn parse_properties(
&self, &self,
mut reports: &mut Vec<Report>, mut reports: &mut Vec<Report>,
colors: &ReportColors,
token: &Token, token: &Token,
m: &Option<Match>, m: &Option<Match>,
) -> Option<PropertyMap> { ) -> Option<PropertyMap> {
match m { match m {
None => match self.properties.default() { None => match self.properties.default() {
Ok(properties) => Some(properties), Ok(properties) => Some(properties),
Err(e) => Err(e) => {
{
report_err!( report_err!(
&mut reports, &mut reports,
token.source(), token.source(),
@ -296,13 +290,10 @@ impl TexRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Tex Properties".into(), "Invalid Tex Properties".into(),
span( span(props.range(), e)
props.range(),
e
)
); );
None None
}, }
Ok(properties) => Some(properties), Ok(properties) => Some(properties),
} }
} }
@ -359,10 +350,7 @@ impl RegexRule for TexRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Empty Tex Code".into(), "Empty Tex Code".into(),
span( span(content.range(), "Tex code is empty".into())
content.range(),
"Tex code is empty".into()
)
); );
} }
processed processed
@ -370,8 +358,7 @@ impl RegexRule for TexRule {
}; };
// Properties // Properties
let properties = match self.parse_properties(&mut reports, state.parser.colors(), &token, &matches.get(1)) let properties = match self.parse_properties(&mut reports, &token, &matches.get(1)) {
{
Some(pm) => pm, Some(pm) => pm,
None => return reports, None => return reports,
}; };
@ -383,7 +370,6 @@ impl RegexRule for TexRule {
Ok((_prop, kind)) => kind, Ok((_prop, kind)) => kind,
Err(e) => match e { Err(e) => match e {
PropertyMapError::ParseError((prop, err)) => { PropertyMapError::ParseError((prop, err)) => {
report_err!( report_err!(
&mut reports, &mut reports,
token.source(), token.source(),
@ -442,14 +428,20 @@ impl RegexRule for TexRule {
Semantics::from_source(token.source(), &state.shared.semantics) Semantics::from_source(token.source(), &state.shared.semantics)
{ {
let range = token.range; let range = token.range;
sems.add(range.start..range.start + if index == 0 { 2 } else { 1 }, tokens.tex_sep); sems.add(
range.start..range.start + if index == 0 { 2 } else { 1 },
tokens.tex_sep,
);
if let Some(props) = matches.get(1).map(|m| m.range()) { if let Some(props) = matches.get(1).map(|m| m.range()) {
sems.add(props.start - 1..props.start, tokens.tex_props_sep); sems.add(props.start - 1..props.start, tokens.tex_props_sep);
sems.add(props.clone(), tokens.tex_props); sems.add(props.clone(), tokens.tex_props);
sems.add(props.end..props.end + 1, tokens.tex_props_sep); sems.add(props.end..props.end + 1, tokens.tex_props_sep);
} }
sems.add(matches.get(2).unwrap().range(), tokens.tex_content); sems.add(matches.get(2).unwrap().range(), tokens.tex_content);
sems.add(range.end - if index == 0 { 2 } else { 1 }..range.end, tokens.tex_sep); sems.add(
range.end - if index == 0 { 2 } else { 1 }..range.end,
tokens.tex_sep,
);
} }
reports reports
} }
@ -550,7 +542,9 @@ mod tests {
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::{validate_document, validate_semantics}; use crate::validate_document;
use crate::validate_semantics;
use std::rc::Rc;
use super::*; use super::*;
@ -648,5 +642,4 @@ $[kind=inline]\LaTeX$
tex_sep { delta_line == 0, delta_start == 6, length == 1 }; tex_sep { delta_line == 0, delta_start == 6, length == 1 };
); );
} }
} }

View file

@ -1,6 +1,4 @@
use std::any::Any; use std::any::Any;
use std::ops::Range;
use std::rc::Rc;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
@ -12,11 +10,10 @@ use crate::document::element::Element;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::reports::*;
use crate::parser::rule::Rule; use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::reports::*;
#[derive(Debug)] #[derive(Debug)]
pub struct Text { pub struct Text {

View file

@ -7,18 +7,16 @@ use crate::lua::kernel::CTX;
use crate::parser::parser::ParseMode; use crate::parser::parser::ParseMode;
use crate::parser::parser::ParserState; use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::reports::macros::*;
use crate::parser::reports::*;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use ariadne::Fmt; use ariadne::Fmt;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
use regex::Regex; use regex::Regex;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use crate::parser::reports::*;
use crate::parser::reports::macros::*;
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum VariableKind { enum VariableKind {
@ -160,17 +158,17 @@ impl RegexRule for VariableRule {
), ),
help(format!( help(format!(
"Leave empty for regular variables. Available variable kinds:{}", "Leave empty for regular variables. Available variable kinds:{}",
self.kinds.iter().skip(1).fold( self.kinds
"".to_string(), .iter()
|acc, (char, name)| { .skip(1)
.fold("".to_string(), |acc, (char, name)| {
acc + format!( acc + format!(
"\n - `{}` : {}", "\n - `{}` : {}",
char.fg(state.parser.colors().highlight), char.fg(state.parser.colors().highlight),
name.fg(state.parser.colors().info) name.fg(state.parser.colors().info)
) )
.as_str() .as_str()
} })
)
)) ))
); );
return reports; return reports;
@ -396,10 +394,7 @@ impl RegexRule for VariableSubstitutionRule {
&mut reports, &mut reports,
token.source(), token.source(),
"Invalid Variable Name".into(), "Invalid Variable Name".into(),
span( span(name.range(), msg)
name.range(),
msg
)
); );
return reports; return reports;

View file

@ -279,7 +279,7 @@ impl<'a> Semantics<'a> {
.unwrap_or(None) .unwrap_or(None)
{ {
return Self::from_source_impl(location.source(), semantics, original_source); return Self::from_source_impl(location.source(), semantics, original_source);
} else if let Some(source) = source.clone().downcast_rc::<SourceFile>().ok() { } else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
return Ref::filter_map( return Ref::filter_map(
semantics.as_ref().unwrap().borrow(), semantics.as_ref().unwrap().borrow(),
|semantics: &SemanticsHolder| { |semantics: &SemanticsHolder| {
@ -301,7 +301,7 @@ impl<'a> Semantics<'a> {
) )
}); });
} }
return None; None
} }
pub fn from_source( pub fn from_source(
@ -311,7 +311,7 @@ impl<'a> Semantics<'a> {
if semantics.is_none() { if semantics.is_none() {
return None; return None;
} }
return Self::from_source_impl(source.clone(), semantics, source); Self::from_source_impl(source.clone(), semantics, source)
} }
pub fn add(&self, range: Range<usize>, token: (u32, u32)) { pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
@ -381,7 +381,7 @@ pub mod tests {
.unwrap() .unwrap()
.borrow() .borrow()
.sems .sems
.get(&($source as Rc<dyn Source>)) .get(&($source as std::rc::Rc<dyn crate::parser::source::Source>))
.unwrap() .unwrap()
.tokens .tokens
.borrow() .borrow()
@ -414,7 +414,7 @@ pub mod tests {
.unwrap() .unwrap()
.borrow() .borrow()
.sems .sems
.get(&($source as Rc<dyn Source>)) .get(&($source as std::rc::Rc<dyn crate::parser::source::Source>))
.unwrap() .unwrap()
.tokens .tokens
.borrow() .borrow()

View file

@ -1,13 +1,12 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::rc::Rc;
use std::ops::Deref; use std::ops::Deref;
use std::rc::Rc;
use crate::document::document::Document; use crate::document::document::Document;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::reports::*; use crate::parser::reports::*;
use super::parser::ParserState; use super::parser::ParserState;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -43,8 +42,7 @@ pub struct CustomStyleHolder {
impl CustomStyleHolder { impl CustomStyleHolder {
pub fn get(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> { pub fn get(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> {
self.custom_styles self.custom_styles.get(style_name).cloned()
.get(style_name).cloned()
} }
pub fn insert(&mut self, style: Rc<dyn CustomStyle>) { pub fn insert(&mut self, style: Rc<dyn CustomStyle>) {
@ -55,7 +53,5 @@ impl CustomStyleHolder {
impl Deref for CustomStyleHolder { impl Deref for CustomStyleHolder {
type Target = HashMap<String, Rc<dyn CustomStyle>>; type Target = HashMap<String, Rc<dyn CustomStyle>>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target { &self.custom_styles }
&self.custom_styles
}
} }

View file

@ -1,10 +1,10 @@
pub mod customstyle;
pub mod langparser; pub mod langparser;
pub mod layout;
pub mod parser; pub mod parser;
pub mod reports;
pub mod rule; pub mod rule;
pub mod source; pub mod source;
pub mod state; pub mod state;
pub mod util;
pub mod style; pub mod style;
pub mod layout; pub mod util;
pub mod customstyle;
pub mod reports;

View file

@ -1,7 +1,5 @@
use std::any::Any; use std::any::Any;
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashSet;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
@ -22,8 +20,6 @@ use crate::elements::paragraph::Paragraph;
use crate::lsp::semantic::SemanticsHolder; use crate::lsp::semantic::SemanticsHolder;
use crate::lua::kernel::Kernel; use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelHolder; use crate::lua::kernel::KernelHolder;
use crate::parser::source::SourceFile;
use crate::parser::source::VirtualSource;
use ariadne::Color; use ariadne::Color;
#[derive(Debug)] #[derive(Debug)]
@ -217,7 +213,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
return; return;
} }
(*matched_at, *match_data) = match rule.next_match(&mode, self, cursor) { (*matched_at, *match_data) = match rule.next_match(mode, self, cursor) {
None => (usize::MAX, None), None => (usize::MAX, None),
Some((mut pos, mut data)) => { Some((mut pos, mut data)) => {
// Check if escaped // Check if escaped
@ -238,7 +234,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
} }
// Find next potential match // Find next potential match
(pos, data) = match rule.next_match(&mode, self, &cursor.at(pos + 1)) { (pos, data) = match rule.next_match(mode, self, &cursor.at(pos + 1)) {
Some((new_pos, new_data)) => (new_pos, new_data), Some((new_pos, new_data)) => (new_pos, new_data),
None => (usize::MAX, data), // Stop iterating None => (usize::MAX, data), // Stop iterating
} }
@ -342,18 +338,11 @@ impl<'a, 'b> ParserState<'a, 'b> {
} }
} }
#[derive(Default)]
pub struct ParseMode { pub struct ParseMode {
pub paragraph_only: bool, pub paragraph_only: bool,
} }
impl Default for ParseMode {
fn default() -> Self {
Self {
paragraph_only: false,
}
}
}
pub trait Parser { pub trait Parser {
/// Gets the colors for formatting errors /// Gets the colors for formatting errors
/// ///

View file

@ -13,9 +13,9 @@ pub enum ReportKind {
Warning, Warning,
} }
impl Into<ariadne::ReportKind<'static>> for &ReportKind { impl From<&ReportKind> for ariadne::ReportKind<'static> {
fn into(self) -> ariadne::ReportKind<'static> { fn from(val: &ReportKind) -> Self {
match self { match val {
ReportKind::Error => ariadne::ReportKind::Error, ReportKind::Error => ariadne::ReportKind::Error,
ReportKind::Warning => ariadne::ReportKind::Warning, ReportKind::Warning => ariadne::ReportKind::Warning,
} }
@ -59,7 +59,7 @@ impl Report {
for span in &self.spans { for span in &self.spans {
let (osource, opos) = span.token.source().original_position(span.token.start()); let (osource, opos) = span.token.source().original_position(span.token.start());
if &osource == &source && opos < start { if osource == source.clone() && opos < start {
start = opos; start = opos;
} }
} }
@ -97,20 +97,20 @@ impl Report {
} }
pub mod macros { pub mod macros {
pub use super::*;
#[macro_export] #[macro_export]
macro_rules! report_label { macro_rules! report_label {
($r:expr,) => {{ }}; ($r:expr,) => {{ }};
($r:expr, span($source:expr, $range:expr, $message:expr) $(, $($tail:tt)*)?) => {{ ($r:expr, span($source:expr, $range:expr, $message:expr) $(, $($tail:tt)*)?) => {{
$r.spans.push(ReportSpan { $r.spans.push(ReportSpan {
token: crate::parser::source::Token::new($range, $source), token: $crate::parser::source::Token::new($range, $source),
message: $message, message: $message,
}); });
report_label!($r, $($($tail)*)?); report_label!($r, $($($tail)*)?);
}}; }};
($r:expr, span($range:expr, $message:expr) $(, $($tail:tt)*)?) => {{ ($r:expr, span($range:expr, $message:expr) $(, $($tail:tt)*)?) => {{
$r.spans.push(ReportSpan { $r.spans.push(ReportSpan {
token: crate::parser::source::Token::new($range, $r.source.clone()), token: $crate::parser::source::Token::new($range, $r.source.clone()),
message: $message, message: $message,
}); });
report_label!($r, $($($tail)*)?); report_label!($r, $($($tail)*)?);

View file

@ -3,7 +3,6 @@ use super::parser::ParseMode;
use super::parser::ParserState; use super::parser::ParserState;
use super::reports::Report; use super::reports::Report;
use super::source::Cursor; use super::source::Cursor;
use super::source::Source;
use super::source::Token; use super::source::Token;
use super::style::StyleHolder; use super::style::StyleHolder;
use crate::document::document::Document; use crate::document::document::Document;
@ -14,8 +13,6 @@ use mlua::Lua;
use std::any::Any; use std::any::Any;
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
macro_rules! create_registry { macro_rules! create_registry {
( $($construct:expr),+ $(,)? ) => {{ ( $($construct:expr),+ $(,)? ) => {{
@ -184,10 +181,10 @@ impl<T: RegexRule + 'static> Rule for T {
let token = Token::new(captures.get(0).unwrap().range(), cursor.source.clone()); let token = Token::new(captures.get(0).unwrap().range(), cursor.source.clone());
let token_end = token.end(); let token_end = token.end();
return ( (
cursor.at(token_end), cursor.at(token_end),
self.on_regex_match(*index, state, document, token, captures), self.on_regex_match(*index, state, document, token, captures),
); )
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {

View file

@ -17,8 +17,7 @@ pub trait Source: Downcast + Debug {
} }
impl_downcast!(Source); impl_downcast!(Source);
pub trait SourcePosition pub trait SourcePosition {
{
/// Transforms a position to it's position in the oldest parent source /// Transforms a position to it's position in the oldest parent source
fn original_position(&self, pos: usize) -> (Rc<dyn Source>, usize); fn original_position(&self, pos: usize) -> (Rc<dyn Source>, usize);
@ -88,30 +87,23 @@ impl Source for SourceFile {
/// Let's say you make a virtual source from the following: "Con\]tent" -> "Con]tent" /// Let's say you make a virtual source from the following: "Con\]tent" -> "Con]tent"
/// Then at position 3, an offset of 1 will be created to account for the removed '\' /// Then at position 3, an offset of 1 will be created to account for the removed '\'
#[derive(Debug)] #[derive(Debug)]
struct SourceOffset struct SourceOffset {
{
/// Stores the total offsets /// Stores the total offsets
offsets: Vec<(usize, isize)>, offsets: Vec<(usize, isize)>,
} }
impl SourceOffset impl SourceOffset {
{
/// Get the offset position /// Get the offset position
pub fn position(&self, pos: usize) -> usize pub fn position(&self, pos: usize) -> usize {
{ match self.offsets.binary_search_by_key(&pos, |&(orig, _)| orig) {
match self.offsets.binary_search_by_key(&pos, |&(orig, _)| orig)
{
Ok(idx) => (pos as isize + self.offsets[idx].1) as usize, Ok(idx) => (pos as isize + self.offsets[idx].1) as usize,
Err(idx) => { Err(idx) => {
if idx == 0 if idx == 0 {
{
pos pos
} } else {
else
{
(pos as isize + self.offsets[idx - 1].1) as usize (pos as isize + self.offsets[idx - 1].1) as usize
} }
}, }
} }
} }
} }
@ -135,7 +127,12 @@ impl VirtualSource {
} }
} }
pub fn new_offsets(location: Token, name: String, content: String, offsets: Vec<(usize, isize)>) -> Self { pub fn new_offsets(
location: Token,
name: String,
content: String,
offsets: Vec<(usize, isize)>,
) -> Self {
Self { Self {
location, location,
name, name,
@ -151,54 +148,51 @@ impl Source for VirtualSource {
fn content(&self) -> &String { &self.content } fn content(&self) -> &String { &self.content }
} }
impl SourcePosition for Rc<dyn Source> impl SourcePosition for Rc<dyn Source> {
{
fn original_position(&self, mut pos: usize) -> (Rc<dyn Source>, usize) { fn original_position(&self, mut pos: usize) -> (Rc<dyn Source>, usize) {
// Stop recursion // Stop recursion
if self.downcast_ref::<SourceFile>().is_some() if self.downcast_ref::<SourceFile>().is_some() {
{
return (self.clone(), pos); return (self.clone(), pos);
} }
// Apply offsets // Apply offsets
if let Some(offsets) = if let Some(offsets) = self
self.downcast_ref::<VirtualSource>() .downcast_ref::<VirtualSource>()
.and_then(|source| source.offsets.as_ref()) .and_then(|source| source.offsets.as_ref())
{ {
pos = offsets.position(pos); pos = offsets.position(pos);
} }
// Recurse to parent // Recurse to parent
if let Some(parent) = self.location() if let Some(parent) = self.location() {
{
return parent.source().original_position(parent.range.start + pos); return parent.source().original_position(parent.range.start + pos);
} }
return (self.clone(), pos); (self.clone(), pos)
} }
fn original_range(&self, mut range: Range<usize>) -> (Rc<dyn Source>, Range<usize>) { fn original_range(&self, mut range: Range<usize>) -> (Rc<dyn Source>, Range<usize>) {
// Stop recursion // Stop recursion
if self.downcast_ref::<SourceFile>().is_some() if self.downcast_ref::<SourceFile>().is_some() {
{
return (self.clone(), range); return (self.clone(), range);
} }
// Apply offsets // Apply offsets
if let Some(offsets) = if let Some(offsets) = self
self.downcast_ref::<VirtualSource>() .downcast_ref::<VirtualSource>()
.and_then(|source| source.offsets.as_ref()) .and_then(|source| source.offsets.as_ref())
{ {
range = offsets.position(range.start) .. offsets.position(range.end); range = offsets.position(range.start)..offsets.position(range.end);
} }
// Recurse to parent // Recurse to parent
if let Some(parent) = self.location() if let Some(parent) = self.location() {
{ return parent
return parent.source.original_range(parent.range.start + range.start..parent.range.start + range.end); .source
.original_range(parent.range.start + range.start..parent.range.start + range.end);
} }
return (self.clone(), range); (self.clone(), range)
} }
} }
@ -264,7 +258,7 @@ impl LineCursor {
let start = self.pos; let start = self.pos;
let mut it = self.source.content().as_str()[start..].chars().peekable(); let mut it = self.source.content().as_str()[start..].chars().peekable();
let mut prev = self.source.content().as_str()[..start].chars().rev().next(); let mut prev = self.source.content().as_str()[..start].chars().next_back();
while self.pos < pos { while self.pos < pos {
let c = it.next().unwrap(); let c = it.next().unwrap();

View file

@ -28,11 +28,7 @@ pub trait RuleState: Downcast {
fn scope(&self) -> Scope; fn scope(&self) -> Scope;
/// Callback called when state goes out of scope /// Callback called when state goes out of scope
fn on_remove( fn on_remove(&self, state: &ParserState, document: &dyn Document) -> Vec<Report>;
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report>;
} }
impl_downcast!(RuleState); impl_downcast!(RuleState);

View file

@ -97,25 +97,28 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
/// # Notes /// # Notes
/// ///
/// If you only need to escape content that won't be parsed, use [`process_escaped`] instead. /// If you only need to escape content that won't be parsed, use [`process_escaped`] instead.
pub fn escape_source(source: Rc<dyn Source>, range: Range<usize>, name: String, escape: char, token: &'static str) -> Rc<dyn Source> pub fn escape_source(
{ source: Rc<dyn Source>,
range: Range<usize>,
name: String,
escape: char,
token: &'static str,
) -> Rc<dyn Source> {
let content = &source.content()[range.clone()]; let content = &source.content()[range.clone()];
let mut processed = String::new(); let mut processed = String::new();
let mut escaped = 0; let mut escaped = 0;
let mut token_it = token.chars().peekable(); let mut token_it = token.chars().peekable();
let mut offset = 0isize; let mut offset = 0isize;
let mut offsets : Vec<(usize, isize)> = vec!(); let mut offsets: Vec<(usize, isize)> = vec![];
for (pos, c) in content.chars().enumerate() for (pos, c) in content.chars().enumerate() {
{
if c == escape { if c == escape {
escaped += 1; escaped += 1;
} else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) { } else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) {
let _ = token_it.next(); let _ = token_it.next();
if token_it.peek().is_none() { if token_it.peek().is_none() {
(0..(escaped / 2)).for_each(|_| processed.push(escape)); (0..(escaped / 2)).for_each(|_| processed.push(escape));
if ( escaped + 1) / 2 != 0 if (escaped + 1) / 2 != 0 {
{
offset += (escaped + 1) / 2; offset += (escaped + 1) / 2;
offsets.push((pos - token.len() - escaped as usize / 2, offset)); offsets.push((pos - token.len() - escaped as usize / 2, offset));
} }
@ -140,7 +143,7 @@ pub fn escape_source(source: Rc<dyn Source>, range: Range<usize>, name: String,
Token::new(range, source), Token::new(range, source),
name, name,
processed, processed,
offsets offsets,
)) ))
} }
@ -205,7 +208,14 @@ pub fn parse_paragraph<'a>(
let parsed = state.with_state(|new_state| -> Box<dyn Document> { let parsed = state.with_state(|new_state| -> Box<dyn Document> {
new_state new_state
.parser .parser
.parse(new_state, source.clone(), Some(document), ParseMode { paragraph_only: true }) .parse(
new_state,
source.clone(),
Some(document),
ParseMode {
paragraph_only: true,
},
)
.0 .0
}); });
if parsed.content().borrow().len() > 1 { if parsed.content().borrow().len() > 1 {

View file

@ -40,16 +40,25 @@ impl Backend {
// TODO: Create a custom parser for the lsp // TODO: Create a custom parser for the lsp
// Which will require a dyn Document to work // Which will require a dyn Document to work
let source = Rc::new(SourceFile::with_content(params.uri.to_string(), params.text.clone(), None)); let source = Rc::new(SourceFile::with_content(
params.uri.to_string(),
params.text.clone(),
None,
));
let parser = LangParser::default(); let parser = LangParser::default();
let (_doc, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None, ParseMode::default()); let (_doc, state) = parser.parse(
ParserState::new_with_semantics(&parser, None),
source.clone(),
None,
ParseMode::default(),
);
if let Some(sems) = state.shared.semantics.as_ref() if let Some(sems) = state.shared.semantics.as_ref() {
{
let borrow = sems.borrow(); let borrow = sems.borrow();
for (source, sem) in &borrow.sems for (source, sem) in &borrow.sems {
{ if let Some(path) = source
if let Some(path) = source.clone().downcast_rc::<SourceFile>() .clone()
.downcast_rc::<SourceFile>()
.ok() .ok()
.map(|source| source.path().to_owned()) .map(|source| source.path().to_owned())
{ {
@ -57,14 +66,16 @@ impl Backend {
.insert(path, sem.tokens.replace(vec![])); .insert(path, sem.tokens.replace(vec![]));
} }
} }
} }
} }
} }
#[tower_lsp::async_trait] #[tower_lsp::async_trait]
impl LanguageServer for Backend { impl LanguageServer for Backend {
async fn initialize(&self, _params: InitializeParams) -> tower_lsp::jsonrpc::Result<InitializeResult> { async fn initialize(
&self,
_params: InitializeParams,
) -> tower_lsp::jsonrpc::Result<InitializeResult> {
Ok(InitializeResult { Ok(InitializeResult {
capabilities: ServerCapabilities { capabilities: ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Kind( text_document_sync: Some(TextDocumentSyncCapability::Kind(
@ -106,7 +117,7 @@ impl LanguageServer for Backend {
}, },
server_info: Some(ServerInfo { server_info: Some(ServerInfo {
name: "nmlls".into(), name: "nmlls".into(),
version: Some("0.1".into()) version: Some("0.1".into()),
}), }),
}) })
} }
@ -138,7 +149,10 @@ impl LanguageServer for Backend {
.await .await
} }
async fn completion(&self, _params: CompletionParams) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> { async fn completion(
&self,
_params: CompletionParams,
) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
//let uri = params.text_document_position.text_document.uri; //let uri = params.text_document_position.text_document.uri;
//let position = params.text_document_position.position; //let position = params.text_document_position.position;
let completions = || -> Option<Vec<CompletionItem>> { let completions = || -> Option<Vec<CompletionItem>> {