From e4ce3edc4df0227ad7878386ab47aa84d010d9c3 Mon Sep 17 00:00:00 2001 From: ef3d0c3e Date: Thu, 1 Aug 2024 16:15:10 +0200 Subject: [PATCH] Fix links & tests --- docs/external/graphviz.nml | 16 ++-- docs/styles/basic.nml | 2 +- src/document/document.rs | 85 +++++++++++------- src/elements/comment.rs | 134 +++++++++++++++++++--------- src/elements/layout.rs | 18 ++-- src/elements/link.rs | 178 ++++++++++++++++++++++++++----------- src/elements/paragraph.rs | 47 ++++++++++ src/elements/style.rs | 51 +++++++++++ src/elements/tex.rs | 50 ++++------- src/parser/util.rs | 2 + 10 files changed, 405 insertions(+), 178 deletions(-) diff --git a/docs/external/graphviz.nml b/docs/external/graphviz.nml index 659aa2c..d0dabf0 100644 --- a/docs/external/graphviz.nml +++ b/docs/external/graphviz.nml @@ -40,14 +40,14 @@ Graphs blocks are delimited by `` [graph]...[/graph]`` # Properties * ``layout`` The layout engine, defaults to `dot` see [Graphviz's documentation](https://graphviz.org/docs/layouts/), allowed values: - *- `[dot](https://graphviz.org/docs/layouts/dot/)` - *- `[neato](https://graphviz.org/docs/layouts/neato/)` - *- `[fdp](https://graphviz.org/docs/layouts/fdp/)` - *- `[sfdp](https://graphviz.org/docs/layouts/sfdp/)` - *- `[circo](https://graphviz.org/docs/layouts/circo/)` - *- `[twopi](https://graphviz.org/docs/layouts/twopi/)` - *- `[osage](https://graphviz.org/docs/layouts/osage/)` - *- `[patchwork](https://graphviz.org/docs/layouts/patchwork/)` + *- [`dot`](https://graphviz.org/docs/layouts/dot/) + *- [`neato`](https://graphviz.org/docs/layouts/neato/) + *- [`fdp`](https://graphviz.org/docs/layouts/fdp/) + *- [`sfdp`](https://graphviz.org/docs/layouts/sfdp/) + *- [`circo`](https://graphviz.org/docs/layouts/circo/) + *- [`twopi`](https://graphviz.org/docs/layouts/twopi/) + *- [`osage`](https://graphviz.org/docs/layouts/osage/) + *- [`patchwork`](https://graphviz.org/docs/layouts/patchwork/) * ``width`` The resulting svg's width property, defaults to `100%` # Examples diff --git a/docs/styles/basic.nml b/docs/styles/basic.nml index 5c4a812..7a7d97e 100644 --- a/docs/styles/basic.nml +++ b/docs/styles/basic.nml @@ -6,7 +6,7 @@ Enclose text between two ``**`` to render it **bold**! * ``**Bold text**`` → **Bold text** - * ``**Bold [link](#)**`` → **Bold [link](#)** + * ``Bold [**link**](#)`` → Bold [**link**](#) ## Italic diff --git a/src/document/document.rs b/src/document/document.rs index a932721..44470e5 100644 --- a/src/document/document.rs +++ b/src/document/document.rs @@ -232,43 +232,60 @@ impl<'a> DocumentAccessors<'a> for dyn Document<'a> + '_ { } } - #[cfg(test)] -pub mod tests -{ -#[macro_export] -macro_rules! validate_document { - ($container:expr, $idx:expr,) => {}; - ($container:expr, $idx:expr, $t:ty; $($tail:tt)*) => {{ - let elem = &$container[$idx]; - assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}", $idx, stringify!($t)); - - validate_document!($container, ($idx+1), $($tail)*); - }}; - ($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{ - let elem = &$container[$idx]; - assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}", $idx, stringify!($t)); - - $( - let val = &elem.downcast_ref::<$t>().unwrap().$field; - assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}", - stringify!($field), - stringify!($t), - $idx, - $value, - val); - )* +pub mod tests { + #[macro_export] + macro_rules! validate_document { + ($container:expr, $idx:expr,) => {}; + ($container:expr, $idx:expr, $t:ty; $($tail:tt)*) => {{ + let elem = &$container[$idx]; + assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t)); validate_document!($container, ($idx+1), $($tail)*); - }}; - ($container:expr, $idx:expr, $t:ty { $($ts:tt)* }; $($tail:tt)*) => {{ - let elem = &$container[$idx]; - assert!(elem.downcast_ref::<$t>().is_some(), "Invalid container element at index {}, expected {}", $idx, stringify!($t)); + }}; + ($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{ + let elem = &$container[$idx]; + assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t)); - let contained = elem.as_container().unwrap().contained(); - validate_document!(contained, 0, $($ts)*); + $( + let val = &elem.downcast_ref::<$t>().unwrap().$field; + assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}", + stringify!($field), + stringify!($t), + $idx, + $value, + val); + )* - validate_document!($container, ($idx+1), $($tail)*); - }}; -} + validate_document!($container, ($idx+1), $($tail)*); + }}; + ($container:expr, $idx:expr, $t:ty { $($ts:tt)* }; $($tail:tt)*) => {{ + let elem = &$container[$idx]; + assert!(elem.downcast_ref::<$t>().is_some(), "Invalid container element at index {}, expected {}", $idx, stringify!($t)); + + let contained = elem.as_container().unwrap().contained(); + validate_document!(contained, 0, $($ts)*); + + validate_document!($container, ($idx+1), $($tail)*); + }}; + ($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* } { $($ts:tt)* }; $($tail:tt)*) => {{ + let elem = &$container[$idx]; + assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t)); + + $( + let val = &elem.downcast_ref::<$t>().unwrap().$field; + assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}", + stringify!($field), + stringify!($t), + $idx, + $value, + val); + )* + + let contained = elem.as_container().unwrap().contained(); + validate_document!(contained, 0, $($ts)*); + + validate_document!($container, ($idx+1), $($tail)*); + }}; + } } diff --git a/src/elements/comment.rs b/src/elements/comment.rs index 3a59862..1fbb65b 100644 --- a/src/elements/comment.rs +++ b/src/elements/comment.rs @@ -1,33 +1,44 @@ -use mlua::{Function, Lua}; -use regex::{Captures, Regex}; -use crate::{document::document::Document, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}}}; -use ariadne::{Report, Label, ReportKind}; -use crate::{compiler::compiler::Compiler, document::element::{ElemKind, Element}}; -use std::{ops::Range, rc::Rc}; +use crate::compiler::compiler::Compiler; +use crate::document::document::Document; +use crate::document::element::ElemKind; +use crate::document::element::Element; +use crate::parser::parser::Parser; +use crate::parser::rule::RegexRule; +use crate::parser::source::Source; +use crate::parser::source::Token; +use ariadne::Label; +use ariadne::Report; +use ariadne::ReportKind; +use mlua::Function; +use mlua::Lua; +use regex::Captures; +use regex::Regex; +use std::ops::Range; +use std::rc::Rc; #[derive(Debug)] pub struct Comment { - location: Token, + location: Token, content: String, } -impl Comment -{ - pub fn new(location: Token, content: String ) -> Self { - Self { location: location, content } - } +impl Comment { + pub fn new(location: Token, content: String) -> Self { + Self { + location: location, + content, + } + } } -impl Element for Comment -{ - fn location(&self) -> &Token { &self.location } - fn kind(&self) -> ElemKind { ElemKind::Invisible } - fn element_name(&self) -> &'static str { "Comment" } - fn to_string(&self) -> String { format!("{self:#?}") } - fn compile(&self, _compiler: &Compiler, _document: &dyn Document) - -> Result { +impl Element for Comment { + fn location(&self) -> &Token { &self.location } + fn kind(&self) -> ElemKind { ElemKind::Invisible } + fn element_name(&self) -> &'static str { "Comment" } + fn to_string(&self) -> String { format!("{self:#?}") } + fn compile(&self, _compiler: &Compiler, _document: &dyn Document) -> Result { Ok("".to_string()) - } + } } pub struct CommentRule { @@ -36,7 +47,9 @@ pub struct CommentRule { impl CommentRule { pub fn new() -> Self { - Self { re: [Regex::new(r"\s*::(.*)").unwrap()] } + Self { + re: [Regex::new(r"(?:(?:^|\n)|[^\S\n]+)::(.*)").unwrap()], + } } } @@ -45,40 +58,77 @@ impl RegexRule for CommentRule { fn regexes(&self) -> &[Regex] { &self.re } - fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document, token: Token, matches: Captures) - -> Vec, Range)>> { + fn on_regex_match<'a>( + &self, + _: usize, + parser: &dyn Parser, + document: &'a dyn Document, + token: Token, + matches: Captures, + ) -> Vec, Range)>> { let mut reports = vec![]; - let content = match matches.get(1) - { + let content = match matches.get(1) { None => panic!("Unknown error"), Some(comment) => { let trimmed = comment.as_str().trim_start().trim_end().to_string(); - if trimmed.is_empty() - { + if trimmed.is_empty() { reports.push( Report::build(ReportKind::Warning, token.source(), comment.start()) - .with_message("Empty comment") - .with_label( - Label::new((token.source(), comment.range())) - .with_message("Comment is empty") - .with_color(parser.colors().warning)) - .finish()); + .with_message("Empty comment") + .with_label( + Label::new((token.source(), comment.range())) + .with_message("Comment is empty") + .with_color(parser.colors().warning), + ) + .finish(), + ); } trimmed } }; - - parser.push(document, Box::new( - Comment::new( - token.clone(), - content - ) - )); - return reports; + parser.push(document, Box::new(Comment::new(token.clone(), content))); + + return reports; } fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option)>> { None } } + +#[cfg(test)] +mod tests { + use crate::elements::paragraph::Paragraph; +use crate::elements::style::Style; +use crate::elements::text::Text; +use crate::parser::langparser::LangParser; + use crate::parser::source::SourceFile; + use crate::validate_document; + + use super::*; + + #[test] + fn parser() { + let source = Rc::new(SourceFile::with_content( + "".to_string(), + r#" +NOT COMMENT: `std::cmp` +:: Commented line +COMMENT ::Test +"# + .to_string(), + None, + )); + let parser = LangParser::default(); + let doc = parser.parse(source, None); + + validate_document!(doc.content().borrow(), 0, + Paragraph { + Text; Style; Text; Style; + Comment { content == "Commented line" }; + Text; Comment { content == "Test" }; + }; + ); + } +} diff --git a/src/elements/layout.rs b/src/elements/layout.rs index d338180..7b3d310 100644 --- a/src/elements/layout.rs +++ b/src/elements/layout.rs @@ -673,7 +673,7 @@ mod tests { use crate::elements::text::Text; use crate::parser::langparser::LangParser; use crate::parser::source::SourceFile; -use crate::validate_document; + use crate::validate_document; use super::*; @@ -703,29 +703,29 @@ use crate::validate_document; let doc = parser.parse(source, None); validate_document!(doc.content().borrow(), 0, - Layout { token == LayoutToken::Begin }; + Layout { token == LayoutToken::Begin, id == 0 }; Paragraph { Text { content == "A" }; }; - Layout { token == LayoutToken::Begin }; + Layout { token == LayoutToken::Begin, id == 0 }; Paragraph { Text { content == "B" }; }; - Layout { token == LayoutToken::End }; - Layout { token == LayoutToken::Next }; + Layout { token == LayoutToken::End, id == 1 }; + Layout { token == LayoutToken::Next, id == 1 }; Paragraph { Text { content == "C" }; }; - Layout { token == LayoutToken::Begin }; + Layout { token == LayoutToken::Begin, id == 0 }; Paragraph { Text { content == "D" }; }; - Layout { token == LayoutToken::Next }; + Layout { token == LayoutToken::Next, id == 1 }; Paragraph { Text { content == "E" }; }; - Layout { token == LayoutToken::End }; - Layout { token == LayoutToken::End }; + Layout { token == LayoutToken::End, id == 2 }; + Layout { token == LayoutToken::End, id == 2 }; ); } } diff --git a/src/elements/link.rs b/src/elements/link.rs index 691e7bd..cac621c 100644 --- a/src/elements/link.rs +++ b/src/elements/link.rs @@ -1,12 +1,14 @@ use crate::compiler::compiler::Compiler; use crate::compiler::compiler::Target; use crate::document::document::Document; +use crate::document::element::ContainerElement; use crate::document::element::ElemKind; use crate::document::element::Element; use crate::parser::parser::Parser; use crate::parser::rule::RegexRule; use crate::parser::source::Source; use crate::parser::source::Token; +use crate::parser::source::VirtualSource; use crate::parser::util; use ariadne::Fmt; use ariadne::Label; @@ -19,21 +21,15 @@ use regex::Regex; use std::ops::Range; use std::rc::Rc; +use super::paragraph::Paragraph; + #[derive(Debug)] pub struct Link { - location: Token, - name: String, // Link name - url: String, // Link url -} - -impl Link { - pub fn new(location: Token, name: String, url: String) -> Self { - Self { - location: location, - name, - url, - } - } + pub(self) location: Token, + /// Display content of link + pub(self) display: Paragraph, + /// Url of link + pub(self) url: String, } impl Element for Link { @@ -41,20 +37,39 @@ impl Element for Link { fn kind(&self) -> ElemKind { ElemKind::Inline } fn element_name(&self) -> &'static str { "Link" } fn to_string(&self) -> String { format!("{self:#?}") } - fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result { + fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result { match compiler.target() { - Target::HTML => Ok(format!( - "{}", - Compiler::sanitize(compiler.target(), self.url.as_str()), - Compiler::sanitize(compiler.target(), self.name.as_str()), - )), - Target::LATEX => Ok(format!( - "\\href{{{}}}{{{}}}", - Compiler::sanitize(compiler.target(), self.url.as_str()), - Compiler::sanitize(compiler.target(), self.name.as_str()), - )), + Target::HTML => { + let mut result = format!( + "", + Compiler::sanitize(compiler.target(), self.url.as_str()) + ); + + result += self + .display + .compile(compiler, document) + .as_ref() + .map(|r| r.as_str())?; + + result += ""; + Ok(result) + } + _ => todo!(""), } } + + fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) } +} + +impl ContainerElement for Link { + fn contained(&self) -> &Vec> { &self.display.content } + + fn push(&mut self, elem: Box) -> Result<(), String> { + if elem.downcast_ref::().is_some() { + return Err("Tried to push a link inside of a link".to_string()); + } + self.display.push(elem) + } } pub struct LinkRule { @@ -78,47 +93,67 @@ impl RegexRule for LinkRule { &self, _: usize, parser: &dyn Parser, - document: &'a dyn Document, + document: &'a (dyn Document<'a> + 'a), token: Token, matches: Captures, ) -> Vec, Range)>> { - let mut result = vec![]; - let link_name = match matches.get(1) { - Some(name) => { - if name.as_str().is_empty() { - result.push( - Report::build(ReportKind::Error, token.source(), name.start()) + let mut reports = vec![]; + + let link_display = match matches.get(1) { + Some(display) => { + if display.as_str().is_empty() { + reports.push( + Report::build(ReportKind::Error, token.source(), display.start()) .with_message("Empty link name") .with_label( - Label::new((token.source().clone(), name.range())) + Label::new((token.source().clone(), display.range())) .with_message("Link name is empty") .with_color(parser.colors().error), ) .finish(), ); - return result; + return reports; } - // TODO: process into separate document... - let text_content = util::process_text(document, name.as_str()); - - if text_content.as_str().is_empty() { - result.push( - Report::build(ReportKind::Error, token.source(), name.start()) + let processed = util::process_escaped('\\', "]", display.as_str()); + if processed.is_empty() { + reports.push( + Report::build(ReportKind::Error, token.source(), display.start()) .with_message("Empty link name") .with_label( - Label::new((token.source(), name.range())) + Label::new((token.source(), display.range())) .with_message(format!( "Link name is empty. Once processed, `{}` yields `{}`", - name.as_str().fg(parser.colors().highlight), - text_content.as_str().fg(parser.colors().highlight), + display.as_str().fg(parser.colors().highlight), + processed.fg(parser.colors().highlight), )) .with_color(parser.colors().error), ) .finish(), ); - return result; + return reports; + } + + let source = Rc::new(VirtualSource::new( + Token::new(display.range(), token.source()), + "Link Display".to_string(), + processed, + )); + match util::parse_paragraph(parser, source, document) { + Err(err) => { + reports.push( + Report::build(ReportKind::Error, token.source(), display.start()) + .with_message("Failed to parse link display") + .with_label( + Label::new((token.source(), display.range())) + .with_message(err.to_string()) + .with_color(parser.colors().error), + ) + .finish(), + ); + return reports; + } + Ok(paragraph) => *paragraph, } - text_content } _ => panic!("Empty link name"), }; @@ -126,7 +161,7 @@ impl RegexRule for LinkRule { let link_url = match matches.get(2) { Some(url) => { if url.as_str().is_empty() { - result.push( + reports.push( Report::build(ReportKind::Error, token.source(), url.start()) .with_message("Empty link url") .with_label( @@ -136,12 +171,12 @@ impl RegexRule for LinkRule { ) .finish(), ); - return result; + return reports; } let text_content = util::process_text(document, url.as_str()); if text_content.as_str().is_empty() { - result.push( + reports.push( Report::build(ReportKind::Error, token.source(), url.start()) .with_message("Empty link url") .with_label( @@ -155,7 +190,7 @@ impl RegexRule for LinkRule { ) .finish(), ); - return result; + return reports; } text_content } @@ -164,12 +199,55 @@ impl RegexRule for LinkRule { parser.push( document, - Box::new(Link::new(token.clone(), link_name, link_url)), + Box::new(Link { + location: token, + display: link_display, + url: link_url, + }), ); - return result; + return reports; } // TODO fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option)>> { None } } + +#[cfg(test)] +mod tests { + use crate::elements::style::Style; +use crate::elements::text::Text; + use crate::parser::langparser::LangParser; + use crate::parser::source::SourceFile; + use crate::validate_document; + + use super::*; + + #[test] + fn parser() { + let source = Rc::new(SourceFile::with_content( + "".to_string(), + r#" +Some [link](url). +[**BOLD link**](another url) + "# + .to_string(), + None, + )); + let parser = LangParser::default(); + let doc = parser.parse(source, None); + + validate_document!(doc.content().borrow(), 0, + Paragraph { + Text { content == "Some " }; + Link { url == "url" } { Text { content == "link" }; }; + Text { content == "." }; + Link { url == "another url" } { + Style; + Text { content == "BOLD link" }; + Style; + }; + }; + ); + } +} diff --git a/src/elements/paragraph.rs b/src/elements/paragraph.rs index 3285f2f..583f7c0 100644 --- a/src/elements/paragraph.rs +++ b/src/elements/paragraph.rs @@ -100,6 +100,9 @@ impl ContainerElement for Paragraph { if elem.location().source() == self.location().source() { self.location.range = self.location.start()..elem.location().end(); } + if elem.kind() == ElemKind::Block { + return Err("Attempted to push block element inside a paragraph".to_string()); + } self.content.push(elem); Ok(()) } @@ -152,3 +155,47 @@ impl Rule for ParagraphRule { // TODO fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option)>> { None } } + +#[cfg(test)] +mod tests { + use crate::elements::paragraph::Paragraph; + use crate::elements::text::Text; + use crate::parser::langparser::LangParser; + use crate::parser::source::SourceFile; + use crate::validate_document; + + use super::*; + + #[test] + fn parse() { + let source = Rc::new(SourceFile::with_content( + "".to_string(), + r#" +First paragraph +Second line + +Second paragraph\ +<- literal \\n + + +Last paragraph + "# + .to_string(), + None, + )); + let parser = LangParser::default(); + let doc = parser.parse(source, None); + + validate_document!(doc.content().borrow(), 0, + Paragraph { + Text { content == "First paragraph Second line" }; + }; + Paragraph { + Text { content == "Second paragraph\n<- literal \\n" }; + }; + Paragraph { + Text { content == "Last paragraph " }; + }; + ); + } +} diff --git a/src/elements/style.rs b/src/elements/style.rs index 47508ab..b841788 100644 --- a/src/elements/style.rs +++ b/src/elements/style.rs @@ -222,3 +222,54 @@ impl RegexRule for StyleRule { // TODO fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option)>> { None } } + +#[cfg(test)] +mod tests { + use crate::elements::text::Text; + use crate::parser::langparser::LangParser; + use crate::parser::source::SourceFile; + use crate::validate_document; + + use super::*; + + #[test] + fn parser() { + let source = Rc::new(SourceFile::with_content( + "".to_string(), + r#" +Some *style +terminated here* + +**BOLD + *italic*** +__`UNDERLINE+EM`__ +"# + .to_string(), + None, + )); + let parser = LangParser::default(); + let doc = parser.parse(source, None); + + validate_document!(doc.content().borrow(), 0, + Paragraph { + Text; + Style { kind == 1, close == false }; + Text; + Style { kind == 1, close == true }; + }; + Paragraph { + Style { kind == 0, close == false }; // ** + Text; + Style { kind == 1, close == false }; // * + Text; + Style { kind == 0, close == true }; // ** + Style { kind == 1, close == true }; // * + + Style { kind == 2, close == false }; // __ + Style { kind == 3, close == false }; // ` + Text; + Style { kind == 3, close == true }; // ` + Style { kind == 2, close == true }; // __ + }; + ); + } +} diff --git a/src/elements/tex.rs b/src/elements/tex.rs index 58f58ae..d2deea1 100644 --- a/src/elements/tex.rs +++ b/src/elements/tex.rs @@ -253,7 +253,7 @@ impl TexRule { .unwrap(), Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(), ], - properties: PropertyParser{ properties: props }, + properties: PropertyParser { properties: props }, } } @@ -435,8 +435,10 @@ impl RegexRule for TexRule { #[cfg(test)] mod tests { + use crate::elements::paragraph::Paragraph; use crate::parser::langparser::LangParser; use crate::parser::source::SourceFile; + use crate::validate_document; use super::*; @@ -446,7 +448,7 @@ mod tests { "".to_string(), r#" $[kind=block, caption=Some\, text\\] 1+1=2 $ -$|[env=another] Non Math \LaTeX|$ +$|[env=another] Non Math \LaTeX |$ $[kind=block,env=another] e^{i\pi}=-1$ "# .to_string(), @@ -455,19 +457,11 @@ $[kind=block,env=another] e^{i\pi}=-1$ let parser = LangParser::default(); let doc = parser.parse(source, None); - let borrow = doc.content().borrow(); - let found = borrow - .iter() - .filter_map(|e| e.downcast_ref::()) - .collect::>(); - - assert_eq!(found[0].tex, "1+1=2"); - assert_eq!(found[0].env, "main"); - assert_eq!(found[0].caption, Some("Some, text\\".to_string())); - assert_eq!(found[1].tex, "Non Math \\LaTeX"); - assert_eq!(found[1].env, "another"); - assert_eq!(found[2].tex, "e^{i\\pi}=-1"); - assert_eq!(found[2].env, "another"); + validate_document!(doc.content().borrow(), 0, + Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) }; + Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" }; + Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" }; + ); } #[test] @@ -485,24 +479,12 @@ $[env=another] e^{i\pi}=-1$ let parser = LangParser::default(); let doc = parser.parse(source, None); - let borrow = doc.content().borrow(); - let found = borrow - .first() - .unwrap() - .as_container() - .unwrap() - .contained() - .iter() - .filter_map(|e| e.downcast_ref::()) - .collect::>(); - - assert_eq!(found[0].tex, "1+1=2"); - assert_eq!(found[0].env, "main"); - assert_eq!(found[0].caption, Some("Some, text\\".to_string())); - assert_eq!(found[1].tex, "Non Math \\LaTeX"); - assert_eq!(found[1].env, "another"); - assert_eq!(found[1].caption, Some("Enclosed ].".to_string())); - assert_eq!(found[2].tex, "e^{i\\pi}=-1"); - assert_eq!(found[2].env, "another"); + validate_document!(doc.content().borrow(), 0, + Paragraph { + Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) }; + Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" }; + Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" }; + }; + ); } } diff --git a/src/parser/util.rs b/src/parser/util.rs index 845013b..c63406e 100644 --- a/src/parser/util.rs +++ b/src/parser/util.rs @@ -147,6 +147,8 @@ pub fn parse_paragraph<'a>( return Err("Parsed document is empty"); } else if parsed.last_element::().is_none() { return Err("Parsed element is not a paragraph"); + } else if parser.has_error() { + return Err("Parser error"); } let paragraph = parsed.content().borrow_mut().pop().unwrap();