Fix links & tests
This commit is contained in:
parent
bdb1c9d1c9
commit
e4ce3edc4d
10 changed files with 405 additions and 178 deletions
16
docs/external/graphviz.nml
vendored
16
docs/external/graphviz.nml
vendored
|
@ -40,14 +40,14 @@ Graphs blocks are delimited by `` [graph]...[/graph]``
|
||||||
# Properties
|
# Properties
|
||||||
* ``layout`` The layout engine, defaults to `dot`
|
* ``layout`` The layout engine, defaults to `dot`
|
||||||
see [Graphviz's documentation](https://graphviz.org/docs/layouts/), allowed values:
|
see [Graphviz's documentation](https://graphviz.org/docs/layouts/), allowed values:
|
||||||
*- `[dot](https://graphviz.org/docs/layouts/dot/)`
|
*- [`dot`](https://graphviz.org/docs/layouts/dot/)
|
||||||
*- `[neato](https://graphviz.org/docs/layouts/neato/)`
|
*- [`neato`](https://graphviz.org/docs/layouts/neato/)
|
||||||
*- `[fdp](https://graphviz.org/docs/layouts/fdp/)`
|
*- [`fdp`](https://graphviz.org/docs/layouts/fdp/)
|
||||||
*- `[sfdp](https://graphviz.org/docs/layouts/sfdp/)`
|
*- [`sfdp`](https://graphviz.org/docs/layouts/sfdp/)
|
||||||
*- `[circo](https://graphviz.org/docs/layouts/circo/)`
|
*- [`circo`](https://graphviz.org/docs/layouts/circo/)
|
||||||
*- `[twopi](https://graphviz.org/docs/layouts/twopi/)`
|
*- [`twopi`](https://graphviz.org/docs/layouts/twopi/)
|
||||||
*- `[osage](https://graphviz.org/docs/layouts/osage/)`
|
*- [`osage`](https://graphviz.org/docs/layouts/osage/)
|
||||||
*- `[patchwork](https://graphviz.org/docs/layouts/patchwork/)`
|
*- [`patchwork`](https://graphviz.org/docs/layouts/patchwork/)
|
||||||
* ``width`` The resulting svg's width property, defaults to `100%`
|
* ``width`` The resulting svg's width property, defaults to `100%`
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
Enclose text between two ``**`` to render it **bold**!
|
Enclose text between two ``**`` to render it **bold**!
|
||||||
* ``**Bold text**`` → **Bold text**
|
* ``**Bold text**`` → **Bold text**
|
||||||
* ``**Bold [link](#)**`` → **Bold [link](#)**
|
* ``Bold [**link**](#)`` → Bold [**link**](#)
|
||||||
|
|
||||||
## Italic
|
## Italic
|
||||||
|
|
||||||
|
|
|
@ -232,43 +232,60 @@ impl<'a> DocumentAccessors<'a> for dyn Document<'a> + '_ {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod tests
|
pub mod tests {
|
||||||
{
|
#[macro_export]
|
||||||
#[macro_export]
|
macro_rules! validate_document {
|
||||||
macro_rules! validate_document {
|
($container:expr, $idx:expr,) => {};
|
||||||
($container:expr, $idx:expr,) => {};
|
($container:expr, $idx:expr, $t:ty; $($tail:tt)*) => {{
|
||||||
($container:expr, $idx:expr, $t:ty; $($tail:tt)*) => {{
|
let elem = &$container[$idx];
|
||||||
let elem = &$container[$idx];
|
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
|
||||||
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}", $idx, stringify!($t));
|
|
||||||
|
|
||||||
validate_document!($container, ($idx+1), $($tail)*);
|
|
||||||
}};
|
|
||||||
($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
|
|
||||||
let elem = &$container[$idx];
|
|
||||||
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}", $idx, stringify!($t));
|
|
||||||
|
|
||||||
$(
|
|
||||||
let val = &elem.downcast_ref::<$t>().unwrap().$field;
|
|
||||||
assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}",
|
|
||||||
stringify!($field),
|
|
||||||
stringify!($t),
|
|
||||||
$idx,
|
|
||||||
$value,
|
|
||||||
val);
|
|
||||||
)*
|
|
||||||
|
|
||||||
validate_document!($container, ($idx+1), $($tail)*);
|
validate_document!($container, ($idx+1), $($tail)*);
|
||||||
}};
|
}};
|
||||||
($container:expr, $idx:expr, $t:ty { $($ts:tt)* }; $($tail:tt)*) => {{
|
($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
|
||||||
let elem = &$container[$idx];
|
let elem = &$container[$idx];
|
||||||
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid container element at index {}, expected {}", $idx, stringify!($t));
|
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
|
||||||
|
|
||||||
let contained = elem.as_container().unwrap().contained();
|
$(
|
||||||
validate_document!(contained, 0, $($ts)*);
|
let val = &elem.downcast_ref::<$t>().unwrap().$field;
|
||||||
|
assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}",
|
||||||
|
stringify!($field),
|
||||||
|
stringify!($t),
|
||||||
|
$idx,
|
||||||
|
$value,
|
||||||
|
val);
|
||||||
|
)*
|
||||||
|
|
||||||
validate_document!($container, ($idx+1), $($tail)*);
|
validate_document!($container, ($idx+1), $($tail)*);
|
||||||
}};
|
}};
|
||||||
}
|
($container:expr, $idx:expr, $t:ty { $($ts:tt)* }; $($tail:tt)*) => {{
|
||||||
|
let elem = &$container[$idx];
|
||||||
|
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid container element at index {}, expected {}", $idx, stringify!($t));
|
||||||
|
|
||||||
|
let contained = elem.as_container().unwrap().contained();
|
||||||
|
validate_document!(contained, 0, $($ts)*);
|
||||||
|
|
||||||
|
validate_document!($container, ($idx+1), $($tail)*);
|
||||||
|
}};
|
||||||
|
($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* } { $($ts:tt)* }; $($tail:tt)*) => {{
|
||||||
|
let elem = &$container[$idx];
|
||||||
|
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
|
||||||
|
|
||||||
|
$(
|
||||||
|
let val = &elem.downcast_ref::<$t>().unwrap().$field;
|
||||||
|
assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}",
|
||||||
|
stringify!($field),
|
||||||
|
stringify!($t),
|
||||||
|
$idx,
|
||||||
|
$value,
|
||||||
|
val);
|
||||||
|
)*
|
||||||
|
|
||||||
|
let contained = elem.as_container().unwrap().contained();
|
||||||
|
validate_document!(contained, 0, $($ts)*);
|
||||||
|
|
||||||
|
validate_document!($container, ($idx+1), $($tail)*);
|
||||||
|
}};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,33 +1,44 @@
|
||||||
use mlua::{Function, Lua};
|
use crate::compiler::compiler::Compiler;
|
||||||
use regex::{Captures, Regex};
|
use crate::document::document::Document;
|
||||||
use crate::{document::document::Document, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}}};
|
use crate::document::element::ElemKind;
|
||||||
use ariadne::{Report, Label, ReportKind};
|
use crate::document::element::Element;
|
||||||
use crate::{compiler::compiler::Compiler, document::element::{ElemKind, Element}};
|
use crate::parser::parser::Parser;
|
||||||
use std::{ops::Range, rc::Rc};
|
use crate::parser::rule::RegexRule;
|
||||||
|
use crate::parser::source::Source;
|
||||||
|
use crate::parser::source::Token;
|
||||||
|
use ariadne::Label;
|
||||||
|
use ariadne::Report;
|
||||||
|
use ariadne::ReportKind;
|
||||||
|
use mlua::Function;
|
||||||
|
use mlua::Lua;
|
||||||
|
use regex::Captures;
|
||||||
|
use regex::Regex;
|
||||||
|
use std::ops::Range;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Comment {
|
pub struct Comment {
|
||||||
location: Token,
|
location: Token,
|
||||||
content: String,
|
content: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Comment
|
impl Comment {
|
||||||
{
|
pub fn new(location: Token, content: String) -> Self {
|
||||||
pub fn new(location: Token, content: String ) -> Self {
|
Self {
|
||||||
Self { location: location, content }
|
location: location,
|
||||||
}
|
content,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Element for Comment
|
impl Element for Comment {
|
||||||
{
|
fn location(&self) -> &Token { &self.location }
|
||||||
fn location(&self) -> &Token { &self.location }
|
fn kind(&self) -> ElemKind { ElemKind::Invisible }
|
||||||
fn kind(&self) -> ElemKind { ElemKind::Invisible }
|
fn element_name(&self) -> &'static str { "Comment" }
|
||||||
fn element_name(&self) -> &'static str { "Comment" }
|
fn to_string(&self) -> String { format!("{self:#?}") }
|
||||||
fn to_string(&self) -> String { format!("{self:#?}") }
|
fn compile(&self, _compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
|
||||||
fn compile(&self, _compiler: &Compiler, _document: &dyn Document)
|
|
||||||
-> Result<String, String> {
|
|
||||||
Ok("".to_string())
|
Ok("".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CommentRule {
|
pub struct CommentRule {
|
||||||
|
@ -36,7 +47,9 @@ pub struct CommentRule {
|
||||||
|
|
||||||
impl CommentRule {
|
impl CommentRule {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self { re: [Regex::new(r"\s*::(.*)").unwrap()] }
|
Self {
|
||||||
|
re: [Regex::new(r"(?:(?:^|\n)|[^\S\n]+)::(.*)").unwrap()],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,40 +58,77 @@ impl RegexRule for CommentRule {
|
||||||
|
|
||||||
fn regexes(&self) -> &[Regex] { &self.re }
|
fn regexes(&self) -> &[Regex] { &self.re }
|
||||||
|
|
||||||
fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document, token: Token, matches: Captures)
|
fn on_regex_match<'a>(
|
||||||
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
&self,
|
||||||
|
_: usize,
|
||||||
|
parser: &dyn Parser,
|
||||||
|
document: &'a dyn Document,
|
||||||
|
token: Token,
|
||||||
|
matches: Captures,
|
||||||
|
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||||
let mut reports = vec![];
|
let mut reports = vec![];
|
||||||
|
|
||||||
let content = match matches.get(1)
|
let content = match matches.get(1) {
|
||||||
{
|
|
||||||
None => panic!("Unknown error"),
|
None => panic!("Unknown error"),
|
||||||
Some(comment) => {
|
Some(comment) => {
|
||||||
let trimmed = comment.as_str().trim_start().trim_end().to_string();
|
let trimmed = comment.as_str().trim_start().trim_end().to_string();
|
||||||
if trimmed.is_empty()
|
if trimmed.is_empty() {
|
||||||
{
|
|
||||||
reports.push(
|
reports.push(
|
||||||
Report::build(ReportKind::Warning, token.source(), comment.start())
|
Report::build(ReportKind::Warning, token.source(), comment.start())
|
||||||
.with_message("Empty comment")
|
.with_message("Empty comment")
|
||||||
.with_label(
|
.with_label(
|
||||||
Label::new((token.source(), comment.range()))
|
Label::new((token.source(), comment.range()))
|
||||||
.with_message("Comment is empty")
|
.with_message("Comment is empty")
|
||||||
.with_color(parser.colors().warning))
|
.with_color(parser.colors().warning),
|
||||||
.finish());
|
)
|
||||||
|
.finish(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
trimmed
|
trimmed
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
parser.push(document, Box::new(
|
parser.push(document, Box::new(Comment::new(token.clone(), content)));
|
||||||
Comment::new(
|
|
||||||
token.clone(),
|
|
||||||
content
|
|
||||||
)
|
|
||||||
));
|
|
||||||
|
|
||||||
return reports;
|
return reports;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::elements::paragraph::Paragraph;
|
||||||
|
use crate::elements::style::Style;
|
||||||
|
use crate::elements::text::Text;
|
||||||
|
use crate::parser::langparser::LangParser;
|
||||||
|
use crate::parser::source::SourceFile;
|
||||||
|
use crate::validate_document;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parser() {
|
||||||
|
let source = Rc::new(SourceFile::with_content(
|
||||||
|
"".to_string(),
|
||||||
|
r#"
|
||||||
|
NOT COMMENT: `std::cmp`
|
||||||
|
:: Commented line
|
||||||
|
COMMENT ::Test
|
||||||
|
"#
|
||||||
|
.to_string(),
|
||||||
|
None,
|
||||||
|
));
|
||||||
|
let parser = LangParser::default();
|
||||||
|
let doc = parser.parse(source, None);
|
||||||
|
|
||||||
|
validate_document!(doc.content().borrow(), 0,
|
||||||
|
Paragraph {
|
||||||
|
Text; Style; Text; Style;
|
||||||
|
Comment { content == "Commented line" };
|
||||||
|
Text; Comment { content == "Test" };
|
||||||
|
};
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -673,7 +673,7 @@ mod tests {
|
||||||
use crate::elements::text::Text;
|
use crate::elements::text::Text;
|
||||||
use crate::parser::langparser::LangParser;
|
use crate::parser::langparser::LangParser;
|
||||||
use crate::parser::source::SourceFile;
|
use crate::parser::source::SourceFile;
|
||||||
use crate::validate_document;
|
use crate::validate_document;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -703,29 +703,29 @@ use crate::validate_document;
|
||||||
let doc = parser.parse(source, None);
|
let doc = parser.parse(source, None);
|
||||||
|
|
||||||
validate_document!(doc.content().borrow(), 0,
|
validate_document!(doc.content().borrow(), 0,
|
||||||
Layout { token == LayoutToken::Begin };
|
Layout { token == LayoutToken::Begin, id == 0 };
|
||||||
Paragraph {
|
Paragraph {
|
||||||
Text { content == "A" };
|
Text { content == "A" };
|
||||||
};
|
};
|
||||||
Layout { token == LayoutToken::Begin };
|
Layout { token == LayoutToken::Begin, id == 0 };
|
||||||
Paragraph {
|
Paragraph {
|
||||||
Text { content == "B" };
|
Text { content == "B" };
|
||||||
};
|
};
|
||||||
Layout { token == LayoutToken::End };
|
Layout { token == LayoutToken::End, id == 1 };
|
||||||
Layout { token == LayoutToken::Next };
|
Layout { token == LayoutToken::Next, id == 1 };
|
||||||
Paragraph {
|
Paragraph {
|
||||||
Text { content == "C" };
|
Text { content == "C" };
|
||||||
};
|
};
|
||||||
Layout { token == LayoutToken::Begin };
|
Layout { token == LayoutToken::Begin, id == 0 };
|
||||||
Paragraph {
|
Paragraph {
|
||||||
Text { content == "D" };
|
Text { content == "D" };
|
||||||
};
|
};
|
||||||
Layout { token == LayoutToken::Next };
|
Layout { token == LayoutToken::Next, id == 1 };
|
||||||
Paragraph {
|
Paragraph {
|
||||||
Text { content == "E" };
|
Text { content == "E" };
|
||||||
};
|
};
|
||||||
Layout { token == LayoutToken::End };
|
Layout { token == LayoutToken::End, id == 2 };
|
||||||
Layout { token == LayoutToken::End };
|
Layout { token == LayoutToken::End, id == 2 };
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
use crate::compiler::compiler::Compiler;
|
use crate::compiler::compiler::Compiler;
|
||||||
use crate::compiler::compiler::Target;
|
use crate::compiler::compiler::Target;
|
||||||
use crate::document::document::Document;
|
use crate::document::document::Document;
|
||||||
|
use crate::document::element::ContainerElement;
|
||||||
use crate::document::element::ElemKind;
|
use crate::document::element::ElemKind;
|
||||||
use crate::document::element::Element;
|
use crate::document::element::Element;
|
||||||
use crate::parser::parser::Parser;
|
use crate::parser::parser::Parser;
|
||||||
use crate::parser::rule::RegexRule;
|
use crate::parser::rule::RegexRule;
|
||||||
use crate::parser::source::Source;
|
use crate::parser::source::Source;
|
||||||
use crate::parser::source::Token;
|
use crate::parser::source::Token;
|
||||||
|
use crate::parser::source::VirtualSource;
|
||||||
use crate::parser::util;
|
use crate::parser::util;
|
||||||
use ariadne::Fmt;
|
use ariadne::Fmt;
|
||||||
use ariadne::Label;
|
use ariadne::Label;
|
||||||
|
@ -19,21 +21,15 @@ use regex::Regex;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use super::paragraph::Paragraph;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Link {
|
pub struct Link {
|
||||||
location: Token,
|
pub(self) location: Token,
|
||||||
name: String, // Link name
|
/// Display content of link
|
||||||
url: String, // Link url
|
pub(self) display: Paragraph,
|
||||||
}
|
/// Url of link
|
||||||
|
pub(self) url: String,
|
||||||
impl Link {
|
|
||||||
pub fn new(location: Token, name: String, url: String) -> Self {
|
|
||||||
Self {
|
|
||||||
location: location,
|
|
||||||
name,
|
|
||||||
url,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Element for Link {
|
impl Element for Link {
|
||||||
|
@ -41,20 +37,39 @@ impl Element for Link {
|
||||||
fn kind(&self) -> ElemKind { ElemKind::Inline }
|
fn kind(&self) -> ElemKind { ElemKind::Inline }
|
||||||
fn element_name(&self) -> &'static str { "Link" }
|
fn element_name(&self) -> &'static str { "Link" }
|
||||||
fn to_string(&self) -> String { format!("{self:#?}") }
|
fn to_string(&self) -> String { format!("{self:#?}") }
|
||||||
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
|
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
|
||||||
match compiler.target() {
|
match compiler.target() {
|
||||||
Target::HTML => Ok(format!(
|
Target::HTML => {
|
||||||
"<a href=\"{}\">{}</a>",
|
let mut result = format!(
|
||||||
Compiler::sanitize(compiler.target(), self.url.as_str()),
|
"<a href=\"{}\">",
|
||||||
Compiler::sanitize(compiler.target(), self.name.as_str()),
|
Compiler::sanitize(compiler.target(), self.url.as_str())
|
||||||
)),
|
);
|
||||||
Target::LATEX => Ok(format!(
|
|
||||||
"\\href{{{}}}{{{}}}",
|
result += self
|
||||||
Compiler::sanitize(compiler.target(), self.url.as_str()),
|
.display
|
||||||
Compiler::sanitize(compiler.target(), self.name.as_str()),
|
.compile(compiler, document)
|
||||||
)),
|
.as_ref()
|
||||||
|
.map(|r| r.as_str())?;
|
||||||
|
|
||||||
|
result += "</a>";
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
_ => todo!(""),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ContainerElement for Link {
|
||||||
|
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.display.content }
|
||||||
|
|
||||||
|
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
|
||||||
|
if elem.downcast_ref::<Link>().is_some() {
|
||||||
|
return Err("Tried to push a link inside of a link".to_string());
|
||||||
|
}
|
||||||
|
self.display.push(elem)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LinkRule {
|
pub struct LinkRule {
|
||||||
|
@ -78,47 +93,67 @@ impl RegexRule for LinkRule {
|
||||||
&self,
|
&self,
|
||||||
_: usize,
|
_: usize,
|
||||||
parser: &dyn Parser,
|
parser: &dyn Parser,
|
||||||
document: &'a dyn Document,
|
document: &'a (dyn Document<'a> + 'a),
|
||||||
token: Token,
|
token: Token,
|
||||||
matches: Captures,
|
matches: Captures,
|
||||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||||
let mut result = vec![];
|
let mut reports = vec![];
|
||||||
let link_name = match matches.get(1) {
|
|
||||||
Some(name) => {
|
let link_display = match matches.get(1) {
|
||||||
if name.as_str().is_empty() {
|
Some(display) => {
|
||||||
result.push(
|
if display.as_str().is_empty() {
|
||||||
Report::build(ReportKind::Error, token.source(), name.start())
|
reports.push(
|
||||||
|
Report::build(ReportKind::Error, token.source(), display.start())
|
||||||
.with_message("Empty link name")
|
.with_message("Empty link name")
|
||||||
.with_label(
|
.with_label(
|
||||||
Label::new((token.source().clone(), name.range()))
|
Label::new((token.source().clone(), display.range()))
|
||||||
.with_message("Link name is empty")
|
.with_message("Link name is empty")
|
||||||
.with_color(parser.colors().error),
|
.with_color(parser.colors().error),
|
||||||
)
|
)
|
||||||
.finish(),
|
.finish(),
|
||||||
);
|
);
|
||||||
return result;
|
return reports;
|
||||||
}
|
}
|
||||||
// TODO: process into separate document...
|
let processed = util::process_escaped('\\', "]", display.as_str());
|
||||||
let text_content = util::process_text(document, name.as_str());
|
if processed.is_empty() {
|
||||||
|
reports.push(
|
||||||
if text_content.as_str().is_empty() {
|
Report::build(ReportKind::Error, token.source(), display.start())
|
||||||
result.push(
|
|
||||||
Report::build(ReportKind::Error, token.source(), name.start())
|
|
||||||
.with_message("Empty link name")
|
.with_message("Empty link name")
|
||||||
.with_label(
|
.with_label(
|
||||||
Label::new((token.source(), name.range()))
|
Label::new((token.source(), display.range()))
|
||||||
.with_message(format!(
|
.with_message(format!(
|
||||||
"Link name is empty. Once processed, `{}` yields `{}`",
|
"Link name is empty. Once processed, `{}` yields `{}`",
|
||||||
name.as_str().fg(parser.colors().highlight),
|
display.as_str().fg(parser.colors().highlight),
|
||||||
text_content.as_str().fg(parser.colors().highlight),
|
processed.fg(parser.colors().highlight),
|
||||||
))
|
))
|
||||||
.with_color(parser.colors().error),
|
.with_color(parser.colors().error),
|
||||||
)
|
)
|
||||||
.finish(),
|
.finish(),
|
||||||
);
|
);
|
||||||
return result;
|
return reports;
|
||||||
|
}
|
||||||
|
|
||||||
|
let source = Rc::new(VirtualSource::new(
|
||||||
|
Token::new(display.range(), token.source()),
|
||||||
|
"Link Display".to_string(),
|
||||||
|
processed,
|
||||||
|
));
|
||||||
|
match util::parse_paragraph(parser, source, document) {
|
||||||
|
Err(err) => {
|
||||||
|
reports.push(
|
||||||
|
Report::build(ReportKind::Error, token.source(), display.start())
|
||||||
|
.with_message("Failed to parse link display")
|
||||||
|
.with_label(
|
||||||
|
Label::new((token.source(), display.range()))
|
||||||
|
.with_message(err.to_string())
|
||||||
|
.with_color(parser.colors().error),
|
||||||
|
)
|
||||||
|
.finish(),
|
||||||
|
);
|
||||||
|
return reports;
|
||||||
|
}
|
||||||
|
Ok(paragraph) => *paragraph,
|
||||||
}
|
}
|
||||||
text_content
|
|
||||||
}
|
}
|
||||||
_ => panic!("Empty link name"),
|
_ => panic!("Empty link name"),
|
||||||
};
|
};
|
||||||
|
@ -126,7 +161,7 @@ impl RegexRule for LinkRule {
|
||||||
let link_url = match matches.get(2) {
|
let link_url = match matches.get(2) {
|
||||||
Some(url) => {
|
Some(url) => {
|
||||||
if url.as_str().is_empty() {
|
if url.as_str().is_empty() {
|
||||||
result.push(
|
reports.push(
|
||||||
Report::build(ReportKind::Error, token.source(), url.start())
|
Report::build(ReportKind::Error, token.source(), url.start())
|
||||||
.with_message("Empty link url")
|
.with_message("Empty link url")
|
||||||
.with_label(
|
.with_label(
|
||||||
|
@ -136,12 +171,12 @@ impl RegexRule for LinkRule {
|
||||||
)
|
)
|
||||||
.finish(),
|
.finish(),
|
||||||
);
|
);
|
||||||
return result;
|
return reports;
|
||||||
}
|
}
|
||||||
let text_content = util::process_text(document, url.as_str());
|
let text_content = util::process_text(document, url.as_str());
|
||||||
|
|
||||||
if text_content.as_str().is_empty() {
|
if text_content.as_str().is_empty() {
|
||||||
result.push(
|
reports.push(
|
||||||
Report::build(ReportKind::Error, token.source(), url.start())
|
Report::build(ReportKind::Error, token.source(), url.start())
|
||||||
.with_message("Empty link url")
|
.with_message("Empty link url")
|
||||||
.with_label(
|
.with_label(
|
||||||
|
@ -155,7 +190,7 @@ impl RegexRule for LinkRule {
|
||||||
)
|
)
|
||||||
.finish(),
|
.finish(),
|
||||||
);
|
);
|
||||||
return result;
|
return reports;
|
||||||
}
|
}
|
||||||
text_content
|
text_content
|
||||||
}
|
}
|
||||||
|
@ -164,12 +199,55 @@ impl RegexRule for LinkRule {
|
||||||
|
|
||||||
parser.push(
|
parser.push(
|
||||||
document,
|
document,
|
||||||
Box::new(Link::new(token.clone(), link_name, link_url)),
|
Box::new(Link {
|
||||||
|
location: token,
|
||||||
|
display: link_display,
|
||||||
|
url: link_url,
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
return result;
|
return reports;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO
|
// TODO
|
||||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::elements::style::Style;
|
||||||
|
use crate::elements::text::Text;
|
||||||
|
use crate::parser::langparser::LangParser;
|
||||||
|
use crate::parser::source::SourceFile;
|
||||||
|
use crate::validate_document;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parser() {
|
||||||
|
let source = Rc::new(SourceFile::with_content(
|
||||||
|
"".to_string(),
|
||||||
|
r#"
|
||||||
|
Some [link](url).
|
||||||
|
[**BOLD link**](another url)
|
||||||
|
"#
|
||||||
|
.to_string(),
|
||||||
|
None,
|
||||||
|
));
|
||||||
|
let parser = LangParser::default();
|
||||||
|
let doc = parser.parse(source, None);
|
||||||
|
|
||||||
|
validate_document!(doc.content().borrow(), 0,
|
||||||
|
Paragraph {
|
||||||
|
Text { content == "Some " };
|
||||||
|
Link { url == "url" } { Text { content == "link" }; };
|
||||||
|
Text { content == "." };
|
||||||
|
Link { url == "another url" } {
|
||||||
|
Style;
|
||||||
|
Text { content == "BOLD link" };
|
||||||
|
Style;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -100,6 +100,9 @@ impl ContainerElement for Paragraph {
|
||||||
if elem.location().source() == self.location().source() {
|
if elem.location().source() == self.location().source() {
|
||||||
self.location.range = self.location.start()..elem.location().end();
|
self.location.range = self.location.start()..elem.location().end();
|
||||||
}
|
}
|
||||||
|
if elem.kind() == ElemKind::Block {
|
||||||
|
return Err("Attempted to push block element inside a paragraph".to_string());
|
||||||
|
}
|
||||||
self.content.push(elem);
|
self.content.push(elem);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -152,3 +155,47 @@ impl Rule for ParagraphRule {
|
||||||
// TODO
|
// TODO
|
||||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::elements::paragraph::Paragraph;
|
||||||
|
use crate::elements::text::Text;
|
||||||
|
use crate::parser::langparser::LangParser;
|
||||||
|
use crate::parser::source::SourceFile;
|
||||||
|
use crate::validate_document;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse() {
|
||||||
|
let source = Rc::new(SourceFile::with_content(
|
||||||
|
"".to_string(),
|
||||||
|
r#"
|
||||||
|
First paragraph
|
||||||
|
Second line
|
||||||
|
|
||||||
|
Second paragraph\
|
||||||
|
<- literal \\n
|
||||||
|
|
||||||
|
|
||||||
|
Last paragraph
|
||||||
|
"#
|
||||||
|
.to_string(),
|
||||||
|
None,
|
||||||
|
));
|
||||||
|
let parser = LangParser::default();
|
||||||
|
let doc = parser.parse(source, None);
|
||||||
|
|
||||||
|
validate_document!(doc.content().borrow(), 0,
|
||||||
|
Paragraph {
|
||||||
|
Text { content == "First paragraph Second line" };
|
||||||
|
};
|
||||||
|
Paragraph {
|
||||||
|
Text { content == "Second paragraph\n<- literal \\n" };
|
||||||
|
};
|
||||||
|
Paragraph {
|
||||||
|
Text { content == "Last paragraph " };
|
||||||
|
};
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -222,3 +222,54 @@ impl RegexRule for StyleRule {
|
||||||
// TODO
|
// TODO
|
||||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::elements::text::Text;
|
||||||
|
use crate::parser::langparser::LangParser;
|
||||||
|
use crate::parser::source::SourceFile;
|
||||||
|
use crate::validate_document;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parser() {
|
||||||
|
let source = Rc::new(SourceFile::with_content(
|
||||||
|
"".to_string(),
|
||||||
|
r#"
|
||||||
|
Some *style
|
||||||
|
terminated here*
|
||||||
|
|
||||||
|
**BOLD + *italic***
|
||||||
|
__`UNDERLINE+EM`__
|
||||||
|
"#
|
||||||
|
.to_string(),
|
||||||
|
None,
|
||||||
|
));
|
||||||
|
let parser = LangParser::default();
|
||||||
|
let doc = parser.parse(source, None);
|
||||||
|
|
||||||
|
validate_document!(doc.content().borrow(), 0,
|
||||||
|
Paragraph {
|
||||||
|
Text;
|
||||||
|
Style { kind == 1, close == false };
|
||||||
|
Text;
|
||||||
|
Style { kind == 1, close == true };
|
||||||
|
};
|
||||||
|
Paragraph {
|
||||||
|
Style { kind == 0, close == false }; // **
|
||||||
|
Text;
|
||||||
|
Style { kind == 1, close == false }; // *
|
||||||
|
Text;
|
||||||
|
Style { kind == 0, close == true }; // **
|
||||||
|
Style { kind == 1, close == true }; // *
|
||||||
|
|
||||||
|
Style { kind == 2, close == false }; // __
|
||||||
|
Style { kind == 3, close == false }; // `
|
||||||
|
Text;
|
||||||
|
Style { kind == 3, close == true }; // `
|
||||||
|
Style { kind == 2, close == true }; // __
|
||||||
|
};
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -253,7 +253,7 @@ impl TexRule {
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
|
Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
|
||||||
],
|
],
|
||||||
properties: PropertyParser{ properties: props },
|
properties: PropertyParser { properties: props },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -435,8 +435,10 @@ impl RegexRule for TexRule {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use crate::elements::paragraph::Paragraph;
|
||||||
use crate::parser::langparser::LangParser;
|
use crate::parser::langparser::LangParser;
|
||||||
use crate::parser::source::SourceFile;
|
use crate::parser::source::SourceFile;
|
||||||
|
use crate::validate_document;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -446,7 +448,7 @@ mod tests {
|
||||||
"".to_string(),
|
"".to_string(),
|
||||||
r#"
|
r#"
|
||||||
$[kind=block, caption=Some\, text\\] 1+1=2 $
|
$[kind=block, caption=Some\, text\\] 1+1=2 $
|
||||||
$|[env=another] Non Math \LaTeX|$
|
$|[env=another] Non Math \LaTeX |$
|
||||||
$[kind=block,env=another] e^{i\pi}=-1$
|
$[kind=block,env=another] e^{i\pi}=-1$
|
||||||
"#
|
"#
|
||||||
.to_string(),
|
.to_string(),
|
||||||
|
@ -455,19 +457,11 @@ $[kind=block,env=another] e^{i\pi}=-1$
|
||||||
let parser = LangParser::default();
|
let parser = LangParser::default();
|
||||||
let doc = parser.parse(source, None);
|
let doc = parser.parse(source, None);
|
||||||
|
|
||||||
let borrow = doc.content().borrow();
|
validate_document!(doc.content().borrow(), 0,
|
||||||
let found = borrow
|
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) };
|
||||||
.iter()
|
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" };
|
||||||
.filter_map(|e| e.downcast_ref::<Tex>())
|
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" };
|
||||||
.collect::<Vec<_>>();
|
);
|
||||||
|
|
||||||
assert_eq!(found[0].tex, "1+1=2");
|
|
||||||
assert_eq!(found[0].env, "main");
|
|
||||||
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
|
|
||||||
assert_eq!(found[1].tex, "Non Math \\LaTeX");
|
|
||||||
assert_eq!(found[1].env, "another");
|
|
||||||
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
|
|
||||||
assert_eq!(found[2].env, "another");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -485,24 +479,12 @@ $[env=another] e^{i\pi}=-1$
|
||||||
let parser = LangParser::default();
|
let parser = LangParser::default();
|
||||||
let doc = parser.parse(source, None);
|
let doc = parser.parse(source, None);
|
||||||
|
|
||||||
let borrow = doc.content().borrow();
|
validate_document!(doc.content().borrow(), 0,
|
||||||
let found = borrow
|
Paragraph {
|
||||||
.first()
|
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) };
|
||||||
.unwrap()
|
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" };
|
||||||
.as_container()
|
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" };
|
||||||
.unwrap()
|
};
|
||||||
.contained()
|
);
|
||||||
.iter()
|
|
||||||
.filter_map(|e| e.downcast_ref::<Tex>())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
assert_eq!(found[0].tex, "1+1=2");
|
|
||||||
assert_eq!(found[0].env, "main");
|
|
||||||
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
|
|
||||||
assert_eq!(found[1].tex, "Non Math \\LaTeX");
|
|
||||||
assert_eq!(found[1].env, "another");
|
|
||||||
assert_eq!(found[1].caption, Some("Enclosed ].".to_string()));
|
|
||||||
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
|
|
||||||
assert_eq!(found[2].env, "another");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,6 +147,8 @@ pub fn parse_paragraph<'a>(
|
||||||
return Err("Parsed document is empty");
|
return Err("Parsed document is empty");
|
||||||
} else if parsed.last_element::<Paragraph>().is_none() {
|
} else if parsed.last_element::<Paragraph>().is_none() {
|
||||||
return Err("Parsed element is not a paragraph");
|
return Err("Parsed element is not a paragraph");
|
||||||
|
} else if parser.has_error() {
|
||||||
|
return Err("Parser error");
|
||||||
}
|
}
|
||||||
|
|
||||||
let paragraph = parsed.content().borrow_mut().pop().unwrap();
|
let paragraph = parsed.content().borrow_mut().pop().unwrap();
|
||||||
|
|
Loading…
Reference in a new issue