This commit is contained in:
ef3d0c3e 2024-07-26 20:01:10 +02:00
parent d0343cee6b
commit 8c71258212
7 changed files with 426 additions and 222 deletions

View file

@ -21,6 +21,7 @@ ariadne = "0.4.1"
dashmap = "6.0.1" dashmap = "6.0.1"
downcast-rs = "1.2.1" downcast-rs = "1.2.1"
getopts = "0.2.21" getopts = "0.2.21"
graphviz-rust = "0.9.0"
lazy_static = "1.5.0" lazy_static = "1.5.0"
lsp-server = "0.7.6" lsp-server = "0.7.6"
lsp-types = "0.97.0" lsp-types = "0.97.0"

View file

@ -11,12 +11,12 @@ Some features requires external dependencies to work.
We ship a modified version of `latex2svg` by Matthias C. Hormann. We ship a modified version of `latex2svg` by Matthias C. Hormann.
The modified program can be found in [third/latex2svg](third/latex2svg) and is licensed under MIT. The modified program can be found in [third/latex2svg](third/latex2svg) and is licensed under MIT.
The installation instructions specified on [latex2svg's repository](https://github.com/Moonbase59/latex2svg). The installation instructions can be found on [latex2svg's repository](https://github.com/Moonbase59/latex2svg).
## Graphviz rendering ## Graphviz rendering
To render Graphviz graph `[graph]...[/graph]` To render Graphviz graph (i.e `[graph]...[/graph]`),
You need to install the `dot` program from [Graphviz](https://graphviz.org/) in order to render graphs. you need to install the `dot` program from [Graphviz](https://graphviz.org/).
## Lua kernels ## Lua kernels
@ -36,6 +36,8 @@ cargo build --release --bin nml
- [x] Graphviz rendering - [x] Graphviz rendering
- [x] Media - [x] Media
- [ ] References - [ ] References
- [ ] Navigation
- [ ] Cross-Document references
- [ ] Complete Lua api - [ ] Complete Lua api
- [ ] Documentation - [ ] Documentation
- [ ] Table - [ ] Table

View file

@ -1,8 +1,21 @@
use mlua::{Function, Lua}; use crate::document::document::Document;
use regex::{Captures, Regex}; use crate::document::document::DocumentAccessors;
use crate::{document::document::{DocumentAccessors, Document}, parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, SourceFile, Token}}}; use crate::parser::parser::Parser;
use ariadne::{Report, Fmt, Label, ReportKind}; use crate::parser::parser::ReportColors;
use std::{ops::Range, rc::Rc}; use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::SourceFile;
use crate::parser::source::Token;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Function;
use mlua::Lua;
use regex::Captures;
use regex::Regex;
use std::ops::Range;
use std::rc::Rc;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;
@ -13,20 +26,18 @@ pub struct ImportRule {
impl ImportRule { impl ImportRule {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
re: [Regex::new(r"(?:^|\n)@import(?:\[(.*)\])?[^\S\r\n]+(.*)").unwrap()], re: [Regex::new(r"(?:^|\n)@import(?:\[(.*)\])?[^\S\r\n]+(.*)").unwrap()],
} }
} }
pub fn validate_name(_colors: &ReportColors, name: &str) -> Result<String, String> pub fn validate_name(_colors: &ReportColors, name: &str) -> Result<String, String> {
{ Ok(name.to_string())
Ok(name.to_string()) }
}
pub fn validate_as(_colors: &ReportColors, as_name: &str) -> Result<String, String> pub fn validate_as(_colors: &ReportColors, as_name: &str) -> Result<String, String> {
{ // TODO: Use variable name validation rules
// TODO: Use variable name validation rules Ok(as_name.to_string())
Ok(as_name.to_string()) }
}
} }
impl RegexRule for ImportRule { impl RegexRule for ImportRule {
@ -34,105 +45,118 @@ impl RegexRule for ImportRule {
fn regexes(&self) -> &[Regex] { &self.re } fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document<'a>, token: Token, matches: Captures) fn on_regex_match<'a>(
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> { &self,
_: usize,
parser: &dyn Parser,
document: &'a dyn Document<'a>,
token: Token,
matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut result = vec![]; let mut result = vec![];
// Path // Path
let import_file = match matches.get(2) let import_file = match matches.get(2) {
{ Some(name) => match ImportRule::validate_name(parser.colors(), name.as_str()) {
Some(name) => { Err(msg) => {
match ImportRule::validate_name(parser.colors(), name.as_str()) result.push(
{ Report::build(ReportKind::Error, token.source(), name.start())
Err(msg) => { .with_message("Invalid name for import")
result.push( .with_label(
Report::build(ReportKind::Error, token.source(), name.start()) Label::new((token.source(), name.range()))
.with_message("Invalid name for import") .with_message(format!(
.with_label( "Import name `{}` is invalid. {msg}",
Label::new((token.source(), name.range())) name.as_str().fg(parser.colors().highlight)
.with_message(format!("Import name `{}` is invalid. {msg}", ))
name.as_str().fg(parser.colors().highlight))) .with_color(parser.colors().error),
.with_color(parser.colors().error)) )
.finish()); .finish(),
);
return result; return result;
}, }
Ok(filename) => { Ok(filename) => {
let meta = match std::fs::metadata(filename.as_str()) let meta = match std::fs::metadata(filename.as_str()) {
{ Err(_) => {
Err(_) => { result.push(
result.push( Report::build(ReportKind::Error, token.source(), name.start())
Report::build(ReportKind::Error, token.source(), name.start()) .with_message("Invalid import path")
.with_message("Invalid import path") .with_label(
.with_label( Label::new((token.source(), name.range()))
Label::new((token.source(), name.range())) .with_message(format!(
.with_message(format!("Unable to access file `{}`", "Unable to access file `{}`",
filename.fg(parser.colors().highlight))) filename.fg(parser.colors().highlight)
.with_color(parser.colors().error)) ))
.finish()); .with_color(parser.colors().error),
return result; )
}, .finish(),
Ok(meta) => meta );
}; return result;
}
Ok(meta) => meta,
};
if !meta.is_file() if !meta.is_file() {
{ result.push(
result.push( Report::build(ReportKind::Error, token.source(), name.start())
Report::build(ReportKind::Error, token.source(), name.start()) .with_message("Invalid import path")
.with_message("Invalid import path") .with_label(
.with_label( Label::new((token.source(), name.range()))
Label::new((token.source(), name.range())) .with_message(format!(
.with_message(format!("Path `{}` is not a file!", "Path `{}` is not a file!",
filename.fg(parser.colors().highlight))) filename.fg(parser.colors().highlight)
.with_color(parser.colors().error)) ))
.finish()); .with_color(parser.colors().error),
return result; )
} .finish(),
);
return result;
}
filename filename
}, }
} },
} _ => panic!("Invalid name for import"),
_ => panic!("Invalid name for import") };
};
// [Optional] import as // [Optional] import as
let import_as = match matches.get(1) let import_as = match matches.get(1) {
{ Some(as_name) => match ImportRule::validate_as(parser.colors(), as_name.as_str()) {
Some(as_name) => { Ok(as_name) => as_name,
match ImportRule::validate_as(parser.colors(), as_name.as_str()) Err(msg) => {
{ result.push(
Ok(as_name) => as_name, Report::build(ReportKind::Error, token.source(), as_name.start())
Err(msg) => { .with_message("Invalid name for import as")
result.push( .with_label(
Report::build(ReportKind::Error, token.source(), as_name.start()) Label::new((token.source(), as_name.range()))
.with_message("Invalid name for import as") .with_message(format!(
.with_label( "Canot import `{import_file}` as `{}`. {msg}",
Label::new((token.source(), as_name.range())) as_name.as_str().fg(parser.colors().highlight)
.with_message(format!("Canot import `{import_file}` as `{}`. {msg}", ))
as_name.as_str().fg(parser.colors().highlight))) .with_color(parser.colors().error),
.with_color(parser.colors().error)) )
.finish()); .finish(),
);
return result; return result;
}, }
} },
} _ => "".to_string(),
_ => "".to_string() };
};
let import = match SourceFile::new(import_file, Some(token.clone())) let import = match SourceFile::new(import_file, Some(token.clone())) {
{
Ok(import) => Rc::new(import), Ok(import) => Rc::new(import),
Err(path) => { Err(path) => {
result.push( result.push(
Report::build(ReportKind::Error, token.source(), token.start()) Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unable to read file content") .with_message("Unable to read file content")
.with_label( .with_label(
Label::new((token.source(), token.range)) Label::new((token.source(), token.range))
.with_message(format!("Failed to read content from path `{path}`")) .with_message(format!("Failed to read content from path `{path}`"))
.with_color(parser.colors().error)) .with_color(parser.colors().error),
.finish()); )
.finish(),
);
return result; return result;
} }
}; };
@ -141,14 +165,17 @@ impl RegexRule for ImportRule {
document.merge(import_doc.content(), import_doc.scope(), Some(&import_as)); document.merge(import_doc.content(), import_doc.scope(), Some(&import_as));
// Close paragraph // Close paragraph
if document.last_element::<Paragraph>().is_some() if document.last_element::<Paragraph>().is_some() {
{ parser.push(
parser.push(document, Box::new(Paragraph::new( document,
Token::new(token.end()..token.end(), token.source()) Box::new(Paragraph {
))); location: Token::new(token.end()..token.end(), token.source()),
content: Vec::new(),
}),
);
} }
return result; return result;
} }
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] } fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }

View file

@ -10,6 +10,7 @@ use regex::Regex;
use crate::compiler::compiler::Compiler; use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target; use crate::compiler::compiler::Target;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
@ -27,27 +28,13 @@ use crate::parser::source::Token;
// Which would need to be reworked // Which would need to be reworked
#[derive(Debug)] #[derive(Debug)]
pub struct Paragraph { pub struct Paragraph {
location: Token, pub location: Token,
pub content: Vec<Box<dyn Element>>, pub content: Vec<Box<dyn Element>>,
} }
impl Paragraph { impl Paragraph {
pub fn new(location: Token) -> Self {
Self {
location,
content: Vec::new(),
}
}
pub fn is_empty(&self) -> bool { self.content.is_empty() } pub fn is_empty(&self) -> bool { self.content.is_empty() }
pub fn push(&mut self, elem: Box<dyn Element>) {
if elem.location().source() == self.location().source() {
self.location.range = self.location.start()..elem.location().end();
}
self.content.push(elem);
}
pub fn find_back<P: FnMut(&&Box<dyn Element + 'static>) -> bool>( pub fn find_back<P: FnMut(&&Box<dyn Element + 'static>) -> bool>(
&self, &self,
predicate: P, predicate: P,
@ -102,6 +89,20 @@ impl Element for Paragraph {
Target::LATEX => todo!("Unimplemented compiler"), Target::LATEX => todo!("Unimplemented compiler"),
} }
} }
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
}
impl ContainerElement for Paragraph {
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.content }
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
if elem.location().source() == self.location().source() {
self.location.range = self.location.start()..elem.location().end();
}
self.content.push(elem);
Ok(())
}
} }
pub struct ParagraphRule { pub struct ParagraphRule {
@ -139,10 +140,10 @@ impl Rule for ParagraphRule {
parser.push( parser.push(
document, document,
Box::new(Paragraph::new(Token::new( Box::new(Paragraph {
cursor.pos..end_cursor.pos, location: Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()),
cursor.source.clone(), content: Vec::new(),
))), }),
); );
(end_cursor, Vec::new()) (end_cursor, Vec::new())

View file

@ -1,9 +1,11 @@
use std::collections::HashMap;
use std::io::Read; use std::io::Read;
use std::io::Write; use std::io::Write;
use std::ops::Range; use std::ops::Range;
use std::process::Command; use std::process::Command;
use std::process::Stdio; use std::process::Stdio;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr;
use std::sync::Once; use std::sync::Once;
use ariadne::Fmt; use ariadne::Fmt;
@ -15,6 +17,7 @@ use crypto::sha2::Sha512;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
use regex::Captures; use regex::Captures;
use regex::Match;
use regex::Regex; use regex::Regex;
use crate::cache::cache::Cached; use crate::cache::cache::Cached;
@ -25,10 +28,15 @@ use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::util; use crate::parser::util;
use crate::parser::util::Property;
use crate::parser::util::PropertyMap;
use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
enum TexKind { enum TexKind {
@ -36,6 +44,18 @@ enum TexKind {
Inline, Inline,
} }
impl FromStr for TexKind {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"inline" => Ok(TexKind::Inline),
"block" => Ok(TexKind::Block),
_ => Err(format!("Unknown kind: {s}")),
}
}
}
impl From<&TexKind> for ElemKind { impl From<&TexKind> for ElemKind {
fn from(value: &TexKind) -> Self { fn from(value: &TexKind) -> Self {
match value { match value {
@ -47,30 +67,14 @@ impl From<&TexKind> for ElemKind {
#[derive(Debug)] #[derive(Debug)]
struct Tex { struct Tex {
location: Token, pub(self) location: Token,
block: TexKind, pub(self) kind: TexKind,
env: String, pub(self) env: String,
tex: String, pub(self) tex: String,
caption: Option<String>, pub(self) caption: Option<String>,
} }
impl Tex { impl Tex {
fn new(
location: Token,
block: TexKind,
env: String,
tex: String,
caption: Option<String>,
) -> Self {
Self {
location,
block,
env,
tex,
caption,
}
}
fn format_latex(fontsize: &String, preamble: &String, tex: &String) -> FormattedTex { fn format_latex(fontsize: &String, preamble: &String, tex: &String) -> FormattedTex {
FormattedTex(format!( FormattedTex(format!(
r"\documentclass[{}pt,preview]{{standalone}} r"\documentclass[{}pt,preview]{{standalone}}
@ -127,13 +131,9 @@ impl Cached for FormattedTex {
svg BLOB NOT NULL);" svg BLOB NOT NULL);"
} }
fn sql_get_query() -> &'static str { fn sql_get_query() -> &'static str { "SELECT svg FROM cached_tex WHERE digest = (?1)" }
"SELECT svg FROM cached_tex WHERE digest = (?1)"
}
fn sql_insert_query() -> &'static str { fn sql_insert_query() -> &'static str { "INSERT INTO cached_tex (digest, svg) VALUES (?1, ?2)" }
"INSERT INTO cached_tex (digest, svg) VALUES (?1, ?2)"
}
fn key(&self) -> <Self as Cached>::Key { fn key(&self) -> <Self as Cached>::Key {
let mut hasher = Sha512::new(); let mut hasher = Sha512::new();
@ -144,21 +144,13 @@ impl Cached for FormattedTex {
} }
impl Element for Tex { impl Element for Tex {
fn location(&self) -> &Token { fn location(&self) -> &Token { &self.location }
&self.location
}
fn kind(&self) -> ElemKind { fn kind(&self) -> ElemKind { (&self.kind).into() }
(&self.block).into()
}
fn element_name(&self) -> &'static str { fn element_name(&self) -> &'static str { "LaTeX" }
"LaTeX"
}
fn to_string(&self) -> String { fn to_string(&self) -> String { format!("{self:#?}") }
format!("{self:#?}")
}
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> { fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target() { match compiler.target() {
@ -172,6 +164,8 @@ impl Element for Tex {
} }
}); });
// TODO: Do something with the caption
let exec = document let exec = document
.get_variable(format!("tex.{}.exec", self.env).as_str()) .get_variable(format!("tex.{}.exec", self.env).as_str())
.map_or("latex2svg".to_string(), |var| var.to_string()); .map_or("latex2svg".to_string(), |var| var.to_string());
@ -182,7 +176,7 @@ impl Element for Tex {
let preamble = document let preamble = document
.get_variable(format!("tex.{}.preamble", self.env).as_str()) .get_variable(format!("tex.{}.preamble", self.env).as_str())
.map_or("".to_string(), |var| var.to_string()); .map_or("".to_string(), |var| var.to_string());
let prepend = if self.block == TexKind::Inline { let prepend = if self.kind == TexKind::Inline {
"".to_string() "".to_string()
} else { } else {
document document
@ -190,7 +184,7 @@ impl Element for Tex {
.map_or("".to_string(), |var| var.to_string() + "\n") .map_or("".to_string(), |var| var.to_string() + "\n")
}; };
let latex = match self.block { let latex = match self.kind {
TexKind::Inline => { TexKind::Inline => {
Tex::format_latex(&fontsize, &preamble, &format!("${{{}}}$", self.tex)) Tex::format_latex(&fontsize, &preamble, &format!("${{{}}}$", self.tex))
} }
@ -218,27 +212,82 @@ impl Element for Tex {
pub struct TexRule { pub struct TexRule {
re: [Regex; 2], re: [Regex; 2],
properties: PropertyParser,
} }
impl TexRule { impl TexRule {
pub fn new() -> Self { pub fn new() -> Self {
let mut props = HashMap::new();
props.insert(
"env".to_string(),
Property::new(
true,
"Tex environment".to_string(),
Some("main".to_string()),
),
);
props.insert(
"kind".to_string(),
Property::new(false, "Element display kind".to_string(), None),
);
props.insert(
"caption".to_string(),
Property::new(false, "Latex caption".to_string(), None),
);
Self { Self {
re: [ re: [
Regex::new(r"\$\|(?:\[(.*)\])?(?:((?:\\.|[^\\\\])*?)\|\$)?").unwrap(), Regex::new(r"\$\|(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\|\$)?").unwrap(),
Regex::new(r"\$(?:\[(.*)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(), Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
], ],
properties: PropertyParser::new(props),
}
}
fn parse_properties(
&self,
colors: &ReportColors,
token: &Token,
m: &Option<Match>,
) -> Result<PropertyMap, Report<'_, (Rc<dyn Source>, Range<usize>)>> {
match m {
None => match self.properties.default() {
Ok(properties) => Ok(properties),
Err(e) => Err(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid Tex Properties")
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Tex is missing required property: {e}"))
.with_color(colors.error),
)
.finish(),
),
},
Some(props) => {
let processed =
util::process_escaped('\\', "]", props.as_str().trim_start().trim_end());
match self.properties.parse(processed.as_str()) {
Err(e) => Err(
Report::build(ReportKind::Error, token.source(), props.start())
.with_message("Invalid Tex Properties")
.with_label(
Label::new((token.source().clone(), props.range()))
.with_message(e)
.with_color(colors.error),
)
.finish(),
),
Ok(properties) => Ok(properties),
}
}
} }
} }
} }
impl RegexRule for TexRule { impl RegexRule for TexRule {
fn name(&self) -> &'static str { fn name(&self) -> &'static str { "Tex" }
"Tex"
}
fn regexes(&self) -> &[regex::Regex] { fn regexes(&self) -> &[regex::Regex] { &self.re }
&self.re
}
fn on_regex_match( fn on_regex_match(
&self, &self,
@ -250,12 +299,6 @@ impl RegexRule for TexRule {
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> { ) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![]; let mut reports = vec![];
let tex_env = matches
.get(1)
.and_then(|env| Some(env.as_str().trim_start().trim_end()))
.and_then(|env| (!env.is_empty()).then_some(env))
.unwrap_or("main");
let tex_content = match matches.get(2) { let tex_content = match matches.get(2) {
// Unterminated `$` // Unterminated `$`
None => { None => {
@ -298,28 +341,155 @@ impl RegexRule for TexRule {
} }
}; };
// TODO: Caption // Properties
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(1)) {
Ok(pm) => pm,
Err(report) => {
reports.push(report);
return reports;
}
};
// Tex kind
let tex_kind = match properties.get("kind", |prop, value| {
TexKind::from_str(value.as_str()).map_err(|e| (prop, e))
}) {
Ok((_prop, kind)) => kind,
Err(e) => match e {
PropertyMapError::ParseError((prop, err)) => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid Tex Property")
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Property `kind: {}` cannot be converted: {}",
prop.fg(parser.colors().info),
err.fg(parser.colors().error)
))
.with_color(parser.colors().warning),
)
.finish(),
);
return reports;
}
PropertyMapError::NotFoundError(err) => {
if index == 1 {
TexKind::Inline
} else {
TexKind::Block
}
}
},
};
// Caption
let caption = properties
.get("caption", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.ok()
.and_then(|(_, value)| Some(value));
// Environ
let tex_env = properties
.get("env", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.ok()
.and_then(|(_, value)| Some(value))
.unwrap();
parser.push( parser.push(
document, document,
Box::new(Tex::new( Box::new(Tex {
token, location: token,
if index == 1 { kind: tex_kind,
TexKind::Inline env: tex_env.to_string(),
} else { tex: tex_content,
TexKind::Block caption,
}, }),
tex_env.to_string(),
tex_content,
None,
)),
); );
reports reports
} }
// TODO // TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
vec![] }
#[cfg(test)]
mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::source::SourceFile;
use super::*;
#[test]
fn tex_block() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
$[kind=block, caption=Some\, text\\] 1+1=2 $
$|[env=another] Non Math \LaTeX|$
$[kind=block,env=another] e^{i\pi}=-1$
"#
.to_string(),
None,
));
let parser = LangParser::default();
let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
let borrow = doc.content().borrow();
let found = borrow
.iter()
.filter_map(|e| e.downcast_ref::<Tex>())
.collect::<Vec<_>>();
assert_eq!(found[0].tex, "1+1=2");
assert_eq!(found[0].env, "main");
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
assert_eq!(found[1].tex, "Non Math \\LaTeX");
assert_eq!(found[1].env, "another");
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
assert_eq!(found[2].env, "another");
}
#[test]
fn tex_inline() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
$[ caption=Some\, text\\] 1+1=2 $
$|[env=another, kind=inline , caption = Enclosed \]. ] Non Math \LaTeX|$
$[env=another] e^{i\pi}=-1$
"#
.to_string(),
None,
));
let parser = LangParser::default();
let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
let borrow = doc.content().borrow();
let found = borrow
.first()
.unwrap()
.as_container()
.unwrap()
.contained()
.iter()
.filter_map(|e| e.downcast_ref::<Tex>())
.collect::<Vec<_>>();
assert_eq!(found[0].tex, "1+1=2");
assert_eq!(found[0].env, "main");
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
assert_eq!(found[1].tex, "Non Math \\LaTeX");
assert_eq!(found[1].env, "another");
assert_eq!(found[1].caption, Some("Enclosed ].".to_string()));
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
assert_eq!(found[2].env, "another");
} }
} }

View file

@ -10,6 +10,7 @@ use ariadne::Report;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::document::DocumentAccessors; use crate::document::document::DocumentAccessors;
use crate::document::element::ContainerElement;
use crate::document::element::DocumentEnd; use crate::document::element::DocumentEnd;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
@ -119,23 +120,13 @@ impl LangParser {
} }
impl Parser for LangParser { impl Parser for LangParser {
fn colors(&self) -> &ReportColors { fn colors(&self) -> &ReportColors { &self.colors }
&self.colors
}
fn rules(&self) -> &Vec<Box<dyn Rule>> { fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
&self.rules fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules }
}
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> {
&mut self.rules
}
fn state(&self) -> std::cell::Ref<'_, StateHolder> { fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() }
self.state.borrow() fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() }
}
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> {
self.state.borrow_mut()
}
fn has_error(&self) -> bool { *self.err_flag.borrow() } fn has_error(&self) -> bool { *self.err_flag.borrow() }
@ -145,7 +136,10 @@ impl Parser for LangParser {
let mut paragraph = doc let mut paragraph = doc
.last_element_mut::<Paragraph>() .last_element_mut::<Paragraph>()
.or_else(|| { .or_else(|| {
doc.push(Box::new(Paragraph::new(elem.location().clone()))); doc.push(Box::new(Paragraph {
location: elem.location().clone(),
content: Vec::new(),
}));
doc.last_element_mut::<Paragraph>() doc.last_element_mut::<Paragraph>()
}) })
.unwrap(); .unwrap();
@ -228,9 +222,13 @@ impl Parser for LangParser {
.on_scope_end(self, &doc, super::state::Scope::DOCUMENT), .on_scope_end(self, &doc, super::state::Scope::DOCUMENT),
); );
self.push(&doc, Box::new(DocumentEnd( self.push(
Token::new(doc.source().content().len()..doc.source().content().len(), doc.source()) &doc,
))); Box::new(DocumentEnd(Token::new(
doc.source().content().len()..doc.source().content().len(),
doc.source(),
))),
);
return Box::new(doc); return Box::new(doc);
} }

View file

@ -144,7 +144,7 @@ pub fn parse_paragraph<'a>(
if parsed.content().borrow().len() > 1 { if parsed.content().borrow().len() > 1 {
return Err("Parsed document contains more than a single paragraph"); return Err("Parsed document contains more than a single paragraph");
} else if parsed.content().borrow().len() == 0 { } else if parsed.content().borrow().len() == 0 {
return Err("Parser document is empty"); return Err("Parsed document is empty");
} else if parsed.last_element::<Paragraph>().is_none() { } else if parsed.last_element::<Paragraph>().is_none() {
return Err("Parsed element is not a paragraph"); return Err("Parsed element is not a paragraph");
} }
@ -347,6 +347,7 @@ impl PropertyParser {
escaped = 0; escaped = 0;
} }
} }
(0..escaped).for_each(|_| value.push('\\'));
if !in_name && value.trim_end().trim_start().is_empty() { if !in_name && value.trim_end().trim_start().is_empty() {
return Err("Expected a value after last `=`".to_string()); return Err("Expected a value after last `=`".to_string());
} else if name.is_empty() || value.is_empty() { } else if name.is_empty() || value.is_empty() {
@ -379,6 +380,7 @@ impl PropertyParser {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::document::element::ContainerElement;
use crate::document::langdocument::LangDocument; use crate::document::langdocument::LangDocument;
use crate::elements::comment::Comment; use crate::elements::comment::Comment;
use crate::elements::style::Style; use crate::elements::style::Style;
@ -405,7 +407,10 @@ mod tests {
assert_eq!(process_text(&doc, "\na"), "a"); assert_eq!(process_text(&doc, "\na"), "a");
let tok = Token::new(0..0, source); let tok = Token::new(0..0, source);
doc.push(Box::new(Paragraph::new(tok.clone()))); doc.push(Box::new(Paragraph {
location: tok.clone(),
content: Vec::new(),
}));
// Comments are ignored (kind => Invisible) // Comments are ignored (kind => Invisible)
(&doc as &dyn Document) (&doc as &dyn Document)