Update
This commit is contained in:
parent
d0343cee6b
commit
8c71258212
7 changed files with 426 additions and 222 deletions
|
@ -21,6 +21,7 @@ ariadne = "0.4.1"
|
|||
dashmap = "6.0.1"
|
||||
downcast-rs = "1.2.1"
|
||||
getopts = "0.2.21"
|
||||
graphviz-rust = "0.9.0"
|
||||
lazy_static = "1.5.0"
|
||||
lsp-server = "0.7.6"
|
||||
lsp-types = "0.97.0"
|
||||
|
|
|
@ -11,12 +11,12 @@ Some features requires external dependencies to work.
|
|||
We ship a modified version of `latex2svg` by Matthias C. Hormann.
|
||||
The modified program can be found in [third/latex2svg](third/latex2svg) and is licensed under MIT.
|
||||
|
||||
The installation instructions specified on [latex2svg's repository](https://github.com/Moonbase59/latex2svg).
|
||||
The installation instructions can be found on [latex2svg's repository](https://github.com/Moonbase59/latex2svg).
|
||||
|
||||
## Graphviz rendering
|
||||
|
||||
To render Graphviz graph `[graph]...[/graph]`
|
||||
You need to install the `dot` program from [Graphviz](https://graphviz.org/) in order to render graphs.
|
||||
To render Graphviz graph (i.e `[graph]...[/graph]`),
|
||||
you need to install the `dot` program from [Graphviz](https://graphviz.org/).
|
||||
|
||||
## Lua kernels
|
||||
|
||||
|
@ -36,6 +36,8 @@ cargo build --release --bin nml
|
|||
- [x] Graphviz rendering
|
||||
- [x] Media
|
||||
- [ ] References
|
||||
- [ ] Navigation
|
||||
- [ ] Cross-Document references
|
||||
- [ ] Complete Lua api
|
||||
- [ ] Documentation
|
||||
- [ ] Table
|
||||
|
|
|
@ -1,8 +1,21 @@
|
|||
use mlua::{Function, Lua};
|
||||
use regex::{Captures, Regex};
|
||||
use crate::{document::document::{DocumentAccessors, Document}, parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, SourceFile, Token}}};
|
||||
use ariadne::{Report, Fmt, Label, ReportKind};
|
||||
use std::{ops::Range, rc::Rc};
|
||||
use crate::document::document::Document;
|
||||
use crate::document::document::DocumentAccessors;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::parser::source::Token;
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::paragraph::Paragraph;
|
||||
|
||||
|
@ -17,13 +30,11 @@ impl ImportRule {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn validate_name(_colors: &ReportColors, name: &str) -> Result<String, String>
|
||||
{
|
||||
pub fn validate_name(_colors: &ReportColors, name: &str) -> Result<String, String> {
|
||||
Ok(name.to_string())
|
||||
}
|
||||
|
||||
pub fn validate_as(_colors: &ReportColors, as_name: &str) -> Result<String, String>
|
||||
{
|
||||
pub fn validate_as(_colors: &ReportColors, as_name: &str) -> Result<String, String> {
|
||||
// TODO: Use variable name validation rules
|
||||
Ok(as_name.to_string())
|
||||
}
|
||||
|
@ -34,74 +45,83 @@ impl RegexRule for ImportRule {
|
|||
|
||||
fn regexes(&self) -> &[Regex] { &self.re }
|
||||
|
||||
fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document<'a>, token: Token, matches: Captures)
|
||||
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
document: &'a dyn Document<'a>,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut result = vec![];
|
||||
|
||||
// Path
|
||||
let import_file = match matches.get(2)
|
||||
{
|
||||
Some(name) => {
|
||||
match ImportRule::validate_name(parser.colors(), name.as_str())
|
||||
{
|
||||
let import_file = match matches.get(2) {
|
||||
Some(name) => match ImportRule::validate_name(parser.colors(), name.as_str()) {
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
.with_message("Invalid name for import")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!("Import name `{}` is invalid. {msg}",
|
||||
name.as_str().fg(parser.colors().highlight)))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_message(format!(
|
||||
"Import name `{}` is invalid. {msg}",
|
||||
name.as_str().fg(parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
||||
return result;
|
||||
},
|
||||
}
|
||||
Ok(filename) => {
|
||||
let meta = match std::fs::metadata(filename.as_str())
|
||||
{
|
||||
let meta = match std::fs::metadata(filename.as_str()) {
|
||||
Err(_) => {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
.with_message("Invalid import path")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!("Unable to access file `{}`",
|
||||
filename.fg(parser.colors().highlight)))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_message(format!(
|
||||
"Unable to access file `{}`",
|
||||
filename.fg(parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return result;
|
||||
},
|
||||
Ok(meta) => meta
|
||||
}
|
||||
Ok(meta) => meta,
|
||||
};
|
||||
|
||||
if !meta.is_file()
|
||||
{
|
||||
if !meta.is_file() {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
.with_message("Invalid import path")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!("Path `{}` is not a file!",
|
||||
filename.fg(parser.colors().highlight)))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_message(format!(
|
||||
"Path `{}` is not a file!",
|
||||
filename.fg(parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
filename
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
_ => panic!("Invalid name for import")
|
||||
_ => panic!("Invalid name for import"),
|
||||
};
|
||||
|
||||
// [Optional] import as
|
||||
let import_as = match matches.get(1)
|
||||
{
|
||||
Some(as_name) => {
|
||||
match ImportRule::validate_as(parser.colors(), as_name.as_str())
|
||||
{
|
||||
let import_as = match matches.get(1) {
|
||||
Some(as_name) => match ImportRule::validate_as(parser.colors(), as_name.as_str()) {
|
||||
Ok(as_name) => as_name,
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
|
@ -109,20 +129,22 @@ impl RegexRule for ImportRule {
|
|||
.with_message("Invalid name for import as")
|
||||
.with_label(
|
||||
Label::new((token.source(), as_name.range()))
|
||||
.with_message(format!("Canot import `{import_file}` as `{}`. {msg}",
|
||||
as_name.as_str().fg(parser.colors().highlight)))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_message(format!(
|
||||
"Canot import `{import_file}` as `{}`. {msg}",
|
||||
as_name.as_str().fg(parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
_ => "".to_string()
|
||||
_ => "".to_string(),
|
||||
};
|
||||
|
||||
let import = match SourceFile::new(import_file, Some(token.clone()))
|
||||
{
|
||||
let import = match SourceFile::new(import_file, Some(token.clone())) {
|
||||
Ok(import) => Rc::new(import),
|
||||
Err(path) => {
|
||||
result.push(
|
||||
|
@ -131,8 +153,10 @@ impl RegexRule for ImportRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), token.range))
|
||||
.with_message(format!("Failed to read content from path `{path}`"))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
@ -141,11 +165,14 @@ impl RegexRule for ImportRule {
|
|||
document.merge(import_doc.content(), import_doc.scope(), Some(&import_as));
|
||||
|
||||
// Close paragraph
|
||||
if document.last_element::<Paragraph>().is_some()
|
||||
{
|
||||
parser.push(document, Box::new(Paragraph::new(
|
||||
Token::new(token.end()..token.end(), token.source())
|
||||
)));
|
||||
if document.last_element::<Paragraph>().is_some() {
|
||||
parser.push(
|
||||
document,
|
||||
Box::new(Paragraph {
|
||||
location: Token::new(token.end()..token.end(), token.source()),
|
||||
content: Vec::new(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
|
|
|
@ -10,6 +10,7 @@ use regex::Regex;
|
|||
use crate::compiler::compiler::Compiler;
|
||||
use crate::compiler::compiler::Target;
|
||||
use crate::document::document::Document;
|
||||
use crate::document::element::ContainerElement;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
|
@ -27,27 +28,13 @@ use crate::parser::source::Token;
|
|||
// Which would need to be reworked
|
||||
#[derive(Debug)]
|
||||
pub struct Paragraph {
|
||||
location: Token,
|
||||
pub location: Token,
|
||||
pub content: Vec<Box<dyn Element>>,
|
||||
}
|
||||
|
||||
impl Paragraph {
|
||||
pub fn new(location: Token) -> Self {
|
||||
Self {
|
||||
location,
|
||||
content: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool { self.content.is_empty() }
|
||||
|
||||
pub fn push(&mut self, elem: Box<dyn Element>) {
|
||||
if elem.location().source() == self.location().source() {
|
||||
self.location.range = self.location.start()..elem.location().end();
|
||||
}
|
||||
self.content.push(elem);
|
||||
}
|
||||
|
||||
pub fn find_back<P: FnMut(&&Box<dyn Element + 'static>) -> bool>(
|
||||
&self,
|
||||
predicate: P,
|
||||
|
@ -102,6 +89,20 @@ impl Element for Paragraph {
|
|||
Target::LATEX => todo!("Unimplemented compiler"),
|
||||
}
|
||||
}
|
||||
|
||||
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
|
||||
}
|
||||
|
||||
impl ContainerElement for Paragraph {
|
||||
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.content }
|
||||
|
||||
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
|
||||
if elem.location().source() == self.location().source() {
|
||||
self.location.range = self.location.start()..elem.location().end();
|
||||
}
|
||||
self.content.push(elem);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ParagraphRule {
|
||||
|
@ -139,10 +140,10 @@ impl Rule for ParagraphRule {
|
|||
|
||||
parser.push(
|
||||
document,
|
||||
Box::new(Paragraph::new(Token::new(
|
||||
cursor.pos..end_cursor.pos,
|
||||
cursor.source.clone(),
|
||||
))),
|
||||
Box::new(Paragraph {
|
||||
location: Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()),
|
||||
content: Vec::new(),
|
||||
}),
|
||||
);
|
||||
|
||||
(end_cursor, Vec::new())
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Once;
|
||||
|
||||
use ariadne::Fmt;
|
||||
|
@ -15,6 +17,7 @@ use crypto::sha2::Sha512;
|
|||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Match;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::cache::cache::Cached;
|
||||
|
@ -25,10 +28,15 @@ use crate::document::document::Document;
|
|||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::util;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyMap;
|
||||
use crate::parser::util::PropertyMapError;
|
||||
use crate::parser::util::PropertyParser;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum TexKind {
|
||||
|
@ -36,6 +44,18 @@ enum TexKind {
|
|||
Inline,
|
||||
}
|
||||
|
||||
impl FromStr for TexKind {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"inline" => Ok(TexKind::Inline),
|
||||
"block" => Ok(TexKind::Block),
|
||||
_ => Err(format!("Unknown kind: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TexKind> for ElemKind {
|
||||
fn from(value: &TexKind) -> Self {
|
||||
match value {
|
||||
|
@ -47,30 +67,14 @@ impl From<&TexKind> for ElemKind {
|
|||
|
||||
#[derive(Debug)]
|
||||
struct Tex {
|
||||
location: Token,
|
||||
block: TexKind,
|
||||
env: String,
|
||||
tex: String,
|
||||
caption: Option<String>,
|
||||
pub(self) location: Token,
|
||||
pub(self) kind: TexKind,
|
||||
pub(self) env: String,
|
||||
pub(self) tex: String,
|
||||
pub(self) caption: Option<String>,
|
||||
}
|
||||
|
||||
impl Tex {
|
||||
fn new(
|
||||
location: Token,
|
||||
block: TexKind,
|
||||
env: String,
|
||||
tex: String,
|
||||
caption: Option<String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
location,
|
||||
block,
|
||||
env,
|
||||
tex,
|
||||
caption,
|
||||
}
|
||||
}
|
||||
|
||||
fn format_latex(fontsize: &String, preamble: &String, tex: &String) -> FormattedTex {
|
||||
FormattedTex(format!(
|
||||
r"\documentclass[{}pt,preview]{{standalone}}
|
||||
|
@ -127,13 +131,9 @@ impl Cached for FormattedTex {
|
|||
svg BLOB NOT NULL);"
|
||||
}
|
||||
|
||||
fn sql_get_query() -> &'static str {
|
||||
"SELECT svg FROM cached_tex WHERE digest = (?1)"
|
||||
}
|
||||
fn sql_get_query() -> &'static str { "SELECT svg FROM cached_tex WHERE digest = (?1)" }
|
||||
|
||||
fn sql_insert_query() -> &'static str {
|
||||
"INSERT INTO cached_tex (digest, svg) VALUES (?1, ?2)"
|
||||
}
|
||||
fn sql_insert_query() -> &'static str { "INSERT INTO cached_tex (digest, svg) VALUES (?1, ?2)" }
|
||||
|
||||
fn key(&self) -> <Self as Cached>::Key {
|
||||
let mut hasher = Sha512::new();
|
||||
|
@ -144,21 +144,13 @@ impl Cached for FormattedTex {
|
|||
}
|
||||
|
||||
impl Element for Tex {
|
||||
fn location(&self) -> &Token {
|
||||
&self.location
|
||||
}
|
||||
fn location(&self) -> &Token { &self.location }
|
||||
|
||||
fn kind(&self) -> ElemKind {
|
||||
(&self.block).into()
|
||||
}
|
||||
fn kind(&self) -> ElemKind { (&self.kind).into() }
|
||||
|
||||
fn element_name(&self) -> &'static str {
|
||||
"LaTeX"
|
||||
}
|
||||
fn element_name(&self) -> &'static str { "LaTeX" }
|
||||
|
||||
fn to_string(&self) -> String {
|
||||
format!("{self:#?}")
|
||||
}
|
||||
fn to_string(&self) -> String { format!("{self:#?}") }
|
||||
|
||||
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
|
||||
match compiler.target() {
|
||||
|
@ -172,6 +164,8 @@ impl Element for Tex {
|
|||
}
|
||||
});
|
||||
|
||||
// TODO: Do something with the caption
|
||||
|
||||
let exec = document
|
||||
.get_variable(format!("tex.{}.exec", self.env).as_str())
|
||||
.map_or("latex2svg".to_string(), |var| var.to_string());
|
||||
|
@ -182,7 +176,7 @@ impl Element for Tex {
|
|||
let preamble = document
|
||||
.get_variable(format!("tex.{}.preamble", self.env).as_str())
|
||||
.map_or("".to_string(), |var| var.to_string());
|
||||
let prepend = if self.block == TexKind::Inline {
|
||||
let prepend = if self.kind == TexKind::Inline {
|
||||
"".to_string()
|
||||
} else {
|
||||
document
|
||||
|
@ -190,7 +184,7 @@ impl Element for Tex {
|
|||
.map_or("".to_string(), |var| var.to_string() + "\n")
|
||||
};
|
||||
|
||||
let latex = match self.block {
|
||||
let latex = match self.kind {
|
||||
TexKind::Inline => {
|
||||
Tex::format_latex(&fontsize, &preamble, &format!("${{{}}}$", self.tex))
|
||||
}
|
||||
|
@ -218,27 +212,82 @@ impl Element for Tex {
|
|||
|
||||
pub struct TexRule {
|
||||
re: [Regex; 2],
|
||||
properties: PropertyParser,
|
||||
}
|
||||
|
||||
impl TexRule {
|
||||
pub fn new() -> Self {
|
||||
let mut props = HashMap::new();
|
||||
props.insert(
|
||||
"env".to_string(),
|
||||
Property::new(
|
||||
true,
|
||||
"Tex environment".to_string(),
|
||||
Some("main".to_string()),
|
||||
),
|
||||
);
|
||||
props.insert(
|
||||
"kind".to_string(),
|
||||
Property::new(false, "Element display kind".to_string(), None),
|
||||
);
|
||||
props.insert(
|
||||
"caption".to_string(),
|
||||
Property::new(false, "Latex caption".to_string(), None),
|
||||
);
|
||||
Self {
|
||||
re: [
|
||||
Regex::new(r"\$\|(?:\[(.*)\])?(?:((?:\\.|[^\\\\])*?)\|\$)?").unwrap(),
|
||||
Regex::new(r"\$(?:\[(.*)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
|
||||
Regex::new(r"\$\|(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\|\$)?").unwrap(),
|
||||
Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
|
||||
],
|
||||
properties: PropertyParser::new(props),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_properties(
|
||||
&self,
|
||||
colors: &ReportColors,
|
||||
token: &Token,
|
||||
m: &Option<Match>,
|
||||
) -> Result<PropertyMap, Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
match m {
|
||||
None => match self.properties.default() {
|
||||
Ok(properties) => Ok(properties),
|
||||
Err(e) => Err(
|
||||
Report::build(ReportKind::Error, token.source(), token.start())
|
||||
.with_message("Invalid Tex Properties")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!("Tex is missing required property: {e}"))
|
||||
.with_color(colors.error),
|
||||
)
|
||||
.finish(),
|
||||
),
|
||||
},
|
||||
Some(props) => {
|
||||
let processed =
|
||||
util::process_escaped('\\', "]", props.as_str().trim_start().trim_end());
|
||||
match self.properties.parse(processed.as_str()) {
|
||||
Err(e) => Err(
|
||||
Report::build(ReportKind::Error, token.source(), props.start())
|
||||
.with_message("Invalid Tex Properties")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), props.range()))
|
||||
.with_message(e)
|
||||
.with_color(colors.error),
|
||||
)
|
||||
.finish(),
|
||||
),
|
||||
Ok(properties) => Ok(properties),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RegexRule for TexRule {
|
||||
fn name(&self) -> &'static str {
|
||||
"Tex"
|
||||
}
|
||||
fn name(&self) -> &'static str { "Tex" }
|
||||
|
||||
fn regexes(&self) -> &[regex::Regex] {
|
||||
&self.re
|
||||
}
|
||||
fn regexes(&self) -> &[regex::Regex] { &self.re }
|
||||
|
||||
fn on_regex_match(
|
||||
&self,
|
||||
|
@ -250,12 +299,6 @@ impl RegexRule for TexRule {
|
|||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut reports = vec![];
|
||||
|
||||
let tex_env = matches
|
||||
.get(1)
|
||||
.and_then(|env| Some(env.as_str().trim_start().trim_end()))
|
||||
.and_then(|env| (!env.is_empty()).then_some(env))
|
||||
.unwrap_or("main");
|
||||
|
||||
let tex_content = match matches.get(2) {
|
||||
// Unterminated `$`
|
||||
None => {
|
||||
|
@ -298,28 +341,155 @@ impl RegexRule for TexRule {
|
|||
}
|
||||
};
|
||||
|
||||
// TODO: Caption
|
||||
// Properties
|
||||
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(1)) {
|
||||
Ok(pm) => pm,
|
||||
Err(report) => {
|
||||
reports.push(report);
|
||||
return reports;
|
||||
}
|
||||
};
|
||||
|
||||
parser.push(
|
||||
document,
|
||||
Box::new(Tex::new(
|
||||
token,
|
||||
// Tex kind
|
||||
let tex_kind = match properties.get("kind", |prop, value| {
|
||||
TexKind::from_str(value.as_str()).map_err(|e| (prop, e))
|
||||
}) {
|
||||
Ok((_prop, kind)) => kind,
|
||||
Err(e) => match e {
|
||||
PropertyMapError::ParseError((prop, err)) => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), token.start())
|
||||
.with_message("Invalid Tex Property")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Property `kind: {}` cannot be converted: {}",
|
||||
prop.fg(parser.colors().info),
|
||||
err.fg(parser.colors().error)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
PropertyMapError::NotFoundError(err) => {
|
||||
if index == 1 {
|
||||
TexKind::Inline
|
||||
} else {
|
||||
TexKind::Block
|
||||
}
|
||||
}
|
||||
},
|
||||
tex_env.to_string(),
|
||||
tex_content,
|
||||
None,
|
||||
)),
|
||||
};
|
||||
|
||||
// Caption
|
||||
let caption = properties
|
||||
.get("caption", |_, value| -> Result<String, ()> {
|
||||
Ok(value.clone())
|
||||
})
|
||||
.ok()
|
||||
.and_then(|(_, value)| Some(value));
|
||||
|
||||
// Environ
|
||||
let tex_env = properties
|
||||
.get("env", |_, value| -> Result<String, ()> {
|
||||
Ok(value.clone())
|
||||
})
|
||||
.ok()
|
||||
.and_then(|(_, value)| Some(value))
|
||||
.unwrap();
|
||||
|
||||
parser.push(
|
||||
document,
|
||||
Box::new(Tex {
|
||||
location: token,
|
||||
kind: tex_kind,
|
||||
env: tex_env.to_string(),
|
||||
tex: tex_content,
|
||||
caption,
|
||||
}),
|
||||
);
|
||||
|
||||
reports
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
vec![]
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::source::SourceFile;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn tex_block() {
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
$[kind=block, caption=Some\, text\\] 1+1=2 $
|
||||
$|[env=another] Non Math \LaTeX|$
|
||||
$[kind=block,env=another] e^{i\pi}=-1$
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let compiler = Compiler::new(Target::HTML, None);
|
||||
let doc = parser.parse(source, None);
|
||||
|
||||
let borrow = doc.content().borrow();
|
||||
let found = borrow
|
||||
.iter()
|
||||
.filter_map(|e| e.downcast_ref::<Tex>())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(found[0].tex, "1+1=2");
|
||||
assert_eq!(found[0].env, "main");
|
||||
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
|
||||
assert_eq!(found[1].tex, "Non Math \\LaTeX");
|
||||
assert_eq!(found[1].env, "another");
|
||||
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
|
||||
assert_eq!(found[2].env, "another");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tex_inline() {
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
$[ caption=Some\, text\\] 1+1=2 $
|
||||
$|[env=another, kind=inline , caption = Enclosed \]. ] Non Math \LaTeX|$
|
||||
$[env=another] e^{i\pi}=-1$
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let compiler = Compiler::new(Target::HTML, None);
|
||||
let doc = parser.parse(source, None);
|
||||
|
||||
let borrow = doc.content().borrow();
|
||||
let found = borrow
|
||||
.first()
|
||||
.unwrap()
|
||||
.as_container()
|
||||
.unwrap()
|
||||
.contained()
|
||||
.iter()
|
||||
.filter_map(|e| e.downcast_ref::<Tex>())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(found[0].tex, "1+1=2");
|
||||
assert_eq!(found[0].env, "main");
|
||||
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
|
||||
assert_eq!(found[1].tex, "Non Math \\LaTeX");
|
||||
assert_eq!(found[1].env, "another");
|
||||
assert_eq!(found[1].caption, Some("Enclosed ].".to_string()));
|
||||
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
|
||||
assert_eq!(found[2].env, "another");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ use ariadne::Report;
|
|||
|
||||
use crate::document::document::Document;
|
||||
use crate::document::document::DocumentAccessors;
|
||||
use crate::document::element::ContainerElement;
|
||||
use crate::document::element::DocumentEnd;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
|
@ -119,23 +120,13 @@ impl LangParser {
|
|||
}
|
||||
|
||||
impl Parser for LangParser {
|
||||
fn colors(&self) -> &ReportColors {
|
||||
&self.colors
|
||||
}
|
||||
fn colors(&self) -> &ReportColors { &self.colors }
|
||||
|
||||
fn rules(&self) -> &Vec<Box<dyn Rule>> {
|
||||
&self.rules
|
||||
}
|
||||
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> {
|
||||
&mut self.rules
|
||||
}
|
||||
fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
|
||||
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules }
|
||||
|
||||
fn state(&self) -> std::cell::Ref<'_, StateHolder> {
|
||||
self.state.borrow()
|
||||
}
|
||||
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> {
|
||||
self.state.borrow_mut()
|
||||
}
|
||||
fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() }
|
||||
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() }
|
||||
|
||||
fn has_error(&self) -> bool { *self.err_flag.borrow() }
|
||||
|
||||
|
@ -145,7 +136,10 @@ impl Parser for LangParser {
|
|||
let mut paragraph = doc
|
||||
.last_element_mut::<Paragraph>()
|
||||
.or_else(|| {
|
||||
doc.push(Box::new(Paragraph::new(elem.location().clone())));
|
||||
doc.push(Box::new(Paragraph {
|
||||
location: elem.location().clone(),
|
||||
content: Vec::new(),
|
||||
}));
|
||||
doc.last_element_mut::<Paragraph>()
|
||||
})
|
||||
.unwrap();
|
||||
|
@ -228,9 +222,13 @@ impl Parser for LangParser {
|
|||
.on_scope_end(self, &doc, super::state::Scope::DOCUMENT),
|
||||
);
|
||||
|
||||
self.push(&doc, Box::new(DocumentEnd(
|
||||
Token::new(doc.source().content().len()..doc.source().content().len(), doc.source())
|
||||
)));
|
||||
self.push(
|
||||
&doc,
|
||||
Box::new(DocumentEnd(Token::new(
|
||||
doc.source().content().len()..doc.source().content().len(),
|
||||
doc.source(),
|
||||
))),
|
||||
);
|
||||
|
||||
return Box::new(doc);
|
||||
}
|
||||
|
|
|
@ -144,7 +144,7 @@ pub fn parse_paragraph<'a>(
|
|||
if parsed.content().borrow().len() > 1 {
|
||||
return Err("Parsed document contains more than a single paragraph");
|
||||
} else if parsed.content().borrow().len() == 0 {
|
||||
return Err("Parser document is empty");
|
||||
return Err("Parsed document is empty");
|
||||
} else if parsed.last_element::<Paragraph>().is_none() {
|
||||
return Err("Parsed element is not a paragraph");
|
||||
}
|
||||
|
@ -347,6 +347,7 @@ impl PropertyParser {
|
|||
escaped = 0;
|
||||
}
|
||||
}
|
||||
(0..escaped).for_each(|_| value.push('\\'));
|
||||
if !in_name && value.trim_end().trim_start().is_empty() {
|
||||
return Err("Expected a value after last `=`".to_string());
|
||||
} else if name.is_empty() || value.is_empty() {
|
||||
|
@ -379,6 +380,7 @@ impl PropertyParser {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::document::element::ContainerElement;
|
||||
use crate::document::langdocument::LangDocument;
|
||||
use crate::elements::comment::Comment;
|
||||
use crate::elements::style::Style;
|
||||
|
@ -405,7 +407,10 @@ mod tests {
|
|||
assert_eq!(process_text(&doc, "\na"), "a");
|
||||
|
||||
let tok = Token::new(0..0, source);
|
||||
doc.push(Box::new(Paragraph::new(tok.clone())));
|
||||
doc.push(Box::new(Paragraph {
|
||||
location: tok.clone(),
|
||||
content: Vec::new(),
|
||||
}));
|
||||
|
||||
// Comments are ignored (kind => Invisible)
|
||||
(&doc as &dyn Document)
|
||||
|
|
Loading…
Reference in a new issue