Fix format for util.rs

This commit is contained in:
ef3d0c3e 2024-07-24 09:09:28 +02:00
parent 5ccd8048c2
commit 12a4e956a9
2 changed files with 682 additions and 415 deletions

267
src/elements/graphviz.rs Normal file
View file

@ -0,0 +1,267 @@
use std::{io::{Read, Write}, ops::Range, process::{Command, Stdio}, rc::Rc, sync::Once};
use ariadne::{Fmt, Label, Report, ReportKind};
use crypto::{digest::Digest, sha2::Sha512};
use mlua::{Function, Lua};
use regex::{Captures, Regex};
use crate::{cache::cache::{Cached, CachedError}, compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util}};
#[derive(Debug, PartialEq, Eq)]
enum TexKind
{
Block,
Inline,
}
impl From<&TexKind> for ElemKind
{
fn from(value: &TexKind) -> Self {
match value {
TexKind::Inline => ElemKind::Inline,
_ => ElemKind::Block
}
}
}
#[derive(Debug)]
struct Tex
{
location: Token,
block: TexKind,
env: String,
tex: String,
caption: Option<String>,
}
impl Tex {
fn new(location: Token, block: TexKind, env: String, tex: String, caption: Option<String>) -> Self {
Self { location, block, env, tex, caption }
}
fn format_latex(fontsize: &String, preamble: &String, tex: &String) -> FormattedTex
{
FormattedTex(format!(r"\documentclass[{}pt,preview]{{standalone}}
{}
\begin{{document}}
\begin{{preview}}
{}
\end{{preview}}
\end{{document}}",
fontsize, preamble, tex))
}
}
struct FormattedTex(String);
impl FormattedTex
{
/// Renders latex to svg
fn latex_to_svg(&self, exec: &String, fontsize: &String) -> Result<String, String>
{
print!("Rendering LaTex `{}`... ", self.0);
let process = match Command::new(exec)
.arg("--fontsize").arg(fontsize)
.stdout(Stdio::piped())
.stdin(Stdio::piped())
.spawn()
{
Err(e) => return Err(format!("Could not spawn `{exec}`: {}", e)),
Ok(process) => process
};
if let Err(e) = process.stdin.unwrap().write_all(self.0.as_bytes())
{
panic!("Unable to write to `latex2svg`'s stdin: {}", e);
}
let mut result = String::new();
match process.stdout.unwrap().read_to_string(&mut result)
{
Err(e) => panic!("Unable to read `latex2svg` stdout: {}", e),
Ok(_) => {}
}
println!("Done!");
Ok(result)
}
}
impl Cached for FormattedTex
{
type Key = String;
type Value = String;
fn sql_table() -> &'static str {
"CREATE TABLE IF NOT EXISTS cached_tex (
digest TEXT PRIMARY KEY,
svg BLOB NOT NULL);"
}
fn sql_get_query() -> &'static str {
"SELECT svg FROM cached_tex WHERE digest = (?1)"
}
fn sql_insert_query() -> &'static str {
"INSERT INTO cached_tex (digest, svg) VALUES (?1, ?2)"
}
fn key(&self) -> <Self as Cached>::Key {
let mut hasher = Sha512::new();
hasher.input(self.0.as_bytes());
hasher.result_str()
}
}
impl Element for Tex {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { (&self.block).into() }
fn element_name(&self) -> &'static str { "LaTeX" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, document: &dyn Document)
-> Result<String, String> {
match compiler.target() {
Target::HTML => {
static CACHE_INIT : Once = Once::new();
CACHE_INIT.call_once(|| if let Some(mut con) = compiler.cache() {
if let Err(e) = FormattedTex::init(&mut con)
{
eprintln!("Unable to create cache table: {e}");
}
});
let exec = document.get_variable(format!("tex.{}.exec", self.env).as_str())
.map_or("latex2svg".to_string(), |var| var.to_string());
// FIXME: Because fontsize is passed as an arg, verify that it cannot be used to execute python/shell code
let fontsize = document.get_variable(format!("tex.{}.fontsize", self.env).as_str())
.map_or("12".to_string(), |var| var.to_string());
let preamble = document.get_variable(format!("tex.{}.preamble", self.env).as_str())
.map_or("".to_string(), |var| var.to_string());
let prepend = if self.block == TexKind::Inline { "".to_string() }
else
{
document.get_variable(format!("tex.{}.block_prepend", self.env).as_str())
.map_or("".to_string(), |var| var.to_string()+"\n")
};
let latex = match self.block
{
TexKind::Inline => Tex::format_latex(
&fontsize,
&preamble,
&format!("${{{}}}$", self.tex)),
_ => Tex::format_latex(
&fontsize,
&preamble,
&format!("{prepend}{}", self.tex))
};
if let Some(mut con) = compiler.cache()
{
match latex.cached(&mut con, |s| s.latex_to_svg(&exec, &fontsize))
{
Ok(s) => Ok(s),
Err(e) => match e
{
CachedError::SqlErr(e) => Err(format!("Querying the cache failed: {e}")),
CachedError::GenErr(e) => Err(e)
}
}
}
else
{
latex.latex_to_svg(&exec, &fontsize)
}
}
_ => todo!("Unimplemented")
}
}
}
pub struct TexRule {
re: [Regex; 2],
}
impl TexRule {
pub fn new() -> Self {
Self {
re: [
Regex::new(r"\$\|(?:\[(.*)\])?(?:((?:\\.|[^\\\\])*?)\|\$)?").unwrap(),
Regex::new(r"\$(?:\[(.*)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
],
}
}
}
impl RegexRule for TexRule
{
fn name(&self) -> &'static str { "Tex" }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(&self, index: usize, parser: &dyn Parser, document: &dyn Document, token: Token, matches: Captures)
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let tex_env = matches.get(1)
.and_then(|env| Some(env.as_str().trim_start().trim_end()))
.and_then(|env| (!env.is_empty()).then_some(env))
.unwrap_or("main");
let tex_content = match matches.get(2)
{
// Unterminated `$`
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unterminated Tex Code")
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Missing terminating `{}` after first `{}`",
["|$", "$"][index].fg(parser.colors().info),
["$|", "$"][index].fg(parser.colors().info)))
.with_color(parser.colors().error))
.finish());
return reports;
}
Some(content) => {
let processed = util::process_escaped('\\', ["|$", "$"][index],
content.as_str().trim_start().trim_end());
if processed.is_empty()
{
reports.push(
Report::build(ReportKind::Warning, token.source(), content.start())
.with_message("Empty Tex Code")
.with_label(
Label::new((token.source().clone(), content.range()))
.with_message("Tex code is empty")
.with_color(parser.colors().warning))
.finish());
}
processed
}
};
// TODO: Caption
parser.push(document, Box::new(Tex::new(
token,
if index == 1 { TexKind::Inline } else { TexKind::Block },
tex_env.to_string(),
tex_content,
None,
)));
reports
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -3,87 +3,87 @@ use std::collections::HashMap;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use crate::{ use crate::{
document::{ document::{
document::{Document, DocumentAccessors}, document::{Document, DocumentAccessors},
element::ElemKind, element::ElemKind,
}, },
elements::paragraph::Paragraph, elements::paragraph::Paragraph,
}; };
/// Processes text for escape characters and paragraphing /// Processes text for escape characters and paragraphing
pub fn process_text(document: &dyn Document, content: &str) -> String { pub fn process_text(document: &dyn Document, content: &str) -> String {
let mut escaped = false; let mut escaped = false;
let mut newlines = 0usize; // Consecutive newlines let mut newlines = 0usize; // Consecutive newlines
//println!("Processing: [{content}]"); //println!("Processing: [{content}]");
let processed = content let processed = content
.graphemes(true) .graphemes(true)
.fold((String::new(), None), |(mut out, prev), g| { .fold((String::new(), None), |(mut out, prev), g| {
if newlines != 0 && g != "\n" { if newlines != 0 && g != "\n" {
newlines = 0; newlines = 0;
// Add a whitespace if necessary // Add a whitespace if necessary
match out.chars().last() { match out.chars().last() {
Some(c) => { Some(c) => {
// NOTE: \n is considered whitespace, so previous codepoint can be \n // NOTE: \n is considered whitespace, so previous codepoint can be \n
// (Which can only be done by escaping it) // (Which can only be done by escaping it)
if !c.is_whitespace() || c == '\n' { if !c.is_whitespace() || c == '\n' {
out += " "; out += " ";
} }
} }
None => { None => {
if document if document
.last_element::<Paragraph>() .last_element::<Paragraph>()
.and_then(|par| { .and_then(|par| {
par.find_back(|e| e.kind() != ElemKind::Invisible) par.find_back(|e| e.kind() != ElemKind::Invisible)
.and_then(|e| Some(e.kind() == ElemKind::Inline)) .and_then(|e| Some(e.kind() == ElemKind::Inline))
}) })
.unwrap_or(false) .unwrap_or(false)
{ {
out += " "; out += " ";
} }
} // Don't output anything } // Don't output anything
} }
} }
// Output grapheme literally when escaped // Output grapheme literally when escaped
if escaped { if escaped {
escaped = false; escaped = false;
return (out + g, Some(g)); return (out + g, Some(g));
} }
// Increment newlines counter // Increment newlines counter
else if g == "\n" { else if g == "\n" {
newlines += 1; newlines += 1;
return (out, Some(g)); return (out, Some(g));
} }
// Determine if escaped // Determine if escaped
else if g == "\\" { else if g == "\\" {
escaped = !escaped; escaped = !escaped;
return (out, Some(g)); return (out, Some(g));
} }
// Whitespaces // Whitespaces
else if g.chars().count() == 1 && g.chars().last().unwrap().is_whitespace() { else if g.chars().count() == 1 && g.chars().last().unwrap().is_whitespace() {
// Content begins with whitespace // Content begins with whitespace
if prev.is_none() { if prev.is_none() {
if document.last_element::<Paragraph>().is_some() { if document.last_element::<Paragraph>().is_some() {
return (out + g, Some(g)); return (out + g, Some(g));
} else { } else {
return (out, Some(g)); return (out, Some(g));
} }
} }
// Consecutive whitespaces are converted to a single whitespace // Consecutive whitespaces are converted to a single whitespace
else if prev.unwrap().chars().count() == 1 else if prev.unwrap().chars().count() == 1
&& prev.unwrap().chars().last().unwrap().is_whitespace() && prev.unwrap().chars().last().unwrap().is_whitespace()
{ {
return (out, Some(g)); return (out, Some(g));
} }
} }
return (out + g, Some(g)); return (out + g, Some(g));
}) })
.0 .0
.to_string(); .to_string();
return processed; return processed;
} }
/// Processed a string and escapes a single token out of it /// Processed a string and escapes a single token out of it
@ -95,182 +95,182 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
/// "escaped: %, also escaped: \\%, untouched: \\a"); /// "escaped: %, also escaped: \\%, untouched: \\a");
/// ``` /// ```
pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content: S) -> String { pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content: S) -> String {
let mut processed = String::new(); let mut processed = String::new();
let mut escaped = 0; let mut escaped = 0;
let mut token_it = token.chars().peekable(); let mut token_it = token.chars().peekable();
for c in content for c in content
.as_ref() .as_ref()
.chars() .chars()
.as_str() .as_str()
.trim_start() .trim_start()
.trim_end() .trim_end()
.chars() .chars()
{ {
if c == escape { if c == escape {
escaped += 1; escaped += 1;
} else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) { } else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) {
let _ = token_it.next(); let _ = token_it.next();
if token_it.peek() == None { if token_it.peek() == None {
(0..(escaped / 2)).for_each(|_| processed.push(escape)); (0..(escaped / 2)).for_each(|_| processed.push(escape));
escaped = 0; escaped = 0;
token_it = token.chars().peekable(); token_it = token.chars().peekable();
processed.push_str(token); processed.push_str(token);
} }
} else { } else {
if escaped != 0 { if escaped != 0 {
// Add untouched escapes // Add untouched escapes
(0..escaped).for_each(|_| processed.push('\\')); (0..escaped).for_each(|_| processed.push('\\'));
token_it = token.chars().peekable(); token_it = token.chars().peekable();
escaped = 0; escaped = 0;
} }
processed.push(c); processed.push(c);
} }
} }
// Add trailing escapes // Add trailing escapes
(0..escaped).for_each(|_| processed.push('\\')); (0..escaped).for_each(|_| processed.push('\\'));
processed processed
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Property { pub struct Property {
required: bool, required: bool,
description: String, description: String,
default: Option<String>, default: Option<String>,
} }
impl Property { impl Property {
pub fn new(required: bool, description: String, default: Option<String>) -> Self { pub fn new(required: bool, description: String, default: Option<String>) -> Self {
Self { Self {
required, required,
description, description,
default, default,
} }
} }
} }
impl core::fmt::Display for Property { impl core::fmt::Display for Property {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.default.as_ref() { match self.default.as_ref() {
None => write!( None => write!(
f, f,
"{} {}", "{} {}",
["[Opt]", "[Req]"][self.required as usize], ["[Opt]", "[Req]"][self.required as usize],
self.description self.description
), ),
Some(default) => write!( Some(default) => write!(
f, f,
"{} {} (Deafult: {})", "{} {} (Deafult: {})",
["[Opt]", "[Req]"][self.required as usize], ["[Opt]", "[Req]"][self.required as usize],
self.description, self.description,
default default
), ),
} }
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub enum PropertyMapError<E> { pub enum PropertyMapError<E> {
ParseError(E), ParseError(E),
NotFoundError(String), NotFoundError(String),
} }
#[derive(Debug)] #[derive(Debug)]
pub struct PropertyMap<'a> { pub struct PropertyMap<'a> {
pub(crate) properties: HashMap<String, (&'a Property, String)>, pub(crate) properties: HashMap<String, (&'a Property, String)>,
} }
impl<'a> PropertyMap<'a> { impl<'a> PropertyMap<'a> {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
properties: HashMap::new(), properties: HashMap::new(),
} }
} }
pub fn get<T, Error, F: FnOnce(&'a Property, &String) -> Result<T, Error>>( pub fn get<T, Error, F: FnOnce(&'a Property, &String) -> Result<T, Error>>(
&self, &self,
name: &str, name: &str,
f: F, f: F,
) -> Result<(&'a Property, T), PropertyMapError<Error>> { ) -> Result<(&'a Property, T), PropertyMapError<Error>> {
let (prop, value) = match self.properties.get(name) { let (prop, value) = match self.properties.get(name) {
Some(found) => found, Some(found) => found,
None => { None => {
return Err(PropertyMapError::NotFoundError(format!( return Err(PropertyMapError::NotFoundError(format!(
"Property `{name}` not found" "Property `{name}` not found"
))) )))
} }
}; };
match f(prop, value) { match f(prop, value) {
Ok(parsed) => Ok((*prop, parsed)), Ok(parsed) => Ok((*prop, parsed)),
Err(err) => Err(PropertyMapError::ParseError(err)), Err(err) => Err(PropertyMapError::ParseError(err)),
} }
} }
} }
pub struct PropertyParser { pub struct PropertyParser {
properties: HashMap<String, Property>, properties: HashMap<String, Property>,
} }
impl PropertyParser { impl PropertyParser {
pub fn new(properties: HashMap<String, Property>) -> Self { pub fn new(properties: HashMap<String, Property>) -> Self {
Self { properties } Self { properties }
} }
/// Attempts to build a default propertymap /// Attempts to build a default propertymap
/// ///
/// Returns an error if at least one [`Property`] is required and doesn't provide a default /// Returns an error if at least one [`Property`] is required and doesn't provide a default
pub fn default(&self) -> Result<PropertyMap<'_>, String> { pub fn default(&self) -> Result<PropertyMap<'_>, String> {
let mut properties = PropertyMap::new(); let mut properties = PropertyMap::new();
for (name, prop) in &self.properties { for (name, prop) in &self.properties {
match (prop.required, prop.default.as_ref()) { match (prop.required, prop.default.as_ref()) {
(true, None) => return Err(format!("Missing property `{name}` {prop}")), (true, None) => return Err(format!("Missing property `{name}` {prop}")),
(false, None) => {} (false, None) => {}
(_, Some(default)) => { (_, Some(default)) => {
properties properties
.properties .properties
.insert(name.clone(), (prop, default.clone())); .insert(name.clone(), (prop, default.clone()));
} }
} }
} }
Ok(properties) Ok(properties)
} }
/// Parses properties string "prop1=value1, prop2 = val\,2" -> {prop1: value1, prop2: val,2} /// Parses properties string "prop1=value1, prop2 = val\,2" -> {prop1: value1, prop2: val,2}
/// ///
/// # Key-value pair /// # Key-value pair
/// ///
/// Property names/values are separated by a single '=' that cannot be escaped. /// Property names/values are separated by a single '=' that cannot be escaped.
/// Therefore names cannot contain the '=' character. /// Therefore names cannot contain the '=' character.
/// ///
/// # Example /// # Example
/// ///
/// ``` /// ```
/// let mut properties = HashMap::new(); /// let mut properties = HashMap::new();
/// properties.insert("width".to_string(), /// properties.insert("width".to_string(),
/// Property::new(true, "Width of the element in em".to_string(), None)); /// Property::new(true, "Width of the element in em".to_string(), None));
/// ///
/// let parser = PropertyParser::new(properties); /// let parser = PropertyParser::new(properties);
/// let pm = parser.parse("width=15").unwrap(); /// let pm = parser.parse("width=15").unwrap();
/// ///
/// assert_eq!(pm.get("width", |_, s| s.parse::<i32>()).unwrap().1, 15); /// assert_eq!(pm.get("width", |_, s| s.parse::<i32>()).unwrap().1, 15);
/// ``` /// ```
/// # Return value /// # Return value
/// ///
/// Returns the parsed property map, or an error if either: /// Returns the parsed property map, or an error if either:
/// * A required property is missing /// * A required property is missing
/// * An unknown property is present /// * An unknown property is present
/// * A duplicate property is present /// * A duplicate property is present
/// ///
/// Note: Only ',' inside values can be escaped, other '\' are treated literally /// Note: Only ',' inside values can be escaped, other '\' are treated literally
pub fn parse(&self, content: &str) -> Result<PropertyMap<'_>, String> { pub fn parse(&self, content: &str) -> Result<PropertyMap<'_>, String> {
let mut properties = PropertyMap::new(); let mut properties = PropertyMap::new();
let mut try_insert = |name: &String, value: &String| -> Result<(), String> { let mut try_insert = |name: &String, value: &String| -> Result<(), String> {
let trimmed_name = name.trim_end().trim_start(); let trimmed_name = name.trim_end().trim_start();
let trimmed_value = value.trim_end().trim_start(); let trimmed_value = value.trim_end().trim_start();
let prop = match self.properties.get(trimmed_name) let prop = match self.properties.get(trimmed_name)
{ {
None => return Err(format!("Unknown property name: `{trimmed_name}` (with value: `{trimmed_value}`). Valid properties are:\n{}", None => return Err(format!("Unknown property name: `{trimmed_name}` (with value: `{trimmed_value}`). Valid properties are:\n{}",
self.properties.iter().fold(String::new(), self.properties.iter().fold(String::new(),
@ -278,226 +278,226 @@ impl PropertyParser {
Some(prop) => prop Some(prop) => prop
}; };
if let Some((_, previous)) = properties if let Some((_, previous)) = properties
.properties .properties
.insert(trimmed_name.to_string(), (prop, trimmed_value.to_string())) .insert(trimmed_name.to_string(), (prop, trimmed_value.to_string()))
{ {
return Err(format!("Duplicate property `{trimmed_name}`, previous value: `{previous}` current value: `{trimmed_value}`")); return Err(format!("Duplicate property `{trimmed_name}`, previous value: `{previous}` current value: `{trimmed_value}`"));
} }
Ok(()) Ok(())
}; };
let mut in_name = true; let mut in_name = true;
let mut name = String::new(); let mut name = String::new();
let mut value = String::new(); let mut value = String::new();
let mut escaped = 0usize; let mut escaped = 0usize;
for c in content.chars() { for c in content.chars() {
if c == '\\' { if c == '\\' {
escaped += 1; escaped += 1;
} else if c == '=' && in_name { } else if c == '=' && in_name {
in_name = false; in_name = false;
(0..escaped).for_each(|_| name.push('\\')); (0..escaped).for_each(|_| name.push('\\'));
escaped = 0; escaped = 0;
} else if c == ',' && !in_name { } else if c == ',' && !in_name {
if escaped % 2 == 0 if escaped % 2 == 0
// Not escaped // Not escaped
{ {
(0..escaped / 2).for_each(|_| value.push('\\')); (0..escaped / 2).for_each(|_| value.push('\\'));
escaped = 0; escaped = 0;
in_name = true; in_name = true;
if let Err(e) = try_insert(&name, &value) { if let Err(e) = try_insert(&name, &value) {
return Err(e); return Err(e);
} }
name.clear(); name.clear();
value.clear(); value.clear();
} else { } else {
(0..(escaped - 1) / 2).for_each(|_| value.push('\\')); (0..(escaped - 1) / 2).for_each(|_| value.push('\\'));
value.push(','); value.push(',');
escaped = 0; escaped = 0;
} }
} else { } else {
if in_name { if in_name {
(0..escaped).for_each(|_| name.push('\\')); (0..escaped).for_each(|_| name.push('\\'));
name.push(c) name.push(c)
} else { } else {
(0..escaped).for_each(|_| value.push('\\')); (0..escaped).for_each(|_| value.push('\\'));
value.push(c) value.push(c)
} }
escaped = 0; escaped = 0;
} }
} }
if !in_name && value.trim_end().trim_start().is_empty() { if !in_name && value.trim_end().trim_start().is_empty() {
return Err("Expected a value after last `=`".to_string()); return Err("Expected a value after last `=`".to_string());
} else if name.is_empty() || value.is_empty() { } else if name.is_empty() || value.is_empty() {
return Err("Expected non empty property list.".to_string()); return Err("Expected non empty property list.".to_string());
} }
if let Err(e) = try_insert(&name, &value) { if let Err(e) = try_insert(&name, &value) {
return Err(e); return Err(e);
} }
if let Err(e) = self.properties.iter().try_for_each(|(key, prop)| { if let Err(e) = self.properties.iter().try_for_each(|(key, prop)| {
if !properties.properties.contains_key(key) { if !properties.properties.contains_key(key) {
if let Some(default) = &prop.default { if let Some(default) = &prop.default {
properties properties
.properties .properties
.insert(key.clone(), (prop, default.clone())); .insert(key.clone(), (prop, default.clone()));
} else if prop.required { } else if prop.required {
return Err(format!("Missing required property: {prop}")); return Err(format!("Missing required property: {prop}"));
} }
} }
Ok(()) Ok(())
}) { }) {
Err(e) Err(e)
} else { } else {
Ok(properties) Ok(properties)
} }
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::{ use crate::{
document::langdocument::LangDocument, document::langdocument::LangDocument,
elements::{comment::Comment, style::Style, text::Text}, elements::{comment::Comment, style::Style, text::Text},
parser::source::{SourceFile, Token}, parser::source::{SourceFile, Token},
}; };
use std::rc::Rc; use std::rc::Rc;
#[test] #[test]
fn process_text_tests() { fn process_text_tests() {
let source = Rc::new(SourceFile::with_content( let source = Rc::new(SourceFile::with_content(
"".to_string(), "".to_string(),
"".to_string(), "".to_string(),
None, None,
)); ));
let doc = LangDocument::new(source.clone(), None); let doc = LangDocument::new(source.clone(), None);
assert_eq!(process_text(&doc, "a\nb"), "a b"); assert_eq!(process_text(&doc, "a\nb"), "a b");
assert_eq!(process_text(&doc, "a\n\nb"), "a b"); // Should never happen but why not assert_eq!(process_text(&doc, "a\n\nb"), "a b"); // Should never happen but why not
assert_eq!(process_text(&doc, "a\\b"), "ab"); assert_eq!(process_text(&doc, "a\\b"), "ab");
assert_eq!(process_text(&doc, "a\\\nb"), "a\nb"); assert_eq!(process_text(&doc, "a\\\nb"), "a\nb");
assert_eq!(process_text(&doc, "a\\\\b"), "a\\b"); assert_eq!(process_text(&doc, "a\\\\b"), "a\\b");
assert_eq!(process_text(&doc, "a\\\\\nb"), "a\\ b"); assert_eq!(process_text(&doc, "a\\\\\nb"), "a\\ b");
assert_eq!(process_text(&doc, "\na"), "a"); assert_eq!(process_text(&doc, "\na"), "a");
let tok = Token::new(0..0, source); let tok = Token::new(0..0, source);
doc.push(Box::new(Paragraph::new(tok.clone()))); doc.push(Box::new(Paragraph::new(tok.clone())));
// A space is appended as previous element is inline // A space is appended as previous element is inline
(&doc as &dyn Document) (&doc as &dyn Document)
.last_element_mut::<Paragraph>() .last_element_mut::<Paragraph>()
.unwrap() .unwrap()
.push(Box::new(Text::new(tok.clone(), "TEXT".to_string()))); .push(Box::new(Text::new(tok.clone(), "TEXT".to_string())));
assert_eq!(process_text(&doc, "\na"), " a"); assert_eq!(process_text(&doc, "\na"), " a");
(&doc as &dyn Document) (&doc as &dyn Document)
.last_element_mut::<Paragraph>() .last_element_mut::<Paragraph>()
.unwrap() .unwrap()
.push(Box::new(Style::new(tok.clone(), 0, false))); .push(Box::new(Style::new(tok.clone(), 0, false)));
assert_eq!(process_text(&doc, "\na"), " a"); assert_eq!(process_text(&doc, "\na"), " a");
// Comments are ignored (kind => Invisible) // Comments are ignored (kind => Invisible)
(&doc as &dyn Document) (&doc as &dyn Document)
.last_element_mut::<Paragraph>() .last_element_mut::<Paragraph>()
.unwrap() .unwrap()
.push(Box::new(Comment::new(tok.clone(), "COMMENT".to_string()))); .push(Box::new(Comment::new(tok.clone(), "COMMENT".to_string())));
assert_eq!(process_text(&doc, "\na"), " a"); assert_eq!(process_text(&doc, "\na"), " a");
} }
#[test] #[test]
fn process_escaped_tests() { fn process_escaped_tests() {
assert_eq!( assert_eq!(
process_escaped( process_escaped(
'\\', '\\',
"%", "%",
"escaped: \\%, also escaped: \\\\\\%, untouched: \\a" "escaped: \\%, also escaped: \\\\\\%, untouched: \\a"
), ),
"escaped: %, also escaped: \\%, untouched: \\a" "escaped: %, also escaped: \\%, untouched: \\a"
); );
assert_eq!( assert_eq!(
process_escaped('"', "><)))°>", "Escaped fish: \"><)))°>"), process_escaped('"', "><)))°>", "Escaped fish: \"><)))°>"),
"Escaped fish: ><)))°>".to_string() "Escaped fish: ><)))°>".to_string()
); );
assert_eq!( assert_eq!(
process_escaped('\\', "]", "Escaped \\]"), process_escaped('\\', "]", "Escaped \\]"),
"Escaped ]".to_string() "Escaped ]".to_string()
); );
assert_eq!( assert_eq!(
process_escaped('\\', "]", "Unescaped \\\\]"), process_escaped('\\', "]", "Unescaped \\\\]"),
"Unescaped \\\\]".to_string() "Unescaped \\\\]".to_string()
); );
assert_eq!( assert_eq!(
process_escaped('\\', "]", "Escaped \\\\\\]"), process_escaped('\\', "]", "Escaped \\\\\\]"),
"Escaped \\]".to_string() "Escaped \\]".to_string()
); );
assert_eq!( assert_eq!(
process_escaped('\\', "]", "Unescaped \\\\\\\\]"), process_escaped('\\', "]", "Unescaped \\\\\\\\]"),
"Unescaped \\\\\\\\]".to_string() "Unescaped \\\\\\\\]".to_string()
); );
} }
#[test] #[test]
fn property_parser_tests() { fn property_parser_tests() {
let mut properties = HashMap::new(); let mut properties = HashMap::new();
properties.insert( properties.insert(
"width".to_string(), "width".to_string(),
Property::new(true, "Width of the element in em".to_string(), None), Property::new(true, "Width of the element in em".to_string(), None),
); );
properties.insert( properties.insert(
"length".to_string(), "length".to_string(),
Property::new(false, "Length in cm".to_string(), None), Property::new(false, "Length in cm".to_string(), None),
); );
properties.insert( properties.insert(
"angle".to_string(), "angle".to_string(),
Property::new( Property::new(
true, true,
"Angle in degrees".to_string(), "Angle in degrees".to_string(),
Some("180".to_string()), Some("180".to_string()),
), ),
); );
properties.insert( properties.insert(
"weight".to_string(), "weight".to_string(),
Property::new(false, "Weight in %".to_string(), Some("0.42".to_string())), Property::new(false, "Weight in %".to_string(), Some("0.42".to_string())),
); );
let parser = PropertyParser::new(properties); let parser = PropertyParser::new(properties);
let pm = parser.parse("width=15,length=-10").unwrap(); let pm = parser.parse("width=15,length=-10").unwrap();
// Ok // Ok
assert_eq!(pm.get("width", |_, s| s.parse::<i32>()).unwrap().1, 15); assert_eq!(pm.get("width", |_, s| s.parse::<i32>()).unwrap().1, 15);
assert_eq!(pm.get("length", |_, s| s.parse::<i32>()).unwrap().1, -10); assert_eq!(pm.get("length", |_, s| s.parse::<i32>()).unwrap().1, -10);
assert_eq!(pm.get("angle", |_, s| s.parse::<f64>()).unwrap().1, 180f64); assert_eq!(pm.get("angle", |_, s| s.parse::<f64>()).unwrap().1, 180f64);
assert_eq!(pm.get("angle", |_, s| s.parse::<i32>()).unwrap().1, 180); assert_eq!(pm.get("angle", |_, s| s.parse::<i32>()).unwrap().1, 180);
assert_eq!( assert_eq!(
pm.get("weight", |_, s| s.parse::<f32>()).unwrap().1, pm.get("weight", |_, s| s.parse::<f32>()).unwrap().1,
0.42f32 0.42f32
); );
assert_eq!( assert_eq!(
pm.get("weight", |_, s| s.parse::<f64>()).unwrap().1, pm.get("weight", |_, s| s.parse::<f64>()).unwrap().1,
0.42f64 0.42f64
); );
// Error // Error
assert!(pm.get("length", |_, s| s.parse::<u32>()).is_err()); assert!(pm.get("length", |_, s| s.parse::<u32>()).is_err());
assert!(pm.get("height", |_, s| s.parse::<f64>()).is_err()); assert!(pm.get("height", |_, s| s.parse::<f64>()).is_err());
// Missing property // Missing property
assert!(parser.parse("length=15").is_err()); assert!(parser.parse("length=15").is_err());
// Defaults // Defaults
assert!(parser.parse("width=15").is_ok()); assert!(parser.parse("width=15").is_ok());
assert_eq!( assert_eq!(
parser parser
.parse("width=0,weight=0.15") .parse("width=0,weight=0.15")
.unwrap() .unwrap()
.get("weight", |_, s| s.parse::<f32>()) .get("weight", |_, s| s.parse::<f32>())
.unwrap() .unwrap()
.1, .1,
0.15f32 0.15f32
); );
} }
} }