Format
This commit is contained in:
parent
b8f4671657
commit
104e102177
9 changed files with 709 additions and 570 deletions
77
src/cache/cache.rs
vendored
77
src/cache/cache.rs
vendored
|
@ -1,29 +1,13 @@
|
|||
use std::{error::Error, path::PathBuf};
|
||||
use rusqlite::types::FromSql;
|
||||
use rusqlite::Connection;
|
||||
use rusqlite::ToSql;
|
||||
|
||||
use rusqlite::{types::FromSql, Connection, Params, ToSql};
|
||||
|
||||
struct Cache {
|
||||
con: Connection
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
fn new(file: PathBuf) -> Result<Self, String> {
|
||||
match Connection::open(file)
|
||||
{
|
||||
Err(e) => return Err(format!("Could not connect to cache database: {}", e.to_string())),
|
||||
Ok(con) => Ok(Self { con })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum CachedError<E>
|
||||
{
|
||||
pub enum CachedError<E> {
|
||||
SqlErr(rusqlite::Error),
|
||||
GenErr(E)
|
||||
GenErr(E),
|
||||
}
|
||||
|
||||
pub trait Cached
|
||||
{
|
||||
pub trait Cached {
|
||||
type Key;
|
||||
type Value;
|
||||
|
||||
|
@ -39,10 +23,8 @@ pub trait Cached
|
|||
|
||||
fn key(&self) -> <Self as Cached>::Key;
|
||||
|
||||
fn init(con: &mut Connection) -> Result<(), rusqlite::Error>
|
||||
{
|
||||
con.execute(<Self as Cached>::sql_table(), ())
|
||||
.map(|_| ())
|
||||
fn init(con: &mut Connection) -> Result<(), rusqlite::Error> {
|
||||
con.execute(<Self as Cached>::sql_table(), ()).map(|_| ())
|
||||
}
|
||||
|
||||
/// Attempts to retrieve a cached element from the compilation database
|
||||
|
@ -54,8 +36,11 @@ pub trait Cached
|
|||
/// or if not cached, an error from the generator [`f`]
|
||||
///
|
||||
/// Note that on error, [`f`] may still have been called
|
||||
fn cached<E, F>(&self, con: &mut Connection, f: F)
|
||||
-> Result<<Self as Cached>::Value, CachedError<E>>
|
||||
fn cached<E, F>(
|
||||
&self,
|
||||
con: &mut Connection,
|
||||
f: F,
|
||||
) -> Result<<Self as Cached>::Value, CachedError<E>>
|
||||
where
|
||||
<Self as Cached>::Key: ToSql,
|
||||
<Self as Cached>::Value: FromSql + ToSql,
|
||||
|
@ -64,42 +49,36 @@ pub trait Cached
|
|||
let key = self.key();
|
||||
|
||||
// Find in cache
|
||||
let mut query = match con.prepare(<Self as Cached>::sql_get_query())
|
||||
{
|
||||
let mut query = match con.prepare(<Self as Cached>::sql_get_query()) {
|
||||
Ok(query) => query,
|
||||
Err(e) => return Err(CachedError::SqlErr(e))
|
||||
Err(e) => return Err(CachedError::SqlErr(e)),
|
||||
};
|
||||
|
||||
let value = query.query_row([&key], |row|
|
||||
{
|
||||
let value = query
|
||||
.query_row([&key], |row| {
|
||||
Ok(row.get_unwrap::<_, <Self as Cached>::Value>(0))
|
||||
}).ok();
|
||||
})
|
||||
.ok();
|
||||
|
||||
if let Some(value) = value
|
||||
{
|
||||
if let Some(value) = value {
|
||||
// Found in cache
|
||||
return Ok(value)
|
||||
}
|
||||
else
|
||||
{
|
||||
return Ok(value);
|
||||
} else {
|
||||
// Compute a value
|
||||
let value = match f(&self)
|
||||
{
|
||||
let value = match f(&self) {
|
||||
Ok(val) => val,
|
||||
Err(e) => return Err(CachedError::GenErr(e))
|
||||
Err(e) => return Err(CachedError::GenErr(e)),
|
||||
};
|
||||
|
||||
// Try to insert
|
||||
let mut query = match con.prepare(<Self as Cached>::sql_insert_query())
|
||||
{
|
||||
let mut query = match con.prepare(<Self as Cached>::sql_insert_query()) {
|
||||
Ok(query) => query,
|
||||
Err(e) => return Err(CachedError::SqlErr(e))
|
||||
Err(e) => return Err(CachedError::SqlErr(e)),
|
||||
};
|
||||
|
||||
match query.execute((&key, &value))
|
||||
{
|
||||
match query.execute((&key, &value)) {
|
||||
Ok(_) => Ok(value),
|
||||
Err(e) => Err(CachedError::SqlErr(e))
|
||||
Err(e) => Err(CachedError::SqlErr(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
use std::cell::{Ref, RefCell, RefMut};
|
||||
use std::cell::Ref;
|
||||
use std::cell::RefCell;
|
||||
use std::cell::RefMut;
|
||||
use std::collections::hash_map::HashMap;
|
||||
use std::rc::Rc;
|
||||
|
||||
|
@ -7,7 +9,6 @@ use crate::parser::source::Source;
|
|||
use super::element::Element;
|
||||
use super::variable::Variable;
|
||||
|
||||
|
||||
// TODO: Referenceable rework
|
||||
// Usize based referencing is not an acceptable method
|
||||
// if we want to support deltas for the lsp
|
||||
|
@ -27,38 +28,43 @@ impl Scope {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: &mut Scope, merge_as: &String, ref_offset: usize)
|
||||
{
|
||||
match merge_as.is_empty()
|
||||
{
|
||||
pub fn merge(&mut self, other: &mut Scope, merge_as: &String, ref_offset: usize) {
|
||||
match merge_as.is_empty() {
|
||||
true => {
|
||||
// References
|
||||
self.referenceable.extend(other.referenceable.drain()
|
||||
.map(|(name, idx)|
|
||||
(name, idx+ref_offset)));
|
||||
self.referenceable.extend(
|
||||
other
|
||||
.referenceable
|
||||
.drain()
|
||||
.map(|(name, idx)| (name, idx + ref_offset)),
|
||||
);
|
||||
|
||||
// Variables
|
||||
self.variables.extend(other.variables.drain()
|
||||
.map(|(name, var)|
|
||||
(name, var)));
|
||||
},
|
||||
self.variables
|
||||
.extend(other.variables.drain().map(|(name, var)| (name, var)));
|
||||
}
|
||||
false => {
|
||||
// References
|
||||
self.referenceable.extend(other.referenceable.drain()
|
||||
.map(|(name, idx)|
|
||||
(format!("{merge_as}.{name}"), idx+ref_offset)));
|
||||
self.referenceable.extend(
|
||||
other
|
||||
.referenceable
|
||||
.drain()
|
||||
.map(|(name, idx)| (format!("{merge_as}.{name}"), idx + ref_offset)),
|
||||
);
|
||||
|
||||
// Variables
|
||||
self.variables.extend(other.variables.drain()
|
||||
.map(|(name, var)|
|
||||
(format!("{merge_as}.{name}"), var)));
|
||||
self.variables.extend(
|
||||
other
|
||||
.variables
|
||||
.drain()
|
||||
.map(|(name, var)| (format!("{merge_as}.{name}"), var)),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Document<'a>: core::fmt::Debug
|
||||
{
|
||||
pub trait Document<'a>: core::fmt::Debug {
|
||||
/// Gets the document [`Source`]
|
||||
fn source(&self) -> Rc<dyn Source>;
|
||||
|
||||
|
@ -73,16 +79,12 @@ pub trait Document<'a>: core::fmt::Debug
|
|||
fn scope(&self) -> &RefCell<Scope>;
|
||||
|
||||
/// Pushes a new element into the document's content
|
||||
fn push(&self, elem: Box<dyn Element>)
|
||||
{
|
||||
fn push(&self, elem: Box<dyn Element>) {
|
||||
// TODO: RefTable
|
||||
|
||||
self.content()
|
||||
.borrow_mut()
|
||||
.push(elem);
|
||||
self.content().borrow_mut().push(elem);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
fn last_element(&'a self, recurse: bool) -> Option<Ref<'_, dyn Element>>
|
||||
{
|
||||
|
@ -116,20 +118,18 @@ pub trait Document<'a>: core::fmt::Debug
|
|||
}
|
||||
*/
|
||||
|
||||
fn add_variable(&self, variable: Rc<dyn Variable>)
|
||||
{
|
||||
self.scope().borrow_mut().variables.insert(
|
||||
variable.name().to_string(),
|
||||
variable);
|
||||
fn add_variable(&self, variable: Rc<dyn Variable>) {
|
||||
self.scope()
|
||||
.borrow_mut()
|
||||
.variables
|
||||
.insert(variable.name().to_string(), variable);
|
||||
}
|
||||
|
||||
fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>>
|
||||
{
|
||||
match self.scope().borrow().variables.get(name)
|
||||
{
|
||||
fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> {
|
||||
match self.scope().borrow().variables.get(name) {
|
||||
Some(variable) => {
|
||||
return Some(variable.clone());
|
||||
},
|
||||
}
|
||||
|
||||
// Continue search recursively
|
||||
None => match self.parent() {
|
||||
|
@ -137,10 +137,11 @@ pub trait Document<'a>: core::fmt::Debug
|
|||
|
||||
// Not found
|
||||
None => return None,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
fn remove_variable(&self, name: &str) -> Option<Rc<dyn Variable>>
|
||||
{
|
||||
match self.scope().borrow_mut().variables.remove(name)
|
||||
|
@ -158,46 +159,48 @@ pub trait Document<'a>: core::fmt::Debug
|
|||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
/// Merges [`other`] into [`self`]
|
||||
fn merge(&self, content: &RefCell<Vec<Box<dyn Element>>>, scope: &RefCell<Scope>, merge_as: Option<&String>)
|
||||
{
|
||||
match merge_as
|
||||
{
|
||||
Some(merge_as) => self.scope().borrow_mut()
|
||||
.merge(
|
||||
fn merge(
|
||||
&self,
|
||||
content: &RefCell<Vec<Box<dyn Element>>>,
|
||||
scope: &RefCell<Scope>,
|
||||
merge_as: Option<&String>,
|
||||
) {
|
||||
match merge_as {
|
||||
Some(merge_as) => self.scope().borrow_mut().merge(
|
||||
&mut *scope.borrow_mut(),
|
||||
merge_as,
|
||||
self.content().borrow().len()+1),
|
||||
_ => {},
|
||||
self.content().borrow().len() + 1,
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Content
|
||||
self.content().borrow_mut().extend((content.borrow_mut())
|
||||
.drain(..)
|
||||
.map(|value| value));
|
||||
self.content()
|
||||
.borrow_mut()
|
||||
.extend((content.borrow_mut()).drain(..).map(|value| value));
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DocumentAccessors<'a>
|
||||
{
|
||||
pub trait DocumentAccessors<'a> {
|
||||
fn last_element<T: Element>(&self) -> Option<Ref<'_, T>>;
|
||||
fn last_element_mut<T: Element>(&self) -> Option<RefMut<'_, T>>;
|
||||
}
|
||||
|
||||
impl<'a> DocumentAccessors<'a> for dyn Document<'a> + '_
|
||||
{
|
||||
fn last_element<T: Element>(&self) -> Option<Ref<'_, T>>
|
||||
{
|
||||
Ref::filter_map(self.content().borrow(),
|
||||
|content| content.last()
|
||||
.and_then(|last| last.downcast_ref::<T>())).ok()
|
||||
impl<'a> DocumentAccessors<'a> for dyn Document<'a> + '_ {
|
||||
fn last_element<T: Element>(&self) -> Option<Ref<'_, T>> {
|
||||
Ref::filter_map(self.content().borrow(), |content| {
|
||||
content.last().and_then(|last| last.downcast_ref::<T>())
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn last_element_mut<T: Element>(&self) -> Option<RefMut<'_, T>>
|
||||
{
|
||||
RefMut::filter_map(self.content().borrow_mut(),
|
||||
|content| content.last_mut()
|
||||
.and_then(|last| last.downcast_mut::<T>())).ok()
|
||||
fn last_element_mut<T: Element>(&self) -> Option<RefMut<'_, T>> {
|
||||
RefMut::filter_map(self.content().borrow_mut(), |content| {
|
||||
content.last_mut().and_then(|last| last.downcast_mut::<T>())
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,6 @@
|
|||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Once;
|
||||
|
||||
use crate::parser::util::Property;
|
||||
|
@ -19,10 +14,7 @@ use crypto::digest::Digest;
|
|||
use crypto::sha2::Sha512;
|
||||
use graphviz_rust::cmd::Format;
|
||||
use graphviz_rust::cmd::Layout;
|
||||
use graphviz_rust::exec;
|
||||
use graphviz_rust::exec_dot;
|
||||
use graphviz_rust::parse;
|
||||
use graphviz_rust::printer::PrinterContext;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
|
@ -129,6 +121,7 @@ impl Element for Graphviz {
|
|||
}
|
||||
}
|
||||
});
|
||||
// TODO: Format svg in a div
|
||||
|
||||
if let Some(mut con) = compiler.cache() {
|
||||
match self.cached(&mut con, |s| s.dot_to_svg()) {
|
||||
|
|
|
@ -1,10 +1,23 @@
|
|||
use mlua::{Function, Lua};
|
||||
use regex::{Captures, Regex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util};
|
||||
use ariadne::{Report, Fmt, Label, ReportKind};
|
||||
use crate::{compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}};
|
||||
use std::{ops::Range, rc::Rc};
|
||||
use crate::compiler::compiler::Compiler;
|
||||
use crate::compiler::compiler::Target;
|
||||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::util;
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Link {
|
||||
|
@ -13,34 +26,33 @@ pub struct Link {
|
|||
url: String, // Link url
|
||||
}
|
||||
|
||||
impl Link
|
||||
{
|
||||
impl Link {
|
||||
pub fn new(location: Token, name: String, url: String) -> Self {
|
||||
Self { location: location, name, url }
|
||||
Self {
|
||||
location: location,
|
||||
name,
|
||||
url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Link
|
||||
{
|
||||
impl Element for Link {
|
||||
fn location(&self) -> &Token { &self.location }
|
||||
fn kind(&self) -> ElemKind { ElemKind::Inline }
|
||||
fn element_name(&self) -> &'static str { "Link" }
|
||||
fn to_string(&self) -> String { format!("{self:#?}") }
|
||||
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
|
||||
match compiler.target()
|
||||
{
|
||||
Target::HTML => {
|
||||
Ok(format!("<a href=\"{}\">{}</a>",
|
||||
match compiler.target() {
|
||||
Target::HTML => Ok(format!(
|
||||
"<a href=\"{}\">{}</a>",
|
||||
compiler.sanitize(self.url.as_str()),
|
||||
compiler.sanitize(self.name.as_str()),
|
||||
))
|
||||
},
|
||||
Target::LATEX => {
|
||||
Ok(format!("\\href{{{}}}{{{}}}",
|
||||
)),
|
||||
Target::LATEX => Ok(format!(
|
||||
"\\href{{{}}}{{{}}}",
|
||||
compiler.sanitize(self.url.as_str()),
|
||||
compiler.sanitize(self.name.as_str()),
|
||||
))
|
||||
},
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -51,7 +63,9 @@ pub struct LinkRule {
|
|||
|
||||
impl LinkRule {
|
||||
pub fn new() -> Self {
|
||||
Self { re: [Regex::new(r"\[((?:\\.|[^\\\\])*?)\]\(((?:\\.|[^\\\\])*?)\)").unwrap()] }
|
||||
Self {
|
||||
re: [Regex::new(r"\[((?:\\.|[^\\\\])*?)\]\(((?:\\.|[^\\\\])*?)\)").unwrap()],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,91 +74,98 @@ impl RegexRule for LinkRule {
|
|||
|
||||
fn regexes(&self) -> &[Regex] { &self.re }
|
||||
|
||||
fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document, token: Token, matches: Captures)
|
||||
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
document: &'a dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut result = vec![];
|
||||
let link_name = match matches.get(1)
|
||||
{
|
||||
let link_name = match matches.get(1) {
|
||||
Some(name) => {
|
||||
if name.as_str().is_empty()
|
||||
{
|
||||
if name.as_str().is_empty() {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
.with_message("Empty link name")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), name.range()))
|
||||
.with_message("Link name is empty")
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
// TODO: process into separate document...
|
||||
let text_content = util::process_text(document, name.as_str());
|
||||
|
||||
if text_content.as_str().is_empty()
|
||||
{
|
||||
if text_content.as_str().is_empty() {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
.with_message("Empty link name")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!("Link name is empty. Once processed, `{}` yields `{}`",
|
||||
.with_message(format!(
|
||||
"Link name is empty. Once processed, `{}` yields `{}`",
|
||||
name.as_str().fg(parser.colors().highlight),
|
||||
text_content.as_str().fg(parser.colors().highlight),
|
||||
))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
text_content
|
||||
},
|
||||
}
|
||||
_ => panic!("Empty link name"),
|
||||
};
|
||||
|
||||
let link_url = match matches.get(2)
|
||||
{
|
||||
let link_url = match matches.get(2) {
|
||||
Some(url) => {
|
||||
if url.as_str().is_empty()
|
||||
{
|
||||
if url.as_str().is_empty() {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), url.start())
|
||||
.with_message("Empty link url")
|
||||
.with_label(
|
||||
Label::new((token.source(), url.range()))
|
||||
.with_message("Link url is empty")
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
let text_content = util::process_text(document, url.as_str());
|
||||
|
||||
if text_content.as_str().is_empty()
|
||||
{
|
||||
if text_content.as_str().is_empty() {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), url.start())
|
||||
.with_message("Empty link url")
|
||||
.with_label(
|
||||
Label::new((token.source(), url.range()))
|
||||
.with_message(format!("Link url is empty. Once processed, `{}` yields `{}`",
|
||||
.with_message(format!(
|
||||
"Link url is empty. Once processed, `{}` yields `{}`",
|
||||
url.as_str().fg(parser.colors().highlight),
|
||||
text_content.as_str().fg(parser.colors().highlight),
|
||||
))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
text_content
|
||||
},
|
||||
}
|
||||
_ => panic!("Empty link url"),
|
||||
};
|
||||
|
||||
parser.push(document, Box::new(
|
||||
Link::new(
|
||||
token.clone(),
|
||||
link_name,
|
||||
link_url
|
||||
)
|
||||
));
|
||||
parser.push(
|
||||
document,
|
||||
Box::new(Link::new(token.clone(), link_name, link_url)),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ impl Element for List
|
|||
match_stack(&mut result, &ent.numbering);
|
||||
result.push_str("<li>");
|
||||
match ent.content.iter().enumerate()
|
||||
.try_for_each(|(idx, elem)| {
|
||||
.try_for_each(|(_idx, elem)| {
|
||||
match elem.compile(compiler, document) {
|
||||
Err(e) => Err(e),
|
||||
Ok(s) => { result.push_str(s.as_str()); Ok(()) }
|
||||
|
|
|
@ -1,8 +1,30 @@
|
|||
use mlua::{Error::BadArgument, Function, Lua};
|
||||
use regex::{Captures, Regex};
|
||||
use crate::{compiler::compiler::Compiler, document::{document::Document, element::{ElemKind, Element}}, lua::kernel::CTX, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util::{self, Property, PropertyMapError, PropertyParser}}};
|
||||
use ariadne::{Fmt, Label, Report, ReportKind};
|
||||
use std::{collections::HashMap, ops::Range, rc::Rc, str::FromStr, sync::Arc};
|
||||
use crate::compiler::compiler::Compiler;
|
||||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyMapError;
|
||||
use crate::parser::util::PropertyParser;
|
||||
use crate::parser::util::{self};
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use mlua::Error::BadArgument;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Raw {
|
||||
|
@ -13,7 +35,11 @@ struct Raw {
|
|||
|
||||
impl Raw {
|
||||
fn new(location: Token, kind: ElemKind, content: String) -> Self {
|
||||
Self { location, kind, content }
|
||||
Self {
|
||||
location,
|
||||
kind,
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -25,7 +51,7 @@ impl Element for Raw {
|
|||
|
||||
fn to_string(&self) -> String { format!("{self:#?}") }
|
||||
|
||||
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
|
||||
fn compile(&self, _compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
|
||||
Ok(self.content.clone())
|
||||
}
|
||||
}
|
||||
|
@ -38,32 +64,40 @@ pub struct RawRule {
|
|||
impl RawRule {
|
||||
pub fn new() -> Self {
|
||||
let mut props = HashMap::new();
|
||||
props.insert("kind".to_string(),
|
||||
props.insert(
|
||||
"kind".to_string(),
|
||||
Property::new(
|
||||
true,
|
||||
"Element display kind".to_string(),
|
||||
Some("inline".to_string())));
|
||||
Some("inline".to_string()),
|
||||
),
|
||||
);
|
||||
Self {
|
||||
re: [
|
||||
Regex::new(r"\{\?(?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)(\?\}))?").unwrap()
|
||||
Regex::new(r"\{\?(?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)(\?\}))?")
|
||||
.unwrap(),
|
||||
],
|
||||
properties: PropertyParser::new(props)
|
||||
properties: PropertyParser::new(props),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RegexRule for RawRule
|
||||
{
|
||||
impl RegexRule for RawRule {
|
||||
fn name(&self) -> &'static str { "Raw" }
|
||||
|
||||
fn regexes(&self) -> &[regex::Regex] { &self.re }
|
||||
|
||||
fn on_regex_match(&self, _index: usize, parser: &dyn Parser, document: &dyn Document, token: Token, matches: Captures)
|
||||
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
fn on_regex_match(
|
||||
&self,
|
||||
_index: usize,
|
||||
parser: &dyn Parser,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut reports = vec![];
|
||||
|
||||
let raw_content = match matches.get(2)
|
||||
{
|
||||
let raw_content = match matches.get(2) {
|
||||
// Unterminated
|
||||
None => {
|
||||
reports.push(
|
||||
|
@ -71,34 +105,38 @@ impl RegexRule for RawRule
|
|||
.with_message("Unterminated Raw Code")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!("Missing terminating `{}` after first `{}`",
|
||||
.with_message(format!(
|
||||
"Missing terminating `{}` after first `{}`",
|
||||
"?}".fg(parser.colors().info),
|
||||
"{?".fg(parser.colors().info)))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
"{?".fg(parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
Some(content) => {
|
||||
let processed = util::process_escaped('\\', "?}",
|
||||
content.as_str().trim_start().trim_end());
|
||||
let processed =
|
||||
util::process_escaped('\\', "?}", content.as_str().trim_start().trim_end());
|
||||
|
||||
if processed.is_empty()
|
||||
{
|
||||
if processed.is_empty() {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Warning, token.source(), content.start())
|
||||
.with_message("Empty Raw Code")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), content.range()))
|
||||
.with_message("Raw code is empty")
|
||||
.with_color(parser.colors().warning))
|
||||
.finish());
|
||||
.with_color(parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
}
|
||||
processed
|
||||
}
|
||||
};
|
||||
|
||||
let properties = match matches.get(1)
|
||||
{
|
||||
let properties = match matches.get(1) {
|
||||
None => match self.properties.default() {
|
||||
Ok(properties) => properties,
|
||||
Err(e) => {
|
||||
|
@ -108,16 +146,17 @@ impl RegexRule for RawRule
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!("Raw code is missing properties: {e}"))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
},
|
||||
}
|
||||
},
|
||||
Some(props) => {
|
||||
let processed = util::process_escaped('\\', "]",
|
||||
props.as_str().trim_start().trim_end());
|
||||
match self.properties.parse(processed.as_str())
|
||||
{
|
||||
let processed =
|
||||
util::process_escaped('\\', "]", props.as_str().trim_start().trim_end());
|
||||
match self.properties.parse(processed.as_str()) {
|
||||
Err(e) => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), props.start())
|
||||
|
@ -125,18 +164,20 @@ impl RegexRule for RawRule
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), props.range()))
|
||||
.with_message(e)
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
Ok(properties) => properties
|
||||
Ok(properties) => properties,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let raw_kind : ElemKind = match properties.get("kind",
|
||||
|prop, value| ElemKind::from_str(value.as_str()).map_err(|e| (prop, e)))
|
||||
{
|
||||
let raw_kind: ElemKind = match properties.get("kind", |prop, value| {
|
||||
ElemKind::from_str(value.as_str()).map_err(|e| (prop, e))
|
||||
}) {
|
||||
Ok((_prop, kind)) => kind,
|
||||
Err(e) => match e {
|
||||
PropertyMapError::ParseError((prop, err)) => {
|
||||
|
@ -145,33 +186,47 @@ impl RegexRule for RawRule
|
|||
.with_message("Invalid Raw Code Property")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!("Property `kind: {}` cannot be converted: {}",
|
||||
.with_message(format!(
|
||||
"Property `kind: {}` cannot be converted: {}",
|
||||
prop.fg(parser.colors().info),
|
||||
err.fg(parser.colors().error)))
|
||||
.with_color(parser.colors().warning))
|
||||
.finish());
|
||||
err.fg(parser.colors().error)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
},
|
||||
}
|
||||
PropertyMapError::NotFoundError(err) => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), token.start())
|
||||
.with_message("Invalid Code Property")
|
||||
.with_label(
|
||||
Label::new((token.source().clone(), token.start()+1..token.end()))
|
||||
.with_message(format!("Property `{}` is missing",
|
||||
err.fg(parser.colors().info)))
|
||||
.with_color(parser.colors().warning))
|
||||
.finish());
|
||||
Label::new((
|
||||
token.source().clone(),
|
||||
token.start() + 1..token.end(),
|
||||
))
|
||||
.with_message(format!(
|
||||
"Property `{}` is missing",
|
||||
err.fg(parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
parser.push(document, Box::new(Raw {
|
||||
parser.push(
|
||||
document,
|
||||
Box::new(Raw {
|
||||
location: token.clone(),
|
||||
kind: raw_kind,
|
||||
content: raw_content
|
||||
}));
|
||||
content: raw_content,
|
||||
}),
|
||||
);
|
||||
|
||||
reports
|
||||
}
|
||||
|
@ -179,30 +234,41 @@ impl RegexRule for RawRule
|
|||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
|
||||
bindings.push(("push".to_string(), lua.create_function(
|
||||
|_, (kind, content): (String, String)| {
|
||||
bindings.push((
|
||||
"push".to_string(),
|
||||
lua.create_function(|_, (kind, content): (String, String)| {
|
||||
// Validate kind
|
||||
let kind = match ElemKind::from_str(kind.as_str())
|
||||
{
|
||||
let kind = match ElemKind::from_str(kind.as_str()) {
|
||||
Ok(kind) => kind,
|
||||
Err(e) => return Err(BadArgument {
|
||||
Err(e) => {
|
||||
return Err(BadArgument {
|
||||
to: Some("push".to_string()),
|
||||
pos: 1,
|
||||
name: Some("kind".to_string()),
|
||||
cause: Arc::new(mlua::Error::external(
|
||||
format!("Wrong section kind specified: {e}")))})
|
||||
cause: Arc::new(mlua::Error::external(format!(
|
||||
"Wrong section kind specified: {e}"
|
||||
))),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
CTX.with_borrow(|ctx| ctx.as_ref().map(|ctx| {
|
||||
ctx.parser.push(ctx.document, Box::new(Raw {
|
||||
CTX.with_borrow(|ctx| {
|
||||
ctx.as_ref().map(|ctx| {
|
||||
ctx.parser.push(
|
||||
ctx.document,
|
||||
Box::new(Raw {
|
||||
location: ctx.location.clone(),
|
||||
kind,
|
||||
content,
|
||||
}));
|
||||
}));
|
||||
}),
|
||||
);
|
||||
})
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}).unwrap()));
|
||||
})
|
||||
.unwrap(),
|
||||
));
|
||||
|
||||
bindings
|
||||
}
|
||||
|
|
|
@ -17,20 +17,20 @@ use super::variable::VariableRule;
|
|||
use super::variable::VariableSubstitutionRule;
|
||||
|
||||
pub fn register<P: Parser>(parser: &mut P) {
|
||||
parser.add_rule(Box::new(CommentRule::new()), None);
|
||||
parser.add_rule(Box::new(ParagraphRule::new()), None);
|
||||
parser.add_rule(Box::new(ImportRule::new()), None);
|
||||
parser.add_rule(Box::new(ScriptRule::new()), None);
|
||||
parser.add_rule(Box::new(VariableRule::new()), None);
|
||||
parser.add_rule(Box::new(VariableSubstitutionRule::new()), None);
|
||||
parser.add_rule(Box::new(RawRule::new()), None);
|
||||
parser.add_rule(Box::new(ListRule::new()), None);
|
||||
parser.add_rule(Box::new(CodeRule::new()), None);
|
||||
parser.add_rule(Box::new(TexRule::new()), None);
|
||||
parser.add_rule(Box::new(GraphRule::new()), None);
|
||||
parser.add_rule(Box::new(CommentRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(ParagraphRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(ImportRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(ScriptRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(VariableRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(VariableSubstitutionRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(RawRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(ListRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(CodeRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(TexRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(GraphRule::new()), None).unwrap();
|
||||
|
||||
parser.add_rule(Box::new(StyleRule::new()), None);
|
||||
parser.add_rule(Box::new(SectionRule::new()), None);
|
||||
parser.add_rule(Box::new(LinkRule::new()), None);
|
||||
parser.add_rule(Box::new(TextRule::default()), None);
|
||||
parser.add_rule(Box::new(StyleRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(SectionRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(LinkRule::new()), None).unwrap();
|
||||
parser.add_rule(Box::new(TextRule::default()), None).unwrap();
|
||||
}
|
||||
|
|
|
@ -1,15 +1,29 @@
|
|||
use mlua::{Function, Lua};
|
||||
use regex::{Captures, Regex};
|
||||
use crate::{document::document::Document, lua::kernel::{Kernel, KernelContext, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, Token, VirtualSource}, util}};
|
||||
use ariadne::{Fmt, Label, Report, ReportKind};
|
||||
use std::{ops::Range, rc::Rc};
|
||||
use crate::document::document::Document;
|
||||
use crate::lua::kernel::Kernel;
|
||||
use crate::lua::kernel::KernelContext;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::source::VirtualSource;
|
||||
use crate::parser::util;
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::text::Text;
|
||||
|
||||
pub struct ScriptRule
|
||||
{
|
||||
pub struct ScriptRule {
|
||||
re: [Regex; 2],
|
||||
eval_kinds: [(&'static str, &'static str); 3]
|
||||
eval_kinds: [(&'static str, &'static str); 3],
|
||||
}
|
||||
|
||||
impl ScriptRule {
|
||||
|
@ -17,57 +31,69 @@ impl ScriptRule {
|
|||
Self {
|
||||
re: [
|
||||
Regex::new(r"(?:^|\n)@<(?:(.*)\n?)((?:\\.|[^\\\\])*?)(?:\n?)>@").unwrap(),
|
||||
Regex::new(r"%<(?:\[(.*?)\])?([^\s[:alpha:]])?((?:\\.|[^\\\\])*?)(?:\n?)>%").unwrap()
|
||||
Regex::new(r"%<(?:\[(.*?)\])?([^\s[:alpha:]])?((?:\\.|[^\\\\])*?)(?:\n?)>%")
|
||||
.unwrap(),
|
||||
],
|
||||
eval_kinds: [
|
||||
("", "Eval"),
|
||||
("\"", "Eval to text"),
|
||||
("!", "Eval and parse"),
|
||||
]
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_kernel_name(colors: &ReportColors, name: &str)
|
||||
-> Result<String, String> {
|
||||
fn validate_kernel_name(colors: &ReportColors, name: &str) -> Result<String, String> {
|
||||
let trimmed = name.trim_end().trim_start();
|
||||
if trimmed.is_empty() { return Ok("main".to_string()) }
|
||||
else if trimmed.find(|c: char| c.is_whitespace()).is_some() {
|
||||
return Err(format!("Kernel name `{}` contains whitespaces",
|
||||
trimmed.fg(colors.highlight)))
|
||||
if trimmed.is_empty() {
|
||||
return Ok("main".to_string());
|
||||
} else if trimmed.find(|c: char| c.is_whitespace()).is_some() {
|
||||
return Err(format!(
|
||||
"Kernel name `{}` contains whitespaces",
|
||||
trimmed.fg(colors.highlight)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(trimmed.to_string())
|
||||
}
|
||||
|
||||
fn validate_kind(&self, colors: &ReportColors, kind: &str)
|
||||
-> Result<usize, String> {
|
||||
match self.eval_kinds.iter().position(|(kind_symbol, _)| kind == *kind_symbol)
|
||||
fn validate_kind(&self, colors: &ReportColors, kind: &str) -> Result<usize, String> {
|
||||
match self
|
||||
.eval_kinds
|
||||
.iter()
|
||||
.position(|(kind_symbol, _)| kind == *kind_symbol)
|
||||
{
|
||||
Some(id) => Ok(id),
|
||||
None => Err(format!("Unable to find eval kind `{}`. Available kinds:{}",
|
||||
None => Err(format!(
|
||||
"Unable to find eval kind `{}`. Available kinds:{}",
|
||||
kind.fg(colors.highlight),
|
||||
self.eval_kinds.iter().fold(String::new(), |out, (symbol, name)| {
|
||||
self.eval_kinds
|
||||
.iter()
|
||||
.fold(String::new(), |out, (symbol, name)| {
|
||||
out + format!("\n - '{symbol}' => {name}").as_str()
|
||||
})))
|
||||
})
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RegexRule for ScriptRule
|
||||
{
|
||||
impl RegexRule for ScriptRule {
|
||||
fn name(&self) -> &'static str { "Script" }
|
||||
|
||||
fn regexes(&self) -> &[regex::Regex] { &self.re }
|
||||
|
||||
fn on_regex_match<'a>(&self, index: usize, parser: &dyn Parser, document: &'a dyn Document<'a>, token: Token, matches: Captures)
|
||||
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
index: usize,
|
||||
parser: &dyn Parser,
|
||||
document: &'a dyn Document<'a>,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut reports = vec![];
|
||||
|
||||
let kernel_name = match matches.get(1) {
|
||||
None => "main".to_string(),
|
||||
Some(name) => {
|
||||
match ScriptRule::validate_kernel_name(parser.colors(), name.as_str())
|
||||
{
|
||||
Some(name) => match ScriptRule::validate_kernel_name(parser.colors(), name.as_str()) {
|
||||
Ok(name) => name,
|
||||
Err(e) => {
|
||||
reports.push(
|
||||
|
@ -76,71 +102,83 @@ impl RegexRule for ScriptRule
|
|||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(e)
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
let kernel = parser.get_kernel(kernel_name.as_str()).unwrap_or_else(|| {
|
||||
parser.insert_kernel(kernel_name.to_string(), Kernel::new(parser))
|
||||
});
|
||||
let kernel = parser
|
||||
.get_kernel(kernel_name.as_str())
|
||||
.unwrap_or_else(|| parser.insert_kernel(kernel_name.to_string(), Kernel::new(parser)));
|
||||
|
||||
let kernel_data = matches.get(if index == 0 {2} else {3})
|
||||
let kernel_data = matches
|
||||
.get(if index == 0 { 2 } else { 3 })
|
||||
.and_then(|code| {
|
||||
let trimmed = code.as_str().trim_start().trim_end();
|
||||
(!trimmed.is_empty()).then_some((trimmed, code.range()))
|
||||
}).or_else(|| {
|
||||
})
|
||||
.or_else(|| {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Warning, token.source(), token.start())
|
||||
.with_message("Invalid kernel code")
|
||||
.with_label(
|
||||
Label::new((token.source(), token.start()+1..token.end()))
|
||||
Label::new((token.source(), token.start() + 1..token.end()))
|
||||
.with_message("Kernel code is empty")
|
||||
.with_color(parser.colors().warning))
|
||||
.finish());
|
||||
.with_color(parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
||||
None
|
||||
});
|
||||
|
||||
if kernel_data.is_none() { return reports; }
|
||||
if kernel_data.is_none() {
|
||||
return reports;
|
||||
}
|
||||
|
||||
let (kernel_content, kernel_range) = kernel_data.unwrap();
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
Token::new(kernel_range, token.source()),
|
||||
format!("{}#{}:lua_kernel@{kernel_name}", token.source().name(), matches.get(0).unwrap().start()),
|
||||
util::process_escaped('\\', ">@", kernel_content)
|
||||
format!(
|
||||
"{}#{}:lua_kernel@{kernel_name}",
|
||||
token.source().name(),
|
||||
matches.get(0).unwrap().start()
|
||||
),
|
||||
util::process_escaped('\\', ">@", kernel_content),
|
||||
)) as Rc<dyn Source>;
|
||||
|
||||
let execute = |lua: &Lua|
|
||||
{
|
||||
let chunk = lua.load(source.content())
|
||||
.set_name(kernel_name);
|
||||
let execute = |lua: &Lua| {
|
||||
let chunk = lua.load(source.content()).set_name(kernel_name);
|
||||
|
||||
if index == 0 // Exec
|
||||
{
|
||||
if let Err(e) = chunk.exec()
|
||||
if index == 0
|
||||
// Exec
|
||||
{
|
||||
if let Err(e) = chunk.exec() {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, source.clone(), 0)
|
||||
.with_message("Invalid kernel code")
|
||||
.with_label(
|
||||
Label::new((source.clone(), 0..source.content().len()))
|
||||
.with_message(format!("Kernel execution failed:\n{}", e.to_string()))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_message(format!(
|
||||
"Kernel execution failed:\n{}",
|
||||
e.to_string()
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
}
|
||||
else // Eval
|
||||
} else
|
||||
// Eval
|
||||
{
|
||||
// Validate kind
|
||||
let kind = match matches.get(2) {
|
||||
None => 0,
|
||||
Some(kind) => {
|
||||
match self.validate_kind(parser.colors(), kind.as_str())
|
||||
{
|
||||
Some(kind) => match self.validate_kind(parser.colors(), kind.as_str()) {
|
||||
Ok(kind) => kind,
|
||||
Err(msg) => {
|
||||
reports.push(
|
||||
|
@ -149,63 +187,76 @@ impl RegexRule for ScriptRule
|
|||
.with_label(
|
||||
Label::new((token.source(), kind.range()))
|
||||
.with_message(msg)
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
return reports;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if kind == 0 // Eval
|
||||
{
|
||||
if let Err(e) = chunk.eval::<()>()
|
||||
if kind == 0
|
||||
// Eval
|
||||
{
|
||||
if let Err(e) = chunk.eval::<()>() {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, source.clone(), 0)
|
||||
.with_message("Invalid kernel code")
|
||||
.with_label(
|
||||
Label::new((source.clone(), 0..source.content().len()))
|
||||
.with_message(format!("Kernel evaluation failed:\n{}", e.to_string()))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_message(format!(
|
||||
"Kernel evaluation failed:\n{}",
|
||||
e.to_string()
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
}
|
||||
}
|
||||
else // Eval to string
|
||||
{
|
||||
match chunk.eval::<String>()
|
||||
} else
|
||||
// Eval to string
|
||||
{
|
||||
match chunk.eval::<String>() {
|
||||
Ok(result) => {
|
||||
if kind == 1 // Eval to text
|
||||
if kind == 1
|
||||
// Eval to text
|
||||
{
|
||||
if !result.is_empty()
|
||||
{
|
||||
parser.push(document, Box::new(Text::new(
|
||||
if !result.is_empty() {
|
||||
parser.push(
|
||||
document,
|
||||
Box::new(Text::new(
|
||||
Token::new(1..source.content().len(), source.clone()),
|
||||
util::process_text(document, result.as_str()),
|
||||
)));
|
||||
)),
|
||||
);
|
||||
}
|
||||
}
|
||||
else if kind == 2 // Eval and Parse
|
||||
} else if kind == 2
|
||||
// Eval and Parse
|
||||
{
|
||||
let parse_source = Rc::new(VirtualSource::new(
|
||||
Token::new(0..source.content().len(), source.clone()),
|
||||
format!("parse({})", source.name()),
|
||||
result
|
||||
result,
|
||||
)) as Rc<dyn Source>;
|
||||
|
||||
parser.parse_into(parse_source, document);
|
||||
}
|
||||
},
|
||||
}
|
||||
Err(e) => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, source.clone(), 0)
|
||||
.with_message("Invalid kernel code")
|
||||
.with_label(
|
||||
Label::new((source.clone(), 0..source.content().len()))
|
||||
.with_message(format!("Kernel evaluation failed:\n{}", e.to_string()))
|
||||
.with_color(parser.colors().error))
|
||||
.finish());
|
||||
.with_message(format!(
|
||||
"Kernel evaluation failed:\n{}",
|
||||
e.to_string()
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -217,7 +268,7 @@ impl RegexRule for ScriptRule
|
|||
let ctx = KernelContext {
|
||||
location: Token::new(0..source.content().len(), source.clone()),
|
||||
parser,
|
||||
document
|
||||
document,
|
||||
};
|
||||
|
||||
kernel.run_with_context(ctx, execute)
|
||||
|
|
|
@ -1,27 +1,38 @@
|
|||
use mlua::{Function, Lua};
|
||||
use std::any::Any;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{compiler::compiler::Compiler, document::{document::Document, element::{ElemKind, Element}}, lua::kernel::CTX, parser::{parser::Parser, rule::Rule, source::Token}};
|
||||
use ariadne::Report;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
|
||||
use crate::compiler::compiler::Compiler;
|
||||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Text
|
||||
{
|
||||
pub struct Text {
|
||||
pub(self) location: Token,
|
||||
pub(self) content: String,
|
||||
}
|
||||
|
||||
impl Text
|
||||
{
|
||||
pub fn new(location: Token, content: String) -> Text
|
||||
{
|
||||
impl Text {
|
||||
pub fn new(location: Token, content: String) -> Text {
|
||||
Text {
|
||||
location: location,
|
||||
content: content
|
||||
content: content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Text
|
||||
{
|
||||
impl Element for Text {
|
||||
fn location(&self) -> &Token { &self.location }
|
||||
fn kind(&self) -> ElemKind { ElemKind::Inline }
|
||||
fn element_name(&self) -> &'static str { "Text" }
|
||||
|
@ -35,27 +46,42 @@ impl Element for Text
|
|||
#[derive(Default)]
|
||||
pub struct TextRule;
|
||||
|
||||
impl Rule for TextRule
|
||||
{
|
||||
impl Rule for TextRule {
|
||||
fn name(&self) -> &'static str { "Text" }
|
||||
|
||||
fn next_match(&self, cursor: &crate::parser::source::Cursor) -> Option<(usize, Box<dyn std::any::Any>)> { None }
|
||||
fn next_match(&self, _cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { None }
|
||||
|
||||
fn on_match(&self, parser: &dyn Parser, document: &dyn Document, cursor: crate::parser::source::Cursor, match_data: Option<Box<dyn std::any::Any>>) -> (crate::parser::source::Cursor, Vec<ariadne::Report<'_, (std::rc::Rc<dyn crate::parser::source::Source>, std::ops::Range<usize>)>>) { panic!("Text canno match"); }
|
||||
fn on_match(
|
||||
&self,
|
||||
_parser: &dyn Parser,
|
||||
_document: &dyn Document,
|
||||
_cursor: Cursor,
|
||||
_match_data: Option<Box<dyn Any>>,
|
||||
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
|
||||
panic!("Text cannot match");
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
bindings.push(("push".to_string(), lua.create_function(
|
||||
|_, content: String| {
|
||||
CTX.with_borrow(|ctx| ctx.as_ref().map(|ctx| {
|
||||
ctx.parser.push(ctx.document, Box::new(Text {
|
||||
bindings.push((
|
||||
"push".to_string(),
|
||||
lua.create_function(|_, content: String| {
|
||||
CTX.with_borrow(|ctx| {
|
||||
ctx.as_ref().map(|ctx| {
|
||||
ctx.parser.push(
|
||||
ctx.document,
|
||||
Box::new(Text {
|
||||
location: ctx.location.clone(),
|
||||
content,
|
||||
}));
|
||||
}));
|
||||
}),
|
||||
);
|
||||
})
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}).unwrap()));
|
||||
})
|
||||
.unwrap(),
|
||||
));
|
||||
|
||||
bindings
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue